Change venv
This commit is contained in:
@@ -25,7 +25,7 @@ SECRET_KEY = 'django-insecure-j4jd&+4j^t_=@zr(#q@n!8e*58vkql6&_6w-t14ju8pw%ei%^s
|
|||||||
# SECURITY WARNING: don't run with debug turned on in production!
|
# SECURITY WARNING: don't run with debug turned on in production!
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
|
|
||||||
cfg_dev_mode = False
|
cfg_dev_mode = True
|
||||||
|
|
||||||
ALLOWED_HOSTS = ["rh.ambulance-clerc.ch"]
|
ALLOWED_HOSTS = ["rh.ambulance-clerc.ch"]
|
||||||
if cfg_dev_mode:
|
if cfg_dev_mode:
|
||||||
@@ -49,6 +49,7 @@ INSTALLED_APPS = [
|
|||||||
'rangefilter',
|
'rangefilter',
|
||||||
'django.contrib.admin',
|
'django.contrib.admin',
|
||||||
#'carnet_rouge.apps.CarnetRougeConfig',
|
#'carnet_rouge.apps.CarnetRougeConfig',
|
||||||
|
'nextcloud.apps.NextcloudConfig'
|
||||||
|
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|||||||
BIN
db.sqlite3
BIN
db.sqlite3
Binary file not shown.
Binary file not shown.
@@ -1,18 +1,11 @@
|
|||||||
|
# don't import any costly modules
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import importlib
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
|
|
||||||
is_pypy = '__pypy__' in sys.builtin_module_names
|
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||||
|
|
||||||
|
|
||||||
warnings.filterwarnings('ignore',
|
|
||||||
r'.+ distutils\b.+ deprecated',
|
|
||||||
DeprecationWarning)
|
|
||||||
|
|
||||||
|
|
||||||
def warn_distutils_present():
|
def warn_distutils_present():
|
||||||
if 'distutils' not in sys.modules:
|
if 'distutils' not in sys.modules:
|
||||||
return
|
return
|
||||||
@@ -20,20 +13,29 @@ def warn_distutils_present():
|
|||||||
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||||
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||||
return
|
return
|
||||||
|
import warnings
|
||||||
|
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
"Distutils was imported before Setuptools, but importing Setuptools "
|
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||||
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||||
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||||
"using distutils directly, ensure that setuptools is installed in the "
|
"using distutils directly, ensure that setuptools is installed in the "
|
||||||
"traditional way (e.g. not an editable install), and/or make sure "
|
"traditional way (e.g. not an editable install), and/or make sure "
|
||||||
"that setuptools is always imported before distutils.")
|
"that setuptools is always imported before distutils."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def clear_distutils():
|
def clear_distutils():
|
||||||
if 'distutils' not in sys.modules:
|
if 'distutils' not in sys.modules:
|
||||||
return
|
return
|
||||||
|
import warnings
|
||||||
|
|
||||||
warnings.warn("Setuptools is replacing distutils.")
|
warnings.warn("Setuptools is replacing distutils.")
|
||||||
mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
|
mods = [
|
||||||
|
name
|
||||||
|
for name in sys.modules
|
||||||
|
if name == "distutils" or name.startswith("distutils.")
|
||||||
|
]
|
||||||
for name in mods:
|
for name in mods:
|
||||||
del sys.modules[name]
|
del sys.modules[name]
|
||||||
|
|
||||||
@@ -42,19 +44,25 @@ def enabled():
|
|||||||
"""
|
"""
|
||||||
Allow selection of distutils by environment variable.
|
Allow selection of distutils by environment variable.
|
||||||
"""
|
"""
|
||||||
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
|
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
||||||
return which == 'local'
|
return which == 'local'
|
||||||
|
|
||||||
|
|
||||||
def ensure_local_distutils():
|
def ensure_local_distutils():
|
||||||
clear_distutils()
|
import importlib
|
||||||
distutils = importlib.import_module('setuptools._distutils')
|
|
||||||
distutils.__name__ = 'distutils'
|
|
||||||
sys.modules['distutils'] = distutils
|
|
||||||
|
|
||||||
# sanity check that submodules load as expected
|
clear_distutils()
|
||||||
|
|
||||||
|
# With the DistutilsMetaFinder in place,
|
||||||
|
# perform an import to cause distutils to be
|
||||||
|
# loaded from setuptools._distutils. Ref #2906.
|
||||||
|
with shim():
|
||||||
|
importlib.import_module('distutils')
|
||||||
|
|
||||||
|
# check that submodules load as expected
|
||||||
core = importlib.import_module('distutils.core')
|
core = importlib.import_module('distutils.core')
|
||||||
assert '_distutils' in core.__file__, core.__file__
|
assert '_distutils' in core.__file__, core.__file__
|
||||||
|
assert 'setuptools._distutils.log' not in sys.modules
|
||||||
|
|
||||||
|
|
||||||
def do_override():
|
def do_override():
|
||||||
@@ -69,9 +77,19 @@ def do_override():
|
|||||||
ensure_local_distutils()
|
ensure_local_distutils()
|
||||||
|
|
||||||
|
|
||||||
|
class _TrivialRe:
|
||||||
|
def __init__(self, *patterns):
|
||||||
|
self._patterns = patterns
|
||||||
|
|
||||||
|
def match(self, string):
|
||||||
|
return all(pat in string for pat in self._patterns)
|
||||||
|
|
||||||
|
|
||||||
class DistutilsMetaFinder:
|
class DistutilsMetaFinder:
|
||||||
def find_spec(self, fullname, path, target=None):
|
def find_spec(self, fullname, path, target=None):
|
||||||
if path is not None:
|
# optimization: only consider top level modules and those
|
||||||
|
# found in the CPython test suite.
|
||||||
|
if path is not None and not fullname.startswith('test.'):
|
||||||
return
|
return
|
||||||
|
|
||||||
method_name = 'spec_for_{fullname}'.format(**locals())
|
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||||
@@ -79,18 +97,45 @@ class DistutilsMetaFinder:
|
|||||||
return method()
|
return method()
|
||||||
|
|
||||||
def spec_for_distutils(self):
|
def spec_for_distutils(self):
|
||||||
|
if self.is_cpython():
|
||||||
|
return
|
||||||
|
|
||||||
|
import importlib
|
||||||
import importlib.abc
|
import importlib.abc
|
||||||
import importlib.util
|
import importlib.util
|
||||||
|
|
||||||
class DistutilsLoader(importlib.abc.Loader):
|
try:
|
||||||
|
mod = importlib.import_module('setuptools._distutils')
|
||||||
|
except Exception:
|
||||||
|
# There are a couple of cases where setuptools._distutils
|
||||||
|
# may not be present:
|
||||||
|
# - An older Setuptools without a local distutils is
|
||||||
|
# taking precedence. Ref #2957.
|
||||||
|
# - Path manipulation during sitecustomize removes
|
||||||
|
# setuptools from the path but only after the hook
|
||||||
|
# has been loaded. Ref #2980.
|
||||||
|
# In either case, fall back to stdlib behavior.
|
||||||
|
return
|
||||||
|
|
||||||
|
class DistutilsLoader(importlib.abc.Loader):
|
||||||
def create_module(self, spec):
|
def create_module(self, spec):
|
||||||
return importlib.import_module('setuptools._distutils')
|
mod.__name__ = 'distutils'
|
||||||
|
return mod
|
||||||
|
|
||||||
def exec_module(self, module):
|
def exec_module(self, module):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return importlib.util.spec_from_loader('distutils', DistutilsLoader())
|
return importlib.util.spec_from_loader(
|
||||||
|
'distutils', DistutilsLoader(), origin=mod.__file__
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_cpython():
|
||||||
|
"""
|
||||||
|
Suppress supplying distutils for CPython (build and tests).
|
||||||
|
Ref #2965 and #3007.
|
||||||
|
"""
|
||||||
|
return os.path.isfile('pybuilddir.txt')
|
||||||
|
|
||||||
def spec_for_pip(self):
|
def spec_for_pip(self):
|
||||||
"""
|
"""
|
||||||
@@ -102,15 +147,52 @@ class DistutilsMetaFinder:
|
|||||||
clear_distutils()
|
clear_distutils()
|
||||||
self.spec_for_distutils = lambda: None
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
def pip_imported_during_build():
|
def pip_imported_during_build(cls):
|
||||||
"""
|
"""
|
||||||
Detect if pip is being imported in a build script. Ref #2355.
|
Detect if pip is being imported in a build script. Ref #2355.
|
||||||
"""
|
"""
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
return any(
|
return any(
|
||||||
frame.f_globals['__file__'].endswith('setup.py')
|
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
||||||
for frame, line in traceback.walk_stack(None)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def frame_file_is_setup(frame):
|
||||||
|
"""
|
||||||
|
Return True if the indicated frame suggests a setup.py file.
|
||||||
|
"""
|
||||||
|
# some frames may not have __file__ (#2940)
|
||||||
|
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
||||||
|
|
||||||
|
def spec_for_sensitive_tests(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running select tests under CPython.
|
||||||
|
|
||||||
|
python/cpython#91169
|
||||||
|
"""
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
sensitive_tests = (
|
||||||
|
[
|
||||||
|
'test.test_distutils',
|
||||||
|
'test.test_peg_generator',
|
||||||
|
'test.test_importlib',
|
||||||
|
]
|
||||||
|
if sys.version_info < (3, 10)
|
||||||
|
else [
|
||||||
|
'test.test_distutils',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
for name in DistutilsMetaFinder.sensitive_tests:
|
||||||
|
setattr(
|
||||||
|
DistutilsMetaFinder,
|
||||||
|
f'spec_for_{name}',
|
||||||
|
DistutilsMetaFinder.spec_for_sensitive_tests,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -118,6 +200,18 @@ DISTUTILS_FINDER = DistutilsMetaFinder()
|
|||||||
|
|
||||||
|
|
||||||
def add_shim():
|
def add_shim():
|
||||||
|
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
||||||
|
|
||||||
|
|
||||||
|
class shim:
|
||||||
|
def __enter__(self):
|
||||||
|
insert_shim()
|
||||||
|
|
||||||
|
def __exit__(self, exc, value, tb):
|
||||||
|
remove_shim()
|
||||||
|
|
||||||
|
|
||||||
|
def insert_shim():
|
||||||
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim();
|
import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim();
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
Copyright (c) 2008-2021 The pip developers (see AUTHORS.txt file)
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
Metadata-Version: 2.1
|
|
||||||
Name: pip
|
|
||||||
Version: 21.2.4
|
|
||||||
Summary: The PyPA recommended tool for installing Python packages.
|
|
||||||
Home-page: https://pip.pypa.io/
|
|
||||||
Author: The pip developers
|
|
||||||
Author-email: distutils-sig@python.org
|
|
||||||
License: MIT
|
|
||||||
Project-URL: Documentation, https://pip.pypa.io
|
|
||||||
Project-URL: Source, https://github.com/pypa/pip
|
|
||||||
Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
|
||||||
Classifier: Topic :: Software Development :: Build Tools
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3 :: Only
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.8
|
|
||||||
Classifier: Programming Language :: Python :: 3.9
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
Requires-Python: >=3.6
|
|
||||||
License-File: LICENSE.txt
|
|
||||||
|
|
||||||
pip - The Python Package Installer
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/v/pip.svg
|
|
||||||
:target: https://pypi.org/project/pip/
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
|
||||||
:target: https://pip.pypa.io/en/latest
|
|
||||||
|
|
||||||
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
|
||||||
|
|
||||||
Please take a look at our documentation for how to install and use pip:
|
|
||||||
|
|
||||||
* `Installation`_
|
|
||||||
* `Usage`_
|
|
||||||
|
|
||||||
We release updates regularly, with a new version every 3 months. Find more details in our documentation:
|
|
||||||
|
|
||||||
* `Release notes`_
|
|
||||||
* `Release process`_
|
|
||||||
|
|
||||||
In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
|
|
||||||
|
|
||||||
**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
|
|
||||||
|
|
||||||
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
|
|
||||||
|
|
||||||
* `Issue tracking`_
|
|
||||||
* `Discourse channel`_
|
|
||||||
* `User IRC`_
|
|
||||||
|
|
||||||
If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
|
|
||||||
|
|
||||||
* `GitHub page`_
|
|
||||||
* `Development documentation`_
|
|
||||||
* `Development mailing list`_
|
|
||||||
* `Development IRC`_
|
|
||||||
|
|
||||||
Code of Conduct
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Everyone interacting in the pip project's codebases, issue trackers, chat
|
|
||||||
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
|
||||||
|
|
||||||
.. _package installer: https://packaging.python.org/guides/tool-recommendations/
|
|
||||||
.. _Python Package Index: https://pypi.org
|
|
||||||
.. _Installation: https://pip.pypa.io/en/stable/installation/
|
|
||||||
.. _Usage: https://pip.pypa.io/en/stable/
|
|
||||||
.. _Release notes: https://pip.pypa.io/en/stable/news.html
|
|
||||||
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
|
|
||||||
.. _GitHub page: https://github.com/pypa/pip
|
|
||||||
.. _Development documentation: https://pip.pypa.io/en/latest/development
|
|
||||||
.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
|
|
||||||
.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
|
|
||||||
.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
|
|
||||||
.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
|
|
||||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
|
||||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
|
||||||
.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/
|
|
||||||
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
|
|
||||||
.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
|
|
||||||
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,795 +0,0 @@
|
|||||||
../../Scripts/pip.exe,sha256=jt73n61qlvRScPJBp5Jv915fpcy1la4FtxxrU3dV9sU,106377
|
|
||||||
../../Scripts/pip3.10.exe,sha256=jt73n61qlvRScPJBp5Jv915fpcy1la4FtxxrU3dV9sU,106377
|
|
||||||
../../Scripts/pip3.exe,sha256=jt73n61qlvRScPJBp5Jv915fpcy1la4FtxxrU3dV9sU,106377
|
|
||||||
pip-21.2.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
pip-21.2.4.dist-info/LICENSE.txt,sha256=I6c2HCsVgQKLxiO52ivSSZeryqR4Gs5q1ESjeUT42uE,1090
|
|
||||||
pip-21.2.4.dist-info/METADATA,sha256=PGCimuD-VsKv664Ne_9navMt6I9Ym_rm5p_u6Ykgfd4,4165
|
|
||||||
pip-21.2.4.dist-info/RECORD,,
|
|
||||||
pip-21.2.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip-21.2.4.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
|
|
||||||
pip-21.2.4.dist-info/entry_points.txt,sha256=5ExSa1s54zSPNA_1epJn5SX06786S8k5YHwskMvVYzw,125
|
|
||||||
pip-21.2.4.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
pip/__init__.py,sha256=EkjFYKiNdO5r1TZT1K-GxPs3Bl2IdRXw75e7IVsKrmc,357
|
|
||||||
pip/__main__.py,sha256=mXwWDftNLMKfwVqKFWGE_uuBZvGSIiUELhLkeysIuZc,1198
|
|
||||||
pip/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/__pycache__/__main__.cpython-310.pyc,,
|
|
||||||
pip/_internal/__init__.py,sha256=nnFCuxrPMgALrIDxSoy-H6Zj4W4UY60D-uL1aJyq0pc,573
|
|
||||||
pip/_internal/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/build_env.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/cache.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/configuration.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/exceptions.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/main.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/pyproject.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc,,
|
|
||||||
pip/_internal/__pycache__/wheel_builder.cpython-310.pyc,,
|
|
||||||
pip/_internal/build_env.py,sha256=uqtt1F0185ctzme5UX43I6bFHVeORY7q-dyhpkk5NDE,10121
|
|
||||||
pip/_internal/cache.py,sha256=6VONtoReGZbBd7sqY1n6hwkdWC4iz3tmXwXwZjpjZKw,9958
|
|
||||||
pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
|
|
||||||
pip/_internal/cli/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/base_command.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/command_context.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/main.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/parser.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/req_command.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/spinners.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc,,
|
|
||||||
pip/_internal/cli/autocompletion.py,sha256=NK5yqe49SgExZOCFVEUT5Bf0QV2CuITGK27WSo2MWg8,6399
|
|
||||||
pip/_internal/cli/base_command.py,sha256=Dq5oXBXYd24GaHs1vPt6CfYgCl22V_4tLEJqfQyBrdE,7596
|
|
||||||
pip/_internal/cli/cmdoptions.py,sha256=xOqvgDNfpkMXVjy0mH3hI0HyczVD6wMuP8K44qsvbew,28283
|
|
||||||
pip/_internal/cli/command_context.py,sha256=a1pBBvvGLDiZ1Kw64_4tT6HmRTwYDoYy8JFgG5Czn7s,760
|
|
||||||
pip/_internal/cli/main.py,sha256=ioJ8IVlb2K1qLOxR-tXkee9lURhYV89CDM71MKag7YY,2472
|
|
||||||
pip/_internal/cli/main_parser.py,sha256=Q9TnytfuC5Z2JSjBFWVGtEdYLFy7rukNIb04movHdAo,2614
|
|
||||||
pip/_internal/cli/parser.py,sha256=CDXTuFr2UD8ozOlZYf1KDziQdo9-X_IaYOiUcyJQwrA,10788
|
|
||||||
pip/_internal/cli/progress_bars.py,sha256=ha8wowclY8_PaoM0cz4G6qK37zjnzuxQ-ydOtzx4EMI,8300
|
|
||||||
pip/_internal/cli/req_command.py,sha256=ZlxKFS9LtEbE1IRB6JyeUeYMe7lvKxVIzpdvag-BHok,16548
|
|
||||||
pip/_internal/cli/spinners.py,sha256=TFhjxtOnLeNJ5YmRvQm4eKPgPbJNkZiqO8jOXuxRaYU,5076
|
|
||||||
pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116
|
|
||||||
pip/_internal/commands/__init__.py,sha256=3f1ZVidEDfgmzAH7aypZLKOZUvUy7qxv4X1CiIZEN30,3776
|
|
||||||
pip/_internal/commands/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/cache.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/check.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/completion.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/configuration.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/debug.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/download.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/freeze.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/hash.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/help.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/index.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/install.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/list.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/search.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/show.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/__pycache__/wheel.cpython-310.pyc,,
|
|
||||||
pip/_internal/commands/cache.py,sha256=O1grQjTg6IRFs_8DxMH00583tmCR0ujqTMv_gZ0h0uU,7237
|
|
||||||
pip/_internal/commands/check.py,sha256=gPC6GTp7S9aK73IeZAW7Z6yxlMnWdMyDTq9er9nXpIY,1570
|
|
||||||
pip/_internal/commands/completion.py,sha256=4Uh_cg04qDmtmgLji-J4VJKZ8BaIBZy2_uTWLi8tiVk,2914
|
|
||||||
pip/_internal/commands/configuration.py,sha256=TK9VTXNJ5haVH0Dc_ylhqo6A9Q_GcNoNsAOMJff4MYY,8962
|
|
||||||
pip/_internal/commands/debug.py,sha256=f943fbrAUufQ7flAR2zHfI0oi_uqhJEEW7Fj_EiwB1Y,6647
|
|
||||||
pip/_internal/commands/download.py,sha256=VGyQ6TDLiqJqJXfJwr_D6ZuHnYfhmzZPQk1mRSQp3tQ,4949
|
|
||||||
pip/_internal/commands/freeze.py,sha256=x0-ia-MFrVvfYqe5p6yAWqzaK5AIi3SqqcXBJNvxXkg,2785
|
|
||||||
pip/_internal/commands/hash.py,sha256=Y5FQ_WgbuEFnJxyLZdNYP928BGWNyNm9ljIUr90R6tI,1664
|
|
||||||
pip/_internal/commands/help.py,sha256=F_IJkERv9gGfGC6YpBNYm_qs8xmBphUCfOuguNRSqLs,1132
|
|
||||||
pip/_internal/commands/index.py,sha256=xA5LSVy1kv-IAvsjIX6Wnk5ZHA0Y_m6AP9T5ZoUGs9o,4781
|
|
||||||
pip/_internal/commands/install.py,sha256=FV-qBbQ56TUEmLDtuWTMeNpD4aQtOpjBEi7ePqlEtSM,27493
|
|
||||||
pip/_internal/commands/list.py,sha256=fpG6_KYqtAEBV8uSlt_lfF7o1GTuS4UdobsZjVqZspQ,11753
|
|
||||||
pip/_internal/commands/search.py,sha256=P8GY077JmUwy7FiOgYJ1CPDsBPgmo7it-b14luquJN4,5543
|
|
||||||
pip/_internal/commands/show.py,sha256=2TxWaJ2saCDSVUVBoRYueijLiueid2DNOhZuM-jhGf0,7974
|
|
||||||
pip/_internal/commands/uninstall.py,sha256=0VQQMfPBTGSlWJn1RRgvYtJhSj7tQFYc3H1kOjrstRE,3480
|
|
||||||
pip/_internal/commands/wheel.py,sha256=UiH15NXfrJ9piFNg3oHm4n2Jyk9Ojv5q0MvrWbHB3Ac,6189
|
|
||||||
pip/_internal/configuration.py,sha256=QBLfhv-sbP-oR08NFxSYnv_mLB-SgtNOsWXAF9tDEcM,13725
|
|
||||||
pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858
|
|
||||||
pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/distributions/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_internal/distributions/__pycache__/installed.cpython-310.pyc,,
|
|
||||||
pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc,,
|
|
||||||
pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc,,
|
|
||||||
pip/_internal/distributions/base.py,sha256=GynlnVE3QLvNu4JvnxPO6D8IQSs_GAlFUabA6U-G-eU,1206
|
|
||||||
pip/_internal/distributions/installed.py,sha256=gT20WSniecOvKGMA-nCyq-4DcJlrIjv8jT-JEWyEOnA,645
|
|
||||||
pip/_internal/distributions/sdist.py,sha256=VBme1UNlCuH_wIoUHTZq9ngo2NpFWQXmJqnwUb3ZpTk,3862
|
|
||||||
pip/_internal/distributions/wheel.py,sha256=J7DNQvKS50pXfwXtetKZtLNgYzkEc8SAbaKQ5v6JHtA,1183
|
|
||||||
pip/_internal/exceptions.py,sha256=2JQJSS68oggR_ZIOA-h1U2DRADURbkQn9Nf4EZWZ834,13170
|
|
||||||
pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
|
|
||||||
pip/_internal/index/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/index/__pycache__/collector.cpython-310.pyc,,
|
|
||||||
pip/_internal/index/__pycache__/package_finder.cpython-310.pyc,,
|
|
||||||
pip/_internal/index/__pycache__/sources.cpython-310.pyc,,
|
|
||||||
pip/_internal/index/collector.py,sha256=oH4XlYHvGMXePbjNhKZPpLI-NLBTXxpHRRZgQ85meNk,17645
|
|
||||||
pip/_internal/index/package_finder.py,sha256=Zzto_P1YPeTlBjJTlPgU8wjocQDJnLYZxUSR8JxVf1E,36138
|
|
||||||
pip/_internal/index/sources.py,sha256=SVyPitv08-Qalh2_Bk5diAJ9GAA_d-a93koouQodAG0,6557
|
|
||||||
pip/_internal/locations/__init__.py,sha256=8HvAnPCRi2Ln5yimpHRq8NVtsImh1KEvqsPhi4H56y0,13292
|
|
||||||
pip/_internal/locations/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc,,
|
|
||||||
pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc,,
|
|
||||||
pip/_internal/locations/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_internal/locations/_distutils.py,sha256=Sk7tw8ZP1DWMYJ8MibABsa8IME2Ejv1PKeGlYQCBTZc,5871
|
|
||||||
pip/_internal/locations/_sysconfig.py,sha256=LQNKTJKyjVqxXaPntlBwdUqTG1xwYf6GVCKMbyRJx5M,7918
|
|
||||||
pip/_internal/locations/base.py,sha256=x5D1ONktmPJd8nnUTh-ELsAJ7fiXA-k-0a_vhfi2_Us,1579
|
|
||||||
pip/_internal/main.py,sha256=BZ0vkdqgpoteTo1A1Q8ovFe8EzgKFJWOUjPmIUQfGCY,351
|
|
||||||
pip/_internal/metadata/__init__.py,sha256=0XQDTWweYOV7kcMuzwoiCggu3wJearBNcK8JV9LXA6Y,1576
|
|
||||||
pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/metadata/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc,,
|
|
||||||
pip/_internal/metadata/base.py,sha256=oRj58fKGutZKZCslfQlKfrzuXI_0M4w1xVOluT3-6TQ,7928
|
|
||||||
pip/_internal/metadata/pkg_resources.py,sha256=xOYt6IluIDvVMgYX-QoZA3SFbToJlZDOVPRHVPJ2Uk4,5200
|
|
||||||
pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
|
|
||||||
pip/_internal/models/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/candidate.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/direct_url.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/format_control.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/index.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/link.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/scheme.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/search_scope.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/target_python.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/__pycache__/wheel.cpython-310.pyc,,
|
|
||||||
pip/_internal/models/candidate.py,sha256=b2aiufhD5jZEI0zhEaMn_o1VRldVE2J-MPsqPpcY2Ds,946
|
|
||||||
pip/_internal/models/direct_url.py,sha256=x2-kAnrP18XAdOftYBStDNt3Zfd8sipef5h0h_efGvY,6262
|
|
||||||
pip/_internal/models/format_control.py,sha256=t5nmFD43huIFj0VchV6FuvlaRHfaMTotbBOTOPBsKeY,2557
|
|
||||||
pip/_internal/models/index.py,sha256=_U2imEWggevvcI7rhQCFZK0djsE-It13BJmvW9Ejmig,1058
|
|
||||||
pip/_internal/models/link.py,sha256=chRRuGqeE5w1XqidCrw6j-j8O-eeCmw-HUdYCR18HmQ,9809
|
|
||||||
pip/_internal/models/scheme.py,sha256=i2QGt5J96gMKC_Wm7xO587kibhhChUQoULhAFgPRxkE,738
|
|
||||||
pip/_internal/models/search_scope.py,sha256=mykEee0wDNCx9xZmQBtkgVaDiQVcDNqbjAZGqI1nm78,4474
|
|
||||||
pip/_internal/models/selection_prefs.py,sha256=OEoiP83Wpm7cUwjH7fnbRo7TzHl5D4y23W0JnZLXk_4,1877
|
|
||||||
pip/_internal/models/target_python.py,sha256=7iT4lbRtoNRkwsmLndysJ4Ic7Iwp_YyIII3doXeLD8c,3870
|
|
||||||
pip/_internal/models/wheel.py,sha256=Ec8fvPoSYeBX9cvBvffLM7gNRx23CrVud1dN3zJmBjc,3541
|
|
||||||
pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
|
|
||||||
pip/_internal/network/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/__pycache__/auth.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/__pycache__/cache.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/__pycache__/download.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/__pycache__/session.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/__pycache__/utils.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc,,
|
|
||||||
pip/_internal/network/auth.py,sha256=zq-fu-eK_EwiqjT0SVmMxuzyvhBlCdBGJi_fnOmcar8,11645
|
|
||||||
pip/_internal/network/cache.py,sha256=HoprMCecwd4IS2wDZowc9B_OpaBlFjJYJl4xOxvtuwU,2100
|
|
||||||
pip/_internal/network/download.py,sha256=VmiR-KKIBugShZS4JlD7N8mq3hErx-0fK-D8aTYU3Og,6016
|
|
||||||
pip/_internal/network/lazy_wheel.py,sha256=4szChUW2I9quggvjEoIhALezmiVVteescGh6TDUslaQ,7615
|
|
||||||
pip/_internal/network/session.py,sha256=3tJHNQCooM7bjLK1WP-q6tiJ84jtqkyrIdrYY84WR1A,16582
|
|
||||||
pip/_internal/network/utils.py,sha256=igLlTu_-q0LmL8FdJKq-Uj7AT_owrQ-T9FfyarkhK5U,4059
|
|
||||||
pip/_internal/network/xmlrpc.py,sha256=AzQgG4GgS152_cqmGr_Oz2MIXsCal-xfsis7fA7nmU0,1791
|
|
||||||
pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/operations/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/__pycache__/check.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/__pycache__/freeze.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/__pycache__/prepare.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/build/metadata.py,sha256=jJp05Rrp0AMsQb7izDXbNGC1LtPNwOhHQj7cRM5324c,1165
|
|
||||||
pip/_internal/operations/build/metadata_legacy.py,sha256=ECMBhLEPEQv6PUUCpPCXW-wN9QRXdY45PNXJv7BZKTU,1917
|
|
||||||
pip/_internal/operations/build/wheel.py,sha256=WYLMxuxqN3ahJTQk2MI9hdmZKBpFyxHeNpUdO0PybxU,1106
|
|
||||||
pip/_internal/operations/build/wheel_legacy.py,sha256=NOJhTYMYljdbizFo_WjkaKGWG1SEZ6aByrBdCrrsZB8,3227
|
|
||||||
pip/_internal/operations/check.py,sha256=zEIdxyRL3vc7CQ1p8qkLFG-mjs-LjnaJDxOr1WI5Yp0,5295
|
|
||||||
pip/_internal/operations/freeze.py,sha256=TyLvXT4ZqpIi8x8X_TTsgBJ76IG54CidJlxGIHBbmBM,10556
|
|
||||||
pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
|
|
||||||
pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/install/__pycache__/legacy.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc,,
|
|
||||||
pip/_internal/operations/install/editable_legacy.py,sha256=bjBObfE6sz3UmGI7y4-GCgKa2WmTgnWlFFU7b-i0sQs,1396
|
|
||||||
pip/_internal/operations/install/legacy.py,sha256=Wk_46sR7zDsh7vp4j63Hka4NTevQ617WdqJKt8_TuUQ,4405
|
|
||||||
pip/_internal/operations/install/wheel.py,sha256=4Y6rtOpPnjlvGkzYXP8HXzqJu1KHEuA6ExgHBdZnD6s,29466
|
|
||||||
pip/_internal/operations/prepare.py,sha256=jgnH7CIdoAhwnYOSpkESvhrJ1yr5TL2ZY5ojjSzRMZo,24848
|
|
||||||
pip/_internal/pyproject.py,sha256=Sl1dOQYazG9AsrE0TXWK2zVcDR_FROshCTwjKBRQsPE,7063
|
|
||||||
pip/_internal/req/__init__.py,sha256=lz4GFfzm5gsm0e8H98Wi6IPI14R2JdDMBc61-4F-0CY,2831
|
|
||||||
pip/_internal/req/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/req/__pycache__/constructors.cpython-310.pyc,,
|
|
||||||
pip/_internal/req/__pycache__/req_file.cpython-310.pyc,,
|
|
||||||
pip/_internal/req/__pycache__/req_install.cpython-310.pyc,,
|
|
||||||
pip/_internal/req/__pycache__/req_set.cpython-310.pyc,,
|
|
||||||
pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc,,
|
|
||||||
pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc,,
|
|
||||||
pip/_internal/req/constructors.py,sha256=35LRb-iaL01AlKBOO_2vrbKil6KI5Tl450NJwUvUnhk,15826
|
|
||||||
pip/_internal/req/req_file.py,sha256=TsBSr0LMVIYF7AqkwslyJxHPLstN0SMqKeVxciI2In4,17408
|
|
||||||
pip/_internal/req/req_install.py,sha256=jPfSPt-s3RoRCj6tYqvvHaxxIW1yr8KbiPRGbAyF3pU,31671
|
|
||||||
pip/_internal/req/req_set.py,sha256=NoPQztL1Z5HZEB3n2Wtst6KV51hMDAPe9AfdAUWmJLs,7572
|
|
||||||
pip/_internal/req/req_tracker.py,sha256=dJ3ql2C3VyaKUQN9kwbFvOPMxAvbTdblB0hKQ2f6Lns,4182
|
|
||||||
pip/_internal/req/req_uninstall.py,sha256=wBcGKaweIyi5RGPPpBqrrn62t8uP3frZmrUJ-qDeO0Y,23821
|
|
||||||
pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/base.py,sha256=yATwIW1VbJkwkFJIgG3JQafndFDSZ50smc-Ao9-SoxI,557
|
|
||||||
pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/legacy/resolver.py,sha256=TZnGUay9WM2Uk0W3D48OA70U9cLYYGHxles1h9ELqSg,17552
|
|
||||||
pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-310.pyc,,
|
|
||||||
pip/_internal/resolution/resolvelib/base.py,sha256=Yvgb2jf0l6S4C2rXAbjbpURYF6yjUgCdwDSrnpiZA8U,5290
|
|
||||||
pip/_internal/resolution/resolvelib/candidates.py,sha256=RgCvLf1meDecmw9lfhG_AU5tN9ufaC0EDrcVOR2hgiA,18842
|
|
||||||
pip/_internal/resolution/resolvelib/factory.py,sha256=N9telNB1arFV-4TqdGdh9KML8zfAWdMbqUSNip6HeEc,26859
|
|
||||||
pip/_internal/resolution/resolvelib/found_candidates.py,sha256=ES3PNACh3ONwGAghPip2Vbgyy_e4baKmeEEHVQiq47g,5285
|
|
||||||
pip/_internal/resolution/resolvelib/provider.py,sha256=fy139RDxPrsPmNLn6YrrjqhBOmeLY0aHEEdzZqS35aU,8420
|
|
||||||
pip/_internal/resolution/resolvelib/reporter.py,sha256=Z06Xa4d9dTWbHNvXIBtBxDn4DHeQmlyW9MJAojkC_iU,2600
|
|
||||||
pip/_internal/resolution/resolvelib/requirements.py,sha256=pcsnwz7txyDNZUEOWJOZEfivy3COWHPf_DIU7fwZ-Kk,5455
|
|
||||||
pip/_internal/resolution/resolvelib/resolver.py,sha256=Rry36d0uCKobfBnSPYMw8WStyNYtjAEFz3j6ZtBsbGQ,10523
|
|
||||||
pip/_internal/self_outdated_check.py,sha256=ivoUYaGuq-Ra_DvlZvPtHhgbY97NKHYuPGzrgN2G1A8,6484
|
|
||||||
pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_internal/utils/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/_log.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/compat.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/compatibility_tags.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/datetime.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/direct_url_helpers.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/distutils_args.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/encoding.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/filesystem.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/filetypes.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/glibc.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/hashes.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/inject_securetransport.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/logging.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/misc.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/models.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/packaging.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/parallel.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/pkg_resources.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/subprocess.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/temp_dir.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/unpacking.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/urls.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/virtualenv.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/__pycache__/wheel.cpython-310.pyc,,
|
|
||||||
pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015
|
|
||||||
pip/_internal/utils/appdirs.py,sha256=CyH0arjhfR4kaeybXs5B1hxe66KeeCfssJhiRFxpFJk,1185
|
|
||||||
pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884
|
|
||||||
pip/_internal/utils/compatibility_tags.py,sha256=h2P4U0ZCkWHwPYveBzFZA79it6agElRhm6yci7S8MCo,5454
|
|
||||||
pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242
|
|
||||||
pip/_internal/utils/deprecation.py,sha256=0bdiuvnAcAZMp1dDrwxK7uDgmJQDHVfb1790_ypO9U4,3200
|
|
||||||
pip/_internal/utils/direct_url_helpers.py,sha256=5ffB9GHoqalUvSU6C53lEFdUgYcWAbXJGfyCwGyIlrY,2994
|
|
||||||
pip/_internal/utils/distutils_args.py,sha256=mcAscyp80vTt3xAGTipnpgc83V-_wCvydNELVXLq7JI,1249
|
|
||||||
pip/_internal/utils/encoding.py,sha256=bdZ3YgUpaOEBI5MP4-DEXiQarCW3V0rxw1kRz-TaU1Q,1169
|
|
||||||
pip/_internal/utils/entrypoints.py,sha256=aPvCnQVi9Hdk35Kloww_D5ibjUpqxgqcJP8O9VuMZek,1055
|
|
||||||
pip/_internal/utils/filesystem.py,sha256=rrl-rY1w8TYyKYndUyZlE9ffkQyA4-jI9x_59zXkn5s,5893
|
|
||||||
pip/_internal/utils/filetypes.py,sha256=weviVbapHWVQ_8-K-PTQ_TnYL66kZi4SrVBTmRYZXLc,761
|
|
||||||
pip/_internal/utils/glibc.py,sha256=GM1Y2hWkOf_tumySGFg-iNbc7oilBQQrjczb_705CF8,3170
|
|
||||||
pip/_internal/utils/hashes.py,sha256=o1qQEkqe2AqsRm_JhLoM4hkxmVtewH0ZZpQ6EBObHuU,5167
|
|
||||||
pip/_internal/utils/inject_securetransport.py,sha256=tGl9Bgyt2IHKtB3b0B-6r3W2yYF3Og-PBe0647S3lZs,810
|
|
||||||
pip/_internal/utils/logging.py,sha256=E5VE1n-pqgdd5DajPQPKpmu7VpJVd7dAhhdjPZNsYjE,12344
|
|
||||||
pip/_internal/utils/misc.py,sha256=WhWMKbtoBWvGrqVMaPekKML-orsLnD2e0N83arjpYQw,23644
|
|
||||||
pip/_internal/utils/models.py,sha256=qCgYyUw2mIH1pombsJ3YQsMtONZgyJ4BGwO5MJnSC4c,1329
|
|
||||||
pip/_internal/utils/packaging.py,sha256=I1938AB7FprcVJJd6C0vSiMuCVajmrxZF55vX5j0bMo,2900
|
|
||||||
pip/_internal/utils/parallel.py,sha256=RZF4JddPEWVbkkPCknfvpqaLfm3Pmqd_ABoCHmV4lXs,3224
|
|
||||||
pip/_internal/utils/pkg_resources.py,sha256=jwH5JViPe-JlXLvLC0-ASfTTCRYvm0u9CwQGcWjxStI,1106
|
|
||||||
pip/_internal/utils/setuptools_build.py,sha256=xk9sRBjUyNTHs_TvEWebVWs1GfLPN208MzpSXr9Ok_A,5047
|
|
||||||
pip/_internal/utils/subprocess.py,sha256=7QOQPJj6ezIVsypJJrcyyq4-mJM9qUsOdOLq0_wUiAA,10043
|
|
||||||
pip/_internal/utils/temp_dir.py,sha256=9gs3N9GQeVXRVWjJIalSpH1uj8yQXPTzarb5n1_HMVo,7950
|
|
||||||
pip/_internal/utils/unpacking.py,sha256=_qYZgmq8b0rRAN2swXsf9VfPogrjShlsTvhRI2heBYI,9050
|
|
||||||
pip/_internal/utils/urls.py,sha256=O5f4VeKJ9cWt_CKqqKmiDTW48uOzo0UNb1QWPQ0n2TI,1798
|
|
||||||
pip/_internal/utils/virtualenv.py,sha256=iRTK-sD6bWpHqXcZ0ECfdpFLWatMOHFUVCIRa0L6Gu0,3564
|
|
||||||
pip/_internal/utils/wheel.py,sha256=DOIVZaXN7bMOAeMEqzIOZHGl4OFO-KGrEqBUB848DPo,6290
|
|
||||||
pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596
|
|
||||||
pip/_internal/vcs/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc,,
|
|
||||||
pip/_internal/vcs/__pycache__/git.cpython-310.pyc,,
|
|
||||||
pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc,,
|
|
||||||
pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc,,
|
|
||||||
pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc,,
|
|
||||||
pip/_internal/vcs/bazaar.py,sha256=Ay_vN-87vYSEzBqXT3RVwl40vlk56j3jy_AfQbMj4uo,2962
|
|
||||||
pip/_internal/vcs/git.py,sha256=VDSzQlkh1390xw6PMh6fneJAZyc1s9qHZgum3wO3DOU,17347
|
|
||||||
pip/_internal/vcs/mercurial.py,sha256=WwoTWZQdQN9FcUTINvIeb0Vt46UJ_lLdf2BAdea9Tic,5076
|
|
||||||
pip/_internal/vcs/subversion.py,sha256=FRMYx7q-b6skWuv6IU7tJyC8Jm8PPblMnH7WN_ucXWU,11866
|
|
||||||
pip/_internal/vcs/versioncontrol.py,sha256=jMKitwE4bQ45jOKKomBxgBypm2TcuDGWWdTUmPa-MUQ,23276
|
|
||||||
pip/_internal/wheel_builder.py,sha256=hW63ZmABr65rOiSRBHXu1jBUdEZw5LZiw0LaQBbz0lI,11740
|
|
||||||
pip/_vendor/__init__.py,sha256=eE_yoHELq6Kw--WqhAEcKkvHLKbmTR1-JX_Th1wcNZc,4703
|
|
||||||
pip/_vendor/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/__pycache__/appdirs.cpython-310.pyc,,
|
|
||||||
pip/_vendor/__pycache__/distro.cpython-310.pyc,,
|
|
||||||
pip/_vendor/__pycache__/pyparsing.cpython-310.pyc,,
|
|
||||||
pip/_vendor/__pycache__/six.cpython-310.pyc,,
|
|
||||||
pip/_vendor/appdirs.py,sha256=M6IYRJtdZgmSPCXCSMBRB0VT3P8MdFbWCDbSLrB2Ebg,25907
|
|
||||||
pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295
|
|
||||||
pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882
|
|
||||||
pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805
|
|
||||||
pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86
|
|
||||||
pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-310.pyc,,
|
|
||||||
pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153
|
|
||||||
pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856
|
|
||||||
pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695
|
|
||||||
pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149
|
|
||||||
pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533
|
|
||||||
pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070
|
|
||||||
pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091
|
|
||||||
pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690
|
|
||||||
pip/_vendor/certifi/__init__.py,sha256=-b78tXibbl0qtgCzv9tc9v6ozwcNX915lT9Tf4a9lds,62
|
|
||||||
pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255
|
|
||||||
pip/_vendor/certifi/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/certifi/__pycache__/__main__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/certifi/__pycache__/core.cpython-310.pyc,,
|
|
||||||
pip/_vendor/certifi/cacert.pem,sha256=3i-hfE2K5o3CBKG2tYt6ehJWk2fP64o6Th83fHPoPp4,259465
|
|
||||||
pip/_vendor/certifi/core.py,sha256=gOFd0zHYlx4krrLEn982esOtmz3djiG0BFSDhgjlvcI,2840
|
|
||||||
pip/_vendor/chardet/__init__.py,sha256=mWZaWmvZkhwfBEAT9O1Y6nRTfKzhT7FHhQTTAujbqUA,3271
|
|
||||||
pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/jisfreq.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/__pycache__/version.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
|
|
||||||
pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
|
|
||||||
pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
|
|
||||||
pip/_vendor/chardet/charsetgroupprober.py,sha256=GZLReHP6FRRn43hvSOoGCxYamErKzyp6RgOQxVeC3kg,3839
|
|
||||||
pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
|
|
||||||
pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
||||||
pip/_vendor/chardet/cli/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/cli/chardetect.py,sha256=XK5zqjUG2a4-y6eLHZ8ThYcp6WWUrdlmELxNypcc2SE,2747
|
|
||||||
pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
|
|
||||||
pip/_vendor/chardet/compat.py,sha256=40zr6wICZwknxyuLGGcIOPyve8DTebBCbbvttvnmp5Q,1200
|
|
||||||
pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
|
|
||||||
pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
|
|
||||||
pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
|
|
||||||
pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
|
|
||||||
pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
|
|
||||||
pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
|
|
||||||
pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
|
|
||||||
pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
|
|
||||||
pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
|
|
||||||
pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
|
|
||||||
pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
|
|
||||||
pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
|
|
||||||
pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
|
|
||||||
pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
|
|
||||||
pip/_vendor/chardet/langbulgarianmodel.py,sha256=rk9CJpuxO0bObboJcv6gNgWuosYZmd8qEEds5y7DS_Y,105697
|
|
||||||
pip/_vendor/chardet/langgreekmodel.py,sha256=S-uNQ1ihC75yhBvSux24gLFZv3QyctMwC6OxLJdX-bw,99571
|
|
||||||
pip/_vendor/chardet/langhebrewmodel.py,sha256=DzPP6TPGG_-PV7tqspu_d8duueqm7uN-5eQ0aHUw1Gg,98776
|
|
||||||
pip/_vendor/chardet/langhungarianmodel.py,sha256=RtJH7DZdsmaHqyK46Kkmnk5wQHiJwJPPJSqqIlpeZRc,102498
|
|
||||||
pip/_vendor/chardet/langrussianmodel.py,sha256=THqJOhSxiTQcHboDNSc5yofc2koXXQFHFyjtyuntUfM,131180
|
|
||||||
pip/_vendor/chardet/langthaimodel.py,sha256=R1wXHnUMtejpw0JnH_JO8XdYasME6wjVqp1zP7TKLgg,103312
|
|
||||||
pip/_vendor/chardet/langturkishmodel.py,sha256=rfwanTptTwSycE4-P-QasPmzd-XVYgevytzjlEzBBu8,95946
|
|
||||||
pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
|
|
||||||
pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
|
|
||||||
pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
|
|
||||||
pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
|
|
||||||
pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/metadata/__pycache__/languages.cpython-310.pyc,,
|
|
||||||
pip/_vendor/chardet/metadata/languages.py,sha256=41tLq3eLSrBEbEVVQpVGFq9K7o1ln9b1HpY1l0hCUQo,19474
|
|
||||||
pip/_vendor/chardet/sbcharsetprober.py,sha256=nmyMyuxzG87DN6K3Rk2MUzJLMLR69MrWpdnHzOwVUwQ,6136
|
|
||||||
pip/_vendor/chardet/sbcsgroupprober.py,sha256=hqefQuXmiFyDBArOjujH6hd6WFXlOD1kWCsxDhjx5Vc,4309
|
|
||||||
pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
|
|
||||||
pip/_vendor/chardet/universaldetector.py,sha256=DpZTXCX0nUHXxkQ9sr4GZxGB_hveZ6hWt3uM94cgWKs,12503
|
|
||||||
pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
|
|
||||||
pip/_vendor/chardet/version.py,sha256=A4CILFAd8MRVG1HoXPp45iK9RLlWyV73a1EtwE8Tvn8,242
|
|
||||||
pip/_vendor/colorama/__init__.py,sha256=pCdErryzLSzDW5P-rRPBlPLqbBtIRNJB6cMgoeJns5k,239
|
|
||||||
pip/_vendor/colorama/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/colorama/__pycache__/ansi.cpython-310.pyc,,
|
|
||||||
pip/_vendor/colorama/__pycache__/ansitowin32.cpython-310.pyc,,
|
|
||||||
pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc,,
|
|
||||||
pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc,,
|
|
||||||
pip/_vendor/colorama/__pycache__/winterm.cpython-310.pyc,,
|
|
||||||
pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522
|
|
||||||
pip/_vendor/colorama/ansitowin32.py,sha256=yV7CEmCb19MjnJKODZEEvMH_fnbJhwnpzo4sxZuGXmA,10517
|
|
||||||
pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915
|
|
||||||
pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404
|
|
||||||
pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438
|
|
||||||
pip/_vendor/distlib/__init__.py,sha256=bHNWOvZsLE4ES9S4FEA8CyP-rDYzatVgp9GHbpTnb2I,581
|
|
||||||
pip/_vendor/distlib/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/compat.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/database.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/index.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/locators.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/manifest.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/markers.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/metadata.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/resources.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/scripts.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/util.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/version.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/__pycache__/wheel.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
|
|
||||||
pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/_backport/__pycache__/misc.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-310.pyc,,
|
|
||||||
pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
|
|
||||||
pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707
|
|
||||||
pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
|
|
||||||
pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854
|
|
||||||
pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628
|
|
||||||
pip/_vendor/distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408
|
|
||||||
pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059
|
|
||||||
pip/_vendor/distlib/index.py,sha256=UfcimNW19AB7IKWam4VaJbXuCBvArKfSxhV16EwavzE,20739
|
|
||||||
pip/_vendor/distlib/locators.py,sha256=AKlB3oZvfOTg4E0CtfwOzujFL19X5V4XUA4eHdKOu44,51965
|
|
||||||
pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
|
|
||||||
pip/_vendor/distlib/markers.py,sha256=OunMSH1SIbvLLt4z2VEERCll4WNlz2tDrg1mSXCNUj4,4344
|
|
||||||
pip/_vendor/distlib/metadata.py,sha256=vatoxFdmBr6ie-sTVXVNPOPG3uwMDWJTnEECnm7xDCw,39109
|
|
||||||
pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820
|
|
||||||
pip/_vendor/distlib/scripts.py,sha256=YD5_kioPD-qybYwQ4Gxyu-FR4ffxczy2gdBuU4II9qA,17248
|
|
||||||
pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768
|
|
||||||
pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984
|
|
||||||
pip/_vendor/distlib/util.py,sha256=eIKKJ5Mp4unHMOVzixRIRxGq4ty5-h_PoFmZ_lpvkkM,67558
|
|
||||||
pip/_vendor/distlib/version.py,sha256=_geOv-cHoV-G8dQzKI8g6z8F0XeFeUqdJ_1G1K6iyrQ,23508
|
|
||||||
pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112
|
|
||||||
pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840
|
|
||||||
pip/_vendor/distlib/wheel.py,sha256=W6aQQo2Si0CzWiCaqlS-Nu8CoHnDbmcGMqRxCHJmg_Q,43062
|
|
||||||
pip/_vendor/distro.py,sha256=xxMIh2a3KmippeWEHzynTdHT3_jZM0o-pos0dAWJROM,43628
|
|
||||||
pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160
|
|
||||||
pip/_vendor/html5lib/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/__pycache__/_inputstream.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/__pycache__/_utils.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/__pycache__/constants.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/__pycache__/html5parser.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/__pycache__/serializer.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728
|
|
||||||
pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353
|
|
||||||
pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040
|
|
||||||
pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109
|
|
||||||
pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/_trie/__pycache__/py.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013
|
|
||||||
pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775
|
|
||||||
pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931
|
|
||||||
pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464
|
|
||||||
pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/lint.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919
|
|
||||||
pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286
|
|
||||||
pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945
|
|
||||||
pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643
|
|
||||||
pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588
|
|
||||||
pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897
|
|
||||||
pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214
|
|
||||||
pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186
|
|
||||||
pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759
|
|
||||||
pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679
|
|
||||||
pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715
|
|
||||||
pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776
|
|
||||||
pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592
|
|
||||||
pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565
|
|
||||||
pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925
|
|
||||||
pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836
|
|
||||||
pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766
|
|
||||||
pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719
|
|
||||||
pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-310.pyc,,
|
|
||||||
pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476
|
|
||||||
pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413
|
|
||||||
pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551
|
|
||||||
pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357
|
|
||||||
pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309
|
|
||||||
pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
|
|
||||||
pip/_vendor/idna/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/__pycache__/codec.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/__pycache__/compat.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/__pycache__/core.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/__pycache__/idnadata.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/__pycache__/intranges.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/__pycache__/package_data.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/__pycache__/uts46data.cpython-310.pyc,,
|
|
||||||
pip/_vendor/idna/codec.py,sha256=QsPFD3Je8gN17rfs14e7zTGRWlnL7bNf2ZqcHTRVYHs,3453
|
|
||||||
pip/_vendor/idna/compat.py,sha256=5A9xR04puRHCsyjBNewZlVSiarth7K1bZqyEOeob1fA,360
|
|
||||||
pip/_vendor/idna/core.py,sha256=icq2P13S6JMjoXgKhhd6ihhby7QsnZlNfniH6fLyf6U,12826
|
|
||||||
pip/_vendor/idna/idnadata.py,sha256=cl4x9RLdw1ZMtEEbvKwAsX-Id3AdIjO5U3HaoKM6VGs,42350
|
|
||||||
pip/_vendor/idna/intranges.py,sha256=EqgXwyATAn-CTACInqH9tYsYAitGB2VcQ50RZt_Cpjs,1933
|
|
||||||
pip/_vendor/idna/package_data.py,sha256=_028B4fvadRIaXMwMYjhuQPP3AxTIt1IRE7X6RDR4Mk,21
|
|
||||||
pip/_vendor/idna/uts46data.py,sha256=DGzwDQv8JijY17I_7ondo3stjFjNnjvVAbA-z0k1XOE,201849
|
|
||||||
pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118
|
|
||||||
pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/msgpack/__pycache__/_version.cpython-310.pyc,,
|
|
||||||
pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc,,
|
|
||||||
pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc,,
|
|
||||||
pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc,,
|
|
||||||
pip/_vendor/msgpack/_version.py,sha256=dFR03oACnj4lsKd1RnwD7BPMiVI_FMygdOL1TOBEw_U,20
|
|
||||||
pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
|
|
||||||
pip/_vendor/msgpack/ext.py,sha256=4l356Y4sVEcvCla2dh_cL57vh4GMhZfa3kuWHFHYz6A,6088
|
|
||||||
pip/_vendor/msgpack/fallback.py,sha256=Rpv1Ldey8f8ueRnQznD4ARKBn9dxM2PywVNkXI8IEeE,38026
|
|
||||||
pip/_vendor/packaging/__about__.py,sha256=p_OQloqH2saadcbUQmWEsWK857dI6_ff5E3aSiCqGFA,661
|
|
||||||
pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497
|
|
||||||
pip/_vendor/packaging/__pycache__/__about__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/_manylinux.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/_musllinux.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/_structures.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/markers.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/requirements.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/tags.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/utils.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/__pycache__/version.cpython-310.pyc,,
|
|
||||||
pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488
|
|
||||||
pip/_vendor/packaging/_musllinux.py,sha256=z5yeG1ygOPx4uUyLdqj-p8Dk5UBb5H_b0NIjW9yo8oA,4378
|
|
||||||
pip/_vendor/packaging/_structures.py,sha256=TMiAgFbdUOPmIfDIfiHc3KFhSJ8kMjof2QS5I-2NyQ8,1629
|
|
||||||
pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487
|
|
||||||
pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676
|
|
||||||
pip/_vendor/packaging/specifiers.py,sha256=MZ-fYcNL3u7pNrt-6g2EQO7AbRXkjc-SPEYwXMQbLmc,30964
|
|
||||||
pip/_vendor/packaging/tags.py,sha256=akIerYw8W0sz4OW9HHozgawWnbt2GGOPm3sviW0jowY,15714
|
|
||||||
pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200
|
|
||||||
pip/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665
|
|
||||||
pip/_vendor/pep517/__init__.py,sha256=qDgVbDWpBYpTvtxA2tilifXlxwzOzRqIodLZdbyahyQ,130
|
|
||||||
pip/_vendor/pep517/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/build.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/check.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/colorlog.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/compat.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/dirtools.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/envbuild.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/meta.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/__pycache__/wrappers.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/build.py,sha256=MqN_W6o5a9oauTC0u6W5cILGFjf9x2BV9BdMLeY60hc,3469
|
|
||||||
pip/_vendor/pep517/check.py,sha256=AYG2yvpzmtsL810c75Z5-nhaXa7SxgK8APyw-_x53Ok,6096
|
|
||||||
pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098
|
|
||||||
pip/_vendor/pep517/compat.py,sha256=fw2Py6lqLwJLfp6MKmXvt1m4sbbgoU1D-_gcScvz8OU,1071
|
|
||||||
pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129
|
|
||||||
pip/_vendor/pep517/envbuild.py,sha256=LcST0MASmcQNLOFqDPxDoS1kjkglx8F6eEhoBJ-DWkg,6112
|
|
||||||
pip/_vendor/pep517/in_process/__init__.py,sha256=MyWoAi8JHdcBv7yXuWpUSVADbx6LSB9rZh7kTIgdA8Y,563
|
|
||||||
pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pep517/in_process/_in_process.py,sha256=YJJf-qaL7BBVdgCHuMhTpx-LtwG1EIGVfly4rtusdiI,10833
|
|
||||||
pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463
|
|
||||||
pip/_vendor/pep517/wrappers.py,sha256=qCWfEUnbE5387PyQl7cT8xv4dDca4uNgro_0bnAO4Rk,13258
|
|
||||||
pip/_vendor/pkg_resources/__init__.py,sha256=XpGBfvS9fafA6bm5rx7vnxdxs7yqyoc_NnpzKApkJ64,108277
|
|
||||||
pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc,,
|
|
||||||
pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562
|
|
||||||
pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857
|
|
||||||
pip/_vendor/progress/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/progress/__pycache__/bar.cpython-310.pyc,,
|
|
||||||
pip/_vendor/progress/__pycache__/counter.cpython-310.pyc,,
|
|
||||||
pip/_vendor/progress/__pycache__/spinner.cpython-310.pyc,,
|
|
||||||
pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854
|
|
||||||
pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372
|
|
||||||
pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380
|
|
||||||
pip/_vendor/pyparsing.py,sha256=J1b4z3S_KwyJW7hKGnoN-hXW9pgMIzIP6QThyY5yJq4,273394
|
|
||||||
pip/_vendor/requests/__init__.py,sha256=g4Bh1QYh6JKjMS4YLobx0uOLq-41sINaXjvbhX2VI8g,5113
|
|
||||||
pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/api.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/auth.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/certs.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/compat.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/help.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/models.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/packages.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/structures.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__pycache__/utils.cpython-310.pyc,,
|
|
||||||
pip/_vendor/requests/__version__.py,sha256=PZEyPTSIN_jRIAIB51wV7pw81m3qAw0InSR7OrKZUnE,441
|
|
||||||
pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096
|
|
||||||
pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548
|
|
||||||
pip/_vendor/requests/api.py,sha256=hjuoP79IAEmX6Dysrw8t032cLfwLHxbI_wM4gC5G9t0,6402
|
|
||||||
pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207
|
|
||||||
pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465
|
|
||||||
pip/_vendor/requests/compat.py,sha256=LQWuCR4qXk6w7-qQopXyz0WNHUdAD40k0mKnaAEf1-g,2045
|
|
||||||
pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430
|
|
||||||
pip/_vendor/requests/exceptions.py,sha256=dwIi512RCDqXJ2T81nLC88mqPNhUFnOI_CgKKDXhTO8,3250
|
|
||||||
pip/_vendor/requests/help.py,sha256=dyhe3lcmHXnFCzDiZVjcGmVvvO_jtsfAm-AC542ndw8,3972
|
|
||||||
pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757
|
|
||||||
pip/_vendor/requests/models.py,sha256=9_LS_t1t6HbbaWFE3ZkxGmmHN2V8BgxziiOU84rrQ50,34924
|
|
||||||
pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695
|
|
||||||
pip/_vendor/requests/sessions.py,sha256=57O4ud9yRL6eLYh-dtFbqC1kO4d_EwZcCgYXEkujlfs,30168
|
|
||||||
pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188
|
|
||||||
pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005
|
|
||||||
pip/_vendor/requests/utils.py,sha256=U_-i6WxLw-67KEij43xHbcvL0DdeQ5Jbd4hfifWJzQY,31394
|
|
||||||
pip/_vendor/resolvelib/__init__.py,sha256=uoW0dgWCDwApX59mRffoPISkZGGk_UZ1It_PY4o_PaE,537
|
|
||||||
pip/_vendor/resolvelib/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/resolvelib/__pycache__/providers.cpython-310.pyc,,
|
|
||||||
pip/_vendor/resolvelib/__pycache__/reporters.cpython-310.pyc,,
|
|
||||||
pip/_vendor/resolvelib/__pycache__/resolvers.cpython-310.pyc,,
|
|
||||||
pip/_vendor/resolvelib/__pycache__/structs.cpython-310.pyc,,
|
|
||||||
pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-310.pyc,,
|
|
||||||
pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156
|
|
||||||
pip/_vendor/resolvelib/providers.py,sha256=bfzFDZd7UqkkAS7lUM_HeYbA-HzjKfDlle_pn_79vio,5638
|
|
||||||
pip/_vendor/resolvelib/reporters.py,sha256=hQvvXuuEBOyEWO8KDfLsWKVjX55UFMAUwO0YZMNpzAw,1364
|
|
||||||
pip/_vendor/resolvelib/resolvers.py,sha256=wT83PHiBWRCklL-nLJ1-8sk2B3yBI06Rse1H11crOsI,17225
|
|
||||||
pip/_vendor/resolvelib/structs.py,sha256=IVIYof6sA_N4ZEiE1C1UhzTX495brCNnyCdgq6CYq28,4794
|
|
||||||
pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549
|
|
||||||
pip/_vendor/tenacity/__init__.py,sha256=GLLsTFD4Bd5VDgTR6mU_FxyOsrxc48qONorVaRebeD4,18257
|
|
||||||
pip/_vendor/tenacity/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/_asyncio.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/_utils.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/after.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/before.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/before_sleep.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/nap.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/retry.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/stop.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/__pycache__/wait.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tenacity/_asyncio.py,sha256=HEb0BVJEeBJE9P-m9XBxh1KcaF96BwoeqkJCL5sbVcQ,3314
|
|
||||||
pip/_vendor/tenacity/_utils.py,sha256=-y68scDcyoqvTJuJJ0GTfjdSCljEYlbCYvgk7nM4NdM,1944
|
|
||||||
pip/_vendor/tenacity/after.py,sha256=dlmyxxFy2uqpLXDr838DiEd7jgv2AGthsWHGYcGYsaI,1496
|
|
||||||
pip/_vendor/tenacity/before.py,sha256=7XtvRmO0dRWUp8SVn24OvIiGFj8-4OP5muQRUiWgLh0,1376
|
|
||||||
pip/_vendor/tenacity/before_sleep.py,sha256=ThyDvqKU5yle_IvYQz_b6Tp6UjUS0PhVp6zgqYl9U6Y,1908
|
|
||||||
pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383
|
|
||||||
pip/_vendor/tenacity/retry.py,sha256=62R71W59bQjuNyFKsDM7hE2aEkEPtwNBRA0tnsEvgSk,6645
|
|
||||||
pip/_vendor/tenacity/stop.py,sha256=sKHmHaoSaW6sKu3dTxUVKr1-stVkY7lw4Y9yjZU30zQ,2790
|
|
||||||
pip/_vendor/tenacity/tornadoweb.py,sha256=E8lWO2nwe6dJgoB-N2HhQprYLDLB_UdSgFnv-EN6wKE,2145
|
|
||||||
pip/_vendor/tenacity/wait.py,sha256=e_Saa6I2tsNLpCL1t9897wN2fGb0XQMQlE4bU2t9V2w,6691
|
|
||||||
pip/_vendor/tomli/__init__.py,sha256=z1Elt0nLAqU5Y0DOn9p__8QnLWavlEOpRyQikdYgKro,230
|
|
||||||
pip/_vendor/tomli/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tomli/__pycache__/_parser.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tomli/__pycache__/_re.cpython-310.pyc,,
|
|
||||||
pip/_vendor/tomli/_parser.py,sha256=50BD4o9YbzFAGAYyZLqZC8F81DQ7iWWyJnrHNwBKa6A,22415
|
|
||||||
pip/_vendor/tomli/_re.py,sha256=5GPfgXKteg7wRFCF-DzlkAPI2ilHbkMK2-JC49F-AJQ,2681
|
|
||||||
pip/_vendor/urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
|
|
||||||
pip/_vendor/urllib3/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/_collections.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/_version.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/connection.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/connectionpool.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/fields.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/filepost.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/poolmanager.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/request.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/__pycache__/response.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
|
|
||||||
pip/_vendor/urllib3/_version.py,sha256=6fJAIPnJkT0m9wzVjHrFcq5wYt65dStDpaRcjj5ugoo,63
|
|
||||||
pip/_vendor/urllib3/connection.py,sha256=kAlubwsW33FUSUroPSVHMF_Zzv-uzX_BwUFMXX9Pt8c,18754
|
|
||||||
pip/_vendor/urllib3/connectionpool.py,sha256=jXNmm4y3LJWYgteNeGcYJx8-0k7bzKRU__AVTXzaIak,37131
|
|
||||||
pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=eRy1Mj-wpg7sR6-OSvnSV4jUbjMT464dLN_CWxbIRVw,17649
|
|
||||||
pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
|
|
||||||
pip/_vendor/urllib3/contrib/appengine.py,sha256=lfzpHFmJiO82shClLEm3QB62SYgHWnjpZOH_2JhU5Tc,11034
|
|
||||||
pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=ej9gGvfAb2Gt00lafFp45SIoRz-QwrQ4WChm6gQmAlM,4538
|
|
||||||
pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=lYIxGFWTosqbfLnkZXOBg7igY71iRvM3NUOaD0stUQ8,16891
|
|
||||||
pip/_vendor/urllib3/contrib/securetransport.py,sha256=TN5q9dKZ0Sd5_vW9baRzEAEItdJ-4VlHWmAUrlcJNfo,34434
|
|
||||||
pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097
|
|
||||||
pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
|
|
||||||
pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
|
|
||||||
pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
|
|
||||||
pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
|
|
||||||
pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/packages/__pycache__/six.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
|
|
||||||
pip/_vendor/urllib3/packages/six.py,sha256=1LVW7ljqRirFlfExjwl-v1B7vSAUNTmzGMs-qays2zg,34666
|
|
||||||
pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=ZVMwCkHx-py8ERsxxM3Il-MiREZktV-8iLBmCfRRHI4,927
|
|
||||||
pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
|
|
||||||
pip/_vendor/urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
|
|
||||||
pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
|
|
||||||
pip/_vendor/urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
|
|
||||||
pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/connection.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/proxy.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/queue.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/request.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/response.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/retry.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/timeout.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/url.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/__pycache__/wait.cpython-310.pyc,,
|
|
||||||
pip/_vendor/urllib3/util/connection.py,sha256=KykjNIXzUZEzeKEOpl5xvKs6IsESXP9o9eTrjE0W_Ys,4920
|
|
||||||
pip/_vendor/urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
|
|
||||||
pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
|
|
||||||
pip/_vendor/urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
|
|
||||||
pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
|
|
||||||
pip/_vendor/urllib3/util/retry.py,sha256=tOWfZpLsuc7Vbk5nWpMwkHdMoXCp90IAvH4xtjSDRqQ,21391
|
|
||||||
pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177
|
|
||||||
pip/_vendor/urllib3/util/ssltransport.py,sha256=F_UncOXGcc-MgeWFTA1H4QCt_RRNQXRbF6onje3SyHY,6931
|
|
||||||
pip/_vendor/urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
|
|
||||||
pip/_vendor/urllib3/util/url.py,sha256=QVEzcbHipbXyCWwH6R4K4TR-N8T4LM55WEMwNUTBmLE,14047
|
|
||||||
pip/_vendor/urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
|
|
||||||
pip/_vendor/vendor.txt,sha256=GuFhR0DHZazrSYZyoY7j3X3T_mGJh-ky2opcZ-A7ezo,364
|
|
||||||
pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579
|
|
||||||
pip/_vendor/webencodings/__pycache__/__init__.cpython-310.pyc,,
|
|
||||||
pip/_vendor/webencodings/__pycache__/labels.cpython-310.pyc,,
|
|
||||||
pip/_vendor/webencodings/__pycache__/mklabels.cpython-310.pyc,,
|
|
||||||
pip/_vendor/webencodings/__pycache__/tests.cpython-310.pyc,,
|
|
||||||
pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-310.pyc,,
|
|
||||||
pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979
|
|
||||||
pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305
|
|
||||||
pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563
|
|
||||||
pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307
|
|
||||||
pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.36.2)
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
[console_scripts]
|
|
||||||
pip = pip._internal.cli.main:main
|
|
||||||
pip3 = pip._internal.cli.main:main
|
|
||||||
pip3.9 = pip._internal.cli.main:main
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
__version__ = "21.2.4"
|
__version__ = "23.1.2"
|
||||||
|
|
||||||
|
|
||||||
def main(args: Optional[List[str]] = None) -> int:
|
def main(args: Optional[List[str]] = None) -> int:
|
||||||
|
|||||||
@@ -1,17 +1,15 @@
|
|||||||
"""Build Environment used for isolation during sdist building
|
"""Build Environment used for isolation during sdist building
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import site
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
import zipfile
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from sysconfig import get_paths
|
|
||||||
from types import TracebackType
|
from types import TracebackType
|
||||||
from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type
|
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
|
||||||
|
|
||||||
from pip._vendor.certifi import where
|
from pip._vendor.certifi import where
|
||||||
from pip._vendor.packaging.requirements import Requirement
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
@@ -19,8 +17,8 @@ from pip._vendor.packaging.version import Version
|
|||||||
|
|
||||||
from pip import __file__ as pip_location
|
from pip import __file__ as pip_location
|
||||||
from pip._internal.cli.spinners import open_spinner
|
from pip._internal.cli.spinners import open_spinner
|
||||||
from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
|
from pip._internal.locations import get_platlib, get_purelib, get_scheme
|
||||||
from pip._internal.metadata import get_environment
|
from pip._internal.metadata import get_default_environment, get_environment
|
||||||
from pip._internal.utils.subprocess import call_subprocess
|
from pip._internal.utils.subprocess import call_subprocess
|
||||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
|
|
||||||
@@ -30,62 +28,68 @@ if TYPE_CHECKING:
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class _Prefix:
|
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
|
||||||
|
return (a, b) if a != b else (a,)
|
||||||
|
|
||||||
def __init__(self, path):
|
|
||||||
# type: (str) -> None
|
class _Prefix:
|
||||||
|
def __init__(self, path: str) -> None:
|
||||||
self.path = path
|
self.path = path
|
||||||
self.setup = False
|
self.setup = False
|
||||||
self.bin_dir = get_paths(
|
scheme = get_scheme("", prefix=path)
|
||||||
'nt' if os.name == 'nt' else 'posix_prefix',
|
self.bin_dir = scheme.scripts
|
||||||
vars={'base': path, 'platbase': path}
|
self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
|
||||||
)['scripts']
|
|
||||||
self.lib_dirs = get_prefixed_libs(path)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
def get_runnable_pip() -> str:
|
||||||
def _create_standalone_pip() -> Iterator[str]:
|
"""Get a file to pass to a Python executable, to run the currently-running pip.
|
||||||
"""Create a "standalone pip" zip file.
|
|
||||||
|
|
||||||
The zip file's content is identical to the currently-running pip.
|
This is used to run a pip subprocess, for installing requirements into the build
|
||||||
It will be used to install requirements into the build environment.
|
environment.
|
||||||
"""
|
"""
|
||||||
source = pathlib.Path(pip_location).resolve().parent
|
source = pathlib.Path(pip_location).resolve().parent
|
||||||
|
|
||||||
# Return the current instance if `source` is not a directory. We can't build
|
|
||||||
# a zip from this, and it likely means the instance is already standalone.
|
|
||||||
if not source.is_dir():
|
if not source.is_dir():
|
||||||
yield str(source)
|
# This would happen if someone is using pip from inside a zip file. In that
|
||||||
return
|
# case, we can use that directly.
|
||||||
|
return str(source)
|
||||||
|
|
||||||
with TempDirectory(kind="standalone-pip") as tmp_dir:
|
return os.fsdecode(source / "__pip-runner__.py")
|
||||||
pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip")
|
|
||||||
kwargs = {}
|
|
||||||
if sys.version_info >= (3, 8):
|
def _get_system_sitepackages() -> Set[str]:
|
||||||
kwargs["strict_timestamps"] = False
|
"""Get system site packages
|
||||||
with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf:
|
|
||||||
for child in source.rglob("*"):
|
Usually from site.getsitepackages,
|
||||||
zf.write(child, child.relative_to(source.parent).as_posix())
|
but fallback on `get_purelib()/get_platlib()` if unavailable
|
||||||
yield os.path.join(pip_zip, "pip")
|
(e.g. in a virtualenv created by virtualenv<20)
|
||||||
|
|
||||||
|
Returns normalized set of strings.
|
||||||
|
"""
|
||||||
|
if hasattr(site, "getsitepackages"):
|
||||||
|
system_sites = site.getsitepackages()
|
||||||
|
else:
|
||||||
|
# virtualenv < 20 overwrites site.py without getsitepackages
|
||||||
|
# fallback on get_purelib/get_platlib.
|
||||||
|
# this is known to miss things, but shouldn't in the cases
|
||||||
|
# where getsitepackages() has been removed (inside a virtualenv)
|
||||||
|
system_sites = [get_purelib(), get_platlib()]
|
||||||
|
return {os.path.normcase(path) for path in system_sites}
|
||||||
|
|
||||||
|
|
||||||
class BuildEnvironment:
|
class BuildEnvironment:
|
||||||
"""Creates and manages an isolated environment to install build deps
|
"""Creates and manages an isolated environment to install build deps"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
|
||||||
temp_dir = TempDirectory(
|
|
||||||
kind=tempdir_kinds.BUILD_ENV, globally_managed=True
|
|
||||||
)
|
|
||||||
|
|
||||||
self._prefixes = OrderedDict(
|
self._prefixes = OrderedDict(
|
||||||
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
||||||
for name in ('normal', 'overlay')
|
for name in ("normal", "overlay")
|
||||||
)
|
)
|
||||||
|
|
||||||
self._bin_dirs = [] # type: List[str]
|
self._bin_dirs: List[str] = []
|
||||||
self._lib_dirs = [] # type: List[str]
|
self._lib_dirs: List[str] = []
|
||||||
for prefix in reversed(list(self._prefixes.values())):
|
for prefix in reversed(list(self._prefixes.values())):
|
||||||
self._bin_dirs.append(prefix.bin_dir)
|
self._bin_dirs.append(prefix.bin_dir)
|
||||||
self._lib_dirs.extend(prefix.lib_dirs)
|
self._lib_dirs.extend(prefix.lib_dirs)
|
||||||
@@ -93,15 +97,17 @@ class BuildEnvironment:
|
|||||||
# Customize site to:
|
# Customize site to:
|
||||||
# - ensure .pth files are honored
|
# - ensure .pth files are honored
|
||||||
# - prevent access to system site packages
|
# - prevent access to system site packages
|
||||||
system_sites = {
|
system_sites = _get_system_sitepackages()
|
||||||
os.path.normcase(site) for site in (get_purelib(), get_platlib())
|
|
||||||
}
|
self._site_dir = os.path.join(temp_dir.path, "site")
|
||||||
self._site_dir = os.path.join(temp_dir.path, 'site')
|
|
||||||
if not os.path.exists(self._site_dir):
|
if not os.path.exists(self._site_dir):
|
||||||
os.mkdir(self._site_dir)
|
os.mkdir(self._site_dir)
|
||||||
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
with open(
|
||||||
fp.write(textwrap.dedent(
|
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
|
||||||
'''
|
) as fp:
|
||||||
|
fp.write(
|
||||||
|
textwrap.dedent(
|
||||||
|
"""
|
||||||
import os, site, sys
|
import os, site, sys
|
||||||
|
|
||||||
# First, drop system-sites related paths.
|
# First, drop system-sites related paths.
|
||||||
@@ -124,44 +130,46 @@ class BuildEnvironment:
|
|||||||
for path in {lib_dirs!r}:
|
for path in {lib_dirs!r}:
|
||||||
assert not path in sys.path
|
assert not path in sys.path
|
||||||
site.addsitedir(path)
|
site.addsitedir(path)
|
||||||
'''
|
"""
|
||||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
|
||||||
|
)
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self._save_env = {
|
self._save_env = {
|
||||||
name: os.environ.get(name, None)
|
name: os.environ.get(name, None)
|
||||||
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
|
||||||
}
|
}
|
||||||
|
|
||||||
path = self._bin_dirs[:]
|
path = self._bin_dirs[:]
|
||||||
old_path = self._save_env['PATH']
|
old_path = self._save_env["PATH"]
|
||||||
if old_path:
|
if old_path:
|
||||||
path.extend(old_path.split(os.pathsep))
|
path.extend(old_path.split(os.pathsep))
|
||||||
|
|
||||||
pythonpath = [self._site_dir]
|
pythonpath = [self._site_dir]
|
||||||
|
|
||||||
os.environ.update({
|
os.environ.update(
|
||||||
'PATH': os.pathsep.join(path),
|
{
|
||||||
'PYTHONNOUSERSITE': '1',
|
"PATH": os.pathsep.join(path),
|
||||||
'PYTHONPATH': os.pathsep.join(pythonpath),
|
"PYTHONNOUSERSITE": "1",
|
||||||
})
|
"PYTHONPATH": os.pathsep.join(pythonpath),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type, # type: Optional[Type[BaseException]]
|
exc_type: Optional[Type[BaseException]],
|
||||||
exc_val, # type: Optional[BaseException]
|
exc_val: Optional[BaseException],
|
||||||
exc_tb # type: Optional[TracebackType]
|
exc_tb: Optional[TracebackType],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
for varname, old_value in self._save_env.items():
|
for varname, old_value in self._save_env.items():
|
||||||
if old_value is None:
|
if old_value is None:
|
||||||
os.environ.pop(varname, None)
|
os.environ.pop(varname, None)
|
||||||
else:
|
else:
|
||||||
os.environ[varname] = old_value
|
os.environ[varname] = old_value
|
||||||
|
|
||||||
def check_requirements(self, reqs):
|
def check_requirements(
|
||||||
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
self, reqs: Iterable[str]
|
||||||
|
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
|
||||||
"""Return 2 sets:
|
"""Return 2 sets:
|
||||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||||
- missing requirements: set of reqs
|
- missing requirements: set of reqs
|
||||||
@@ -169,9 +177,17 @@ class BuildEnvironment:
|
|||||||
missing = set()
|
missing = set()
|
||||||
conflicting = set()
|
conflicting = set()
|
||||||
if reqs:
|
if reqs:
|
||||||
env = get_environment(self._lib_dirs)
|
env = (
|
||||||
|
get_environment(self._lib_dirs)
|
||||||
|
if hasattr(self, "_lib_dirs")
|
||||||
|
else get_default_environment()
|
||||||
|
)
|
||||||
for req_str in reqs:
|
for req_str in reqs:
|
||||||
req = Requirement(req_str)
|
req = Requirement(req_str)
|
||||||
|
# We're explicitly evaluating with an empty extra value, since build
|
||||||
|
# environments are not provided any mechanism to select specific extras.
|
||||||
|
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
|
||||||
|
continue
|
||||||
dist = env.get_distribution(req.name)
|
dist = env.get_distribution(req.name)
|
||||||
if not dist:
|
if not dist:
|
||||||
missing.add(req_str)
|
missing.add(req_str)
|
||||||
@@ -180,39 +196,30 @@ class BuildEnvironment:
|
|||||||
installed_req_str = f"{req.name}=={dist.version}"
|
installed_req_str = f"{req.name}=={dist.version}"
|
||||||
else:
|
else:
|
||||||
installed_req_str = f"{req.name}==={dist.version}"
|
installed_req_str = f"{req.name}==={dist.version}"
|
||||||
if dist.version not in req.specifier:
|
if not req.specifier.contains(dist.version, prereleases=True):
|
||||||
conflicting.add((installed_req_str, req_str))
|
conflicting.add((installed_req_str, req_str))
|
||||||
# FIXME: Consider direct URL?
|
# FIXME: Consider direct URL?
|
||||||
return conflicting, missing
|
return conflicting, missing
|
||||||
|
|
||||||
def install_requirements(
|
def install_requirements(
|
||||||
self,
|
self,
|
||||||
finder, # type: PackageFinder
|
finder: "PackageFinder",
|
||||||
requirements, # type: Iterable[str]
|
requirements: Iterable[str],
|
||||||
prefix_as_string, # type: str
|
prefix_as_string: str,
|
||||||
message # type: str
|
*,
|
||||||
):
|
kind: str,
|
||||||
# type: (...) -> None
|
) -> None:
|
||||||
prefix = self._prefixes[prefix_as_string]
|
prefix = self._prefixes[prefix_as_string]
|
||||||
assert not prefix.setup
|
assert not prefix.setup
|
||||||
prefix.setup = True
|
prefix.setup = True
|
||||||
if not requirements:
|
if not requirements:
|
||||||
return
|
return
|
||||||
with contextlib.ExitStack() as ctx:
|
|
||||||
# TODO: Remove this block when dropping 3.6 support. Python 3.6
|
|
||||||
# lacks importlib.resources and pep517 has issues loading files in
|
|
||||||
# a zip, so we fallback to the "old" method by adding the current
|
|
||||||
# pip directory to the child process's sys.path.
|
|
||||||
if sys.version_info < (3, 7):
|
|
||||||
pip_runnable = os.path.dirname(pip_location)
|
|
||||||
else:
|
|
||||||
pip_runnable = ctx.enter_context(_create_standalone_pip())
|
|
||||||
self._install_requirements(
|
self._install_requirements(
|
||||||
pip_runnable,
|
get_runnable_pip(),
|
||||||
finder,
|
finder,
|
||||||
requirements,
|
requirements,
|
||||||
prefix,
|
prefix,
|
||||||
message,
|
kind=kind,
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -221,74 +228,84 @@ class BuildEnvironment:
|
|||||||
finder: "PackageFinder",
|
finder: "PackageFinder",
|
||||||
requirements: Iterable[str],
|
requirements: Iterable[str],
|
||||||
prefix: _Prefix,
|
prefix: _Prefix,
|
||||||
message: str,
|
*,
|
||||||
|
kind: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
args = [
|
args: List[str] = [
|
||||||
sys.executable, pip_runnable, 'install',
|
sys.executable,
|
||||||
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
pip_runnable,
|
||||||
'--no-warn-script-location',
|
"install",
|
||||||
] # type: List[str]
|
"--ignore-installed",
|
||||||
|
"--no-user",
|
||||||
|
"--prefix",
|
||||||
|
prefix.path,
|
||||||
|
"--no-warn-script-location",
|
||||||
|
]
|
||||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||||
args.append('-v')
|
args.append("-v")
|
||||||
for format_control in ('no_binary', 'only_binary'):
|
for format_control in ("no_binary", "only_binary"):
|
||||||
formats = getattr(finder.format_control, format_control)
|
formats = getattr(finder.format_control, format_control)
|
||||||
args.extend(('--' + format_control.replace('_', '-'),
|
args.extend(
|
||||||
','.join(sorted(formats or {':none:'}))))
|
(
|
||||||
|
"--" + format_control.replace("_", "-"),
|
||||||
|
",".join(sorted(formats or {":none:"})),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
index_urls = finder.index_urls
|
index_urls = finder.index_urls
|
||||||
if index_urls:
|
if index_urls:
|
||||||
args.extend(['-i', index_urls[0]])
|
args.extend(["-i", index_urls[0]])
|
||||||
for extra_index in index_urls[1:]:
|
for extra_index in index_urls[1:]:
|
||||||
args.extend(['--extra-index-url', extra_index])
|
args.extend(["--extra-index-url", extra_index])
|
||||||
else:
|
else:
|
||||||
args.append('--no-index')
|
args.append("--no-index")
|
||||||
for link in finder.find_links:
|
for link in finder.find_links:
|
||||||
args.extend(['--find-links', link])
|
args.extend(["--find-links", link])
|
||||||
|
|
||||||
for host in finder.trusted_hosts:
|
for host in finder.trusted_hosts:
|
||||||
args.extend(['--trusted-host', host])
|
args.extend(["--trusted-host", host])
|
||||||
if finder.allow_all_prereleases:
|
if finder.allow_all_prereleases:
|
||||||
args.append('--pre')
|
args.append("--pre")
|
||||||
if finder.prefer_binary:
|
if finder.prefer_binary:
|
||||||
args.append('--prefer-binary')
|
args.append("--prefer-binary")
|
||||||
args.append('--')
|
args.append("--")
|
||||||
args.extend(requirements)
|
args.extend(requirements)
|
||||||
extra_environ = {"_PIP_STANDALONE_CERT": where()}
|
extra_environ = {"_PIP_STANDALONE_CERT": where()}
|
||||||
with open_spinner(message) as spinner:
|
with open_spinner(f"Installing {kind}") as spinner:
|
||||||
call_subprocess(args, spinner=spinner, extra_environ=extra_environ)
|
call_subprocess(
|
||||||
|
args,
|
||||||
|
command_desc=f"pip subprocess to install {kind}",
|
||||||
|
spinner=spinner,
|
||||||
|
extra_environ=extra_environ,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NoOpBuildEnvironment(BuildEnvironment):
|
class NoOpBuildEnvironment(BuildEnvironment):
|
||||||
"""A no-op drop-in replacement for BuildEnvironment
|
"""A no-op drop-in replacement for BuildEnvironment"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def __exit__(
|
def __exit__(
|
||||||
self,
|
self,
|
||||||
exc_type, # type: Optional[Type[BaseException]]
|
exc_type: Optional[Type[BaseException]],
|
||||||
exc_val, # type: Optional[BaseException]
|
exc_val: Optional[BaseException],
|
||||||
exc_tb # type: Optional[TracebackType]
|
exc_tb: Optional[TracebackType],
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def cleanup(self):
|
def cleanup(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def install_requirements(
|
def install_requirements(
|
||||||
self,
|
self,
|
||||||
finder, # type: PackageFinder
|
finder: "PackageFinder",
|
||||||
requirements, # type: Iterable[str]
|
requirements: Iterable[str],
|
||||||
prefix_as_string, # type: str
|
prefix_as_string: str,
|
||||||
message # type: str
|
*,
|
||||||
):
|
kind: str,
|
||||||
# type: (...) -> None
|
) -> None:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|||||||
@@ -5,13 +5,14 @@ import hashlib
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import Any, Dict, List, Optional, Set
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._internal.exceptions import InvalidWheelFilename
|
from pip._internal.exceptions import InvalidWheelFilename
|
||||||
from pip._internal.models.format_control import FormatControl
|
from pip._internal.models.direct_url import DirectUrl
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
from pip._internal.models.wheel import Wheel
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
@@ -19,9 +20,10 @@ from pip._internal.utils.urls import path_to_url
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ORIGIN_JSON_NAME = "origin.json"
|
||||||
|
|
||||||
def _hash_dict(d):
|
|
||||||
# type: (Dict[str, str]) -> str
|
def _hash_dict(d: Dict[str, str]) -> str:
|
||||||
"""Return a stable sha224 of a dictionary."""
|
"""Return a stable sha224 of a dictionary."""
|
||||||
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
||||||
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
||||||
@@ -30,29 +32,16 @@ def _hash_dict(d):
|
|||||||
class Cache:
|
class Cache:
|
||||||
"""An abstract class - provides cache directories for data from links
|
"""An abstract class - provides cache directories for data from links
|
||||||
|
|
||||||
|
|
||||||
:param cache_dir: The root of the cache.
|
:param cache_dir: The root of the cache.
|
||||||
:param format_control: An object of FormatControl class to limit
|
|
||||||
binaries being read from the cache.
|
|
||||||
:param allowed_formats: which formats of files the cache should store.
|
|
||||||
('binary' and 'source' are the only allowed values)
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control, allowed_formats):
|
def __init__(self, cache_dir: str) -> None:
|
||||||
# type: (str, FormatControl, Set[str]) -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
assert not cache_dir or os.path.isabs(cache_dir)
|
assert not cache_dir or os.path.isabs(cache_dir)
|
||||||
self.cache_dir = cache_dir or None
|
self.cache_dir = cache_dir or None
|
||||||
self.format_control = format_control
|
|
||||||
self.allowed_formats = allowed_formats
|
|
||||||
|
|
||||||
_valid_formats = {"source", "binary"}
|
def _get_cache_path_parts(self, link: Link) -> List[str]:
|
||||||
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
"""Get parts of part that must be os.path.joined with cache_dir"""
|
||||||
|
|
||||||
def _get_cache_path_parts(self, link):
|
|
||||||
# type: (Link) -> List[str]
|
|
||||||
"""Get parts of part that must be os.path.joined with cache_dir
|
|
||||||
"""
|
|
||||||
|
|
||||||
# We want to generate an url to use as our cache key, we don't want to
|
# We want to generate an url to use as our cache key, we don't want to
|
||||||
# just re-use the URL because it might have other items in the fragment
|
# just re-use the URL because it might have other items in the fragment
|
||||||
@@ -84,22 +73,11 @@ class Cache:
|
|||||||
|
|
||||||
return parts
|
return parts
|
||||||
|
|
||||||
def _get_candidates(self, link, canonical_package_name):
|
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
|
||||||
# type: (Link, str) -> List[Any]
|
can_not_cache = not self.cache_dir or not canonical_package_name or not link
|
||||||
can_not_cache = (
|
|
||||||
not self.cache_dir or
|
|
||||||
not canonical_package_name or
|
|
||||||
not link
|
|
||||||
)
|
|
||||||
if can_not_cache:
|
if can_not_cache:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
formats = self.format_control.get_allowed_formats(
|
|
||||||
canonical_package_name
|
|
||||||
)
|
|
||||||
if not self.allowed_formats.intersection(formats):
|
|
||||||
return []
|
|
||||||
|
|
||||||
candidates = []
|
candidates = []
|
||||||
path = self.get_path_for_link(link)
|
path = self.get_path_for_link(link)
|
||||||
if os.path.isdir(path):
|
if os.path.isdir(path):
|
||||||
@@ -107,19 +85,16 @@ class Cache:
|
|||||||
candidates.append((candidate, path))
|
candidates.append((candidate, path))
|
||||||
return candidates
|
return candidates
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
def get_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
"""Return a directory to store cached items in for link."""
|
||||||
"""Return a directory to store cached items in for link.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get(
|
def get(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Link:
|
||||||
# type: (...) -> Link
|
|
||||||
"""Returns a link to a cached item if it exists, otherwise returns the
|
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||||
passed link.
|
passed link.
|
||||||
"""
|
"""
|
||||||
@@ -127,15 +102,12 @@ class Cache:
|
|||||||
|
|
||||||
|
|
||||||
class SimpleWheelCache(Cache):
|
class SimpleWheelCache(Cache):
|
||||||
"""A cache of wheels for future installs.
|
"""A cache of wheels for future installs."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control):
|
def __init__(self, cache_dir: str) -> None:
|
||||||
# type: (str, FormatControl) -> None
|
super().__init__(cache_dir)
|
||||||
super().__init__(cache_dir, format_control, {"binary"})
|
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
def get_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
|
||||||
"""Return a directory to store cached wheels for link
|
"""Return a directory to store cached wheels for link
|
||||||
|
|
||||||
Because there are M wheels for any one sdist, we provide a directory
|
Because there are M wheels for any one sdist, we provide a directory
|
||||||
@@ -157,20 +129,17 @@ class SimpleWheelCache(Cache):
|
|||||||
|
|
||||||
def get(
|
def get(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Link:
|
||||||
# type: (...) -> Link
|
|
||||||
candidates = []
|
candidates = []
|
||||||
|
|
||||||
if not package_name:
|
if not package_name:
|
||||||
return link
|
return link
|
||||||
|
|
||||||
canonical_package_name = canonicalize_name(package_name)
|
canonical_package_name = canonicalize_name(package_name)
|
||||||
for wheel_name, wheel_dir in self._get_candidates(
|
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
|
||||||
link, canonical_package_name
|
|
||||||
):
|
|
||||||
try:
|
try:
|
||||||
wheel = Wheel(wheel_name)
|
wheel = Wheel(wheel_name)
|
||||||
except InvalidWheelFilename:
|
except InvalidWheelFilename:
|
||||||
@@ -179,7 +148,9 @@ class SimpleWheelCache(Cache):
|
|||||||
logger.debug(
|
logger.debug(
|
||||||
"Ignoring cached wheel %s for %s as it "
|
"Ignoring cached wheel %s for %s as it "
|
||||||
"does not match the expected distribution name %s.",
|
"does not match the expected distribution name %s.",
|
||||||
wheel_name, link, package_name,
|
wheel_name,
|
||||||
|
link,
|
||||||
|
package_name,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if not wheel.supported(supported_tags):
|
if not wheel.supported(supported_tags):
|
||||||
@@ -201,27 +172,29 @@ class SimpleWheelCache(Cache):
|
|||||||
|
|
||||||
|
|
||||||
class EphemWheelCache(SimpleWheelCache):
|
class EphemWheelCache(SimpleWheelCache):
|
||||||
"""A SimpleWheelCache that creates it's own temporary cache directory
|
"""A SimpleWheelCache that creates it's own temporary cache directory"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, format_control):
|
def __init__(self) -> None:
|
||||||
# type: (FormatControl) -> None
|
|
||||||
self._temp_dir = TempDirectory(
|
self._temp_dir = TempDirectory(
|
||||||
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||||
globally_managed=True,
|
globally_managed=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
super().__init__(self._temp_dir.path, format_control)
|
super().__init__(self._temp_dir.path)
|
||||||
|
|
||||||
|
|
||||||
class CacheEntry:
|
class CacheEntry:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
persistent, # type: bool
|
persistent: bool,
|
||||||
):
|
):
|
||||||
self.link = link
|
self.link = link
|
||||||
self.persistent = persistent
|
self.persistent = persistent
|
||||||
|
self.origin: Optional[DirectUrl] = None
|
||||||
|
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
|
||||||
|
if origin_direct_url_path.exists():
|
||||||
|
self.origin = DirectUrl.from_json(origin_direct_url_path.read_text())
|
||||||
|
|
||||||
|
|
||||||
class WheelCache(Cache):
|
class WheelCache(Cache):
|
||||||
@@ -231,27 +204,23 @@ class WheelCache(Cache):
|
|||||||
when a certain link is not found in the simple wheel cache first.
|
when a certain link is not found in the simple wheel cache first.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control):
|
def __init__(self, cache_dir: str) -> None:
|
||||||
# type: (str, FormatControl) -> None
|
super().__init__(cache_dir)
|
||||||
super().__init__(cache_dir, format_control, {'binary'})
|
self._wheel_cache = SimpleWheelCache(cache_dir)
|
||||||
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
self._ephem_cache = EphemWheelCache()
|
||||||
self._ephem_cache = EphemWheelCache(format_control)
|
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
def get_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
|
||||||
return self._wheel_cache.get_path_for_link(link)
|
return self._wheel_cache.get_path_for_link(link)
|
||||||
|
|
||||||
def get_ephem_path_for_link(self, link):
|
def get_ephem_path_for_link(self, link: Link) -> str:
|
||||||
# type: (Link) -> str
|
|
||||||
return self._ephem_cache.get_path_for_link(link)
|
return self._ephem_cache.get_path_for_link(link)
|
||||||
|
|
||||||
def get(
|
def get(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Link:
|
||||||
# type: (...) -> Link
|
|
||||||
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
||||||
if cache_entry is None:
|
if cache_entry is None:
|
||||||
return link
|
return link
|
||||||
@@ -259,11 +228,10 @@ class WheelCache(Cache):
|
|||||||
|
|
||||||
def get_cache_entry(
|
def get_cache_entry(
|
||||||
self,
|
self,
|
||||||
link, # type: Link
|
link: Link,
|
||||||
package_name, # type: Optional[str]
|
package_name: Optional[str],
|
||||||
supported_tags, # type: List[Tag]
|
supported_tags: List[Tag],
|
||||||
):
|
) -> Optional[CacheEntry]:
|
||||||
# type: (...) -> Optional[CacheEntry]
|
|
||||||
"""Returns a CacheEntry with a link to a cached item if it exists or
|
"""Returns a CacheEntry with a link to a cached item if it exists or
|
||||||
None. The cache entry indicates if the item was found in the persistent
|
None. The cache entry indicates if the item was found in the persistent
|
||||||
or ephemeral cache.
|
or ephemeral cache.
|
||||||
@@ -285,3 +253,20 @@ class WheelCache(Cache):
|
|||||||
return CacheEntry(retval, persistent=False)
|
return CacheEntry(retval, persistent=False)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
|
||||||
|
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
|
||||||
|
if origin_path.is_file():
|
||||||
|
origin = DirectUrl.from_json(origin_path.read_text())
|
||||||
|
# TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564
|
||||||
|
# is merged.
|
||||||
|
if origin.url != download_info.url:
|
||||||
|
logger.warning(
|
||||||
|
"Origin URL %s in cache entry %s does not match download URL %s. "
|
||||||
|
"This is likely a pip bug or a cache corruption issue.",
|
||||||
|
origin.url,
|
||||||
|
cache_dir,
|
||||||
|
download_info.url,
|
||||||
|
)
|
||||||
|
origin_path.write_text(download_info.to_json(), encoding="utf-8")
|
||||||
|
|||||||
@@ -59,6 +59,14 @@ def autocomplete() -> None:
|
|||||||
print(dist)
|
print(dist)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
should_list_installables = (
|
||||||
|
not current.startswith("-") and subcommand_name == "install"
|
||||||
|
)
|
||||||
|
if should_list_installables:
|
||||||
|
for path in auto_complete_paths(current, "path"):
|
||||||
|
print(path)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
subcommand = create_command(subcommand_name)
|
subcommand = create_command(subcommand_name)
|
||||||
|
|
||||||
for opt in subcommand.parser.option_list_all:
|
for opt in subcommand.parser.option_list_all:
|
||||||
@@ -138,7 +146,7 @@ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
|
|||||||
starting with ``current``.
|
starting with ``current``.
|
||||||
|
|
||||||
:param current: The word to be completed
|
:param current: The word to be completed
|
||||||
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
|
||||||
:return: A generator of regular files and/or directories
|
:return: A generator of regular files and/or directories
|
||||||
"""
|
"""
|
||||||
directory, filename = os.path.split(current)
|
directory, filename = os.path.split(current)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""Base Command class, and related routines"""
|
"""Base Command class, and related routines"""
|
||||||
|
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
import optparse
|
import optparse
|
||||||
@@ -7,7 +8,9 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Any, List, Optional, Tuple
|
from typing import Any, Callable, List, Optional, Tuple
|
||||||
|
|
||||||
|
from pip._vendor.rich import traceback as rich_traceback
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.command_context import CommandContextMixIn
|
from pip._internal.cli.command_context import CommandContextMixIn
|
||||||
@@ -21,12 +24,12 @@ from pip._internal.cli.status_codes import (
|
|||||||
from pip._internal.exceptions import (
|
from pip._internal.exceptions import (
|
||||||
BadCommand,
|
BadCommand,
|
||||||
CommandError,
|
CommandError,
|
||||||
|
DiagnosticPipError,
|
||||||
InstallationError,
|
InstallationError,
|
||||||
NetworkConnectionError,
|
NetworkConnectionError,
|
||||||
PreviousBuildDirError,
|
PreviousBuildDirError,
|
||||||
UninstallationError,
|
UninstallationError,
|
||||||
)
|
)
|
||||||
from pip._internal.utils.deprecation import deprecated
|
|
||||||
from pip._internal.utils.filesystem import check_path_owner
|
from pip._internal.utils.filesystem import check_path_owner
|
||||||
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||||
from pip._internal.utils.misc import get_prog, normalize_path
|
from pip._internal.utils.misc import get_prog, normalize_path
|
||||||
@@ -85,10 +88,10 @@ class Command(CommandContextMixIn):
|
|||||||
# are present.
|
# are present.
|
||||||
assert not hasattr(options, "no_index")
|
assert not hasattr(options, "no_index")
|
||||||
|
|
||||||
def run(self, options: Values, args: List[Any]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def parse_args(self, args: List[str]) -> Tuple[Any, Any]:
|
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
|
||||||
# factored out for testability
|
# factored out for testability
|
||||||
return self.parser.parse_args(args)
|
return self.parser.parse_args(args)
|
||||||
|
|
||||||
@@ -119,6 +122,15 @@ class Command(CommandContextMixIn):
|
|||||||
user_log_file=options.log,
|
user_log_file=options.log,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
always_enabled_features = set(options.features_enabled) & set(
|
||||||
|
cmdoptions.ALWAYS_ENABLED_FEATURES
|
||||||
|
)
|
||||||
|
if always_enabled_features:
|
||||||
|
logger.warning(
|
||||||
|
"The following features are always enabled: %s. ",
|
||||||
|
", ".join(sorted(always_enabled_features)),
|
||||||
|
)
|
||||||
|
|
||||||
# TODO: Try to get these passing down from the command?
|
# TODO: Try to get these passing down from the command?
|
||||||
# without resorting to os.environ to hold these.
|
# without resorting to os.environ to hold these.
|
||||||
# This also affects isolated builds and it should.
|
# This also affects isolated builds and it should.
|
||||||
@@ -148,31 +160,20 @@ class Command(CommandContextMixIn):
|
|||||||
)
|
)
|
||||||
options.cache_dir = None
|
options.cache_dir = None
|
||||||
|
|
||||||
if getattr(options, "build_dir", None):
|
def intercepts_unhandled_exc(
|
||||||
deprecated(
|
run_func: Callable[..., int]
|
||||||
reason=(
|
) -> Callable[..., int]:
|
||||||
"The -b/--build/--build-dir/--build-directory "
|
@functools.wraps(run_func)
|
||||||
"option is deprecated and has no effect anymore."
|
def exc_logging_wrapper(*args: Any) -> int:
|
||||||
),
|
|
||||||
replacement=(
|
|
||||||
"use the TMPDIR/TEMP/TMP environment variable, "
|
|
||||||
"possibly combined with --no-clean"
|
|
||||||
),
|
|
||||||
gone_in="21.3",
|
|
||||||
issue=8333,
|
|
||||||
)
|
|
||||||
|
|
||||||
if "2020-resolver" in options.features_enabled:
|
|
||||||
logger.warning(
|
|
||||||
"--use-feature=2020-resolver no longer has any effect, "
|
|
||||||
"since it is now the default dependency resolver in pip. "
|
|
||||||
"This will become an error in pip 21.0."
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
status = self.run(options, args)
|
status = run_func(*args)
|
||||||
assert isinstance(status, int)
|
assert isinstance(status, int)
|
||||||
return status
|
return status
|
||||||
|
except DiagnosticPipError as exc:
|
||||||
|
logger.error("[present-rich] %s", exc)
|
||||||
|
logger.debug("Exception information:", exc_info=True)
|
||||||
|
|
||||||
|
return ERROR
|
||||||
except PreviousBuildDirError as exc:
|
except PreviousBuildDirError as exc:
|
||||||
logger.critical(str(exc))
|
logger.critical(str(exc))
|
||||||
logger.debug("Exception information:", exc_info=True)
|
logger.debug("Exception information:", exc_info=True)
|
||||||
@@ -194,8 +195,8 @@ class Command(CommandContextMixIn):
|
|||||||
|
|
||||||
return ERROR
|
return ERROR
|
||||||
except BrokenStdoutLoggingError:
|
except BrokenStdoutLoggingError:
|
||||||
# Bypass our logger and write any remaining messages to stderr
|
# Bypass our logger and write any remaining messages to
|
||||||
# because stdout no longer works.
|
# stderr because stdout no longer works.
|
||||||
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
||||||
if level_number <= logging.DEBUG:
|
if level_number <= logging.DEBUG:
|
||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
@@ -210,5 +211,15 @@ class Command(CommandContextMixIn):
|
|||||||
logger.critical("Exception:", exc_info=True)
|
logger.critical("Exception:", exc_info=True)
|
||||||
|
|
||||||
return UNKNOWN_ERROR
|
return UNKNOWN_ERROR
|
||||||
|
|
||||||
|
return exc_logging_wrapper
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not options.debug_mode:
|
||||||
|
run = intercepts_unhandled_exc(self.run)
|
||||||
|
else:
|
||||||
|
run = self.run
|
||||||
|
rich_traceback.install(show_locals=True)
|
||||||
|
return run(options, args)
|
||||||
finally:
|
finally:
|
||||||
self.handle_pip_version_check(options)
|
self.handle_pip_version_check(options)
|
||||||
|
|||||||
@@ -10,9 +10,10 @@ pass on state. To be consistent, all options will follow this design.
|
|||||||
# The following comment should be removed at some point in the future.
|
# The following comment should be removed at some point in the future.
|
||||||
# mypy: strict-optional=False
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import importlib.util
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import textwrap
|
import textwrap
|
||||||
import warnings
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
@@ -21,7 +22,6 @@ from typing import Any, Callable, Dict, Optional, Tuple
|
|||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._internal.cli.parser import ConfigOptionParser
|
from pip._internal.cli.parser import ConfigOptionParser
|
||||||
from pip._internal.cli.progress_bars import BAR_TYPES
|
|
||||||
from pip._internal.exceptions import CommandError
|
from pip._internal.exceptions import CommandError
|
||||||
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
||||||
from pip._internal.models.format_control import FormatControl
|
from pip._internal.models.format_control import FormatControl
|
||||||
@@ -30,6 +30,8 @@ from pip._internal.models.target_python import TargetPython
|
|||||||
from pip._internal.utils.hashes import STRONG_HASHES
|
from pip._internal.utils.hashes import STRONG_HASHES
|
||||||
from pip._internal.utils.misc import strtobool
|
from pip._internal.utils.misc import strtobool
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
|
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -57,32 +59,6 @@ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> Opti
|
|||||||
return option_group
|
return option_group
|
||||||
|
|
||||||
|
|
||||||
def check_install_build_global(
|
|
||||||
options: Values, check_options: Optional[Values] = None
|
|
||||||
) -> None:
|
|
||||||
"""Disable wheels if per-setup.py call options are set.
|
|
||||||
|
|
||||||
:param options: The OptionParser options to update.
|
|
||||||
:param check_options: The options to check, if not supplied defaults to
|
|
||||||
options.
|
|
||||||
"""
|
|
||||||
if check_options is None:
|
|
||||||
check_options = options
|
|
||||||
|
|
||||||
def getname(n: str) -> Optional[Any]:
|
|
||||||
return getattr(check_options, n, None)
|
|
||||||
|
|
||||||
names = ["build_options", "global_options", "install_options"]
|
|
||||||
if any(map(getname, names)):
|
|
||||||
control = options.format_control
|
|
||||||
control.disallow_binaries()
|
|
||||||
warnings.warn(
|
|
||||||
"Disabling all use of wheels due to the use of --build-option "
|
|
||||||
"/ --global-option / --install-option.",
|
|
||||||
stacklevel=2,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
||||||
"""Function for determining if custom platform options are allowed.
|
"""Function for determining if custom platform options are allowed.
|
||||||
|
|
||||||
@@ -151,6 +127,18 @@ help_: Callable[..., Option] = partial(
|
|||||||
help="Show help.",
|
help="Show help.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
debug_mode: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--debug",
|
||||||
|
dest="debug_mode",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"Let unhandled exceptions propagate outside the main subroutine, "
|
||||||
|
"instead of logging them to stderr."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
isolated_mode: Callable[..., Option] = partial(
|
isolated_mode: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--isolated",
|
"--isolated",
|
||||||
@@ -165,13 +153,30 @@ isolated_mode: Callable[..., Option] = partial(
|
|||||||
|
|
||||||
require_virtualenv: Callable[..., Option] = partial(
|
require_virtualenv: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
# Run only if inside a virtualenv, bail if not.
|
|
||||||
"--require-virtualenv",
|
"--require-virtualenv",
|
||||||
"--require-venv",
|
"--require-venv",
|
||||||
dest="require_venv",
|
dest="require_venv",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help=SUPPRESS_HELP,
|
help=(
|
||||||
|
"Allow pip to only run in a virtual environment; "
|
||||||
|
"exit with an error otherwise."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
override_externally_managed: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--break-system-packages",
|
||||||
|
dest="override_externally_managed",
|
||||||
|
action="store_true",
|
||||||
|
help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
|
||||||
|
)
|
||||||
|
|
||||||
|
python: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--python",
|
||||||
|
dest="python",
|
||||||
|
help="Run pip with the specified Python interpreter.",
|
||||||
)
|
)
|
||||||
|
|
||||||
verbose: Callable[..., Option] = partial(
|
verbose: Callable[..., Option] = partial(
|
||||||
@@ -221,13 +226,9 @@ progress_bar: Callable[..., Option] = partial(
|
|||||||
"--progress-bar",
|
"--progress-bar",
|
||||||
dest="progress_bar",
|
dest="progress_bar",
|
||||||
type="choice",
|
type="choice",
|
||||||
choices=list(BAR_TYPES.keys()),
|
choices=["on", "off"],
|
||||||
default="on",
|
default="on",
|
||||||
help=(
|
help="Specify whether the progress bar should be used [on, off] (default: on)",
|
||||||
"Specify type of progress to be displayed ["
|
|
||||||
+ "|".join(BAR_TYPES.keys())
|
|
||||||
+ "] (default: %default)"
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
log: Callable[..., Option] = partial(
|
log: Callable[..., Option] = partial(
|
||||||
@@ -251,13 +252,26 @@ no_input: Callable[..., Option] = partial(
|
|||||||
help="Disable prompting for input.",
|
help="Disable prompting for input.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
keyring_provider: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--keyring-provider",
|
||||||
|
dest="keyring_provider",
|
||||||
|
choices=["auto", "disabled", "import", "subprocess"],
|
||||||
|
default="auto",
|
||||||
|
help=(
|
||||||
|
"Enable the credential lookup via the keyring library if user input is allowed."
|
||||||
|
" Specify which mechanism to use [disabled, import, subprocess]."
|
||||||
|
" (default: disabled)"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
proxy: Callable[..., Option] = partial(
|
proxy: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--proxy",
|
"--proxy",
|
||||||
dest="proxy",
|
dest="proxy",
|
||||||
type="str",
|
type="str",
|
||||||
default="",
|
default="",
|
||||||
help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
|
help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
|
||||||
)
|
)
|
||||||
|
|
||||||
retries: Callable[..., Option] = partial(
|
retries: Callable[..., Option] = partial(
|
||||||
@@ -719,18 +733,6 @@ no_deps: Callable[..., Option] = partial(
|
|||||||
help="Don't install package dependencies.",
|
help="Don't install package dependencies.",
|
||||||
)
|
)
|
||||||
|
|
||||||
build_dir: Callable[..., Option] = partial(
|
|
||||||
PipOption,
|
|
||||||
"-b",
|
|
||||||
"--build",
|
|
||||||
"--build-dir",
|
|
||||||
"--build-directory",
|
|
||||||
dest="build_dir",
|
|
||||||
type="path",
|
|
||||||
metavar="dir",
|
|
||||||
help=SUPPRESS_HELP,
|
|
||||||
)
|
|
||||||
|
|
||||||
ignore_requires_python: Callable[..., Option] = partial(
|
ignore_requires_python: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--ignore-requires-python",
|
"--ignore-requires-python",
|
||||||
@@ -750,6 +752,15 @@ no_build_isolation: Callable[..., Option] = partial(
|
|||||||
"if this option is used.",
|
"if this option is used.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
check_build_deps: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--check-build-dependencies",
|
||||||
|
dest="check_build_deps",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Check the build dependencies when PEP517 is used.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _handle_no_use_pep517(
|
def _handle_no_use_pep517(
|
||||||
option: Option, opt: str, value: str, parser: OptionParser
|
option: Option, opt: str, value: str, parser: OptionParser
|
||||||
@@ -772,6 +783,16 @@ def _handle_no_use_pep517(
|
|||||||
"""
|
"""
|
||||||
raise_option_error(parser, option=option, msg=msg)
|
raise_option_error(parser, option=option, msg=msg)
|
||||||
|
|
||||||
|
# If user doesn't wish to use pep517, we check if setuptools and wheel are installed
|
||||||
|
# and raise error if it is not.
|
||||||
|
packages = ("setuptools", "wheel")
|
||||||
|
if not all(importlib.util.find_spec(package) for package in packages):
|
||||||
|
msg = (
|
||||||
|
f"It is not possible to use --no-use-pep517 "
|
||||||
|
f"without {' and '.join(packages)} installed."
|
||||||
|
)
|
||||||
|
raise_option_error(parser, option=option, msg=msg)
|
||||||
|
|
||||||
# Otherwise, --no-use-pep517 was passed via the command-line.
|
# Otherwise, --no-use-pep517 was passed via the command-line.
|
||||||
parser.values.use_pep517 = False
|
parser.values.use_pep517 = False
|
||||||
|
|
||||||
@@ -796,17 +817,38 @@ no_use_pep517: Any = partial(
|
|||||||
help=SUPPRESS_HELP,
|
help=SUPPRESS_HELP,
|
||||||
)
|
)
|
||||||
|
|
||||||
install_options: Callable[..., Option] = partial(
|
|
||||||
|
def _handle_config_settings(
|
||||||
|
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||||
|
) -> None:
|
||||||
|
key, sep, val = value.partition("=")
|
||||||
|
if sep != "=":
|
||||||
|
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
|
||||||
|
dest = getattr(parser.values, option.dest)
|
||||||
|
if dest is None:
|
||||||
|
dest = {}
|
||||||
|
setattr(parser.values, option.dest, dest)
|
||||||
|
if key in dest:
|
||||||
|
if isinstance(dest[key], list):
|
||||||
|
dest[key].append(val)
|
||||||
|
else:
|
||||||
|
dest[key] = [dest[key], val]
|
||||||
|
else:
|
||||||
|
dest[key] = val
|
||||||
|
|
||||||
|
|
||||||
|
config_settings: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--install-option",
|
"-C",
|
||||||
dest="install_options",
|
"--config-settings",
|
||||||
action="append",
|
dest="config_settings",
|
||||||
metavar="options",
|
type=str,
|
||||||
help="Extra arguments to be supplied to the setup.py install "
|
action="callback",
|
||||||
'command (use like --install-option="--install-scripts=/usr/local/'
|
callback=_handle_config_settings,
|
||||||
'bin"). Use multiple --install-option options to pass multiple '
|
metavar="settings",
|
||||||
"options to setup.py install. If you are using an option with a "
|
help="Configuration settings to be passed to the PEP 517 build backend. "
|
||||||
"directory path, be sure to use absolute path.",
|
"Settings take the form KEY=VALUE. Use multiple --config-settings options "
|
||||||
|
"to pass multiple keys to the backend.",
|
||||||
)
|
)
|
||||||
|
|
||||||
build_options: Callable[..., Option] = partial(
|
build_options: Callable[..., Option] = partial(
|
||||||
@@ -855,6 +897,15 @@ disable_pip_version_check: Callable[..., Option] = partial(
|
|||||||
"of pip is available for download. Implied with --no-index.",
|
"of pip is available for download. Implied with --no-index.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
root_user_action: Callable[..., Option] = partial(
|
||||||
|
Option,
|
||||||
|
"--root-user-action",
|
||||||
|
dest="root_user_action",
|
||||||
|
default="warn",
|
||||||
|
choices=["warn", "ignore"],
|
||||||
|
help="Action if pip is run as a root user. By default, a warning message is shown.",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _handle_merge_hash(
|
def _handle_merge_hash(
|
||||||
option: Option, opt_str: str, value: str, parser: OptionParser
|
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||||
@@ -943,6 +994,11 @@ no_python_version_warning: Callable[..., Option] = partial(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Features that are now always on. A warning is printed if they are used.
|
||||||
|
ALWAYS_ENABLED_FEATURES = [
|
||||||
|
"no-binary-enable-wheel-cache", # always on since 23.1
|
||||||
|
]
|
||||||
|
|
||||||
use_new_feature: Callable[..., Option] = partial(
|
use_new_feature: Callable[..., Option] = partial(
|
||||||
Option,
|
Option,
|
||||||
"--use-feature",
|
"--use-feature",
|
||||||
@@ -950,7 +1006,11 @@ use_new_feature: Callable[..., Option] = partial(
|
|||||||
metavar="feature",
|
metavar="feature",
|
||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
choices=["2020-resolver", "fast-deps", "in-tree-build"],
|
choices=[
|
||||||
|
"fast-deps",
|
||||||
|
"truststore",
|
||||||
|
]
|
||||||
|
+ ALWAYS_ENABLED_FEATURES,
|
||||||
help="Enable new functionality, that may be backward incompatible.",
|
help="Enable new functionality, that may be backward incompatible.",
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -961,7 +1021,9 @@ use_deprecated_feature: Callable[..., Option] = partial(
|
|||||||
metavar="feature",
|
metavar="feature",
|
||||||
action="append",
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
choices=["legacy-resolver"],
|
choices=[
|
||||||
|
"legacy-resolver",
|
||||||
|
],
|
||||||
help=("Enable deprecated functionality, that will be removed in the future."),
|
help=("Enable deprecated functionality, that will be removed in the future."),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -974,13 +1036,16 @@ general_group: Dict[str, Any] = {
|
|||||||
"name": "General Options",
|
"name": "General Options",
|
||||||
"options": [
|
"options": [
|
||||||
help_,
|
help_,
|
||||||
|
debug_mode,
|
||||||
isolated_mode,
|
isolated_mode,
|
||||||
require_virtualenv,
|
require_virtualenv,
|
||||||
|
python,
|
||||||
verbose,
|
verbose,
|
||||||
version,
|
version,
|
||||||
quiet,
|
quiet,
|
||||||
log,
|
log,
|
||||||
no_input,
|
no_input,
|
||||||
|
keyring_provider,
|
||||||
proxy,
|
proxy,
|
||||||
retries,
|
retries,
|
||||||
timeout,
|
timeout,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from contextlib import ExitStack, contextmanager
|
from contextlib import ExitStack, contextmanager
|
||||||
from typing import ContextManager, Iterator, TypeVar
|
from typing import ContextManager, Generator, TypeVar
|
||||||
|
|
||||||
_T = TypeVar("_T", covariant=True)
|
_T = TypeVar("_T", covariant=True)
|
||||||
|
|
||||||
@@ -11,7 +11,7 @@ class CommandContextMixIn:
|
|||||||
self._main_context = ExitStack()
|
self._main_context = ExitStack()
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def main_context(self) -> Iterator[None]:
|
def main_context(self) -> Generator[None, None, None]:
|
||||||
assert not self._in_main_context
|
assert not self._in_main_context
|
||||||
|
|
||||||
self._in_main_context = True
|
self._in_main_context = True
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import locale
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import warnings
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from pip._internal.cli.autocompletion import autocomplete
|
from pip._internal.cli.autocompletion import autocomplete
|
||||||
@@ -46,6 +47,14 @@ def main(args: Optional[List[str]] = None) -> int:
|
|||||||
if args is None:
|
if args is None:
|
||||||
args = sys.argv[1:]
|
args = sys.argv[1:]
|
||||||
|
|
||||||
|
# Suppress the pkg_resources deprecation warning
|
||||||
|
# Note - we use a module of .*pkg_resources to cover
|
||||||
|
# the normal case (pip._vendor.pkg_resources) and the
|
||||||
|
# devendored case (a bare pkg_resources)
|
||||||
|
warnings.filterwarnings(
|
||||||
|
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
|
||||||
|
)
|
||||||
|
|
||||||
# Configure our deprecation warnings to be sent through loggers
|
# Configure our deprecation warnings to be sent through loggers
|
||||||
deprecation.install_warning_logger()
|
deprecation.install_warning_logger()
|
||||||
|
|
||||||
|
|||||||
@@ -2,9 +2,11 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
from pip._internal.build_env import get_runnable_pip
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||||
from pip._internal.commands import commands_dict, get_similar_commands
|
from pip._internal.commands import commands_dict, get_similar_commands
|
||||||
@@ -45,6 +47,25 @@ def create_main_parser() -> ConfigOptionParser:
|
|||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def identify_python_interpreter(python: str) -> Optional[str]:
|
||||||
|
# If the named file exists, use it.
|
||||||
|
# If it's a directory, assume it's a virtual environment and
|
||||||
|
# look for the environment's Python executable.
|
||||||
|
if os.path.exists(python):
|
||||||
|
if os.path.isdir(python):
|
||||||
|
# bin/python for Unix, Scripts/python.exe for Windows
|
||||||
|
# Try both in case of odd cases like cygwin.
|
||||||
|
for exe in ("bin/python", "Scripts/python.exe"):
|
||||||
|
py = os.path.join(python, exe)
|
||||||
|
if os.path.exists(py):
|
||||||
|
return py
|
||||||
|
else:
|
||||||
|
return python
|
||||||
|
|
||||||
|
# Could not find the interpreter specified
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
||||||
parser = create_main_parser()
|
parser = create_main_parser()
|
||||||
|
|
||||||
@@ -57,6 +78,32 @@ def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
|||||||
# args_else: ['install', '--user', 'INITools']
|
# args_else: ['install', '--user', 'INITools']
|
||||||
general_options, args_else = parser.parse_args(args)
|
general_options, args_else = parser.parse_args(args)
|
||||||
|
|
||||||
|
# --python
|
||||||
|
if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
||||||
|
# Re-invoke pip using the specified Python interpreter
|
||||||
|
interpreter = identify_python_interpreter(general_options.python)
|
||||||
|
if interpreter is None:
|
||||||
|
raise CommandError(
|
||||||
|
f"Could not locate Python interpreter {general_options.python}"
|
||||||
|
)
|
||||||
|
|
||||||
|
pip_cmd = [
|
||||||
|
interpreter,
|
||||||
|
get_runnable_pip(),
|
||||||
|
]
|
||||||
|
pip_cmd.extend(args)
|
||||||
|
|
||||||
|
# Set a flag so the child doesn't re-invoke itself, causing
|
||||||
|
# an infinite loop.
|
||||||
|
os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
|
||||||
|
returncode = 0
|
||||||
|
try:
|
||||||
|
proc = subprocess.run(pip_cmd)
|
||||||
|
returncode = proc.returncode
|
||||||
|
except (subprocess.SubprocessError, OSError) as exc:
|
||||||
|
raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
|
||||||
|
sys.exit(returncode)
|
||||||
|
|
||||||
# --version
|
# --version
|
||||||
if general_options.version:
|
if general_options.version:
|
||||||
sys.stdout.write(parser.version)
|
sys.stdout.write(parser.version)
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from typing import Any, Dict, Iterator, List, Tuple
|
from typing import Any, Dict, Generator, List, Tuple
|
||||||
|
|
||||||
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||||
from pip._internal.configuration import Configuration, ConfigurationError
|
from pip._internal.configuration import Configuration, ConfigurationError
|
||||||
@@ -175,7 +175,9 @@ class ConfigOptionParser(CustomOptionParser):
|
|||||||
print(f"An error occurred during configuration: {exc}")
|
print(f"An error occurred during configuration: {exc}")
|
||||||
sys.exit(3)
|
sys.exit(3)
|
||||||
|
|
||||||
def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]:
|
def _get_ordered_configuration_items(
|
||||||
|
self,
|
||||||
|
) -> Generator[Tuple[str, Any], None, None]:
|
||||||
# Configuration gives keys in an unordered manner. Order them.
|
# Configuration gives keys in an unordered manner. Order them.
|
||||||
override_order = ["global", self.name, ":env:"]
|
override_order = ["global", self.name, ":env:"]
|
||||||
|
|
||||||
|
|||||||
@@ -1,250 +1,68 @@
|
|||||||
import itertools
|
import functools
|
||||||
import sys
|
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
|
||||||
from signal import SIGINT, default_int_handler, signal
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
|
from pip._vendor.rich.progress import (
|
||||||
from pip._vendor.progress.spinner import Spinner
|
BarColumn,
|
||||||
|
DownloadColumn,
|
||||||
|
FileSizeColumn,
|
||||||
|
Progress,
|
||||||
|
ProgressColumn,
|
||||||
|
SpinnerColumn,
|
||||||
|
TextColumn,
|
||||||
|
TimeElapsedColumn,
|
||||||
|
TimeRemainingColumn,
|
||||||
|
TransferSpeedColumn,
|
||||||
|
)
|
||||||
|
|
||||||
from pip._internal.utils.compat import WINDOWS
|
|
||||||
from pip._internal.utils.logging import get_indentation
|
from pip._internal.utils.logging import get_indentation
|
||||||
from pip._internal.utils.misc import format_size
|
|
||||||
|
|
||||||
try:
|
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
|
||||||
from pip._vendor import colorama
|
|
||||||
# Lots of different errors can come from this, including SystemError and
|
|
||||||
# ImportError.
|
|
||||||
except Exception:
|
|
||||||
colorama = None
|
|
||||||
|
|
||||||
|
|
||||||
def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar:
|
def _rich_progress_bar(
|
||||||
encoding = getattr(preferred.file, "encoding", None)
|
iterable: Iterable[bytes],
|
||||||
|
*,
|
||||||
|
bar_type: str,
|
||||||
|
size: int,
|
||||||
|
) -> Generator[bytes, None, None]:
|
||||||
|
assert bar_type == "on", "This should only be used in the default mode."
|
||||||
|
|
||||||
# If we don't know what encoding this file is in, then we'll just assume
|
if not size:
|
||||||
# that it doesn't support unicode and use the ASCII bar.
|
total = float("inf")
|
||||||
if not encoding:
|
columns: Tuple[ProgressColumn, ...] = (
|
||||||
return fallback
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
SpinnerColumn("line", speed=1.5),
|
||||||
# Collect all of the possible characters we want to use with the preferred
|
FileSizeColumn(),
|
||||||
# bar.
|
TransferSpeedColumn(),
|
||||||
characters = [
|
TimeElapsedColumn(),
|
||||||
getattr(preferred, "empty_fill", ""),
|
)
|
||||||
getattr(preferred, "fill", ""),
|
|
||||||
]
|
|
||||||
characters += list(getattr(preferred, "phases", []))
|
|
||||||
|
|
||||||
# Try to decode the characters we're using for the bar using the encoding
|
|
||||||
# of the given file, if this works then we'll assume that we can use the
|
|
||||||
# fancier bar and if not we'll fall back to the plaintext bar.
|
|
||||||
try:
|
|
||||||
"".join(characters).encode(encoding)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
return fallback
|
|
||||||
else:
|
else:
|
||||||
return preferred
|
total = size
|
||||||
|
columns = (
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
_BaseBar: Any = _select_progress_class(IncrementalBar, Bar)
|
BarColumn(),
|
||||||
|
DownloadColumn(),
|
||||||
|
TransferSpeedColumn(),
|
||||||
class InterruptibleMixin:
|
TextColumn("eta"),
|
||||||
"""
|
TimeRemainingColumn(),
|
||||||
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
|
||||||
|
|
||||||
This allows downloads to be interrupted without leaving temporary state
|
|
||||||
(like hidden cursors) behind.
|
|
||||||
|
|
||||||
This class is similar to the progress library's existing SigIntMixin
|
|
||||||
helper, but as of version 1.2, that helper has the following problems:
|
|
||||||
|
|
||||||
1. It calls sys.exit().
|
|
||||||
2. It discards the existing SIGINT handler completely.
|
|
||||||
3. It leaves its own handler in place even after an uninterrupted finish,
|
|
||||||
which will have unexpected delayed effects if the user triggers an
|
|
||||||
unrelated keyboard interrupt some time after a progress-displaying
|
|
||||||
download has already completed, for example.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
||||||
"""
|
|
||||||
Save the original SIGINT handler for later.
|
|
||||||
"""
|
|
||||||
# https://github.com/python/mypy/issues/5887
|
|
||||||
super().__init__(*args, **kwargs) # type: ignore
|
|
||||||
|
|
||||||
self.original_handler = signal(SIGINT, self.handle_sigint)
|
|
||||||
|
|
||||||
# If signal() returns None, the previous handler was not installed from
|
|
||||||
# Python, and we cannot restore it. This probably should not happen,
|
|
||||||
# but if it does, we must restore something sensible instead, at least.
|
|
||||||
# The least bad option should be Python's default SIGINT handler, which
|
|
||||||
# just raises KeyboardInterrupt.
|
|
||||||
if self.original_handler is None:
|
|
||||||
self.original_handler = default_int_handler
|
|
||||||
|
|
||||||
def finish(self) -> None:
|
|
||||||
"""
|
|
||||||
Restore the original SIGINT handler after finishing.
|
|
||||||
|
|
||||||
This should happen regardless of whether the progress display finishes
|
|
||||||
normally, or gets interrupted.
|
|
||||||
"""
|
|
||||||
super().finish() # type: ignore
|
|
||||||
signal(SIGINT, self.original_handler)
|
|
||||||
|
|
||||||
def handle_sigint(self, signum, frame): # type: ignore
|
|
||||||
"""
|
|
||||||
Call self.finish() before delegating to the original SIGINT handler.
|
|
||||||
|
|
||||||
This handler should only be in place while the progress display is
|
|
||||||
active.
|
|
||||||
"""
|
|
||||||
self.finish()
|
|
||||||
self.original_handler(signum, frame)
|
|
||||||
|
|
||||||
|
|
||||||
class SilentBar(Bar):
|
|
||||||
def update(self) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BlueEmojiBar(IncrementalBar):
|
|
||||||
|
|
||||||
suffix = "%(percent)d%%"
|
|
||||||
bar_prefix = " "
|
|
||||||
bar_suffix = " "
|
|
||||||
phases = ("\U0001F539", "\U0001F537", "\U0001F535")
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadProgressMixin:
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
||||||
# https://github.com/python/mypy/issues/5887
|
|
||||||
super().__init__(*args, **kwargs) # type: ignore
|
|
||||||
self.message: str = (" " * (get_indentation() + 2)) + self.message
|
|
||||||
|
|
||||||
@property
|
|
||||||
def downloaded(self) -> str:
|
|
||||||
return format_size(self.index) # type: ignore
|
|
||||||
|
|
||||||
@property
|
|
||||||
def download_speed(self) -> str:
|
|
||||||
# Avoid zero division errors...
|
|
||||||
if self.avg == 0.0: # type: ignore
|
|
||||||
return "..."
|
|
||||||
return format_size(1 / self.avg) + "/s" # type: ignore
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pretty_eta(self) -> str:
|
|
||||||
if self.eta: # type: ignore
|
|
||||||
return f"eta {self.eta_td}" # type: ignore
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def iter(self, it): # type: ignore
|
|
||||||
for x in it:
|
|
||||||
yield x
|
|
||||||
# B305 is incorrectly raised here
|
|
||||||
# https://github.com/PyCQA/flake8-bugbear/issues/59
|
|
||||||
self.next(len(x)) # noqa: B305
|
|
||||||
self.finish()
|
|
||||||
|
|
||||||
|
|
||||||
class WindowsMixin:
|
|
||||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
||||||
# The Windows terminal does not support the hide/show cursor ANSI codes
|
|
||||||
# even with colorama. So we'll ensure that hide_cursor is False on
|
|
||||||
# Windows.
|
|
||||||
# This call needs to go before the super() call, so that hide_cursor
|
|
||||||
# is set in time. The base progress bar class writes the "hide cursor"
|
|
||||||
# code to the terminal in its init, so if we don't set this soon
|
|
||||||
# enough, we get a "hide" with no corresponding "show"...
|
|
||||||
if WINDOWS and self.hide_cursor: # type: ignore
|
|
||||||
self.hide_cursor = False
|
|
||||||
|
|
||||||
# https://github.com/python/mypy/issues/5887
|
|
||||||
super().__init__(*args, **kwargs) # type: ignore
|
|
||||||
|
|
||||||
# Check if we are running on Windows and we have the colorama module,
|
|
||||||
# if we do then wrap our file with it.
|
|
||||||
if WINDOWS and colorama:
|
|
||||||
self.file = colorama.AnsiToWin32(self.file) # type: ignore
|
|
||||||
# The progress code expects to be able to call self.file.isatty()
|
|
||||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
|
||||||
# add it.
|
|
||||||
self.file.isatty = lambda: self.file.wrapped.isatty()
|
|
||||||
# The progress code expects to be able to call self.file.flush()
|
|
||||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
|
||||||
# add it.
|
|
||||||
self.file.flush = lambda: self.file.wrapped.flush()
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
|
|
||||||
|
|
||||||
file = sys.stdout
|
|
||||||
message = "%(percent)d%%"
|
|
||||||
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
|
||||||
|
|
||||||
|
|
||||||
class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadBar(BaseDownloadProgressBar, Bar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadProgressSpinner(
|
|
||||||
WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
|
|
||||||
):
|
|
||||||
|
|
||||||
file = sys.stdout
|
|
||||||
suffix = "%(downloaded)s %(download_speed)s"
|
|
||||||
|
|
||||||
def next_phase(self) -> str:
|
|
||||||
if not hasattr(self, "_phaser"):
|
|
||||||
self._phaser = itertools.cycle(self.phases)
|
|
||||||
return next(self._phaser)
|
|
||||||
|
|
||||||
def update(self) -> None:
|
|
||||||
message = self.message % self
|
|
||||||
phase = self.next_phase()
|
|
||||||
suffix = self.suffix % self
|
|
||||||
line = "".join(
|
|
||||||
[
|
|
||||||
message,
|
|
||||||
" " if message else "",
|
|
||||||
phase,
|
|
||||||
" " if suffix else "",
|
|
||||||
suffix,
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.writeln(line)
|
progress = Progress(*columns, refresh_per_second=30)
|
||||||
|
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
|
||||||
|
with progress:
|
||||||
|
for chunk in iterable:
|
||||||
|
yield chunk
|
||||||
|
progress.update(task_id, advance=len(chunk))
|
||||||
|
|
||||||
|
|
||||||
BAR_TYPES = {
|
def get_download_progress_renderer(
|
||||||
"off": (DownloadSilentBar, DownloadSilentBar),
|
*, bar_type: str, size: Optional[int] = None
|
||||||
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
) -> DownloadProgressRenderer:
|
||||||
"ascii": (DownloadBar, DownloadProgressSpinner),
|
"""Get an object that can be used to render the download progress.
|
||||||
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
|
||||||
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
Returns a callable, that takes an iterable to "wrap".
|
||||||
def DownloadProgressProvider(progress_bar, max=None): # type: ignore
|
"""
|
||||||
if max is None or max == 0:
|
if bar_type == "on":
|
||||||
return BAR_TYPES[progress_bar][1]().iter
|
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
|
||||||
else:
|
else:
|
||||||
return BAR_TYPES[progress_bar][0](max=max).iter
|
return iter # no-op, when passed an iterator
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Any, List, Optional, Tuple
|
from typing import TYPE_CHECKING, Any, List, Optional, Tuple
|
||||||
|
|
||||||
from pip._internal.cache import WheelCache
|
from pip._internal.cache import WheelCache
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
@@ -22,6 +22,7 @@ from pip._internal.index.package_finder import PackageFinder
|
|||||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||||
from pip._internal.models.target_python import TargetPython
|
from pip._internal.models.target_python import TargetPython
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
|
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||||
from pip._internal.operations.prepare import RequirementPreparer
|
from pip._internal.operations.prepare import RequirementPreparer
|
||||||
from pip._internal.req.constructors import (
|
from pip._internal.req.constructors import (
|
||||||
install_req_from_editable,
|
install_req_from_editable,
|
||||||
@@ -31,7 +32,6 @@ from pip._internal.req.constructors import (
|
|||||||
)
|
)
|
||||||
from pip._internal.req.req_file import parse_requirements
|
from pip._internal.req.req_file import parse_requirements
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.req.req_tracker import RequirementTracker
|
|
||||||
from pip._internal.resolution.base import BaseResolver
|
from pip._internal.resolution.base import BaseResolver
|
||||||
from pip._internal.self_outdated_check import pip_self_version_check
|
from pip._internal.self_outdated_check import pip_self_version_check
|
||||||
from pip._internal.utils.temp_dir import (
|
from pip._internal.utils.temp_dir import (
|
||||||
@@ -41,9 +41,33 @@ from pip._internal.utils.temp_dir import (
|
|||||||
)
|
)
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ssl import SSLContext
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
|
||||||
|
if sys.version_info < (3, 10):
|
||||||
|
raise CommandError("The truststore feature is only available for Python 3.10+")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ssl
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("Disabling truststore since ssl support is missing")
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
import truststore
|
||||||
|
except ImportError:
|
||||||
|
raise CommandError(
|
||||||
|
"To use the truststore feature, 'truststore' must be installed into "
|
||||||
|
"pip's current environment."
|
||||||
|
)
|
||||||
|
|
||||||
|
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||||
|
|
||||||
|
|
||||||
class SessionCommandMixin(CommandContextMixIn):
|
class SessionCommandMixin(CommandContextMixIn):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -83,15 +107,27 @@ class SessionCommandMixin(CommandContextMixIn):
|
|||||||
options: Values,
|
options: Values,
|
||||||
retries: Optional[int] = None,
|
retries: Optional[int] = None,
|
||||||
timeout: Optional[int] = None,
|
timeout: Optional[int] = None,
|
||||||
|
fallback_to_certifi: bool = False,
|
||||||
) -> PipSession:
|
) -> PipSession:
|
||||||
assert not options.cache_dir or os.path.isabs(options.cache_dir)
|
cache_dir = options.cache_dir
|
||||||
|
assert not cache_dir or os.path.isabs(cache_dir)
|
||||||
|
|
||||||
|
if "truststore" in options.features_enabled:
|
||||||
|
try:
|
||||||
|
ssl_context = _create_truststore_ssl_context()
|
||||||
|
except Exception:
|
||||||
|
if not fallback_to_certifi:
|
||||||
|
raise
|
||||||
|
ssl_context = None
|
||||||
|
else:
|
||||||
|
ssl_context = None
|
||||||
|
|
||||||
session = PipSession(
|
session = PipSession(
|
||||||
cache=(
|
cache=os.path.join(cache_dir, "http") if cache_dir else None,
|
||||||
os.path.join(options.cache_dir, "http") if options.cache_dir else None
|
|
||||||
),
|
|
||||||
retries=retries if retries is not None else options.retries,
|
retries=retries if retries is not None else options.retries,
|
||||||
trusted_hosts=options.trusted_hosts,
|
trusted_hosts=options.trusted_hosts,
|
||||||
index_urls=self._get_index_urls(options),
|
index_urls=self._get_index_urls(options),
|
||||||
|
ssl_context=ssl_context,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Handle custom ca-bundles from the user
|
# Handle custom ca-bundles from the user
|
||||||
@@ -115,6 +151,7 @@ class SessionCommandMixin(CommandContextMixIn):
|
|||||||
|
|
||||||
# Determine if we can prompt the user for authentication or not
|
# Determine if we can prompt the user for authentication or not
|
||||||
session.auth.prompting = not options.no_input
|
session.auth.prompting = not options.no_input
|
||||||
|
session.auth.keyring_provider = options.keyring_provider
|
||||||
|
|
||||||
return session
|
return session
|
||||||
|
|
||||||
@@ -141,7 +178,14 @@ class IndexGroupCommand(Command, SessionCommandMixin):
|
|||||||
|
|
||||||
# Otherwise, check if we're using the latest version of pip available.
|
# Otherwise, check if we're using the latest version of pip available.
|
||||||
session = self._build_session(
|
session = self._build_session(
|
||||||
options, retries=0, timeout=min(5, options.timeout)
|
options,
|
||||||
|
retries=0,
|
||||||
|
timeout=min(5, options.timeout),
|
||||||
|
# This is set to ensure the function does not fail when truststore is
|
||||||
|
# specified in use-feature but cannot be loaded. This usually raises a
|
||||||
|
# CommandError and shows a nice user-facing error, but this function is not
|
||||||
|
# called in that try-except block.
|
||||||
|
fallback_to_certifi=True,
|
||||||
)
|
)
|
||||||
with session:
|
with session:
|
||||||
pip_self_version_check(session, options)
|
pip_self_version_check(session, options)
|
||||||
@@ -172,9 +216,10 @@ def warn_if_run_as_root() -> None:
|
|||||||
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
||||||
if sys.platform == "win32" or sys.platform == "cygwin":
|
if sys.platform == "win32" or sys.platform == "cygwin":
|
||||||
return
|
return
|
||||||
if sys.platform == "darwin" or sys.platform == "linux":
|
|
||||||
if os.getuid() != 0:
|
if os.getuid() != 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Running pip as the 'root' user can result in broken permissions and "
|
"Running pip as the 'root' user can result in broken permissions and "
|
||||||
"conflicting behaviour with the system package manager. "
|
"conflicting behaviour with the system package manager. "
|
||||||
@@ -230,11 +275,12 @@ class RequirementCommand(IndexGroupCommand):
|
|||||||
cls,
|
cls,
|
||||||
temp_build_dir: TempDirectory,
|
temp_build_dir: TempDirectory,
|
||||||
options: Values,
|
options: Values,
|
||||||
req_tracker: RequirementTracker,
|
build_tracker: BuildTracker,
|
||||||
session: PipSession,
|
session: PipSession,
|
||||||
finder: PackageFinder,
|
finder: PackageFinder,
|
||||||
use_user_site: bool,
|
use_user_site: bool,
|
||||||
download_dir: Optional[str] = None,
|
download_dir: Optional[str] = None,
|
||||||
|
verbosity: int = 0,
|
||||||
) -> RequirementPreparer:
|
) -> RequirementPreparer:
|
||||||
"""
|
"""
|
||||||
Create a RequirementPreparer instance for the given parameters.
|
Create a RequirementPreparer instance for the given parameters.
|
||||||
@@ -265,14 +311,15 @@ class RequirementCommand(IndexGroupCommand):
|
|||||||
src_dir=options.src_dir,
|
src_dir=options.src_dir,
|
||||||
download_dir=download_dir,
|
download_dir=download_dir,
|
||||||
build_isolation=options.build_isolation,
|
build_isolation=options.build_isolation,
|
||||||
req_tracker=req_tracker,
|
check_build_deps=options.check_build_deps,
|
||||||
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
progress_bar=options.progress_bar,
|
progress_bar=options.progress_bar,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
require_hashes=options.require_hashes,
|
require_hashes=options.require_hashes,
|
||||||
use_user_site=use_user_site,
|
use_user_site=use_user_site,
|
||||||
lazy_wheel=lazy_wheel,
|
lazy_wheel=lazy_wheel,
|
||||||
in_tree_build="in-tree-build" in options.features_enabled,
|
verbosity=verbosity,
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -363,10 +410,11 @@ class RequirementCommand(IndexGroupCommand):
|
|||||||
for req in args:
|
for req in args:
|
||||||
req_to_add = install_req_from_line(
|
req_to_add = install_req_from_line(
|
||||||
req,
|
req,
|
||||||
None,
|
comes_from=None,
|
||||||
isolated=options.isolated_mode,
|
isolated=options.isolated_mode,
|
||||||
use_pep517=options.use_pep517,
|
use_pep517=options.use_pep517,
|
||||||
user_supplied=True,
|
user_supplied=True,
|
||||||
|
config_settings=getattr(options, "config_settings", None),
|
||||||
)
|
)
|
||||||
requirements.append(req_to_add)
|
requirements.append(req_to_add)
|
||||||
|
|
||||||
@@ -376,6 +424,7 @@ class RequirementCommand(IndexGroupCommand):
|
|||||||
user_supplied=True,
|
user_supplied=True,
|
||||||
isolated=options.isolated_mode,
|
isolated=options.isolated_mode,
|
||||||
use_pep517=options.use_pep517,
|
use_pep517=options.use_pep517,
|
||||||
|
config_settings=getattr(options, "config_settings", None),
|
||||||
)
|
)
|
||||||
requirements.append(req_to_add)
|
requirements.append(req_to_add)
|
||||||
|
|
||||||
@@ -389,6 +438,9 @@ class RequirementCommand(IndexGroupCommand):
|
|||||||
isolated=options.isolated_mode,
|
isolated=options.isolated_mode,
|
||||||
use_pep517=options.use_pep517,
|
use_pep517=options.use_pep517,
|
||||||
user_supplied=True,
|
user_supplied=True,
|
||||||
|
config_settings=parsed_req.options.get("config_settings")
|
||||||
|
if parsed_req.options
|
||||||
|
else None,
|
||||||
)
|
)
|
||||||
requirements.append(req_to_add)
|
requirements.append(req_to_add)
|
||||||
|
|
||||||
|
|||||||
@@ -3,9 +3,7 @@ import itertools
|
|||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import IO, Iterator
|
from typing import IO, Generator, Optional
|
||||||
|
|
||||||
from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
|
|
||||||
|
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
from pip._internal.utils.logging import get_indentation
|
from pip._internal.utils.logging import get_indentation
|
||||||
@@ -25,7 +23,7 @@ class InteractiveSpinner(SpinnerInterface):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
message: str,
|
message: str,
|
||||||
file: IO[str] = None,
|
file: Optional[IO[str]] = None,
|
||||||
spin_chars: str = "-\\|/",
|
spin_chars: str = "-\\|/",
|
||||||
# Empirically, 8 updates/second looks nice
|
# Empirically, 8 updates/second looks nice
|
||||||
min_update_interval_seconds: float = 0.125,
|
min_update_interval_seconds: float = 0.125,
|
||||||
@@ -115,7 +113,7 @@ class RateLimiter:
|
|||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def open_spinner(message: str) -> Iterator[SpinnerInterface]:
|
def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
|
||||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||||
# through the logging system, but it acts like it has level INFO,
|
# through the logging system, but it acts like it has level INFO,
|
||||||
# i.e. it's only displayed if we're at level INFO or better.
|
# i.e. it's only displayed if we're at level INFO or better.
|
||||||
@@ -138,8 +136,12 @@ def open_spinner(message: str) -> Iterator[SpinnerInterface]:
|
|||||||
spinner.finish("done")
|
spinner.finish("done")
|
||||||
|
|
||||||
|
|
||||||
|
HIDE_CURSOR = "\x1b[?25l"
|
||||||
|
SHOW_CURSOR = "\x1b[?25h"
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def hidden_cursor(file: IO[str]) -> Iterator[None]:
|
def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
|
||||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||||
# even via colorama. So don't even try.
|
# even via colorama. So don't even try.
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
|
|||||||
@@ -3,87 +3,107 @@ Package containing all pip commands
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
from collections import OrderedDict, namedtuple
|
from collections import namedtuple
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
|
|
||||||
CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary')
|
CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
|
||||||
|
|
||||||
# The ordering matters for help display.
|
# This dictionary does a bunch of heavy lifting for help output:
|
||||||
# Also, even though the module path starts with the same
|
# - Enables avoiding additional (costly) imports for presenting `--help`.
|
||||||
# "pip._internal.commands" prefix in each case, we include the full path
|
# - The ordering matters for help display.
|
||||||
# because it makes testing easier (specifically when modifying commands_dict
|
#
|
||||||
# in test setup / teardown by adding info for a FakeCommand class defined
|
# Even though the module path starts with the same "pip._internal.commands"
|
||||||
# in a test-related module).
|
# prefix, the full path makes testing easier (specifically when modifying
|
||||||
# Finally, we need to pass an iterable of pairs here rather than a dict
|
# `commands_dict` in test setup / teardown).
|
||||||
# so that the ordering won't be lost when using Python 2.7.
|
commands_dict: Dict[str, CommandInfo] = {
|
||||||
commands_dict: Dict[str, CommandInfo] = OrderedDict([
|
"install": CommandInfo(
|
||||||
('install', CommandInfo(
|
"pip._internal.commands.install",
|
||||||
'pip._internal.commands.install', 'InstallCommand',
|
"InstallCommand",
|
||||||
'Install packages.',
|
"Install packages.",
|
||||||
)),
|
),
|
||||||
('download', CommandInfo(
|
"download": CommandInfo(
|
||||||
'pip._internal.commands.download', 'DownloadCommand',
|
"pip._internal.commands.download",
|
||||||
'Download packages.',
|
"DownloadCommand",
|
||||||
)),
|
"Download packages.",
|
||||||
('uninstall', CommandInfo(
|
),
|
||||||
'pip._internal.commands.uninstall', 'UninstallCommand',
|
"uninstall": CommandInfo(
|
||||||
'Uninstall packages.',
|
"pip._internal.commands.uninstall",
|
||||||
)),
|
"UninstallCommand",
|
||||||
('freeze', CommandInfo(
|
"Uninstall packages.",
|
||||||
'pip._internal.commands.freeze', 'FreezeCommand',
|
),
|
||||||
'Output installed packages in requirements format.',
|
"freeze": CommandInfo(
|
||||||
)),
|
"pip._internal.commands.freeze",
|
||||||
('list', CommandInfo(
|
"FreezeCommand",
|
||||||
'pip._internal.commands.list', 'ListCommand',
|
"Output installed packages in requirements format.",
|
||||||
'List installed packages.',
|
),
|
||||||
)),
|
"inspect": CommandInfo(
|
||||||
('show', CommandInfo(
|
"pip._internal.commands.inspect",
|
||||||
'pip._internal.commands.show', 'ShowCommand',
|
"InspectCommand",
|
||||||
'Show information about installed packages.',
|
"Inspect the python environment.",
|
||||||
)),
|
),
|
||||||
('check', CommandInfo(
|
"list": CommandInfo(
|
||||||
'pip._internal.commands.check', 'CheckCommand',
|
"pip._internal.commands.list",
|
||||||
'Verify installed packages have compatible dependencies.',
|
"ListCommand",
|
||||||
)),
|
"List installed packages.",
|
||||||
('config', CommandInfo(
|
),
|
||||||
'pip._internal.commands.configuration', 'ConfigurationCommand',
|
"show": CommandInfo(
|
||||||
'Manage local and global configuration.',
|
"pip._internal.commands.show",
|
||||||
)),
|
"ShowCommand",
|
||||||
('search', CommandInfo(
|
"Show information about installed packages.",
|
||||||
'pip._internal.commands.search', 'SearchCommand',
|
),
|
||||||
'Search PyPI for packages.',
|
"check": CommandInfo(
|
||||||
)),
|
"pip._internal.commands.check",
|
||||||
('cache', CommandInfo(
|
"CheckCommand",
|
||||||
'pip._internal.commands.cache', 'CacheCommand',
|
"Verify installed packages have compatible dependencies.",
|
||||||
|
),
|
||||||
|
"config": CommandInfo(
|
||||||
|
"pip._internal.commands.configuration",
|
||||||
|
"ConfigurationCommand",
|
||||||
|
"Manage local and global configuration.",
|
||||||
|
),
|
||||||
|
"search": CommandInfo(
|
||||||
|
"pip._internal.commands.search",
|
||||||
|
"SearchCommand",
|
||||||
|
"Search PyPI for packages.",
|
||||||
|
),
|
||||||
|
"cache": CommandInfo(
|
||||||
|
"pip._internal.commands.cache",
|
||||||
|
"CacheCommand",
|
||||||
"Inspect and manage pip's wheel cache.",
|
"Inspect and manage pip's wheel cache.",
|
||||||
)),
|
),
|
||||||
('index', CommandInfo(
|
"index": CommandInfo(
|
||||||
'pip._internal.commands.index', 'IndexCommand',
|
"pip._internal.commands.index",
|
||||||
|
"IndexCommand",
|
||||||
"Inspect information available from package indexes.",
|
"Inspect information available from package indexes.",
|
||||||
)),
|
),
|
||||||
('wheel', CommandInfo(
|
"wheel": CommandInfo(
|
||||||
'pip._internal.commands.wheel', 'WheelCommand',
|
"pip._internal.commands.wheel",
|
||||||
'Build wheels from your requirements.',
|
"WheelCommand",
|
||||||
)),
|
"Build wheels from your requirements.",
|
||||||
('hash', CommandInfo(
|
),
|
||||||
'pip._internal.commands.hash', 'HashCommand',
|
"hash": CommandInfo(
|
||||||
'Compute hashes of package archives.',
|
"pip._internal.commands.hash",
|
||||||
)),
|
"HashCommand",
|
||||||
('completion', CommandInfo(
|
"Compute hashes of package archives.",
|
||||||
'pip._internal.commands.completion', 'CompletionCommand',
|
),
|
||||||
'A helper command used for command completion.',
|
"completion": CommandInfo(
|
||||||
)),
|
"pip._internal.commands.completion",
|
||||||
('debug', CommandInfo(
|
"CompletionCommand",
|
||||||
'pip._internal.commands.debug', 'DebugCommand',
|
"A helper command used for command completion.",
|
||||||
'Show information useful for debugging.',
|
),
|
||||||
)),
|
"debug": CommandInfo(
|
||||||
('help', CommandInfo(
|
"pip._internal.commands.debug",
|
||||||
'pip._internal.commands.help', 'HelpCommand',
|
"DebugCommand",
|
||||||
'Show help for commands.',
|
"Show information useful for debugging.",
|
||||||
)),
|
),
|
||||||
])
|
"help": CommandInfo(
|
||||||
|
"pip._internal.commands.help",
|
||||||
|
"HelpCommand",
|
||||||
|
"Show help for commands.",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def create_command(name: str, **kwargs: Any) -> Command:
|
def create_command(name: str, **kwargs: Any) -> Command:
|
||||||
|
|||||||
@@ -37,19 +37,18 @@ class CacheCommand(Command):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--format',
|
"--format",
|
||||||
action='store',
|
action="store",
|
||||||
dest='list_format',
|
dest="list_format",
|
||||||
default="human",
|
default="human",
|
||||||
choices=('human', 'abspath'),
|
choices=("human", "abspath"),
|
||||||
help="Select the output format among: human (default) or abspath"
|
help="Select the output format among: human (default) or abspath",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options: Values, args: List[Any]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
handlers = {
|
handlers = {
|
||||||
"dir": self.get_cache_dir,
|
"dir": self.get_cache_dir,
|
||||||
"info": self.get_cache_info,
|
"info": self.get_cache_info,
|
||||||
@@ -59,8 +58,7 @@ class CacheCommand(Command):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if not options.cache_dir:
|
if not options.cache_dir:
|
||||||
logger.error("pip cache commands can not "
|
logger.error("pip cache commands can not function since cache is disabled.")
|
||||||
"function since cache is disabled.")
|
|
||||||
return ERROR
|
return ERROR
|
||||||
|
|
||||||
# Determine action
|
# Determine action
|
||||||
@@ -84,69 +82,73 @@ class CacheCommand(Command):
|
|||||||
|
|
||||||
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
||||||
if args:
|
if args:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
logger.info(options.cache_dir)
|
logger.info(options.cache_dir)
|
||||||
|
|
||||||
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
||||||
if args:
|
if args:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
num_http_files = len(self._find_http_files(options))
|
num_http_files = len(self._find_http_files(options))
|
||||||
num_packages = len(self._find_wheels(options, '*'))
|
num_packages = len(self._find_wheels(options, "*"))
|
||||||
|
|
||||||
http_cache_location = self._cache_dir(options, 'http')
|
http_cache_location = self._cache_dir(options, "http")
|
||||||
wheels_cache_location = self._cache_dir(options, 'wheels')
|
wheels_cache_location = self._cache_dir(options, "wheels")
|
||||||
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
||||||
wheels_cache_size = filesystem.format_directory_size(
|
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
||||||
wheels_cache_location
|
|
||||||
)
|
|
||||||
|
|
||||||
message = textwrap.dedent("""
|
message = (
|
||||||
|
textwrap.dedent(
|
||||||
|
"""
|
||||||
Package index page cache location: {http_cache_location}
|
Package index page cache location: {http_cache_location}
|
||||||
Package index page cache size: {http_cache_size}
|
Package index page cache size: {http_cache_size}
|
||||||
Number of HTTP files: {num_http_files}
|
Number of HTTP files: {num_http_files}
|
||||||
Wheels location: {wheels_cache_location}
|
Locally built wheels location: {wheels_cache_location}
|
||||||
Wheels size: {wheels_cache_size}
|
Locally built wheels size: {wheels_cache_size}
|
||||||
Number of wheels: {package_count}
|
Number of locally built wheels: {package_count}
|
||||||
""").format(
|
"""
|
||||||
|
)
|
||||||
|
.format(
|
||||||
http_cache_location=http_cache_location,
|
http_cache_location=http_cache_location,
|
||||||
http_cache_size=http_cache_size,
|
http_cache_size=http_cache_size,
|
||||||
num_http_files=num_http_files,
|
num_http_files=num_http_files,
|
||||||
wheels_cache_location=wheels_cache_location,
|
wheels_cache_location=wheels_cache_location,
|
||||||
package_count=num_packages,
|
package_count=num_packages,
|
||||||
wheels_cache_size=wheels_cache_size,
|
wheels_cache_size=wheels_cache_size,
|
||||||
).strip()
|
)
|
||||||
|
.strip()
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(message)
|
logger.info(message)
|
||||||
|
|
||||||
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
if args:
|
if args:
|
||||||
pattern = args[0]
|
pattern = args[0]
|
||||||
else:
|
else:
|
||||||
pattern = '*'
|
pattern = "*"
|
||||||
|
|
||||||
files = self._find_wheels(options, pattern)
|
files = self._find_wheels(options, pattern)
|
||||||
if options.list_format == 'human':
|
if options.list_format == "human":
|
||||||
self.format_for_human(files)
|
self.format_for_human(files)
|
||||||
else:
|
else:
|
||||||
self.format_for_abspath(files)
|
self.format_for_abspath(files)
|
||||||
|
|
||||||
def format_for_human(self, files: List[str]) -> None:
|
def format_for_human(self, files: List[str]) -> None:
|
||||||
if not files:
|
if not files:
|
||||||
logger.info('Nothing cached.')
|
logger.info("No locally built wheels cached.")
|
||||||
return
|
return
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for filename in files:
|
for filename in files:
|
||||||
wheel = os.path.basename(filename)
|
wheel = os.path.basename(filename)
|
||||||
size = filesystem.format_file_size(filename)
|
size = filesystem.format_file_size(filename)
|
||||||
results.append(f' - {wheel} ({size})')
|
results.append(f" - {wheel} ({size})")
|
||||||
logger.info('Cache contents:\n')
|
logger.info("Cache contents:\n")
|
||||||
logger.info('\n'.join(sorted(results)))
|
logger.info("\n".join(sorted(results)))
|
||||||
|
|
||||||
def format_for_abspath(self, files: List[str]) -> None:
|
def format_for_abspath(self, files: List[str]) -> None:
|
||||||
if not files:
|
if not files:
|
||||||
@@ -156,23 +158,27 @@ class CacheCommand(Command):
|
|||||||
for filename in files:
|
for filename in files:
|
||||||
results.append(filename)
|
results.append(filename)
|
||||||
|
|
||||||
logger.info('\n'.join(sorted(results)))
|
logger.info("\n".join(sorted(results)))
|
||||||
|
|
||||||
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||||
if len(args) > 1:
|
if len(args) > 1:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
raise CommandError('Please provide a pattern')
|
raise CommandError("Please provide a pattern")
|
||||||
|
|
||||||
files = self._find_wheels(options, args[0])
|
files = self._find_wheels(options, args[0])
|
||||||
|
|
||||||
|
no_matching_msg = "No matching packages"
|
||||||
|
if args[0] == "*":
|
||||||
# Only fetch http files if no specific pattern given
|
# Only fetch http files if no specific pattern given
|
||||||
if args[0] == '*':
|
|
||||||
files += self._find_http_files(options)
|
files += self._find_http_files(options)
|
||||||
|
else:
|
||||||
|
# Add the pattern to the log message
|
||||||
|
no_matching_msg += ' for pattern "{}"'.format(args[0])
|
||||||
|
|
||||||
if not files:
|
if not files:
|
||||||
raise CommandError('No matching packages')
|
logger.warning(no_matching_msg)
|
||||||
|
|
||||||
for filename in files:
|
for filename in files:
|
||||||
os.unlink(filename)
|
os.unlink(filename)
|
||||||
@@ -181,19 +187,19 @@ class CacheCommand(Command):
|
|||||||
|
|
||||||
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
||||||
if args:
|
if args:
|
||||||
raise CommandError('Too many arguments')
|
raise CommandError("Too many arguments")
|
||||||
|
|
||||||
return self.remove_cache_items(options, ['*'])
|
return self.remove_cache_items(options, ["*"])
|
||||||
|
|
||||||
def _cache_dir(self, options: Values, subdir: str) -> str:
|
def _cache_dir(self, options: Values, subdir: str) -> str:
|
||||||
return os.path.join(options.cache_dir, subdir)
|
return os.path.join(options.cache_dir, subdir)
|
||||||
|
|
||||||
def _find_http_files(self, options: Values) -> List[str]:
|
def _find_http_files(self, options: Values) -> List[str]:
|
||||||
http_dir = self._cache_dir(options, 'http')
|
http_dir = self._cache_dir(options, "http")
|
||||||
return filesystem.find_files(http_dir, '*')
|
return filesystem.find_files(http_dir, "*")
|
||||||
|
|
||||||
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
||||||
wheel_dir = self._cache_dir(options, 'wheels')
|
wheel_dir = self._cache_dir(options, "wheels")
|
||||||
|
|
||||||
# The wheel filename format, as specified in PEP 427, is:
|
# The wheel filename format, as specified in PEP 427, is:
|
||||||
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Any, List
|
from typing import List
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||||
@@ -19,8 +19,7 @@ class CheckCommand(Command):
|
|||||||
usage = """
|
usage = """
|
||||||
%prog [options]"""
|
%prog [options]"""
|
||||||
|
|
||||||
def run(self, options: Values, args: List[Any]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
|
|
||||||
package_set, parsing_probs = create_package_set_from_installed()
|
package_set, parsing_probs = create_package_set_from_installed()
|
||||||
missing, conflicting = check_package_set(package_set)
|
missing, conflicting = check_package_set(package_set)
|
||||||
|
|
||||||
@@ -29,7 +28,9 @@ class CheckCommand(Command):
|
|||||||
for dependency in missing[project_name]:
|
for dependency in missing[project_name]:
|
||||||
write_output(
|
write_output(
|
||||||
"%s %s requires %s, which is not installed.",
|
"%s %s requires %s, which is not installed.",
|
||||||
project_name, version, dependency[0],
|
project_name,
|
||||||
|
version,
|
||||||
|
dependency[0],
|
||||||
)
|
)
|
||||||
|
|
||||||
for project_name in conflicting:
|
for project_name in conflicting:
|
||||||
@@ -37,7 +38,11 @@ class CheckCommand(Command):
|
|||||||
for dep_name, dep_version, req in conflicting[project_name]:
|
for dep_name, dep_version, req in conflicting[project_name]:
|
||||||
write_output(
|
write_output(
|
||||||
"%s %s has requirement %s, but you have %s %s.",
|
"%s %s has requirement %s, but you have %s %s.",
|
||||||
project_name, version, req, dep_name, dep_version,
|
project_name,
|
||||||
|
version,
|
||||||
|
req,
|
||||||
|
dep_name,
|
||||||
|
dep_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
if missing or conflicting or parsing_probs:
|
if missing or conflicting or parsing_probs:
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ BASE_COMPLETION = """
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
COMPLETION_SCRIPTS = {
|
COMPLETION_SCRIPTS = {
|
||||||
'bash': """
|
"bash": """
|
||||||
_pip_completion()
|
_pip_completion()
|
||||||
{{
|
{{
|
||||||
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
||||||
@@ -21,7 +21,7 @@ COMPLETION_SCRIPTS = {
|
|||||||
}}
|
}}
|
||||||
complete -o default -F _pip_completion {prog}
|
complete -o default -F _pip_completion {prog}
|
||||||
""",
|
""",
|
||||||
'zsh': """
|
"zsh": """
|
||||||
function _pip_completion {{
|
function _pip_completion {{
|
||||||
local words cword
|
local words cword
|
||||||
read -Ac words
|
read -Ac words
|
||||||
@@ -32,7 +32,7 @@ COMPLETION_SCRIPTS = {
|
|||||||
}}
|
}}
|
||||||
compctl -K _pip_completion {prog}
|
compctl -K _pip_completion {prog}
|
||||||
""",
|
""",
|
||||||
'fish': """
|
"fish": """
|
||||||
function __fish_complete_pip
|
function __fish_complete_pip
|
||||||
set -lx COMP_WORDS (commandline -o) ""
|
set -lx COMP_WORDS (commandline -o) ""
|
||||||
set -lx COMP_CWORD ( \\
|
set -lx COMP_CWORD ( \\
|
||||||
@@ -43,6 +43,28 @@ COMPLETION_SCRIPTS = {
|
|||||||
end
|
end
|
||||||
complete -fa "(__fish_complete_pip)" -c {prog}
|
complete -fa "(__fish_complete_pip)" -c {prog}
|
||||||
""",
|
""",
|
||||||
|
"powershell": """
|
||||||
|
if ((Test-Path Function:\\TabExpansion) -and -not `
|
||||||
|
(Test-Path Function:\\_pip_completeBackup)) {{
|
||||||
|
Rename-Item Function:\\TabExpansion _pip_completeBackup
|
||||||
|
}}
|
||||||
|
function TabExpansion($line, $lastWord) {{
|
||||||
|
$lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
|
||||||
|
if ($lastBlock.StartsWith("{prog} ")) {{
|
||||||
|
$Env:COMP_WORDS=$lastBlock
|
||||||
|
$Env:COMP_CWORD=$lastBlock.Split().Length - 1
|
||||||
|
$Env:PIP_AUTO_COMPLETE=1
|
||||||
|
(& {prog}).Split()
|
||||||
|
Remove-Item Env:COMP_WORDS
|
||||||
|
Remove-Item Env:COMP_CWORD
|
||||||
|
Remove-Item Env:PIP_AUTO_COMPLETE
|
||||||
|
}}
|
||||||
|
elseif (Test-Path Function:\\_pip_completeBackup) {{
|
||||||
|
# Fall back on existing tab expansion
|
||||||
|
_pip_completeBackup $line $lastWord
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
""",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -53,39 +75,52 @@ class CompletionCommand(Command):
|
|||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--bash', '-b',
|
"--bash",
|
||||||
action='store_const',
|
"-b",
|
||||||
const='bash',
|
action="store_const",
|
||||||
dest='shell',
|
const="bash",
|
||||||
help='Emit completion code for bash')
|
dest="shell",
|
||||||
|
help="Emit completion code for bash",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--zsh', '-z',
|
"--zsh",
|
||||||
action='store_const',
|
"-z",
|
||||||
const='zsh',
|
action="store_const",
|
||||||
dest='shell',
|
const="zsh",
|
||||||
help='Emit completion code for zsh')
|
dest="shell",
|
||||||
|
help="Emit completion code for zsh",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--fish', '-f',
|
"--fish",
|
||||||
action='store_const',
|
"-f",
|
||||||
const='fish',
|
action="store_const",
|
||||||
dest='shell',
|
const="fish",
|
||||||
help='Emit completion code for fish')
|
dest="shell",
|
||||||
|
help="Emit completion code for fish",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"--powershell",
|
||||||
|
"-p",
|
||||||
|
action="store_const",
|
||||||
|
const="powershell",
|
||||||
|
dest="shell",
|
||||||
|
help="Emit completion code for powershell",
|
||||||
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
"""Prints the completion code of the given shell"""
|
"""Prints the completion code of the given shell"""
|
||||||
shells = COMPLETION_SCRIPTS.keys()
|
shells = COMPLETION_SCRIPTS.keys()
|
||||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
shell_options = ["--" + shell for shell in sorted(shells)]
|
||||||
if options.shell in shells:
|
if options.shell in shells:
|
||||||
script = textwrap.dedent(
|
script = textwrap.dedent(
|
||||||
COMPLETION_SCRIPTS.get(options.shell, '').format(
|
COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
|
||||||
prog=get_prog())
|
|
||||||
)
|
)
|
||||||
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
else:
|
else:
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
|
"ERROR: You must pass {}\n".format(" or ".join(shell_options))
|
||||||
)
|
)
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|||||||
@@ -27,14 +27,20 @@ class ConfigurationCommand(Command):
|
|||||||
|
|
||||||
- list: List the active configuration (or from the file specified)
|
- list: List the active configuration (or from the file specified)
|
||||||
- edit: Edit the configuration file in an editor
|
- edit: Edit the configuration file in an editor
|
||||||
- get: Get the value associated with name
|
- get: Get the value associated with command.option
|
||||||
- set: Set the name=value
|
- set: Set the command.option=value
|
||||||
- unset: Unset the value associated with name
|
- unset: Unset the value associated with command.option
|
||||||
- debug: List the configuration files and values defined under them
|
- debug: List the configuration files and values defined under them
|
||||||
|
|
||||||
|
Configuration keys should be dot separated command and option name,
|
||||||
|
with the special prefix "global" affecting any command. For example,
|
||||||
|
"pip config set global.index-url https://example.org/" would configure
|
||||||
|
the index url for all commands, but "pip config set download.timeout 10"
|
||||||
|
would configure a 10 second timeout only for "pip download" commands.
|
||||||
|
|
||||||
If none of --user, --global and --site are passed, a virtual
|
If none of --user, --global and --site are passed, a virtual
|
||||||
environment configuration file is used if one is active and the file
|
environment configuration file is used if one is active and the file
|
||||||
exists. Otherwise, all modifications happen on the to the user file by
|
exists. Otherwise, all modifications happen to the user file by
|
||||||
default.
|
default.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -43,46 +49,46 @@ class ConfigurationCommand(Command):
|
|||||||
%prog [<file-option>] list
|
%prog [<file-option>] list
|
||||||
%prog [<file-option>] [--editor <editor-path>] edit
|
%prog [<file-option>] [--editor <editor-path>] edit
|
||||||
|
|
||||||
%prog [<file-option>] get name
|
%prog [<file-option>] get command.option
|
||||||
%prog [<file-option>] set name value
|
%prog [<file-option>] set command.option value
|
||||||
%prog [<file-option>] unset name
|
%prog [<file-option>] unset command.option
|
||||||
%prog [<file-option>] debug
|
%prog [<file-option>] debug
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--editor',
|
"--editor",
|
||||||
dest='editor',
|
dest="editor",
|
||||||
action='store',
|
action="store",
|
||||||
default=None,
|
default=None,
|
||||||
help=(
|
help=(
|
||||||
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
"Editor to use to edit the file. Uses VISUAL or EDITOR "
|
||||||
'environment variables if not provided.'
|
"environment variables if not provided."
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--global',
|
"--global",
|
||||||
dest='global_file',
|
dest="global_file",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Use the system-wide configuration file only'
|
help="Use the system-wide configuration file only",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"--user",
|
||||||
dest='user_file',
|
dest="user_file",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Use the user configuration file only'
|
help="Use the user configuration file only",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--site',
|
"--site",
|
||||||
dest='site_file',
|
dest="site_file",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Use the current environment configuration file only'
|
help="Use the current environment configuration file only",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
@@ -133,11 +139,15 @@ class ConfigurationCommand(Command):
|
|||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
||||||
file_options = [key for key, value in (
|
file_options = [
|
||||||
|
key
|
||||||
|
for key, value in (
|
||||||
(kinds.USER, options.user_file),
|
(kinds.USER, options.user_file),
|
||||||
(kinds.GLOBAL, options.global_file),
|
(kinds.GLOBAL, options.global_file),
|
||||||
(kinds.SITE, options.site_file),
|
(kinds.SITE, options.site_file),
|
||||||
) if value]
|
)
|
||||||
|
if value
|
||||||
|
]
|
||||||
|
|
||||||
if not file_options:
|
if not file_options:
|
||||||
if not need_value:
|
if not need_value:
|
||||||
@@ -194,24 +204,22 @@ class ConfigurationCommand(Command):
|
|||||||
for fname in files:
|
for fname in files:
|
||||||
with indent_log():
|
with indent_log():
|
||||||
file_exists = os.path.exists(fname)
|
file_exists = os.path.exists(fname)
|
||||||
write_output("%s, exists: %r",
|
write_output("%s, exists: %r", fname, file_exists)
|
||||||
fname, file_exists)
|
|
||||||
if file_exists:
|
if file_exists:
|
||||||
self.print_config_file_values(variant)
|
self.print_config_file_values(variant)
|
||||||
|
|
||||||
def print_config_file_values(self, variant: Kind) -> None:
|
def print_config_file_values(self, variant: Kind) -> None:
|
||||||
"""Get key-value pairs from the file of a variant"""
|
"""Get key-value pairs from the file of a variant"""
|
||||||
for name, value in self.configuration.\
|
for name, value in self.configuration.get_values_in_config(variant).items():
|
||||||
get_values_in_config(variant).items():
|
|
||||||
with indent_log():
|
with indent_log():
|
||||||
write_output("%s: %s", name, value)
|
write_output("%s: %s", name, value)
|
||||||
|
|
||||||
def print_env_var_values(self) -> None:
|
def print_env_var_values(self) -> None:
|
||||||
"""Get key-values pairs present as environment variables"""
|
"""Get key-values pairs present as environment variables"""
|
||||||
write_output("%s:", 'env_var')
|
write_output("%s:", "env_var")
|
||||||
with indent_log():
|
with indent_log():
|
||||||
for key, value in sorted(self.configuration.get_environ_vars()):
|
for key, value in sorted(self.configuration.get_environ_vars()):
|
||||||
env_var = f'PIP_{key.upper()}'
|
env_var = f"PIP_{key.upper()}"
|
||||||
write_output("%s=%r", env_var, value)
|
write_output("%s=%r", env_var, value)
|
||||||
|
|
||||||
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
||||||
@@ -220,21 +228,29 @@ class ConfigurationCommand(Command):
|
|||||||
fname = self.configuration.get_file_to_edit()
|
fname = self.configuration.get_file_to_edit()
|
||||||
if fname is None:
|
if fname is None:
|
||||||
raise PipError("Could not determine appropriate file.")
|
raise PipError("Could not determine appropriate file.")
|
||||||
|
elif '"' in fname:
|
||||||
|
# This shouldn't happen, unless we see a username like that.
|
||||||
|
# If that happens, we'd appreciate a pull request fixing this.
|
||||||
|
raise PipError(
|
||||||
|
f'Can not open an editor for a file name containing "\n{fname}'
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
subprocess.check_call([editor, fname])
|
subprocess.check_call(f'{editor} "{fname}"', shell=True)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
if not e.filename:
|
||||||
|
e.filename = editor
|
||||||
|
raise
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise PipError(
|
raise PipError(
|
||||||
"Editor Subprocess exited with exit code {}"
|
"Editor Subprocess exited with exit code {}".format(e.returncode)
|
||||||
.format(e.returncode)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
||||||
"""Helper to make sure the command got the right number of arguments
|
"""Helper to make sure the command got the right number of arguments"""
|
||||||
"""
|
|
||||||
if len(args) != n:
|
if len(args) != n:
|
||||||
msg = (
|
msg = (
|
||||||
'Got unexpected number of arguments, expected {}. '
|
"Got unexpected number of arguments, expected {}. "
|
||||||
'(example: "{} config {}")'
|
'(example: "{} config {}")'
|
||||||
).format(n, get_prog(), example)
|
).format(n, get_prog(), example)
|
||||||
raise PipError(msg)
|
raise PipError(msg)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import importlib.resources
|
||||||
import locale
|
import locale
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@@ -10,7 +11,6 @@ import pip._vendor
|
|||||||
from pip._vendor.certifi import where
|
from pip._vendor.certifi import where
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
|
||||||
from pip import __file__ as pip_location
|
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.cmdoptions import make_target_python
|
from pip._internal.cli.cmdoptions import make_target_python
|
||||||
@@ -24,55 +24,46 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def show_value(name: str, value: Any) -> None:
|
def show_value(name: str, value: Any) -> None:
|
||||||
logger.info('%s: %s', name, value)
|
logger.info("%s: %s", name, value)
|
||||||
|
|
||||||
|
|
||||||
def show_sys_implementation() -> None:
|
def show_sys_implementation() -> None:
|
||||||
logger.info('sys.implementation:')
|
logger.info("sys.implementation:")
|
||||||
implementation_name = sys.implementation.name
|
implementation_name = sys.implementation.name
|
||||||
with indent_log():
|
with indent_log():
|
||||||
show_value('name', implementation_name)
|
show_value("name", implementation_name)
|
||||||
|
|
||||||
|
|
||||||
def create_vendor_txt_map() -> Dict[str, str]:
|
def create_vendor_txt_map() -> Dict[str, str]:
|
||||||
vendor_txt_path = os.path.join(
|
with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
|
||||||
os.path.dirname(pip_location),
|
|
||||||
'_vendor',
|
|
||||||
'vendor.txt'
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(vendor_txt_path) as f:
|
|
||||||
# Purge non version specifying lines.
|
# Purge non version specifying lines.
|
||||||
# Also, remove any space prefix or suffixes (including comments).
|
# Also, remove any space prefix or suffixes (including comments).
|
||||||
lines = [line.strip().split(' ', 1)[0]
|
lines = [
|
||||||
for line in f.readlines() if '==' in line]
|
line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
|
||||||
|
]
|
||||||
|
|
||||||
# Transform into "module" -> version dict.
|
# Transform into "module" -> version dict.
|
||||||
return dict(line.split('==', 1) for line in lines) # type: ignore
|
return dict(line.split("==", 1) for line in lines)
|
||||||
|
|
||||||
|
|
||||||
def get_module_from_module_name(module_name: str) -> ModuleType:
|
def get_module_from_module_name(module_name: str) -> ModuleType:
|
||||||
# Module name can be uppercase in vendor.txt for some reason...
|
# Module name can be uppercase in vendor.txt for some reason...
|
||||||
module_name = module_name.lower()
|
module_name = module_name.lower().replace("-", "_")
|
||||||
# PATCH: setuptools is actually only pkg_resources.
|
# PATCH: setuptools is actually only pkg_resources.
|
||||||
if module_name == 'setuptools':
|
if module_name == "setuptools":
|
||||||
module_name = 'pkg_resources'
|
module_name = "pkg_resources"
|
||||||
|
|
||||||
__import__(
|
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
||||||
f'pip._vendor.{module_name}',
|
|
||||||
globals(),
|
|
||||||
locals(),
|
|
||||||
level=0
|
|
||||||
)
|
|
||||||
return getattr(pip._vendor, module_name)
|
return getattr(pip._vendor, module_name)
|
||||||
|
|
||||||
|
|
||||||
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
||||||
module = get_module_from_module_name(module_name)
|
module = get_module_from_module_name(module_name)
|
||||||
version = getattr(module, '__version__', None)
|
version = getattr(module, "__version__", None)
|
||||||
|
|
||||||
if not version:
|
if not version:
|
||||||
# Try to find version in debundled module info.
|
# Try to find version in debundled module info.
|
||||||
|
assert module.__file__ is not None
|
||||||
env = get_environment([os.path.dirname(module.__file__)])
|
env = get_environment([os.path.dirname(module.__file__)])
|
||||||
dist = env.get_distribution(module_name)
|
dist = env.get_distribution(module_name)
|
||||||
if dist:
|
if dist:
|
||||||
@@ -86,20 +77,24 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
|
|||||||
a conflict or if the actual version could not be imported.
|
a conflict or if the actual version could not be imported.
|
||||||
"""
|
"""
|
||||||
for module_name, expected_version in vendor_txt_versions.items():
|
for module_name, expected_version in vendor_txt_versions.items():
|
||||||
extra_message = ''
|
extra_message = ""
|
||||||
actual_version = get_vendor_version_from_module(module_name)
|
actual_version = get_vendor_version_from_module(module_name)
|
||||||
if not actual_version:
|
if not actual_version:
|
||||||
extra_message = ' (Unable to locate actual module version, using'\
|
extra_message = (
|
||||||
' vendor.txt specified version)'
|
" (Unable to locate actual module version, using"
|
||||||
|
" vendor.txt specified version)"
|
||||||
|
)
|
||||||
actual_version = expected_version
|
actual_version = expected_version
|
||||||
elif parse_version(actual_version) != parse_version(expected_version):
|
elif parse_version(actual_version) != parse_version(expected_version):
|
||||||
extra_message = ' (CONFLICT: vendor.txt suggests version should'\
|
extra_message = (
|
||||||
' be {})'.format(expected_version)
|
" (CONFLICT: vendor.txt suggests version should"
|
||||||
logger.info('%s==%s%s', module_name, actual_version, extra_message)
|
" be {})".format(expected_version)
|
||||||
|
)
|
||||||
|
logger.info("%s==%s%s", module_name, actual_version, extra_message)
|
||||||
|
|
||||||
|
|
||||||
def show_vendor_versions() -> None:
|
def show_vendor_versions() -> None:
|
||||||
logger.info('vendored library versions:')
|
logger.info("vendored library versions:")
|
||||||
|
|
||||||
vendor_txt_versions = create_vendor_txt_map()
|
vendor_txt_versions = create_vendor_txt_map()
|
||||||
with indent_log():
|
with indent_log():
|
||||||
@@ -114,11 +109,11 @@ def show_tags(options: Values) -> None:
|
|||||||
|
|
||||||
# Display the target options that were explicitly provided.
|
# Display the target options that were explicitly provided.
|
||||||
formatted_target = target_python.format_given()
|
formatted_target = target_python.format_given()
|
||||||
suffix = ''
|
suffix = ""
|
||||||
if formatted_target:
|
if formatted_target:
|
||||||
suffix = f' (target: {formatted_target})'
|
suffix = f" (target: {formatted_target})"
|
||||||
|
|
||||||
msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
|
msg = "Compatible tags: {}{}".format(len(tags), suffix)
|
||||||
logger.info(msg)
|
logger.info(msg)
|
||||||
|
|
||||||
if options.verbose < 1 and len(tags) > tag_limit:
|
if options.verbose < 1 and len(tags) > tag_limit:
|
||||||
@@ -133,8 +128,7 @@ def show_tags(options: Values) -> None:
|
|||||||
|
|
||||||
if tags_limited:
|
if tags_limited:
|
||||||
msg = (
|
msg = (
|
||||||
'...\n'
|
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
|
||||||
'[First {tag_limit} tags shown. Pass --verbose to show all.]'
|
|
||||||
).format(tag_limit=tag_limit)
|
).format(tag_limit=tag_limit)
|
||||||
logger.info(msg)
|
logger.info(msg)
|
||||||
|
|
||||||
@@ -142,20 +136,20 @@ def show_tags(options: Values) -> None:
|
|||||||
def ca_bundle_info(config: Configuration) -> str:
|
def ca_bundle_info(config: Configuration) -> str:
|
||||||
levels = set()
|
levels = set()
|
||||||
for key, _ in config.items():
|
for key, _ in config.items():
|
||||||
levels.add(key.split('.')[0])
|
levels.add(key.split(".")[0])
|
||||||
|
|
||||||
if not levels:
|
if not levels:
|
||||||
return "Not specified"
|
return "Not specified"
|
||||||
|
|
||||||
levels_that_override_global = ['install', 'wheel', 'download']
|
levels_that_override_global = ["install", "wheel", "download"]
|
||||||
global_overriding_level = [
|
global_overriding_level = [
|
||||||
level for level in levels if level in levels_that_override_global
|
level for level in levels if level in levels_that_override_global
|
||||||
]
|
]
|
||||||
if not global_overriding_level:
|
if not global_overriding_level:
|
||||||
return 'global'
|
return "global"
|
||||||
|
|
||||||
if 'global' in levels:
|
if "global" in levels:
|
||||||
levels.remove('global')
|
levels.remove("global")
|
||||||
return ", ".join(levels)
|
return ", ".join(levels)
|
||||||
|
|
||||||
|
|
||||||
@@ -180,20 +174,21 @@ class DebugCommand(Command):
|
|||||||
"details, since the output and options of this command may "
|
"details, since the output and options of this command may "
|
||||||
"change without notice."
|
"change without notice."
|
||||||
)
|
)
|
||||||
show_value('pip version', get_pip_version())
|
show_value("pip version", get_pip_version())
|
||||||
show_value('sys.version', sys.version)
|
show_value("sys.version", sys.version)
|
||||||
show_value('sys.executable', sys.executable)
|
show_value("sys.executable", sys.executable)
|
||||||
show_value('sys.getdefaultencoding', sys.getdefaultencoding())
|
show_value("sys.getdefaultencoding", sys.getdefaultencoding())
|
||||||
show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
|
show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
|
||||||
show_value(
|
show_value(
|
||||||
'locale.getpreferredencoding', locale.getpreferredencoding(),
|
"locale.getpreferredencoding",
|
||||||
|
locale.getpreferredencoding(),
|
||||||
)
|
)
|
||||||
show_value('sys.platform', sys.platform)
|
show_value("sys.platform", sys.platform)
|
||||||
show_sys_implementation()
|
show_sys_implementation()
|
||||||
|
|
||||||
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
||||||
show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
|
show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
|
||||||
show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
|
show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
|
||||||
show_value("pip._vendor.certifi.where()", where())
|
show_value("pip._vendor.certifi.where()", where())
|
||||||
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,8 @@ from pip._internal.cli import cmdoptions
|
|||||||
from pip._internal.cli.cmdoptions import make_target_python
|
from pip._internal.cli.cmdoptions import make_target_python
|
||||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||||
|
from pip._internal.req.req_install import check_legacy_setup_py_options
|
||||||
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
@@ -37,7 +38,6 @@ class DownloadCommand(RequirementCommand):
|
|||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
|
||||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
@@ -50,14 +50,18 @@ class DownloadCommand(RequirementCommand):
|
|||||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
"-d",
|
||||||
dest='download_dir',
|
"--dest",
|
||||||
metavar='dir',
|
"--destination-dir",
|
||||||
|
"--destination-directory",
|
||||||
|
dest="download_dir",
|
||||||
|
metavar="dir",
|
||||||
default=os.curdir,
|
default=os.curdir,
|
||||||
help=("Download packages into <dir>."),
|
help="Download packages into <dir>.",
|
||||||
)
|
)
|
||||||
|
|
||||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||||
@@ -72,7 +76,6 @@ class DownloadCommand(RequirementCommand):
|
|||||||
|
|
||||||
@with_cleanup
|
@with_cleanup
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
|
|
||||||
options.ignore_installed = True
|
options.ignore_installed = True
|
||||||
# editable doesn't really make sense for `pip download`, but the bowels
|
# editable doesn't really make sense for `pip download`, but the bowels
|
||||||
# of the RequirementSet code require that property.
|
# of the RequirementSet code require that property.
|
||||||
@@ -93,7 +96,7 @@ class DownloadCommand(RequirementCommand):
|
|||||||
ignore_requires_python=options.ignore_requires_python,
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
)
|
)
|
||||||
|
|
||||||
req_tracker = self.enter_context(get_requirement_tracker())
|
build_tracker = self.enter_context(get_build_tracker())
|
||||||
|
|
||||||
directory = TempDirectory(
|
directory = TempDirectory(
|
||||||
delete=not options.no_clean,
|
delete=not options.no_clean,
|
||||||
@@ -102,15 +105,17 @@ class DownloadCommand(RequirementCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
reqs = self.get_requirements(args, options, finder, session)
|
reqs = self.get_requirements(args, options, finder, session)
|
||||||
|
check_legacy_setup_py_options(options, reqs)
|
||||||
|
|
||||||
preparer = self.make_requirement_preparer(
|
preparer = self.make_requirement_preparer(
|
||||||
temp_build_dir=directory,
|
temp_build_dir=directory,
|
||||||
options=options,
|
options=options,
|
||||||
req_tracker=req_tracker,
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
download_dir=options.download_dir,
|
download_dir=options.download_dir,
|
||||||
use_user_site=False,
|
use_user_site=False,
|
||||||
|
verbosity=self.verbosity,
|
||||||
)
|
)
|
||||||
|
|
||||||
resolver = self.make_resolver(
|
resolver = self.make_resolver(
|
||||||
@@ -118,14 +123,13 @@ class DownloadCommand(RequirementCommand):
|
|||||||
finder=finder,
|
finder=finder,
|
||||||
options=options,
|
options=options,
|
||||||
ignore_requires_python=options.ignore_requires_python,
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
|
use_pep517=options.use_pep517,
|
||||||
py_version_info=options.python_version,
|
py_version_info=options.python_version,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.trace_basic_info(finder)
|
self.trace_basic_info(finder)
|
||||||
|
|
||||||
requirement_set = resolver.resolve(
|
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||||
reqs, check_supported_wheels=True
|
|
||||||
)
|
|
||||||
|
|
||||||
downloaded: List[str] = []
|
downloaded: List[str] = []
|
||||||
for req in requirement_set.requirements.values():
|
for req in requirement_set.requirements.values():
|
||||||
@@ -134,6 +138,6 @@ class DownloadCommand(RequirementCommand):
|
|||||||
preparer.save_linked_requirement(req)
|
preparer.save_linked_requirement(req)
|
||||||
downloaded.append(req.name)
|
downloaded.append(req.name)
|
||||||
if downloaded:
|
if downloaded:
|
||||||
write_output('Successfully downloaded %s', ' '.join(downloaded))
|
write_output("Successfully downloaded %s", " ".join(downloaded))
|
||||||
|
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from pip._internal.cli.status_codes import SUCCESS
|
|||||||
from pip._internal.operations.freeze import freeze
|
from pip._internal.operations.freeze import freeze
|
||||||
from pip._internal.utils.compat import stdlib_pkgs
|
from pip._internal.utils.compat import stdlib_pkgs
|
||||||
|
|
||||||
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"}
|
||||||
|
|
||||||
|
|
||||||
class FreezeCommand(Command):
|
class FreezeCommand(Command):
|
||||||
@@ -24,39 +24,52 @@ class FreezeCommand(Command):
|
|||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-r', '--requirement',
|
"-r",
|
||||||
dest='requirements',
|
"--requirement",
|
||||||
action='append',
|
dest="requirements",
|
||||||
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
metavar='file',
|
metavar="file",
|
||||||
help="Use the order in the given requirements file and its "
|
help=(
|
||||||
|
"Use the order in the given requirements file and its "
|
||||||
"comments when generating output. This option can be "
|
"comments when generating output. This option can be "
|
||||||
"used multiple times.")
|
"used multiple times."
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-l', '--local',
|
"-l",
|
||||||
dest='local',
|
"--local",
|
||||||
action='store_true',
|
dest="local",
|
||||||
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='If in a virtualenv that has global access, do not output '
|
help=(
|
||||||
'globally-installed packages.')
|
"If in a virtualenv that has global access, do not output "
|
||||||
|
"globally-installed packages."
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"--user",
|
||||||
dest='user',
|
dest="user",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Only output packages installed in user-site.')
|
help="Only output packages installed in user-site.",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--all',
|
"--all",
|
||||||
dest='freeze_all',
|
dest="freeze_all",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help='Do not skip these packages in the output:'
|
help=(
|
||||||
' {}'.format(', '.join(DEV_PKGS)))
|
"Do not skip these packages in the output:"
|
||||||
|
" {}".format(", ".join(DEV_PKGS))
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--exclude-editable',
|
"--exclude-editable",
|
||||||
dest='exclude_editable',
|
dest="exclude_editable",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help='Exclude editable package from output.')
|
help="Exclude editable package from output.",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
@@ -80,5 +93,5 @@ class FreezeCommand(Command):
|
|||||||
skip=skip,
|
skip=skip,
|
||||||
exclude_editable=options.exclude_editable,
|
exclude_editable=options.exclude_editable,
|
||||||
):
|
):
|
||||||
sys.stdout.write(line + '\n')
|
sys.stdout.write(line + "\n")
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|||||||
@@ -20,18 +20,21 @@ class HashCommand(Command):
|
|||||||
installs.
|
installs.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
usage = '%prog [options] <file> ...'
|
usage = "%prog [options] <file> ..."
|
||||||
ignore_require_venv = True
|
ignore_require_venv = True
|
||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-a', '--algorithm',
|
"-a",
|
||||||
dest='algorithm',
|
"--algorithm",
|
||||||
|
dest="algorithm",
|
||||||
choices=STRONG_HASHES,
|
choices=STRONG_HASHES,
|
||||||
action='store',
|
action="store",
|
||||||
default=FAVORITE_HASH,
|
default=FAVORITE_HASH,
|
||||||
help='The hash algorithm to use: one of {}'.format(
|
help="The hash algorithm to use: one of {}".format(
|
||||||
', '.join(STRONG_HASHES)))
|
", ".join(STRONG_HASHES)
|
||||||
|
),
|
||||||
|
)
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
@@ -41,14 +44,15 @@ class HashCommand(Command):
|
|||||||
|
|
||||||
algorithm = options.algorithm
|
algorithm = options.algorithm
|
||||||
for path in args:
|
for path in args:
|
||||||
write_output('%s:\n--hash=%s:%s',
|
write_output(
|
||||||
path, algorithm, _hash_of_file(path, algorithm))
|
"%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
|
||||||
|
)
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
|
|
||||||
def _hash_of_file(path: str, algorithm: str) -> str:
|
def _hash_of_file(path: str, algorithm: str) -> str:
|
||||||
"""Return the hash digest of a file."""
|
"""Return the hash digest of a file."""
|
||||||
with open(path, 'rb') as archive:
|
with open(path, "rb") as archive:
|
||||||
hash = hashlib.new(algorithm)
|
hash = hashlib.new(algorithm)
|
||||||
for chunk in read_chunks(archive):
|
for chunk in read_chunks(archive):
|
||||||
hash.update(chunk)
|
hash.update(chunk)
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ class HelpCommand(Command):
|
|||||||
if guess:
|
if guess:
|
||||||
msg.append(f'maybe you meant "{guess}"')
|
msg.append(f'maybe you meant "{guess}"')
|
||||||
|
|
||||||
raise CommandError(' - '.join(msg))
|
raise CommandError(" - ".join(msg))
|
||||||
|
|
||||||
command = create_command(cmd_name)
|
command = create_command(cmd_name)
|
||||||
command.parser.print_help()
|
command.parser.print_help()
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ class IndexCommand(IndexGroupCommand):
|
|||||||
Inspect information available from package indexes.
|
Inspect information available from package indexes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
ignore_require_venv = True
|
||||||
usage = """
|
usage = """
|
||||||
%prog versions <package>
|
%prog versions <package>
|
||||||
"""
|
"""
|
||||||
@@ -44,7 +45,7 @@ class IndexCommand(IndexGroupCommand):
|
|||||||
self.parser.insert_option_group(0, index_opts)
|
self.parser.insert_option_group(0, index_opts)
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options: Values, args: List[Any]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
handlers = {
|
handlers = {
|
||||||
"versions": self.get_available_package_versions,
|
"versions": self.get_available_package_versions,
|
||||||
}
|
}
|
||||||
@@ -101,7 +102,7 @@ class IndexCommand(IndexGroupCommand):
|
|||||||
|
|
||||||
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
|
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
|
||||||
if len(args) != 1:
|
if len(args) != 1:
|
||||||
raise CommandError('You need to specify exactly one argument')
|
raise CommandError("You need to specify exactly one argument")
|
||||||
|
|
||||||
target_python = cmdoptions.make_target_python(options)
|
target_python = cmdoptions.make_target_python(options)
|
||||||
query = args[0]
|
query = args[0]
|
||||||
@@ -115,25 +116,24 @@ class IndexCommand(IndexGroupCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
versions: Iterable[Union[LegacyVersion, Version]] = (
|
versions: Iterable[Union[LegacyVersion, Version]] = (
|
||||||
candidate.version
|
candidate.version for candidate in finder.find_all_candidates(query)
|
||||||
for candidate in finder.find_all_candidates(query)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not options.pre:
|
if not options.pre:
|
||||||
# Remove prereleases
|
# Remove prereleases
|
||||||
versions = (version for version in versions
|
versions = (
|
||||||
if not version.is_prerelease)
|
version for version in versions if not version.is_prerelease
|
||||||
|
)
|
||||||
versions = set(versions)
|
versions = set(versions)
|
||||||
|
|
||||||
if not versions:
|
if not versions:
|
||||||
raise DistributionNotFound(
|
raise DistributionNotFound(
|
||||||
'No matching distribution found for {}'.format(query))
|
"No matching distribution found for {}".format(query)
|
||||||
|
)
|
||||||
|
|
||||||
formatted_versions = [str(ver) for ver in sorted(
|
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
|
||||||
versions, reverse=True)]
|
|
||||||
latest = formatted_versions[0]
|
latest = formatted_versions[0]
|
||||||
|
|
||||||
write_output('{} ({})'.format(query, latest))
|
write_output("{} ({})".format(query, latest))
|
||||||
write_output('Available versions: {}'.format(
|
write_output("Available versions: {}".format(", ".join(formatted_versions)))
|
||||||
', '.join(formatted_versions)))
|
|
||||||
print_dist_installation_info(query, latest)
|
print_dist_installation_info(query, latest)
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import errno
|
import errno
|
||||||
|
import json
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import site
|
import site
|
||||||
from optparse import SUPPRESS_HELP, Values
|
from optparse import SUPPRESS_HELP, Values
|
||||||
from typing import Iterable, List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.rich import print_json
|
||||||
|
|
||||||
from pip._internal.cache import WheelCache
|
from pip._internal.cache import WheelCache
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
@@ -20,16 +21,19 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS
|
|||||||
from pip._internal.exceptions import CommandError, InstallationError
|
from pip._internal.exceptions import CommandError, InstallationError
|
||||||
from pip._internal.locations import get_scheme
|
from pip._internal.locations import get_scheme
|
||||||
from pip._internal.metadata import get_environment
|
from pip._internal.metadata import get_environment
|
||||||
from pip._internal.models.format_control import FormatControl
|
from pip._internal.models.installation_report import InstallationReport
|
||||||
|
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||||
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
||||||
from pip._internal.req import install_given_reqs
|
from pip._internal.req import install_given_reqs
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import (
|
||||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
InstallRequirement,
|
||||||
|
check_legacy_setup_py_options,
|
||||||
|
)
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
from pip._internal.utils.distutils_args import parse_distutils_args
|
|
||||||
from pip._internal.utils.filesystem import test_writable_dir
|
from pip._internal.utils.filesystem import test_writable_dir
|
||||||
from pip._internal.utils.logging import getLogger
|
from pip._internal.utils.logging import getLogger
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import (
|
||||||
|
check_externally_managed,
|
||||||
ensure_dir,
|
ensure_dir,
|
||||||
get_pip_version,
|
get_pip_version,
|
||||||
protect_pip_from_modification_on_windows,
|
protect_pip_from_modification_on_windows,
|
||||||
@@ -40,24 +44,11 @@ from pip._internal.utils.virtualenv import (
|
|||||||
running_under_virtualenv,
|
running_under_virtualenv,
|
||||||
virtualenv_no_global,
|
virtualenv_no_global,
|
||||||
)
|
)
|
||||||
from pip._internal.wheel_builder import (
|
from pip._internal.wheel_builder import build, should_build_for_install_command
|
||||||
BinaryAllowedPredicate,
|
|
||||||
build,
|
|
||||||
should_build_for_install_command,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate:
|
|
||||||
def check_binary_allowed(req: InstallRequirement) -> bool:
|
|
||||||
canonical_name = canonicalize_name(req.name or "")
|
|
||||||
allowed_formats = format_control.get_allowed_formats(canonical_name)
|
|
||||||
return "binary" in allowed_formats
|
|
||||||
|
|
||||||
return check_binary_allowed
|
|
||||||
|
|
||||||
|
|
||||||
class InstallCommand(RequirementCommand):
|
class InstallCommand(RequirementCommand):
|
||||||
"""
|
"""
|
||||||
Install packages from:
|
Install packages from:
|
||||||
@@ -86,95 +77,129 @@ class InstallCommand(RequirementCommand):
|
|||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.editable())
|
self.cmd_opts.add_option(cmdoptions.editable())
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-t', '--target',
|
"--dry-run",
|
||||||
dest='target_dir',
|
action="store_true",
|
||||||
metavar='dir',
|
dest="dry_run",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"Don't actually install anything, just print what would be. "
|
||||||
|
"Can be used in combination with --ignore-installed "
|
||||||
|
"to 'resolve' the requirements."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"-t",
|
||||||
|
"--target",
|
||||||
|
dest="target_dir",
|
||||||
|
metavar="dir",
|
||||||
default=None,
|
default=None,
|
||||||
help='Install packages into <dir>. '
|
help=(
|
||||||
'By default this will not replace existing files/folders in '
|
"Install packages into <dir>. "
|
||||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
"By default this will not replace existing files/folders in "
|
||||||
'with new versions.'
|
"<dir>. Use --upgrade to replace existing packages in <dir> "
|
||||||
|
"with new versions."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"--user",
|
||||||
dest='use_user_site',
|
dest="use_user_site",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help="Install to the Python user install directory for your "
|
help=(
|
||||||
|
"Install to the Python user install directory for your "
|
||||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||||
"Windows. (See the Python documentation for site.USER_BASE "
|
"Windows. (See the Python documentation for site.USER_BASE "
|
||||||
"for full details.)")
|
"for full details.)"
|
||||||
|
),
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--no-user',
|
"--no-user",
|
||||||
dest='use_user_site',
|
dest="use_user_site",
|
||||||
action='store_false',
|
action="store_false",
|
||||||
help=SUPPRESS_HELP)
|
help=SUPPRESS_HELP,
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--root',
|
"--root",
|
||||||
dest='root_path',
|
dest="root_path",
|
||||||
metavar='dir',
|
metavar="dir",
|
||||||
default=None,
|
default=None,
|
||||||
help="Install everything relative to this alternate root "
|
help="Install everything relative to this alternate root directory.",
|
||||||
"directory.")
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--prefix',
|
"--prefix",
|
||||||
dest='prefix_path',
|
dest="prefix_path",
|
||||||
metavar='dir',
|
metavar="dir",
|
||||||
default=None,
|
default=None,
|
||||||
help="Installation prefix where lib, bin and other top-level "
|
help=(
|
||||||
"folders are placed")
|
"Installation prefix where lib, bin and other top-level "
|
||||||
|
"folders are placed. Note that the resulting installation may "
|
||||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
"contain scripts and other resources which reference the "
|
||||||
|
"Python interpreter of pip, and not that of ``--prefix``. "
|
||||||
|
"See also the ``--python`` option if the intention is to "
|
||||||
|
"install packages into another (possibly pip-free) "
|
||||||
|
"environment."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.src())
|
self.cmd_opts.add_option(cmdoptions.src())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-U', '--upgrade',
|
"-U",
|
||||||
dest='upgrade',
|
"--upgrade",
|
||||||
action='store_true',
|
dest="upgrade",
|
||||||
help='Upgrade all specified packages to the newest available '
|
action="store_true",
|
||||||
'version. The handling of dependencies depends on the '
|
help=(
|
||||||
'upgrade-strategy used.'
|
"Upgrade all specified packages to the newest available "
|
||||||
|
"version. The handling of dependencies depends on the "
|
||||||
|
"upgrade-strategy used."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--upgrade-strategy',
|
"--upgrade-strategy",
|
||||||
dest='upgrade_strategy',
|
dest="upgrade_strategy",
|
||||||
default='only-if-needed',
|
default="only-if-needed",
|
||||||
choices=['only-if-needed', 'eager'],
|
choices=["only-if-needed", "eager"],
|
||||||
help='Determines how dependency upgrading should be handled '
|
help=(
|
||||||
'[default: %default]. '
|
"Determines how dependency upgrading should be handled "
|
||||||
|
"[default: %default]. "
|
||||||
'"eager" - dependencies are upgraded regardless of '
|
'"eager" - dependencies are upgraded regardless of '
|
||||||
'whether the currently installed version satisfies the '
|
"whether the currently installed version satisfies the "
|
||||||
'requirements of the upgraded package(s). '
|
"requirements of the upgraded package(s). "
|
||||||
'"only-if-needed" - are upgraded only when they do not '
|
'"only-if-needed" - are upgraded only when they do not '
|
||||||
'satisfy the requirements of the upgraded package(s).'
|
"satisfy the requirements of the upgraded package(s)."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--force-reinstall',
|
"--force-reinstall",
|
||||||
dest='force_reinstall',
|
dest="force_reinstall",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
help='Reinstall all packages even if they are already '
|
help="Reinstall all packages even if they are already up-to-date.",
|
||||||
'up-to-date.')
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-I', '--ignore-installed',
|
"-I",
|
||||||
dest='ignore_installed',
|
"--ignore-installed",
|
||||||
action='store_true',
|
dest="ignore_installed",
|
||||||
help='Ignore the installed packages, overwriting them. '
|
action="store_true",
|
||||||
'This can break your system if the existing package '
|
help=(
|
||||||
'is of a different version or was installed '
|
"Ignore the installed packages, overwriting them. "
|
||||||
'with a different package manager!'
|
"This can break your system if the existing package "
|
||||||
|
"is of a different version or was installed "
|
||||||
|
"with a different package manager!"
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.install_options())
|
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
@@ -206,12 +231,12 @@ class InstallCommand(RequirementCommand):
|
|||||||
default=True,
|
default=True,
|
||||||
help="Do not warn about broken dependencies",
|
help="Do not warn about broken dependencies",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||||
|
|
||||||
index_opts = cmdoptions.make_option_group(
|
index_opts = cmdoptions.make_option_group(
|
||||||
cmdoptions.index_group,
|
cmdoptions.index_group,
|
||||||
@@ -221,20 +246,50 @@ class InstallCommand(RequirementCommand):
|
|||||||
self.parser.insert_option_group(0, index_opts)
|
self.parser.insert_option_group(0, index_opts)
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"--report",
|
||||||
|
dest="json_report_file",
|
||||||
|
metavar="file",
|
||||||
|
default=None,
|
||||||
|
help=(
|
||||||
|
"Generate a JSON file describing what pip did to install "
|
||||||
|
"the provided requirements. "
|
||||||
|
"Can be used in combination with --dry-run and --ignore-installed "
|
||||||
|
"to 'resolve' the requirements. "
|
||||||
|
"When - is used as file name it writes to stdout. "
|
||||||
|
"When writing to stdout, please combine with the --quiet option "
|
||||||
|
"to avoid mixing pip logging output with JSON output."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
@with_cleanup
|
@with_cleanup
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
if options.use_user_site and options.target_dir is not None:
|
if options.use_user_site and options.target_dir is not None:
|
||||||
raise CommandError("Can not combine '--user' and '--target'")
|
raise CommandError("Can not combine '--user' and '--target'")
|
||||||
|
|
||||||
cmdoptions.check_install_build_global(options)
|
# Check whether the environment we're installing into is externally
|
||||||
|
# managed, as specified in PEP 668. Specifying --root, --target, or
|
||||||
|
# --prefix disables the check, since there's no reliable way to locate
|
||||||
|
# the EXTERNALLY-MANAGED file for those cases. An exception is also
|
||||||
|
# made specifically for "--dry-run --report" for convenience.
|
||||||
|
installing_into_current_environment = (
|
||||||
|
not (options.dry_run and options.json_report_file)
|
||||||
|
and options.root_path is None
|
||||||
|
and options.target_dir is None
|
||||||
|
and options.prefix_path is None
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
installing_into_current_environment
|
||||||
|
and not options.override_externally_managed
|
||||||
|
):
|
||||||
|
check_externally_managed()
|
||||||
|
|
||||||
upgrade_strategy = "to-satisfy-only"
|
upgrade_strategy = "to-satisfy-only"
|
||||||
if options.upgrade:
|
if options.upgrade:
|
||||||
upgrade_strategy = options.upgrade_strategy
|
upgrade_strategy = options.upgrade_strategy
|
||||||
|
|
||||||
cmdoptions.check_dist_restriction(options, check_target=True)
|
cmdoptions.check_dist_restriction(options, check_target=True)
|
||||||
|
|
||||||
install_options = options.install_options or []
|
|
||||||
|
|
||||||
logger.verbose("Using %s", get_pip_version())
|
logger.verbose("Using %s", get_pip_version())
|
||||||
options.use_user_site = decide_user_install(
|
options.use_user_site = decide_user_install(
|
||||||
options.use_user_site,
|
options.use_user_site,
|
||||||
@@ -249,11 +304,14 @@ class InstallCommand(RequirementCommand):
|
|||||||
if options.target_dir:
|
if options.target_dir:
|
||||||
options.ignore_installed = True
|
options.ignore_installed = True
|
||||||
options.target_dir = os.path.abspath(options.target_dir)
|
options.target_dir = os.path.abspath(options.target_dir)
|
||||||
if (os.path.exists(options.target_dir) and not
|
if (
|
||||||
os.path.isdir(options.target_dir)):
|
# fmt: off
|
||||||
|
os.path.exists(options.target_dir) and
|
||||||
|
not os.path.isdir(options.target_dir)
|
||||||
|
# fmt: on
|
||||||
|
):
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"Target path exists but is not a directory, will not "
|
"Target path exists but is not a directory, will not continue."
|
||||||
"continue."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a target directory for using with the target option
|
# Create a target directory for using with the target option
|
||||||
@@ -272,9 +330,7 @@ class InstallCommand(RequirementCommand):
|
|||||||
target_python=target_python,
|
target_python=target_python,
|
||||||
ignore_requires_python=options.ignore_requires_python,
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
)
|
)
|
||||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
build_tracker = self.enter_context(get_build_tracker())
|
||||||
|
|
||||||
req_tracker = self.enter_context(get_requirement_tracker())
|
|
||||||
|
|
||||||
directory = TempDirectory(
|
directory = TempDirectory(
|
||||||
delete=not options.no_clean,
|
delete=not options.no_clean,
|
||||||
@@ -284,18 +340,24 @@ class InstallCommand(RequirementCommand):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
reqs = self.get_requirements(args, options, finder, session)
|
reqs = self.get_requirements(args, options, finder, session)
|
||||||
|
check_legacy_setup_py_options(options, reqs)
|
||||||
|
|
||||||
reject_location_related_install_options(
|
wheel_cache = WheelCache(options.cache_dir)
|
||||||
reqs, options.install_options
|
|
||||||
)
|
# Only when installing is it permitted to use PEP 660.
|
||||||
|
# In other circumstances (pip wheel, pip download) we generate
|
||||||
|
# regular (i.e. non editable) metadata and wheels.
|
||||||
|
for req in reqs:
|
||||||
|
req.permit_editable_wheels = True
|
||||||
|
|
||||||
preparer = self.make_requirement_preparer(
|
preparer = self.make_requirement_preparer(
|
||||||
temp_build_dir=directory,
|
temp_build_dir=directory,
|
||||||
options=options,
|
options=options,
|
||||||
req_tracker=req_tracker,
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
use_user_site=options.use_user_site,
|
use_user_site=options.use_user_site,
|
||||||
|
verbosity=self.verbosity,
|
||||||
)
|
)
|
||||||
resolver = self.make_resolver(
|
resolver = self.make_resolver(
|
||||||
preparer=preparer,
|
preparer=preparer,
|
||||||
@@ -316,6 +378,26 @@ class InstallCommand(RequirementCommand):
|
|||||||
reqs, check_supported_wheels=not options.target_dir
|
reqs, check_supported_wheels=not options.target_dir
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if options.json_report_file:
|
||||||
|
report = InstallationReport(requirement_set.requirements_to_install)
|
||||||
|
if options.json_report_file == "-":
|
||||||
|
print_json(data=report.to_dict())
|
||||||
|
else:
|
||||||
|
with open(options.json_report_file, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
|
||||||
|
|
||||||
|
if options.dry_run:
|
||||||
|
would_install_items = sorted(
|
||||||
|
(r.metadata["name"], r.metadata["version"])
|
||||||
|
for r in requirement_set.requirements_to_install
|
||||||
|
)
|
||||||
|
if would_install_items:
|
||||||
|
write_output(
|
||||||
|
"Would install %s",
|
||||||
|
" ".join("-".join(item) for item in would_install_items),
|
||||||
|
)
|
||||||
|
return SUCCESS
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pip_req = requirement_set.get_requirement("pip")
|
pip_req = requirement_set.get_requirement("pip")
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -324,19 +406,12 @@ class InstallCommand(RequirementCommand):
|
|||||||
# If we're not replacing an already installed pip,
|
# If we're not replacing an already installed pip,
|
||||||
# we're not modifying it.
|
# we're not modifying it.
|
||||||
modifying_pip = pip_req.satisfied_by is None
|
modifying_pip = pip_req.satisfied_by is None
|
||||||
protect_pip_from_modification_on_windows(
|
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
|
||||||
modifying_pip=modifying_pip
|
|
||||||
)
|
|
||||||
|
|
||||||
check_binary_allowed = get_check_binary_allowed(
|
|
||||||
finder.format_control
|
|
||||||
)
|
|
||||||
|
|
||||||
reqs_to_build = [
|
reqs_to_build = [
|
||||||
r for r in requirement_set.requirements.values()
|
r
|
||||||
if should_build_for_install_command(
|
for r in requirement_set.requirements.values()
|
||||||
r, check_binary_allowed
|
if should_build_for_install_command(r)
|
||||||
)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
_, build_failures = build(
|
_, build_failures = build(
|
||||||
@@ -344,39 +419,23 @@ class InstallCommand(RequirementCommand):
|
|||||||
wheel_cache=wheel_cache,
|
wheel_cache=wheel_cache,
|
||||||
verify=True,
|
verify=True,
|
||||||
build_options=[],
|
build_options=[],
|
||||||
global_options=[],
|
global_options=global_options,
|
||||||
)
|
)
|
||||||
|
|
||||||
# If we're using PEP 517, we cannot do a direct install
|
if build_failures:
|
||||||
# so we fail here.
|
|
||||||
pep517_build_failure_names: List[str] = [
|
|
||||||
r.name # type: ignore
|
|
||||||
for r in build_failures if r.use_pep517
|
|
||||||
]
|
|
||||||
if pep517_build_failure_names:
|
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Could not build wheels for {} which use"
|
"Could not build wheels for {}, which is required to "
|
||||||
" PEP 517 and cannot be installed directly".format(
|
"install pyproject.toml-based projects".format(
|
||||||
", ".join(pep517_build_failure_names)
|
", ".join(r.name for r in build_failures) # type: ignore
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# For now, we just warn about failures building legacy
|
to_install = resolver.get_installation_order(requirement_set)
|
||||||
# requirements, as we'll fall through to a direct
|
|
||||||
# install for those.
|
|
||||||
for r in build_failures:
|
|
||||||
if not r.use_pep517:
|
|
||||||
r.legacy_install_reason = 8368
|
|
||||||
|
|
||||||
to_install = resolver.get_installation_order(
|
|
||||||
requirement_set
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for conflicts in the package set we're installing.
|
# Check for conflicts in the package set we're installing.
|
||||||
conflicts: Optional[ConflictDetails] = None
|
conflicts: Optional[ConflictDetails] = None
|
||||||
should_warn_about_conflicts = (
|
should_warn_about_conflicts = (
|
||||||
not options.ignore_dependencies and
|
not options.ignore_dependencies and options.warn_about_conflicts
|
||||||
options.warn_about_conflicts
|
|
||||||
)
|
)
|
||||||
if should_warn_about_conflicts:
|
if should_warn_about_conflicts:
|
||||||
conflicts = self._determine_conflicts(to_install)
|
conflicts = self._determine_conflicts(to_install)
|
||||||
@@ -389,7 +448,6 @@ class InstallCommand(RequirementCommand):
|
|||||||
|
|
||||||
installed = install_given_reqs(
|
installed = install_given_reqs(
|
||||||
to_install,
|
to_install,
|
||||||
install_options,
|
|
||||||
global_options,
|
global_options,
|
||||||
root=options.root_path,
|
root=options.root_path,
|
||||||
home=target_temp_dir_path,
|
home=target_temp_dir_path,
|
||||||
@@ -408,7 +466,7 @@ class InstallCommand(RequirementCommand):
|
|||||||
)
|
)
|
||||||
env = get_environment(lib_locations)
|
env = get_environment(lib_locations)
|
||||||
|
|
||||||
installed.sort(key=operator.attrgetter('name'))
|
installed.sort(key=operator.attrgetter("name"))
|
||||||
items = []
|
items = []
|
||||||
for result in installed:
|
for result in installed:
|
||||||
item = result.name
|
item = result.name
|
||||||
@@ -426,16 +484,19 @@ class InstallCommand(RequirementCommand):
|
|||||||
resolver_variant=self.determine_resolver_variant(options),
|
resolver_variant=self.determine_resolver_variant(options),
|
||||||
)
|
)
|
||||||
|
|
||||||
installed_desc = ' '.join(items)
|
installed_desc = " ".join(items)
|
||||||
if installed_desc:
|
if installed_desc:
|
||||||
write_output(
|
write_output(
|
||||||
'Successfully installed %s', installed_desc,
|
"Successfully installed %s",
|
||||||
|
installed_desc,
|
||||||
)
|
)
|
||||||
except OSError as error:
|
except OSError as error:
|
||||||
show_traceback = (self.verbosity >= 1)
|
show_traceback = self.verbosity >= 1
|
||||||
|
|
||||||
message = create_os_error_message(
|
message = create_os_error_message(
|
||||||
error, show_traceback, options.use_user_site,
|
error,
|
||||||
|
show_traceback,
|
||||||
|
options.use_user_site,
|
||||||
)
|
)
|
||||||
logger.error(message, exc_info=show_traceback) # noqa
|
logger.error(message, exc_info=show_traceback) # noqa
|
||||||
|
|
||||||
@@ -446,7 +507,7 @@ class InstallCommand(RequirementCommand):
|
|||||||
self._handle_target_dir(
|
self._handle_target_dir(
|
||||||
options.target_dir, target_temp_dir, options.upgrade
|
options.target_dir, target_temp_dir, options.upgrade
|
||||||
)
|
)
|
||||||
|
if options.root_user_action == "warn":
|
||||||
warn_if_run_as_root()
|
warn_if_run_as_root()
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
@@ -461,7 +522,7 @@ class InstallCommand(RequirementCommand):
|
|||||||
|
|
||||||
# Checking both purelib and platlib directories for installed
|
# Checking both purelib and platlib directories for installed
|
||||||
# packages to be moved to target directory
|
# packages to be moved to target directory
|
||||||
scheme = get_scheme('', home=target_temp_dir.path)
|
scheme = get_scheme("", home=target_temp_dir.path)
|
||||||
purelib_dir = scheme.purelib
|
purelib_dir = scheme.purelib
|
||||||
platlib_dir = scheme.platlib
|
platlib_dir = scheme.platlib
|
||||||
data_dir = scheme.data
|
data_dir = scheme.data
|
||||||
@@ -483,18 +544,18 @@ class InstallCommand(RequirementCommand):
|
|||||||
if os.path.exists(target_item_dir):
|
if os.path.exists(target_item_dir):
|
||||||
if not upgrade:
|
if not upgrade:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Target directory %s already exists. Specify '
|
"Target directory %s already exists. Specify "
|
||||||
'--upgrade to force replacement.',
|
"--upgrade to force replacement.",
|
||||||
target_item_dir
|
target_item_dir,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if os.path.islink(target_item_dir):
|
if os.path.islink(target_item_dir):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Target directory %s already exists and is '
|
"Target directory %s already exists and is "
|
||||||
'a link. pip will not automatically replace '
|
"a link. pip will not automatically replace "
|
||||||
'links, please remove if replacement is '
|
"links, please remove if replacement is "
|
||||||
'desired.',
|
"desired.",
|
||||||
target_item_dir
|
target_item_dir,
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
if os.path.isdir(target_item_dir):
|
if os.path.isdir(target_item_dir):
|
||||||
@@ -502,10 +563,7 @@ class InstallCommand(RequirementCommand):
|
|||||||
else:
|
else:
|
||||||
os.remove(target_item_dir)
|
os.remove(target_item_dir)
|
||||||
|
|
||||||
shutil.move(
|
shutil.move(os.path.join(lib_dir, item), target_item_dir)
|
||||||
os.path.join(lib_dir, item),
|
|
||||||
target_item_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
def _determine_conflicts(
|
def _determine_conflicts(
|
||||||
self, to_install: List[InstallRequirement]
|
self, to_install: List[InstallRequirement]
|
||||||
@@ -567,7 +625,7 @@ class InstallCommand(RequirementCommand):
|
|||||||
requirement=req,
|
requirement=req,
|
||||||
dep_name=dep_name,
|
dep_name=dep_name,
|
||||||
dep_version=dep_version,
|
dep_version=dep_version,
|
||||||
you=("you" if resolver_variant == "2020-resolver" else "you'll")
|
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
|
||||||
)
|
)
|
||||||
parts.append(message)
|
parts.append(message)
|
||||||
|
|
||||||
@@ -579,10 +637,10 @@ def get_lib_location_guesses(
|
|||||||
home: Optional[str] = None,
|
home: Optional[str] = None,
|
||||||
root: Optional[str] = None,
|
root: Optional[str] = None,
|
||||||
isolated: bool = False,
|
isolated: bool = False,
|
||||||
prefix: Optional[str] = None
|
prefix: Optional[str] = None,
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
scheme = get_scheme(
|
scheme = get_scheme(
|
||||||
'',
|
"",
|
||||||
user=user,
|
user=user,
|
||||||
home=home,
|
home=home,
|
||||||
root=root,
|
root=root,
|
||||||
@@ -594,8 +652,8 @@ def get_lib_location_guesses(
|
|||||||
|
|
||||||
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
||||||
return all(
|
return all(
|
||||||
test_writable_dir(d) for d in set(
|
test_writable_dir(d)
|
||||||
get_lib_location_guesses(root=root, isolated=isolated))
|
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -653,53 +711,13 @@ def decide_user_install(
|
|||||||
logger.debug("Non-user install because site-packages writeable")
|
logger.debug("Non-user install because site-packages writeable")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.info("Defaulting to user installation because normal site-packages "
|
logger.info(
|
||||||
"is not writeable")
|
"Defaulting to user installation because normal site-packages "
|
||||||
|
"is not writeable"
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reject_location_related_install_options(
|
|
||||||
requirements: List[InstallRequirement], options: Optional[List[str]]
|
|
||||||
) -> None:
|
|
||||||
"""If any location-changing --install-option arguments were passed for
|
|
||||||
requirements or on the command-line, then show a deprecation warning.
|
|
||||||
"""
|
|
||||||
def format_options(option_names: Iterable[str]) -> List[str]:
|
|
||||||
return ["--{}".format(name.replace("_", "-")) for name in option_names]
|
|
||||||
|
|
||||||
offenders = []
|
|
||||||
|
|
||||||
for requirement in requirements:
|
|
||||||
install_options = requirement.install_options
|
|
||||||
location_options = parse_distutils_args(install_options)
|
|
||||||
if location_options:
|
|
||||||
offenders.append(
|
|
||||||
"{!r} from {}".format(
|
|
||||||
format_options(location_options.keys()), requirement
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if options:
|
|
||||||
location_options = parse_distutils_args(options)
|
|
||||||
if location_options:
|
|
||||||
offenders.append(
|
|
||||||
"{!r} from command line".format(
|
|
||||||
format_options(location_options.keys())
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not offenders:
|
|
||||||
return
|
|
||||||
|
|
||||||
raise CommandError(
|
|
||||||
"Location-changing options found in --install-option: {}."
|
|
||||||
" This is unsupported, use pip-level options like --user,"
|
|
||||||
" --prefix, --root, and --target instead.".format(
|
|
||||||
"; ".join(offenders)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def create_os_error_message(
|
def create_os_error_message(
|
||||||
error: OSError, show_traceback: bool, using_user_site: bool
|
error: OSError, show_traceback: bool, using_user_site: bool
|
||||||
) -> str:
|
) -> str:
|
||||||
@@ -727,18 +745,25 @@ def create_os_error_message(
|
|||||||
permissions_part = "Check the permissions"
|
permissions_part = "Check the permissions"
|
||||||
|
|
||||||
if not running_under_virtualenv() and not using_user_site:
|
if not running_under_virtualenv() and not using_user_site:
|
||||||
parts.extend([
|
parts.extend(
|
||||||
user_option_part, " or ",
|
[
|
||||||
|
user_option_part,
|
||||||
|
" or ",
|
||||||
permissions_part.lower(),
|
permissions_part.lower(),
|
||||||
])
|
]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
parts.append(permissions_part)
|
parts.append(permissions_part)
|
||||||
parts.append(".\n")
|
parts.append(".\n")
|
||||||
|
|
||||||
# Suggest the user to enable Long Paths if path length is
|
# Suggest the user to enable Long Paths if path length is
|
||||||
# more than 260
|
# more than 260
|
||||||
if (WINDOWS and error.errno == errno.ENOENT and error.filename and
|
if (
|
||||||
len(error.filename) > 260):
|
WINDOWS
|
||||||
|
and error.errno == errno.ENOENT
|
||||||
|
and error.filename
|
||||||
|
and len(error.filename) > 260
|
||||||
|
):
|
||||||
parts.append(
|
parts.append(
|
||||||
"HINT: This error might have occurred since "
|
"HINT: This error might have occurred since "
|
||||||
"this system does not have Windows Long Path "
|
"this system does not have Windows Long Path "
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast
|
from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
@@ -14,8 +14,8 @@ from pip._internal.index.package_finder import PackageFinder
|
|||||||
from pip._internal.metadata import BaseDistribution, get_environment
|
from pip._internal.metadata import BaseDistribution, get_environment
|
||||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
from pip._internal.utils.misc import stdlib_pkgs, tabulate, write_output
|
from pip._internal.utils.compat import stdlib_pkgs
|
||||||
from pip._internal.utils.parallel import map_multithread
|
from pip._internal.utils.misc import tabulate, write_output
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pip._internal.metadata.base import DistributionVersion
|
from pip._internal.metadata.base import DistributionVersion
|
||||||
@@ -26,6 +26,7 @@ if TYPE_CHECKING:
|
|||||||
These will be populated during ``get_outdated()``. This is dirty but
|
These will be populated during ``get_outdated()``. This is dirty but
|
||||||
makes the rest of the code much cleaner.
|
makes the rest of the code much cleaner.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
latest_version: DistributionVersion
|
latest_version: DistributionVersion
|
||||||
latest_filetype: str
|
latest_filetype: str
|
||||||
|
|
||||||
@@ -48,77 +49,85 @@ class ListCommand(IndexGroupCommand):
|
|||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-o', '--outdated',
|
"-o",
|
||||||
action='store_true',
|
"--outdated",
|
||||||
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='List outdated packages')
|
help="List outdated packages",
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-u', '--uptodate',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='List uptodate packages')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-e', '--editable',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='List editable projects.')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-l', '--local',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help=('If in a virtualenv that has global access, do not list '
|
|
||||||
'globally-installed packages.'),
|
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--user',
|
"-u",
|
||||||
dest='user',
|
"--uptodate",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Only output packages installed in user-site.')
|
help="List uptodate packages",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"-e",
|
||||||
|
"--editable",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="List editable projects.",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"-l",
|
||||||
|
"--local",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help=(
|
||||||
|
"If in a virtualenv that has global access, do not list "
|
||||||
|
"globally-installed packages."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(
|
||||||
|
"--user",
|
||||||
|
dest="user",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Only output packages installed in user-site.",
|
||||||
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--pre',
|
"--pre",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help=("Include pre-release and development versions. By default, "
|
help=(
|
||||||
"pip only finds stable versions."),
|
"Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--format',
|
"--format",
|
||||||
action='store',
|
action="store",
|
||||||
dest='list_format',
|
dest="list_format",
|
||||||
default="columns",
|
default="columns",
|
||||||
choices=('columns', 'freeze', 'json'),
|
choices=("columns", "freeze", "json"),
|
||||||
help="Select the output format among: columns (default), freeze, "
|
help="Select the output format among: columns (default), freeze, or json",
|
||||||
"or json",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--not-required',
|
"--not-required",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
dest='not_required',
|
dest="not_required",
|
||||||
help="List packages that are not dependencies of "
|
help="List packages that are not dependencies of installed packages.",
|
||||||
"installed packages.",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--exclude-editable',
|
"--exclude-editable",
|
||||||
action='store_false',
|
action="store_false",
|
||||||
dest='include_editable',
|
dest="include_editable",
|
||||||
help='Exclude editable package from output.',
|
help="Exclude editable package from output.",
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--include-editable',
|
"--include-editable",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
dest='include_editable',
|
dest="include_editable",
|
||||||
help='Include editable package from output.',
|
help="Include editable package from output.",
|
||||||
default=True,
|
default=True,
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||||
index_opts = cmdoptions.make_option_group(
|
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
||||||
cmdoptions.index_group, self.parser
|
|
||||||
)
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, index_opts)
|
self.parser.insert_option_group(0, index_opts)
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
@@ -144,8 +153,12 @@ class ListCommand(IndexGroupCommand):
|
|||||||
|
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
if options.outdated and options.uptodate:
|
if options.outdated and options.uptodate:
|
||||||
|
raise CommandError("Options --outdated and --uptodate cannot be combined.")
|
||||||
|
|
||||||
|
if options.outdated and options.list_format == "freeze":
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"Options --outdated and --uptodate cannot be combined.")
|
"List format 'freeze' can not be used with the --outdated option."
|
||||||
|
)
|
||||||
|
|
||||||
cmdoptions.check_list_path_option(options)
|
cmdoptions.check_list_path_option(options)
|
||||||
|
|
||||||
@@ -183,7 +196,8 @@ class ListCommand(IndexGroupCommand):
|
|||||||
self, packages: "_ProcessedDists", options: Values
|
self, packages: "_ProcessedDists", options: Values
|
||||||
) -> "_ProcessedDists":
|
) -> "_ProcessedDists":
|
||||||
return [
|
return [
|
||||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
dist
|
||||||
|
for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
if dist.latest_version > dist.version
|
if dist.latest_version > dist.version
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -191,7 +205,8 @@ class ListCommand(IndexGroupCommand):
|
|||||||
self, packages: "_ProcessedDists", options: Values
|
self, packages: "_ProcessedDists", options: Values
|
||||||
) -> "_ProcessedDists":
|
) -> "_ProcessedDists":
|
||||||
return [
|
return [
|
||||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
dist
|
||||||
|
for dist in self.iter_packages_latest_infos(packages, options)
|
||||||
if dist.latest_version == dist.version
|
if dist.latest_version == dist.version
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -211,18 +226,21 @@ class ListCommand(IndexGroupCommand):
|
|||||||
|
|
||||||
def iter_packages_latest_infos(
|
def iter_packages_latest_infos(
|
||||||
self, packages: "_ProcessedDists", options: Values
|
self, packages: "_ProcessedDists", options: Values
|
||||||
) -> Iterator["_DistWithLatestInfo"]:
|
) -> Generator["_DistWithLatestInfo", None, None]:
|
||||||
with self._build_session(options) as session:
|
with self._build_session(options) as session:
|
||||||
finder = self._build_package_finder(options, session)
|
finder = self._build_package_finder(options, session)
|
||||||
|
|
||||||
def latest_info(
|
def latest_info(
|
||||||
dist: "_DistWithLatestInfo"
|
dist: "_DistWithLatestInfo",
|
||||||
) -> Optional["_DistWithLatestInfo"]:
|
) -> Optional["_DistWithLatestInfo"]:
|
||||||
all_candidates = finder.find_all_candidates(dist.canonical_name)
|
all_candidates = finder.find_all_candidates(dist.canonical_name)
|
||||||
if not options.pre:
|
if not options.pre:
|
||||||
# Remove prereleases
|
# Remove prereleases
|
||||||
all_candidates = [candidate for candidate in all_candidates
|
all_candidates = [
|
||||||
if not candidate.version.is_prerelease]
|
candidate
|
||||||
|
for candidate in all_candidates
|
||||||
|
if not candidate.version.is_prerelease
|
||||||
|
]
|
||||||
|
|
||||||
evaluator = finder.make_candidate_evaluator(
|
evaluator = finder.make_candidate_evaluator(
|
||||||
project_name=dist.canonical_name,
|
project_name=dist.canonical_name,
|
||||||
@@ -233,14 +251,14 @@ class ListCommand(IndexGroupCommand):
|
|||||||
|
|
||||||
remote_version = best_candidate.version
|
remote_version = best_candidate.version
|
||||||
if best_candidate.link.is_wheel:
|
if best_candidate.link.is_wheel:
|
||||||
typ = 'wheel'
|
typ = "wheel"
|
||||||
else:
|
else:
|
||||||
typ = 'sdist'
|
typ = "sdist"
|
||||||
dist.latest_version = remote_version
|
dist.latest_version = remote_version
|
||||||
dist.latest_filetype = typ
|
dist.latest_filetype = typ
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
for dist in map_multithread(latest_info, packages):
|
for dist in map(latest_info, packages):
|
||||||
if dist is not None:
|
if dist is not None:
|
||||||
yield dist
|
yield dist
|
||||||
|
|
||||||
@@ -251,17 +269,18 @@ class ListCommand(IndexGroupCommand):
|
|||||||
packages,
|
packages,
|
||||||
key=lambda dist: dist.canonical_name,
|
key=lambda dist: dist.canonical_name,
|
||||||
)
|
)
|
||||||
if options.list_format == 'columns' and packages:
|
if options.list_format == "columns" and packages:
|
||||||
data, header = format_for_columns(packages, options)
|
data, header = format_for_columns(packages, options)
|
||||||
self.output_package_listing_columns(data, header)
|
self.output_package_listing_columns(data, header)
|
||||||
elif options.list_format == 'freeze':
|
elif options.list_format == "freeze":
|
||||||
for dist in packages:
|
for dist in packages:
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
write_output("%s==%s (%s)", dist.raw_name,
|
write_output(
|
||||||
dist.version, dist.location)
|
"%s==%s (%s)", dist.raw_name, dist.version, dist.location
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
write_output("%s==%s", dist.raw_name, dist.version)
|
write_output("%s==%s", dist.raw_name, dist.version)
|
||||||
elif options.list_format == 'json':
|
elif options.list_format == "json":
|
||||||
write_output(format_for_json(packages, options))
|
write_output(format_for_json(packages, options))
|
||||||
|
|
||||||
def output_package_listing_columns(
|
def output_package_listing_columns(
|
||||||
@@ -275,7 +294,7 @@ class ListCommand(IndexGroupCommand):
|
|||||||
|
|
||||||
# Create and add a separator.
|
# Create and add a separator.
|
||||||
if len(data) > 0:
|
if len(data) > 0:
|
||||||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
|
||||||
|
|
||||||
for val in pkg_strings:
|
for val in pkg_strings:
|
||||||
write_output(val)
|
write_output(val)
|
||||||
@@ -288,19 +307,22 @@ def format_for_columns(
|
|||||||
Convert the package data into something usable
|
Convert the package data into something usable
|
||||||
by output_package_listing_columns.
|
by output_package_listing_columns.
|
||||||
"""
|
"""
|
||||||
running_outdated = options.outdated
|
|
||||||
# Adjust the header for the `pip list --outdated` case.
|
|
||||||
if running_outdated:
|
|
||||||
header = ["Package", "Version", "Latest", "Type"]
|
|
||||||
else:
|
|
||||||
header = ["Package", "Version"]
|
header = ["Package", "Version"]
|
||||||
|
|
||||||
data = []
|
running_outdated = options.outdated
|
||||||
if options.verbose >= 1 or any(x.editable for x in pkgs):
|
if running_outdated:
|
||||||
|
header.extend(["Latest", "Type"])
|
||||||
|
|
||||||
|
has_editables = any(x.editable for x in pkgs)
|
||||||
|
if has_editables:
|
||||||
|
header.append("Editable project location")
|
||||||
|
|
||||||
|
if options.verbose >= 1:
|
||||||
header.append("Location")
|
header.append("Location")
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
header.append("Installer")
|
header.append("Installer")
|
||||||
|
|
||||||
|
data = []
|
||||||
for proj in pkgs:
|
for proj in pkgs:
|
||||||
# if we're working on the 'outdated' list, separate out the
|
# if we're working on the 'outdated' list, separate out the
|
||||||
# latest_version and type
|
# latest_version and type
|
||||||
@@ -310,7 +332,10 @@ def format_for_columns(
|
|||||||
row.append(str(proj.latest_version))
|
row.append(str(proj.latest_version))
|
||||||
row.append(proj.latest_filetype)
|
row.append(proj.latest_filetype)
|
||||||
|
|
||||||
if options.verbose >= 1 or proj.editable:
|
if has_editables:
|
||||||
|
row.append(proj.editable_project_location or "")
|
||||||
|
|
||||||
|
if options.verbose >= 1:
|
||||||
row.append(proj.location or "")
|
row.append(proj.location or "")
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
row.append(proj.installer)
|
row.append(proj.installer)
|
||||||
@@ -324,14 +349,17 @@ def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
|
|||||||
data = []
|
data = []
|
||||||
for dist in packages:
|
for dist in packages:
|
||||||
info = {
|
info = {
|
||||||
'name': dist.raw_name,
|
"name": dist.raw_name,
|
||||||
'version': str(dist.version),
|
"version": str(dist.version),
|
||||||
}
|
}
|
||||||
if options.verbose >= 1:
|
if options.verbose >= 1:
|
||||||
info['location'] = dist.location or ""
|
info["location"] = dist.location or ""
|
||||||
info['installer'] = dist.installer
|
info["installer"] = dist.installer
|
||||||
if options.outdated:
|
if options.outdated:
|
||||||
info['latest_version'] = str(dist.latest_version)
|
info["latest_version"] = str(dist.latest_version)
|
||||||
info['latest_filetype'] = dist.latest_filetype
|
info["latest_filetype"] = dist.latest_filetype
|
||||||
|
editable_project_location = dist.editable_project_location
|
||||||
|
if editable_project_location:
|
||||||
|
info["editable_project_location"] = editable_project_location
|
||||||
data.append(info)
|
data.append(info)
|
||||||
return json.dumps(data)
|
return json.dumps(data)
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ if TYPE_CHECKING:
|
|||||||
summary: str
|
summary: str
|
||||||
versions: List[str]
|
versions: List[str]
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -39,17 +40,19 @@ class SearchCommand(Command, SessionCommandMixin):
|
|||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-i', '--index',
|
"-i",
|
||||||
dest='index',
|
"--index",
|
||||||
metavar='URL',
|
dest="index",
|
||||||
|
metavar="URL",
|
||||||
default=PyPI.pypi_url,
|
default=PyPI.pypi_url,
|
||||||
help='Base URL of Python Package Index (default %default)')
|
help="Base URL of Python Package Index (default %default)",
|
||||||
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
if not args:
|
if not args:
|
||||||
raise CommandError('Missing required argument (search query).')
|
raise CommandError("Missing required argument (search query).")
|
||||||
query = args
|
query = args
|
||||||
pypi_hits = self.search(query, options)
|
pypi_hits = self.search(query, options)
|
||||||
hits = transform_hits(pypi_hits)
|
hits = transform_hits(pypi_hits)
|
||||||
@@ -71,7 +74,7 @@ class SearchCommand(Command, SessionCommandMixin):
|
|||||||
transport = PipXmlrpcTransport(index_url, session)
|
transport = PipXmlrpcTransport(index_url, session)
|
||||||
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
||||||
try:
|
try:
|
||||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
hits = pypi.search({"name": query, "summary": query}, "or")
|
||||||
except xmlrpc.client.Fault as fault:
|
except xmlrpc.client.Fault as fault:
|
||||||
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
||||||
code=fault.faultCode,
|
code=fault.faultCode,
|
||||||
@@ -90,22 +93,22 @@ def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
|
|||||||
"""
|
"""
|
||||||
packages: Dict[str, "TransformedHit"] = OrderedDict()
|
packages: Dict[str, "TransformedHit"] = OrderedDict()
|
||||||
for hit in hits:
|
for hit in hits:
|
||||||
name = hit['name']
|
name = hit["name"]
|
||||||
summary = hit['summary']
|
summary = hit["summary"]
|
||||||
version = hit['version']
|
version = hit["version"]
|
||||||
|
|
||||||
if name not in packages.keys():
|
if name not in packages.keys():
|
||||||
packages[name] = {
|
packages[name] = {
|
||||||
'name': name,
|
"name": name,
|
||||||
'summary': summary,
|
"summary": summary,
|
||||||
'versions': [version],
|
"versions": [version],
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
packages[name]['versions'].append(version)
|
packages[name]["versions"].append(version)
|
||||||
|
|
||||||
# if this is the highest version, replace summary and score
|
# if this is the highest version, replace summary and score
|
||||||
if version == highest_version(packages[name]['versions']):
|
if version == highest_version(packages[name]["versions"]):
|
||||||
packages[name]['summary'] = summary
|
packages[name]["summary"] = summary
|
||||||
|
|
||||||
return list(packages.values())
|
return list(packages.values())
|
||||||
|
|
||||||
@@ -116,14 +119,17 @@ def print_dist_installation_info(name: str, latest: str) -> None:
|
|||||||
if dist is not None:
|
if dist is not None:
|
||||||
with indent_log():
|
with indent_log():
|
||||||
if dist.version == latest:
|
if dist.version == latest:
|
||||||
write_output('INSTALLED: %s (latest)', dist.version)
|
write_output("INSTALLED: %s (latest)", dist.version)
|
||||||
else:
|
else:
|
||||||
write_output('INSTALLED: %s', dist.version)
|
write_output("INSTALLED: %s", dist.version)
|
||||||
if parse_version(latest).pre:
|
if parse_version(latest).pre:
|
||||||
write_output('LATEST: %s (pre-release; install'
|
write_output(
|
||||||
' with "pip install --pre")', latest)
|
"LATEST: %s (pre-release; install"
|
||||||
|
" with `pip install --pre`)",
|
||||||
|
latest,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
write_output('LATEST: %s', latest)
|
write_output("LATEST: %s", latest)
|
||||||
|
|
||||||
|
|
||||||
def print_results(
|
def print_results(
|
||||||
@@ -134,25 +140,29 @@ def print_results(
|
|||||||
if not hits:
|
if not hits:
|
||||||
return
|
return
|
||||||
if name_column_width is None:
|
if name_column_width is None:
|
||||||
name_column_width = max([
|
name_column_width = (
|
||||||
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
max(
|
||||||
|
[
|
||||||
|
len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
|
||||||
for hit in hits
|
for hit in hits
|
||||||
]) + 4
|
]
|
||||||
|
)
|
||||||
|
+ 4
|
||||||
|
)
|
||||||
|
|
||||||
for hit in hits:
|
for hit in hits:
|
||||||
name = hit['name']
|
name = hit["name"]
|
||||||
summary = hit['summary'] or ''
|
summary = hit["summary"] or ""
|
||||||
latest = highest_version(hit.get('versions', ['-']))
|
latest = highest_version(hit.get("versions", ["-"]))
|
||||||
if terminal_width is not None:
|
if terminal_width is not None:
|
||||||
target_width = terminal_width - name_column_width - 5
|
target_width = terminal_width - name_column_width - 5
|
||||||
if target_width > 10:
|
if target_width > 10:
|
||||||
# wrap and indent summary to fit terminal
|
# wrap and indent summary to fit terminal
|
||||||
summary_lines = textwrap.wrap(summary, target_width)
|
summary_lines = textwrap.wrap(summary, target_width)
|
||||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(
|
summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
|
||||||
summary_lines)
|
|
||||||
|
|
||||||
name_latest = f'{name} ({latest})'
|
name_latest = f"{name} ({latest})"
|
||||||
line = f'{name_latest:{name_column_width}} - {summary}'
|
line = f"{name_latest:{name_column_width}} - {summary}"
|
||||||
try:
|
try:
|
||||||
write_output(line)
|
write_output(line)
|
||||||
print_dist_installation_info(name, latest)
|
print_dist_installation_info(name, latest)
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import csv
|
|
||||||
import logging
|
import logging
|
||||||
import pathlib
|
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import Iterator, List, NamedTuple, Optional, Tuple
|
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
@@ -27,23 +25,26 @@ class ShowCommand(Command):
|
|||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-f', '--files',
|
"-f",
|
||||||
dest='files',
|
"--files",
|
||||||
action='store_true',
|
dest="files",
|
||||||
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help='Show the full list of installed files for each package.')
|
help="Show the full list of installed files for each package.",
|
||||||
|
)
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
if not args:
|
if not args:
|
||||||
logger.warning('ERROR: Please provide a package name or names.')
|
logger.warning("ERROR: Please provide a package name or names.")
|
||||||
return ERROR
|
return ERROR
|
||||||
query = args
|
query = args
|
||||||
|
|
||||||
results = search_packages_info(query)
|
results = search_packages_info(query)
|
||||||
if not print_results(
|
if not print_results(
|
||||||
results, list_files=options.files, verbose=options.verbose):
|
results, list_files=options.files, verbose=options.verbose
|
||||||
|
):
|
||||||
return ERROR
|
return ERROR
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|
||||||
@@ -52,6 +53,7 @@ class _PackageInfo(NamedTuple):
|
|||||||
name: str
|
name: str
|
||||||
version: str
|
version: str
|
||||||
location: str
|
location: str
|
||||||
|
editable_project_location: Optional[str]
|
||||||
requires: List[str]
|
requires: List[str]
|
||||||
required_by: List[str]
|
required_by: List[str]
|
||||||
installer: str
|
installer: str
|
||||||
@@ -59,6 +61,7 @@ class _PackageInfo(NamedTuple):
|
|||||||
classifiers: List[str]
|
classifiers: List[str]
|
||||||
summary: str
|
summary: str
|
||||||
homepage: str
|
homepage: str
|
||||||
|
project_urls: List[str]
|
||||||
author: str
|
author: str
|
||||||
author_email: str
|
author_email: str
|
||||||
license: str
|
license: str
|
||||||
@@ -66,34 +69,7 @@ class _PackageInfo(NamedTuple):
|
|||||||
files: Optional[List[str]]
|
files: Optional[List[str]]
|
||||||
|
|
||||||
|
|
||||||
def _covert_legacy_entry(entry: Tuple[str, ...], info: Tuple[str, ...]) -> str:
|
def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
|
||||||
"""Convert a legacy installed-files.txt path into modern RECORD path.
|
|
||||||
|
|
||||||
The legacy format stores paths relative to the info directory, while the
|
|
||||||
modern format stores paths relative to the package root, e.g. the
|
|
||||||
site-packages directory.
|
|
||||||
|
|
||||||
:param entry: Path parts of the installed-files.txt entry.
|
|
||||||
:param info: Path parts of the egg-info directory relative to package root.
|
|
||||||
:returns: The converted entry.
|
|
||||||
|
|
||||||
For best compatibility with symlinks, this does not use ``abspath()`` or
|
|
||||||
``Path.resolve()``, but tries to work with path parts:
|
|
||||||
|
|
||||||
1. While ``entry`` starts with ``..``, remove the equal amounts of parts
|
|
||||||
from ``info``; if ``info`` is empty, start appending ``..`` instead.
|
|
||||||
2. Join the two directly.
|
|
||||||
"""
|
|
||||||
while entry and entry[0] == "..":
|
|
||||||
if not info or info[-1] == "..":
|
|
||||||
info += ("..",)
|
|
||||||
else:
|
|
||||||
info = info[:-1]
|
|
||||||
entry = entry[1:]
|
|
||||||
return str(pathlib.Path(*info, *entry))
|
|
||||||
|
|
||||||
|
|
||||||
def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
|
||||||
"""
|
"""
|
||||||
Gather details from installed distributions. Print distribution name,
|
Gather details from installed distributions. Print distribution name,
|
||||||
version, location, and installed files. Installed files requires a
|
version, location, and installed files. Installed files requires a
|
||||||
@@ -102,53 +78,20 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
|||||||
"""
|
"""
|
||||||
env = get_default_environment()
|
env = get_default_environment()
|
||||||
|
|
||||||
installed = {
|
installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
|
||||||
dist.canonical_name: dist
|
|
||||||
for dist in env.iter_distributions()
|
|
||||||
}
|
|
||||||
query_names = [canonicalize_name(name) for name in query]
|
query_names = [canonicalize_name(name) for name in query]
|
||||||
missing = sorted(
|
missing = sorted(
|
||||||
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
||||||
)
|
)
|
||||||
if missing:
|
if missing:
|
||||||
logger.warning('Package(s) not found: %s', ', '.join(missing))
|
logger.warning("Package(s) not found: %s", ", ".join(missing))
|
||||||
|
|
||||||
def _get_requiring_packages(current_dist: BaseDistribution) -> List[str]:
|
def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
|
||||||
return [
|
return (
|
||||||
dist.metadata["Name"] or "UNKNOWN"
|
dist.metadata["Name"] or "UNKNOWN"
|
||||||
for dist in installed.values()
|
for dist in installed.values()
|
||||||
if current_dist.canonical_name in {
|
if current_dist.canonical_name
|
||||||
canonicalize_name(d.name) for d in dist.iter_dependencies()
|
in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
def _files_from_record(dist: BaseDistribution) -> Optional[Iterator[str]]:
|
|
||||||
try:
|
|
||||||
text = dist.read_text('RECORD')
|
|
||||||
except FileNotFoundError:
|
|
||||||
return None
|
|
||||||
# This extra Path-str cast normalizes entries.
|
|
||||||
return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
|
|
||||||
|
|
||||||
def _files_from_legacy(dist: BaseDistribution) -> Optional[Iterator[str]]:
|
|
||||||
try:
|
|
||||||
text = dist.read_text('installed-files.txt')
|
|
||||||
except FileNotFoundError:
|
|
||||||
return None
|
|
||||||
paths = (p for p in text.splitlines(keepends=False) if p)
|
|
||||||
root = dist.location
|
|
||||||
info = dist.info_directory
|
|
||||||
if root is None or info is None:
|
|
||||||
return paths
|
|
||||||
try:
|
|
||||||
info_rel = pathlib.Path(info).relative_to(root)
|
|
||||||
except ValueError: # info is not relative to root.
|
|
||||||
return paths
|
|
||||||
if not info_rel.parts: # info *is* root.
|
|
||||||
return paths
|
|
||||||
return (
|
|
||||||
_covert_legacy_entry(pathlib.Path(p).parts, info_rel.parts)
|
|
||||||
for p in paths
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for query_name in query_names:
|
for query_name in query_names:
|
||||||
@@ -157,13 +100,16 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
|
||||||
|
required_by = sorted(_get_requiring_packages(dist), key=str.lower)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
entry_points_text = dist.read_text('entry_points.txt')
|
entry_points_text = dist.read_text("entry_points.txt")
|
||||||
entry_points = entry_points_text.splitlines(keepends=False)
|
entry_points = entry_points_text.splitlines(keepends=False)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
entry_points = []
|
entry_points = []
|
||||||
|
|
||||||
files_iter = _files_from_record(dist) or _files_from_legacy(dist)
|
files_iter = dist.iter_declared_entries()
|
||||||
if files_iter is None:
|
if files_iter is None:
|
||||||
files: Optional[List[str]] = None
|
files: Optional[List[str]] = None
|
||||||
else:
|
else:
|
||||||
@@ -175,13 +121,15 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
|||||||
name=dist.raw_name,
|
name=dist.raw_name,
|
||||||
version=str(dist.version),
|
version=str(dist.version),
|
||||||
location=dist.location or "",
|
location=dist.location or "",
|
||||||
requires=[req.name for req in dist.iter_dependencies()],
|
editable_project_location=dist.editable_project_location,
|
||||||
required_by=_get_requiring_packages(dist),
|
requires=requires,
|
||||||
|
required_by=required_by,
|
||||||
installer=dist.installer,
|
installer=dist.installer,
|
||||||
metadata_version=dist.metadata_version or "",
|
metadata_version=dist.metadata_version or "",
|
||||||
classifiers=metadata.get_all("Classifier", []),
|
classifiers=metadata.get_all("Classifier", []),
|
||||||
summary=metadata.get("Summary", ""),
|
summary=metadata.get("Summary", ""),
|
||||||
homepage=metadata.get("Home-page", ""),
|
homepage=metadata.get("Home-page", ""),
|
||||||
|
project_urls=metadata.get_all("Project-URL", []),
|
||||||
author=metadata.get("Author", ""),
|
author=metadata.get("Author", ""),
|
||||||
author_email=metadata.get("Author-email", ""),
|
author_email=metadata.get("Author-email", ""),
|
||||||
license=metadata.get("License", ""),
|
license=metadata.get("License", ""),
|
||||||
@@ -191,7 +139,7 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
|||||||
|
|
||||||
|
|
||||||
def print_results(
|
def print_results(
|
||||||
distributions: Iterator[_PackageInfo],
|
distributions: Iterable[_PackageInfo],
|
||||||
list_files: bool,
|
list_files: bool,
|
||||||
verbose: bool,
|
verbose: bool,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
@@ -212,8 +160,12 @@ def print_results(
|
|||||||
write_output("Author-email: %s", dist.author_email)
|
write_output("Author-email: %s", dist.author_email)
|
||||||
write_output("License: %s", dist.license)
|
write_output("License: %s", dist.license)
|
||||||
write_output("Location: %s", dist.location)
|
write_output("Location: %s", dist.location)
|
||||||
write_output("Requires: %s", ', '.join(dist.requires))
|
if dist.editable_project_location is not None:
|
||||||
write_output("Required-by: %s", ', '.join(dist.required_by))
|
write_output(
|
||||||
|
"Editable project location: %s", dist.editable_project_location
|
||||||
|
)
|
||||||
|
write_output("Requires: %s", ", ".join(dist.requires))
|
||||||
|
write_output("Required-by: %s", ", ".join(dist.required_by))
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
write_output("Metadata-Version: %s", dist.metadata_version)
|
write_output("Metadata-Version: %s", dist.metadata_version)
|
||||||
@@ -224,6 +176,9 @@ def print_results(
|
|||||||
write_output("Entry-points:")
|
write_output("Entry-points:")
|
||||||
for entry in dist.entry_points:
|
for entry in dist.entry_points:
|
||||||
write_output(" %s", entry.strip())
|
write_output(" %s", entry.strip())
|
||||||
|
write_output("Project-URLs:")
|
||||||
|
for project_url in dist.project_urls:
|
||||||
|
write_output(" %s", project_url)
|
||||||
if list_files:
|
if list_files:
|
||||||
write_output("Files:")
|
write_output("Files:")
|
||||||
if dist.files is None:
|
if dist.files is None:
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from typing import List
|
|||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.cli.base_command import Command
|
from pip._internal.cli.base_command import Command
|
||||||
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
@@ -13,7 +14,10 @@ from pip._internal.req.constructors import (
|
|||||||
install_req_from_line,
|
install_req_from_line,
|
||||||
install_req_from_parsed_requirement,
|
install_req_from_parsed_requirement,
|
||||||
)
|
)
|
||||||
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
from pip._internal.utils.misc import (
|
||||||
|
check_externally_managed,
|
||||||
|
protect_pip_from_modification_on_windows,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -35,20 +39,26 @@ class UninstallCommand(Command, SessionCommandMixin):
|
|||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-r', '--requirement',
|
"-r",
|
||||||
dest='requirements',
|
"--requirement",
|
||||||
action='append',
|
dest="requirements",
|
||||||
|
action="append",
|
||||||
default=[],
|
default=[],
|
||||||
metavar='file',
|
metavar="file",
|
||||||
help='Uninstall all the packages listed in the given requirements '
|
help=(
|
||||||
'file. This option can be used multiple times.',
|
"Uninstall all the packages listed in the given requirements "
|
||||||
|
"file. This option can be used multiple times."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-y', '--yes',
|
"-y",
|
||||||
dest='yes',
|
"--yes",
|
||||||
action='store_true',
|
dest="yes",
|
||||||
help="Don't ask for confirmation of uninstall deletions.")
|
action="store_true",
|
||||||
|
help="Don't ask for confirmation of uninstall deletions.",
|
||||||
|
)
|
||||||
|
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
self.parser.insert_option_group(0, self.cmd_opts)
|
||||||
|
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
@@ -57,7 +67,8 @@ class UninstallCommand(Command, SessionCommandMixin):
|
|||||||
reqs_to_uninstall = {}
|
reqs_to_uninstall = {}
|
||||||
for name in args:
|
for name in args:
|
||||||
req = install_req_from_line(
|
req = install_req_from_line(
|
||||||
name, isolated=options.isolated_mode,
|
name,
|
||||||
|
isolated=options.isolated_mode,
|
||||||
)
|
)
|
||||||
if req.name:
|
if req.name:
|
||||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
@@ -70,31 +81,33 @@ class UninstallCommand(Command, SessionCommandMixin):
|
|||||||
)
|
)
|
||||||
for filename in options.requirements:
|
for filename in options.requirements:
|
||||||
for parsed_req in parse_requirements(
|
for parsed_req in parse_requirements(
|
||||||
filename,
|
filename, options=options, session=session
|
||||||
options=options,
|
):
|
||||||
session=session):
|
|
||||||
req = install_req_from_parsed_requirement(
|
req = install_req_from_parsed_requirement(
|
||||||
parsed_req,
|
parsed_req, isolated=options.isolated_mode
|
||||||
isolated=options.isolated_mode
|
|
||||||
)
|
)
|
||||||
if req.name:
|
if req.name:
|
||||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||||
if not reqs_to_uninstall:
|
if not reqs_to_uninstall:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
f'You must give at least one requirement to {self.name} (see '
|
f"You must give at least one requirement to {self.name} (see "
|
||||||
f'"pip help {self.name}")'
|
f'"pip help {self.name}")'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not options.override_externally_managed:
|
||||||
|
check_externally_managed()
|
||||||
|
|
||||||
protect_pip_from_modification_on_windows(
|
protect_pip_from_modification_on_windows(
|
||||||
modifying_pip="pip" in reqs_to_uninstall
|
modifying_pip="pip" in reqs_to_uninstall
|
||||||
)
|
)
|
||||||
|
|
||||||
for req in reqs_to_uninstall.values():
|
for req in reqs_to_uninstall.values():
|
||||||
uninstall_pathset = req.uninstall(
|
uninstall_pathset = req.uninstall(
|
||||||
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
auto_confirm=options.yes,
|
||||||
|
verbose=self.verbosity > 0,
|
||||||
)
|
)
|
||||||
if uninstall_pathset:
|
if uninstall_pathset:
|
||||||
uninstall_pathset.commit()
|
uninstall_pathset.commit()
|
||||||
|
if options.root_user_action == "warn":
|
||||||
warn_if_run_as_root()
|
warn_if_run_as_root()
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|||||||
@@ -9,8 +9,11 @@ from pip._internal.cli import cmdoptions
|
|||||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
from pip._internal.cli.status_codes import SUCCESS
|
||||||
from pip._internal.exceptions import CommandError
|
from pip._internal.exceptions import CommandError
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
from pip._internal.req.req_install import (
|
||||||
|
InstallRequirement,
|
||||||
|
check_legacy_setup_py_options,
|
||||||
|
)
|
||||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
||||||
@@ -26,10 +29,8 @@ class WheelCommand(RequirementCommand):
|
|||||||
recompiling your software during every install. For more details, see the
|
recompiling your software during every install. For more details, see the
|
||||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||||
|
|
||||||
Requirements: setuptools>=0.8, and wheel.
|
'pip wheel' uses the build system interface as described here:
|
||||||
|
https://pip.pypa.io/en/stable/reference/build-system/
|
||||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
|
||||||
package to build individual wheels.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -41,14 +42,16 @@ class WheelCommand(RequirementCommand):
|
|||||||
%prog [options] <archive url/path> ..."""
|
%prog [options] <archive url/path> ..."""
|
||||||
|
|
||||||
def add_options(self) -> None:
|
def add_options(self) -> None:
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'-w', '--wheel-dir',
|
"-w",
|
||||||
dest='wheel_dir',
|
"--wheel-dir",
|
||||||
metavar='dir',
|
dest="wheel_dir",
|
||||||
|
metavar="dir",
|
||||||
default=os.curdir,
|
default=os.curdir,
|
||||||
help=("Build wheels into <dir>, where the default is the "
|
help=(
|
||||||
"current working directory."),
|
"Build wheels into <dir>, where the default is the "
|
||||||
|
"current working directory."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||||
@@ -56,32 +59,35 @@ class WheelCommand(RequirementCommand):
|
|||||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||||
|
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||||
self.cmd_opts.add_option(cmdoptions.editable())
|
self.cmd_opts.add_option(cmdoptions.editable())
|
||||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||||
self.cmd_opts.add_option(cmdoptions.src())
|
self.cmd_opts.add_option(cmdoptions.src())
|
||||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
|
||||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--no-verify',
|
"--no-verify",
|
||||||
dest='no_verify',
|
dest="no_verify",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="Don't verify if built wheel is valid.",
|
help="Don't verify if built wheel is valid.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||||
self.cmd_opts.add_option(cmdoptions.build_options())
|
self.cmd_opts.add_option(cmdoptions.build_options())
|
||||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
self.cmd_opts.add_option(
|
||||||
'--pre',
|
"--pre",
|
||||||
action='store_true',
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help=("Include pre-release and development versions. By default, "
|
help=(
|
||||||
"pip only finds stable versions."),
|
"Include pre-release and development versions. By default, "
|
||||||
|
"pip only finds stable versions."
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||||
@@ -96,17 +102,14 @@ class WheelCommand(RequirementCommand):
|
|||||||
|
|
||||||
@with_cleanup
|
@with_cleanup
|
||||||
def run(self, options: Values, args: List[str]) -> int:
|
def run(self, options: Values, args: List[str]) -> int:
|
||||||
cmdoptions.check_install_build_global(options)
|
|
||||||
|
|
||||||
session = self.get_default_session(options)
|
session = self.get_default_session(options)
|
||||||
|
|
||||||
finder = self._build_package_finder(options, session)
|
finder = self._build_package_finder(options, session)
|
||||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
|
||||||
|
|
||||||
options.wheel_dir = normalize_path(options.wheel_dir)
|
options.wheel_dir = normalize_path(options.wheel_dir)
|
||||||
ensure_dir(options.wheel_dir)
|
ensure_dir(options.wheel_dir)
|
||||||
|
|
||||||
req_tracker = self.enter_context(get_requirement_tracker())
|
build_tracker = self.enter_context(get_build_tracker())
|
||||||
|
|
||||||
directory = TempDirectory(
|
directory = TempDirectory(
|
||||||
delete=not options.no_clean,
|
delete=not options.no_clean,
|
||||||
@@ -115,15 +118,19 @@ class WheelCommand(RequirementCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
reqs = self.get_requirements(args, options, finder, session)
|
reqs = self.get_requirements(args, options, finder, session)
|
||||||
|
check_legacy_setup_py_options(options, reqs)
|
||||||
|
|
||||||
|
wheel_cache = WheelCache(options.cache_dir)
|
||||||
|
|
||||||
preparer = self.make_requirement_preparer(
|
preparer = self.make_requirement_preparer(
|
||||||
temp_build_dir=directory,
|
temp_build_dir=directory,
|
||||||
options=options,
|
options=options,
|
||||||
req_tracker=req_tracker,
|
build_tracker=build_tracker,
|
||||||
session=session,
|
session=session,
|
||||||
finder=finder,
|
finder=finder,
|
||||||
download_dir=options.wheel_dir,
|
download_dir=options.wheel_dir,
|
||||||
use_user_site=False,
|
use_user_site=False,
|
||||||
|
verbosity=self.verbosity,
|
||||||
)
|
)
|
||||||
|
|
||||||
resolver = self.make_resolver(
|
resolver = self.make_resolver(
|
||||||
@@ -137,9 +144,7 @@ class WheelCommand(RequirementCommand):
|
|||||||
|
|
||||||
self.trace_basic_info(finder)
|
self.trace_basic_info(finder)
|
||||||
|
|
||||||
requirement_set = resolver.resolve(
|
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||||
reqs, check_supported_wheels=True
|
|
||||||
)
|
|
||||||
|
|
||||||
reqs_to_build: List[InstallRequirement] = []
|
reqs_to_build: List[InstallRequirement] = []
|
||||||
for req in requirement_set.requirements.values():
|
for req in requirement_set.requirements.values():
|
||||||
@@ -165,12 +170,11 @@ class WheelCommand(RequirementCommand):
|
|||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Building wheel for %s failed: %s",
|
"Building wheel for %s failed: %s",
|
||||||
req.name, e,
|
req.name,
|
||||||
|
e,
|
||||||
)
|
)
|
||||||
build_failures.append(req)
|
build_failures.append(req)
|
||||||
if len(build_failures) != 0:
|
if len(build_failures) != 0:
|
||||||
raise CommandError(
|
raise CommandError("Failed to build one or more wheels")
|
||||||
"Failed to build one or more wheels"
|
|
||||||
)
|
|
||||||
|
|
||||||
return SUCCESS
|
return SUCCESS
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ Some terminology:
|
|||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
import locale
|
import locale
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||||
@@ -24,12 +23,13 @@ from pip._internal.exceptions import (
|
|||||||
)
|
)
|
||||||
from pip._internal.utils import appdirs
|
from pip._internal.utils import appdirs
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
|
from pip._internal.utils.logging import getLogger
|
||||||
from pip._internal.utils.misc import ensure_dir, enum
|
from pip._internal.utils.misc import ensure_dir, enum
|
||||||
|
|
||||||
RawConfigParser = configparser.RawConfigParser # Shorthand
|
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||||
Kind = NewType("Kind", str)
|
Kind = NewType("Kind", str)
|
||||||
|
|
||||||
CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
|
CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
|
||||||
ENV_NAMES_IGNORED = "version", "help"
|
ENV_NAMES_IGNORED = "version", "help"
|
||||||
|
|
||||||
# The kinds of configurations there are.
|
# The kinds of configurations there are.
|
||||||
@@ -43,22 +43,19 @@ kinds = enum(
|
|||||||
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
||||||
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||||
def _normalize_name(name):
|
def _normalize_name(name: str) -> str:
|
||||||
# type: (str) -> str
|
"""Make a name consistent regardless of source (environment or file)"""
|
||||||
"""Make a name consistent regardless of source (environment or file)
|
name = name.lower().replace("_", "-")
|
||||||
"""
|
if name.startswith("--"):
|
||||||
name = name.lower().replace('_', '-')
|
|
||||||
if name.startswith('--'):
|
|
||||||
name = name[2:] # only prefer long opts
|
name = name[2:] # only prefer long opts
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
def _disassemble_key(name):
|
def _disassemble_key(name: str) -> List[str]:
|
||||||
# type: (str) -> List[str]
|
|
||||||
if "." not in name:
|
if "." not in name:
|
||||||
error_message = (
|
error_message = (
|
||||||
"Key does not contain dot separated section and key. "
|
"Key does not contain dot separated section and key. "
|
||||||
@@ -68,22 +65,18 @@ def _disassemble_key(name):
|
|||||||
return name.split(".", 1)
|
return name.split(".", 1)
|
||||||
|
|
||||||
|
|
||||||
def get_configuration_files():
|
def get_configuration_files() -> Dict[Kind, List[str]]:
|
||||||
# type: () -> Dict[Kind, List[str]]
|
|
||||||
global_config_files = [
|
global_config_files = [
|
||||||
os.path.join(path, CONFIG_BASENAME)
|
os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
|
||||||
for path in appdirs.site_config_dirs('pip')
|
|
||||||
]
|
]
|
||||||
|
|
||||||
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
||||||
legacy_config_file = os.path.join(
|
legacy_config_file = os.path.join(
|
||||||
os.path.expanduser('~'),
|
os.path.expanduser("~"),
|
||||||
'pip' if WINDOWS else '.pip',
|
"pip" if WINDOWS else ".pip",
|
||||||
CONFIG_BASENAME,
|
CONFIG_BASENAME,
|
||||||
)
|
)
|
||||||
new_config_file = os.path.join(
|
new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
|
||||||
appdirs.user_config_dir("pip"), CONFIG_BASENAME
|
|
||||||
)
|
|
||||||
return {
|
return {
|
||||||
kinds.GLOBAL: global_config_files,
|
kinds.GLOBAL: global_config_files,
|
||||||
kinds.SITE: [site_config_file],
|
kinds.SITE: [site_config_file],
|
||||||
@@ -105,8 +98,7 @@ class Configuration:
|
|||||||
and the data stored is also nice.
|
and the data stored is also nice.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, isolated, load_only=None):
|
def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
|
||||||
# type: (bool, Optional[Kind]) -> None
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
||||||
@@ -119,54 +111,50 @@ class Configuration:
|
|||||||
self.load_only = load_only
|
self.load_only = load_only
|
||||||
|
|
||||||
# Because we keep track of where we got the data from
|
# Because we keep track of where we got the data from
|
||||||
self._parsers = {
|
self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
|
||||||
variant: [] for variant in OVERRIDE_ORDER
|
variant: [] for variant in OVERRIDE_ORDER
|
||||||
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
}
|
||||||
self._config = {
|
self._config: Dict[Kind, Dict[str, Any]] = {
|
||||||
variant: {} for variant in OVERRIDE_ORDER
|
variant: {} for variant in OVERRIDE_ORDER
|
||||||
} # type: Dict[Kind, Dict[str, Any]]
|
}
|
||||||
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
|
||||||
|
|
||||||
def load(self):
|
def load(self) -> None:
|
||||||
# type: () -> None
|
"""Loads configuration from configuration files and environment"""
|
||||||
"""Loads configuration from configuration files and environment
|
|
||||||
"""
|
|
||||||
self._load_config_files()
|
self._load_config_files()
|
||||||
if not self.isolated:
|
if not self.isolated:
|
||||||
self._load_environment_vars()
|
self._load_environment_vars()
|
||||||
|
|
||||||
def get_file_to_edit(self):
|
def get_file_to_edit(self) -> Optional[str]:
|
||||||
# type: () -> Optional[str]
|
"""Returns the file with highest priority in configuration"""
|
||||||
"""Returns the file with highest priority in configuration
|
assert self.load_only is not None, "Need to be specified a file to be editing"
|
||||||
"""
|
|
||||||
assert self.load_only is not None, \
|
|
||||||
"Need to be specified a file to be editing"
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._get_parser_to_modify()[0]
|
return self._get_parser_to_modify()[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def items(self):
|
def items(self) -> Iterable[Tuple[str, Any]]:
|
||||||
# type: () -> Iterable[Tuple[str, Any]]
|
|
||||||
"""Returns key-value pairs like dict.items() representing the loaded
|
"""Returns key-value pairs like dict.items() representing the loaded
|
||||||
configuration
|
configuration
|
||||||
"""
|
"""
|
||||||
return self._dictionary.items()
|
return self._dictionary.items()
|
||||||
|
|
||||||
def get_value(self, key):
|
def get_value(self, key: str) -> Any:
|
||||||
# type: (str) -> Any
|
"""Get a value from the configuration."""
|
||||||
"""Get a value from the configuration.
|
orig_key = key
|
||||||
"""
|
key = _normalize_name(key)
|
||||||
try:
|
try:
|
||||||
return self._dictionary[key]
|
return self._dictionary[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ConfigurationError(f"No such key - {key}")
|
# disassembling triggers a more useful error message than simply
|
||||||
|
# "No such key" in the case that the key isn't in the form command.option
|
||||||
|
_disassemble_key(key)
|
||||||
|
raise ConfigurationError(f"No such key - {orig_key}")
|
||||||
|
|
||||||
def set_value(self, key, value):
|
def set_value(self, key: str, value: Any) -> None:
|
||||||
# type: (str, Any) -> None
|
"""Modify a value in the configuration."""
|
||||||
"""Modify a value in the configuration.
|
key = _normalize_name(key)
|
||||||
"""
|
|
||||||
self._ensure_have_load_only()
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
assert self.load_only
|
assert self.load_only
|
||||||
@@ -183,21 +171,23 @@ class Configuration:
|
|||||||
self._config[self.load_only][key] = value
|
self._config[self.load_only][key] = value
|
||||||
self._mark_as_modified(fname, parser)
|
self._mark_as_modified(fname, parser)
|
||||||
|
|
||||||
def unset_value(self, key):
|
def unset_value(self, key: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""Unset a value in the configuration."""
|
"""Unset a value in the configuration."""
|
||||||
|
orig_key = key
|
||||||
|
key = _normalize_name(key)
|
||||||
self._ensure_have_load_only()
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
assert self.load_only
|
assert self.load_only
|
||||||
if key not in self._config[self.load_only]:
|
if key not in self._config[self.load_only]:
|
||||||
raise ConfigurationError(f"No such key - {key}")
|
raise ConfigurationError(f"No such key - {orig_key}")
|
||||||
|
|
||||||
fname, parser = self._get_parser_to_modify()
|
fname, parser = self._get_parser_to_modify()
|
||||||
|
|
||||||
if parser is not None:
|
if parser is not None:
|
||||||
section, name = _disassemble_key(key)
|
section, name = _disassemble_key(key)
|
||||||
if not (parser.has_section(section)
|
if not (
|
||||||
and parser.remove_option(section, name)):
|
parser.has_section(section) and parser.remove_option(section, name)
|
||||||
|
):
|
||||||
# The option was not removed.
|
# The option was not removed.
|
||||||
raise ConfigurationError(
|
raise ConfigurationError(
|
||||||
"Fatal Internal error [id=1]. Please report as a bug."
|
"Fatal Internal error [id=1]. Please report as a bug."
|
||||||
@@ -210,10 +200,8 @@ class Configuration:
|
|||||||
|
|
||||||
del self._config[self.load_only][key]
|
del self._config[self.load_only][key]
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
"""Save the current in-memory state."""
|
||||||
"""Save the current in-memory state.
|
|
||||||
"""
|
|
||||||
self._ensure_have_load_only()
|
self._ensure_have_load_only()
|
||||||
|
|
||||||
for fname, parser in self._modified_parsers:
|
for fname, parser in self._modified_parsers:
|
||||||
@@ -229,17 +217,14 @@ class Configuration:
|
|||||||
# Private routines
|
# Private routines
|
||||||
#
|
#
|
||||||
|
|
||||||
def _ensure_have_load_only(self):
|
def _ensure_have_load_only(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
if self.load_only is None:
|
if self.load_only is None:
|
||||||
raise ConfigurationError("Needed a specific file to be modifying.")
|
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||||
logger.debug("Will be working with %s variant only", self.load_only)
|
logger.debug("Will be working with %s variant only", self.load_only)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _dictionary(self):
|
def _dictionary(self) -> Dict[str, Any]:
|
||||||
# type: () -> Dict[str, Any]
|
"""A dictionary representing the loaded configuration."""
|
||||||
"""A dictionary representing the loaded configuration.
|
|
||||||
"""
|
|
||||||
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||||
# are not needed here.
|
# are not needed here.
|
||||||
retval = {}
|
retval = {}
|
||||||
@@ -249,10 +234,8 @@ class Configuration:
|
|||||||
|
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
def _load_config_files(self):
|
def _load_config_files(self) -> None:
|
||||||
# type: () -> None
|
"""Loads configuration from configuration files"""
|
||||||
"""Loads configuration from configuration files
|
|
||||||
"""
|
|
||||||
config_files = dict(self.iter_config_files())
|
config_files = dict(self.iter_config_files())
|
||||||
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@@ -266,9 +249,7 @@ class Configuration:
|
|||||||
# If there's specific variant set in `load_only`, load only
|
# If there's specific variant set in `load_only`, load only
|
||||||
# that variant, not the others.
|
# that variant, not the others.
|
||||||
if self.load_only is not None and variant != self.load_only:
|
if self.load_only is not None and variant != self.load_only:
|
||||||
logger.debug(
|
logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
|
||||||
"Skipping file '%s' (variant: %s)", fname, variant
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
parser = self._load_file(variant, fname)
|
parser = self._load_file(variant, fname)
|
||||||
@@ -276,9 +257,8 @@ class Configuration:
|
|||||||
# Keeping track of the parsers used
|
# Keeping track of the parsers used
|
||||||
self._parsers[variant].append((fname, parser))
|
self._parsers[variant].append((fname, parser))
|
||||||
|
|
||||||
def _load_file(self, variant, fname):
|
def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
|
||||||
# type: (Kind, str) -> RawConfigParser
|
logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
|
||||||
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
|
||||||
parser = self._construct_parser(fname)
|
parser = self._construct_parser(fname)
|
||||||
|
|
||||||
for section in parser.sections():
|
for section in parser.sections():
|
||||||
@@ -287,22 +267,20 @@ class Configuration:
|
|||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
def _construct_parser(self, fname):
|
def _construct_parser(self, fname: str) -> RawConfigParser:
|
||||||
# type: (str) -> RawConfigParser
|
|
||||||
parser = configparser.RawConfigParser()
|
parser = configparser.RawConfigParser()
|
||||||
# If there is no such file, don't bother reading it but create the
|
# If there is no such file, don't bother reading it but create the
|
||||||
# parser anyway, to hold the data.
|
# parser anyway, to hold the data.
|
||||||
# Doing this is useful when modifying and saving files, where we don't
|
# Doing this is useful when modifying and saving files, where we don't
|
||||||
# need to construct a parser.
|
# need to construct a parser.
|
||||||
if os.path.exists(fname):
|
if os.path.exists(fname):
|
||||||
|
locale_encoding = locale.getpreferredencoding(False)
|
||||||
try:
|
try:
|
||||||
parser.read(fname)
|
parser.read(fname, encoding=locale_encoding)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# See https://github.com/pypa/pip/issues/4963
|
# See https://github.com/pypa/pip/issues/4963
|
||||||
raise ConfigurationFileCouldNotBeLoaded(
|
raise ConfigurationFileCouldNotBeLoaded(
|
||||||
reason="contains invalid {} characters".format(
|
reason=f"contains invalid {locale_encoding} characters",
|
||||||
locale.getpreferredencoding(False)
|
|
||||||
),
|
|
||||||
fname=fname,
|
fname=fname,
|
||||||
)
|
)
|
||||||
except configparser.Error as error:
|
except configparser.Error as error:
|
||||||
@@ -310,16 +288,15 @@ class Configuration:
|
|||||||
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
def _load_environment_vars(self):
|
def _load_environment_vars(self) -> None:
|
||||||
# type: () -> None
|
"""Loads configuration from environment variables"""
|
||||||
"""Loads configuration from environment variables
|
|
||||||
"""
|
|
||||||
self._config[kinds.ENV_VAR].update(
|
self._config[kinds.ENV_VAR].update(
|
||||||
self._normalized_keys(":env:", self.get_environ_vars())
|
self._normalized_keys(":env:", self.get_environ_vars())
|
||||||
)
|
)
|
||||||
|
|
||||||
def _normalized_keys(self, section, items):
|
def _normalized_keys(
|
||||||
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
self, section: str, items: Iterable[Tuple[str, Any]]
|
||||||
|
) -> Dict[str, Any]:
|
||||||
"""Normalizes items to construct a dictionary with normalized keys.
|
"""Normalizes items to construct a dictionary with normalized keys.
|
||||||
|
|
||||||
This routine is where the names become keys and are made the same
|
This routine is where the names become keys and are made the same
|
||||||
@@ -331,8 +308,7 @@ class Configuration:
|
|||||||
normalized[key] = val
|
normalized[key] = val
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
def get_environ_vars(self):
|
def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
|
||||||
# type: () -> Iterable[Tuple[str, str]]
|
|
||||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||||
for key, val in os.environ.items():
|
for key, val in os.environ.items():
|
||||||
if key.startswith("PIP_"):
|
if key.startswith("PIP_"):
|
||||||
@@ -341,8 +317,7 @@ class Configuration:
|
|||||||
yield name, val
|
yield name, val
|
||||||
|
|
||||||
# XXX: This is patched in the tests.
|
# XXX: This is patched in the tests.
|
||||||
def iter_config_files(self):
|
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
|
||||||
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
|
||||||
"""Yields variant and configuration files associated with it.
|
"""Yields variant and configuration files associated with it.
|
||||||
|
|
||||||
This should be treated like items of a dictionary.
|
This should be treated like items of a dictionary.
|
||||||
@@ -350,7 +325,7 @@ class Configuration:
|
|||||||
# SMELL: Move the conditions out of this function
|
# SMELL: Move the conditions out of this function
|
||||||
|
|
||||||
# environment variables have the lowest priority
|
# environment variables have the lowest priority
|
||||||
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
config_file = os.environ.get("PIP_CONFIG_FILE", None)
|
||||||
if config_file is not None:
|
if config_file is not None:
|
||||||
yield kinds.ENV, [config_file]
|
yield kinds.ENV, [config_file]
|
||||||
else:
|
else:
|
||||||
@@ -372,13 +347,11 @@ class Configuration:
|
|||||||
# finally virtualenv configuration first trumping others
|
# finally virtualenv configuration first trumping others
|
||||||
yield kinds.SITE, config_files[kinds.SITE]
|
yield kinds.SITE, config_files[kinds.SITE]
|
||||||
|
|
||||||
def get_values_in_config(self, variant):
|
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
|
||||||
# type: (Kind) -> Dict[str, Any]
|
|
||||||
"""Get values present in a config file"""
|
"""Get values present in a config file"""
|
||||||
return self._config[variant]
|
return self._config[variant]
|
||||||
|
|
||||||
def _get_parser_to_modify(self):
|
def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
|
||||||
# type: () -> Tuple[str, RawConfigParser]
|
|
||||||
# Determine which parser to modify
|
# Determine which parser to modify
|
||||||
assert self.load_only
|
assert self.load_only
|
||||||
parsers = self._parsers[self.load_only]
|
parsers = self._parsers[self.load_only]
|
||||||
@@ -392,12 +365,10 @@ class Configuration:
|
|||||||
return parsers[-1]
|
return parsers[-1]
|
||||||
|
|
||||||
# XXX: This is patched in the tests.
|
# XXX: This is patched in the tests.
|
||||||
def _mark_as_modified(self, fname, parser):
|
def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
|
||||||
# type: (str, RawConfigParser) -> None
|
|
||||||
file_parser_tuple = (fname, parser)
|
file_parser_tuple = (fname, parser)
|
||||||
if file_parser_tuple not in self._modified_parsers:
|
if file_parser_tuple not in self._modified_parsers:
|
||||||
self._modified_parsers.append(file_parser_tuple)
|
self._modified_parsers.append(file_parser_tuple)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return f"{self.__class__.__name__}({self._dictionary!r})"
|
return f"{self.__class__.__name__}({self._dictionary!r})"
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import abc
|
import abc
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata.base import BaseDistribution
|
||||||
from pip._internal.req import InstallRequirement
|
from pip._internal.req import InstallRequirement
|
||||||
|
|
||||||
|
|
||||||
@@ -28,11 +26,14 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
|
|||||||
self.req = req
|
self.req = req
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def get_pkg_resources_distribution(self) -> Optional[Distribution]:
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def prepare_distribution_metadata(
|
def prepare_distribution_metadata(
|
||||||
self, finder: PackageFinder, build_isolation: bool
|
self,
|
||||||
|
finder: PackageFinder,
|
||||||
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.distributions.base import AbstractDistribution
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
|
|
||||||
|
|
||||||
class InstalledDistribution(AbstractDistribution):
|
class InstalledDistribution(AbstractDistribution):
|
||||||
@@ -13,10 +10,14 @@ class InstalledDistribution(AbstractDistribution):
|
|||||||
been computed.
|
been computed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_pkg_resources_distribution(self) -> Optional[Distribution]:
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
|
assert self.req.satisfied_by is not None, "not actually installed"
|
||||||
return self.req.satisfied_by
|
return self.req.satisfied_by
|
||||||
|
|
||||||
def prepare_distribution_metadata(
|
def prepare_distribution_metadata(
|
||||||
self, finder: PackageFinder, build_isolation: bool
|
self,
|
||||||
|
finder: PackageFinder,
|
||||||
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Set, Tuple
|
from typing import Iterable, Set, Tuple
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
from pip._internal.distributions.base import AbstractDistribution
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -19,11 +18,14 @@ class SourceDistribution(AbstractDistribution):
|
|||||||
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_pkg_resources_distribution(self) -> Distribution:
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
return self.req.get_dist()
|
return self.req.get_dist()
|
||||||
|
|
||||||
def prepare_distribution_metadata(
|
def prepare_distribution_metadata(
|
||||||
self, finder: PackageFinder, build_isolation: bool
|
self,
|
||||||
|
finder: PackageFinder,
|
||||||
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
||||||
self.req.load_pyproject_toml()
|
self.req.load_pyproject_toml()
|
||||||
@@ -31,28 +33,34 @@ class SourceDistribution(AbstractDistribution):
|
|||||||
# Set up the build isolation, if this requirement should be isolated
|
# Set up the build isolation, if this requirement should be isolated
|
||||||
should_isolate = self.req.use_pep517 and build_isolation
|
should_isolate = self.req.use_pep517 and build_isolation
|
||||||
if should_isolate:
|
if should_isolate:
|
||||||
self._setup_isolation(finder)
|
# Setup an isolated environment and install the build backend static
|
||||||
|
# requirements in it.
|
||||||
|
self._prepare_build_backend(finder)
|
||||||
|
# Check that if the requirement is editable, it either supports PEP 660 or
|
||||||
|
# has a setup.py or a setup.cfg. This cannot be done earlier because we need
|
||||||
|
# to setup the build backend to verify it supports build_editable, nor can
|
||||||
|
# it be done later, because we want to avoid installing build requirements
|
||||||
|
# needlessly. Doing it here also works around setuptools generating
|
||||||
|
# UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
|
||||||
|
# without setup.py nor setup.cfg.
|
||||||
|
self.req.isolated_editable_sanity_check()
|
||||||
|
# Install the dynamic build requirements.
|
||||||
|
self._install_build_reqs(finder)
|
||||||
|
# Check if the current environment provides build dependencies
|
||||||
|
should_check_deps = self.req.use_pep517 and check_build_deps
|
||||||
|
if should_check_deps:
|
||||||
|
pyproject_requires = self.req.pyproject_requires
|
||||||
|
assert pyproject_requires is not None
|
||||||
|
conflicting, missing = self.req.build_env.check_requirements(
|
||||||
|
pyproject_requires
|
||||||
|
)
|
||||||
|
if conflicting:
|
||||||
|
self._raise_conflicts("the backend dependencies", conflicting)
|
||||||
|
if missing:
|
||||||
|
self._raise_missing_reqs(missing)
|
||||||
self.req.prepare_metadata()
|
self.req.prepare_metadata()
|
||||||
|
|
||||||
def _setup_isolation(self, finder: PackageFinder) -> None:
|
def _prepare_build_backend(self, finder: PackageFinder) -> None:
|
||||||
def _raise_conflicts(
|
|
||||||
conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
|
||||||
) -> None:
|
|
||||||
format_string = (
|
|
||||||
"Some build dependencies for {requirement} "
|
|
||||||
"conflict with {conflicting_with}: {description}."
|
|
||||||
)
|
|
||||||
error_message = format_string.format(
|
|
||||||
requirement=self.req,
|
|
||||||
conflicting_with=conflicting_with,
|
|
||||||
description=", ".join(
|
|
||||||
f"{installed} is incompatible with {wanted}"
|
|
||||||
for installed, wanted in sorted(conflicting)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
raise InstallationError(error_message)
|
|
||||||
|
|
||||||
# Isolate in a BuildEnvironment and install the build-time
|
# Isolate in a BuildEnvironment and install the build-time
|
||||||
# requirements.
|
# requirements.
|
||||||
pyproject_requires = self.req.pyproject_requires
|
pyproject_requires = self.req.pyproject_requires
|
||||||
@@ -60,13 +68,13 @@ class SourceDistribution(AbstractDistribution):
|
|||||||
|
|
||||||
self.req.build_env = BuildEnvironment()
|
self.req.build_env = BuildEnvironment()
|
||||||
self.req.build_env.install_requirements(
|
self.req.build_env.install_requirements(
|
||||||
finder, pyproject_requires, "overlay", "Installing build dependencies"
|
finder, pyproject_requires, "overlay", kind="build dependencies"
|
||||||
)
|
)
|
||||||
conflicting, missing = self.req.build_env.check_requirements(
|
conflicting, missing = self.req.build_env.check_requirements(
|
||||||
self.req.requirements_to_check
|
self.req.requirements_to_check
|
||||||
)
|
)
|
||||||
if conflicting:
|
if conflicting:
|
||||||
_raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
||||||
if missing:
|
if missing:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Missing build requirements in pyproject.toml for %s.",
|
"Missing build requirements in pyproject.toml for %s.",
|
||||||
@@ -77,19 +85,66 @@ class SourceDistribution(AbstractDistribution):
|
|||||||
"pip cannot fall back to setuptools without %s.",
|
"pip cannot fall back to setuptools without %s.",
|
||||||
" and ".join(map(repr, sorted(missing))),
|
" and ".join(map(repr, sorted(missing))),
|
||||||
)
|
)
|
||||||
# Install any extra build dependencies that the backend requests.
|
|
||||||
# This must be done in a second pass, as the pyproject.toml
|
def _get_build_requires_wheel(self) -> Iterable[str]:
|
||||||
# dependencies must be installed before we can call the backend.
|
|
||||||
with self.req.build_env:
|
with self.req.build_env:
|
||||||
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
||||||
backend = self.req.pep517_backend
|
backend = self.req.pep517_backend
|
||||||
assert backend is not None
|
assert backend is not None
|
||||||
with backend.subprocess_runner(runner):
|
with backend.subprocess_runner(runner):
|
||||||
reqs = backend.get_requires_for_build_wheel()
|
return backend.get_requires_for_build_wheel()
|
||||||
|
|
||||||
conflicting, missing = self.req.build_env.check_requirements(reqs)
|
def _get_build_requires_editable(self) -> Iterable[str]:
|
||||||
if conflicting:
|
with self.req.build_env:
|
||||||
_raise_conflicts("the backend dependencies", conflicting)
|
runner = runner_with_spinner_message(
|
||||||
self.req.build_env.install_requirements(
|
"Getting requirements to build editable"
|
||||||
finder, missing, "normal", "Installing backend dependencies"
|
|
||||||
)
|
)
|
||||||
|
backend = self.req.pep517_backend
|
||||||
|
assert backend is not None
|
||||||
|
with backend.subprocess_runner(runner):
|
||||||
|
return backend.get_requires_for_build_editable()
|
||||||
|
|
||||||
|
def _install_build_reqs(self, finder: PackageFinder) -> None:
|
||||||
|
# Install any extra build dependencies that the backend requests.
|
||||||
|
# This must be done in a second pass, as the pyproject.toml
|
||||||
|
# dependencies must be installed before we can call the backend.
|
||||||
|
if (
|
||||||
|
self.req.editable
|
||||||
|
and self.req.permit_editable_wheels
|
||||||
|
and self.req.supports_pyproject_editable()
|
||||||
|
):
|
||||||
|
build_reqs = self._get_build_requires_editable()
|
||||||
|
else:
|
||||||
|
build_reqs = self._get_build_requires_wheel()
|
||||||
|
conflicting, missing = self.req.build_env.check_requirements(build_reqs)
|
||||||
|
if conflicting:
|
||||||
|
self._raise_conflicts("the backend dependencies", conflicting)
|
||||||
|
self.req.build_env.install_requirements(
|
||||||
|
finder, missing, "normal", kind="backend dependencies"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _raise_conflicts(
|
||||||
|
self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
||||||
|
) -> None:
|
||||||
|
format_string = (
|
||||||
|
"Some build dependencies for {requirement} "
|
||||||
|
"conflict with {conflicting_with}: {description}."
|
||||||
|
)
|
||||||
|
error_message = format_string.format(
|
||||||
|
requirement=self.req,
|
||||||
|
conflicting_with=conflicting_with,
|
||||||
|
description=", ".join(
|
||||||
|
f"{installed} is incompatible with {wanted}"
|
||||||
|
for installed, wanted in sorted(conflicting_reqs)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
raise InstallationError(error_message)
|
||||||
|
|
||||||
|
def _raise_missing_reqs(self, missing: Set[str]) -> None:
|
||||||
|
format_string = (
|
||||||
|
"Some build dependencies for {requirement} are missing: {missing}."
|
||||||
|
)
|
||||||
|
error_message = format_string.format(
|
||||||
|
requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
|
||||||
|
)
|
||||||
|
raise InstallationError(error_message)
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
from zipfile import ZipFile
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.distributions.base import AbstractDistribution
|
from pip._internal.distributions.base import AbstractDistribution
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
from pip._internal.metadata import (
|
||||||
|
BaseDistribution,
|
||||||
|
FilesystemWheel,
|
||||||
|
get_wheel_distribution,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class WheelDistribution(AbstractDistribution):
|
class WheelDistribution(AbstractDistribution):
|
||||||
@@ -13,22 +15,20 @@ class WheelDistribution(AbstractDistribution):
|
|||||||
This does not need any preparation as wheels can be directly unpacked.
|
This does not need any preparation as wheels can be directly unpacked.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def get_pkg_resources_distribution(self) -> Distribution:
|
def get_metadata_distribution(self) -> BaseDistribution:
|
||||||
"""Loads the metadata from the wheel file into memory and returns a
|
"""Loads the metadata from the wheel file into memory and returns a
|
||||||
Distribution that uses it, not relying on the wheel file or
|
Distribution that uses it, not relying on the wheel file or
|
||||||
requirement.
|
requirement.
|
||||||
"""
|
"""
|
||||||
# Set as part of preparation during download.
|
assert self.req.local_file_path, "Set as part of preparation during download"
|
||||||
assert self.req.local_file_path
|
assert self.req.name, "Wheels are never unnamed"
|
||||||
# Wheels are never unnamed.
|
wheel = FilesystemWheel(self.req.local_file_path)
|
||||||
assert self.req.name
|
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
|
||||||
|
|
||||||
with ZipFile(self.req.local_file_path, allowZip64=True) as z:
|
|
||||||
return pkg_resources_distribution_for_wheel(
|
|
||||||
z, self.req.name, self.req.local_file_path
|
|
||||||
)
|
|
||||||
|
|
||||||
def prepare_distribution_metadata(
|
def prepare_distribution_metadata(
|
||||||
self, finder: PackageFinder, build_isolation: bool
|
self,
|
||||||
|
finder: PackageFinder,
|
||||||
|
build_isolation: bool,
|
||||||
|
check_build_deps: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,22 +1,181 @@
|
|||||||
"""Exceptions used throughout package"""
|
"""Exceptions used throughout package.
|
||||||
|
|
||||||
|
This module MUST NOT try to import from anything within `pip._internal` to
|
||||||
|
operate. This is expected to be importable from any/all files within the
|
||||||
|
subpackage and, thus, should not depend on them.
|
||||||
|
"""
|
||||||
|
|
||||||
import configparser
|
import configparser
|
||||||
|
import contextlib
|
||||||
|
import locale
|
||||||
|
import logging
|
||||||
|
import pathlib
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
from itertools import chain, groupby, repeat
|
from itertools import chain, groupby, repeat
|
||||||
from typing import TYPE_CHECKING, Dict, List, Optional
|
from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
from pip._vendor.requests.models import Request, Response
|
from pip._vendor.requests.models import Request, Response
|
||||||
|
from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
|
||||||
|
from pip._vendor.rich.markup import escape
|
||||||
|
from pip._vendor.rich.text import Text
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from hashlib import _Hash
|
from hashlib import _Hash
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Scaffolding
|
||||||
|
#
|
||||||
|
def _is_kebab_case(s: str) -> bool:
|
||||||
|
return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def _prefix_with_indent(
|
||||||
|
s: Union[Text, str],
|
||||||
|
console: Console,
|
||||||
|
*,
|
||||||
|
prefix: str,
|
||||||
|
indent: str,
|
||||||
|
) -> Text:
|
||||||
|
if isinstance(s, Text):
|
||||||
|
text = s
|
||||||
|
else:
|
||||||
|
text = console.render_str(s)
|
||||||
|
|
||||||
|
return console.render_str(prefix, overflow="ignore") + console.render_str(
|
||||||
|
f"\n{indent}", overflow="ignore"
|
||||||
|
).join(text.split(allow_blank=True))
|
||||||
|
|
||||||
|
|
||||||
class PipError(Exception):
|
class PipError(Exception):
|
||||||
"""Base pip exception"""
|
"""The base pip error."""
|
||||||
|
|
||||||
|
|
||||||
|
class DiagnosticPipError(PipError):
|
||||||
|
"""An error, that presents diagnostic information to the user.
|
||||||
|
|
||||||
|
This contains a bunch of logic, to enable pretty presentation of our error
|
||||||
|
messages. Each error gets a unique reference. Each error can also include
|
||||||
|
additional context, a hint and/or a note -- which are presented with the
|
||||||
|
main error message in a consistent style.
|
||||||
|
|
||||||
|
This is adapted from the error output styling in `sphinx-theme-builder`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
reference: str
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
kind: 'Literal["error", "warning"]' = "error",
|
||||||
|
reference: Optional[str] = None,
|
||||||
|
message: Union[str, Text],
|
||||||
|
context: Optional[Union[str, Text]],
|
||||||
|
hint_stmt: Optional[Union[str, Text]],
|
||||||
|
note_stmt: Optional[Union[str, Text]] = None,
|
||||||
|
link: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
# Ensure a proper reference is provided.
|
||||||
|
if reference is None:
|
||||||
|
assert hasattr(self, "reference"), "error reference not provided!"
|
||||||
|
reference = self.reference
|
||||||
|
assert _is_kebab_case(reference), "error reference must be kebab-case!"
|
||||||
|
|
||||||
|
self.kind = kind
|
||||||
|
self.reference = reference
|
||||||
|
|
||||||
|
self.message = message
|
||||||
|
self.context = context
|
||||||
|
|
||||||
|
self.note_stmt = note_stmt
|
||||||
|
self.hint_stmt = hint_stmt
|
||||||
|
|
||||||
|
self.link = link
|
||||||
|
|
||||||
|
super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return (
|
||||||
|
f"<{self.__class__.__name__}("
|
||||||
|
f"reference={self.reference!r}, "
|
||||||
|
f"message={self.message!r}, "
|
||||||
|
f"context={self.context!r}, "
|
||||||
|
f"note_stmt={self.note_stmt!r}, "
|
||||||
|
f"hint_stmt={self.hint_stmt!r}"
|
||||||
|
")>"
|
||||||
|
)
|
||||||
|
|
||||||
|
def __rich_console__(
|
||||||
|
self,
|
||||||
|
console: Console,
|
||||||
|
options: ConsoleOptions,
|
||||||
|
) -> RenderResult:
|
||||||
|
colour = "red" if self.kind == "error" else "yellow"
|
||||||
|
|
||||||
|
yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
|
||||||
|
yield ""
|
||||||
|
|
||||||
|
if not options.ascii_only:
|
||||||
|
# Present the main message, with relevant context indented.
|
||||||
|
if self.context is not None:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.message,
|
||||||
|
console,
|
||||||
|
prefix=f"[{colour}]×[/] ",
|
||||||
|
indent=f"[{colour}]│[/] ",
|
||||||
|
)
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.context,
|
||||||
|
console,
|
||||||
|
prefix=f"[{colour}]╰─>[/] ",
|
||||||
|
indent=f"[{colour}] [/] ",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.message,
|
||||||
|
console,
|
||||||
|
prefix="[red]×[/] ",
|
||||||
|
indent=" ",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
yield self.message
|
||||||
|
if self.context is not None:
|
||||||
|
yield ""
|
||||||
|
yield self.context
|
||||||
|
|
||||||
|
if self.note_stmt is not None or self.hint_stmt is not None:
|
||||||
|
yield ""
|
||||||
|
|
||||||
|
if self.note_stmt is not None:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.note_stmt,
|
||||||
|
console,
|
||||||
|
prefix="[magenta bold]note[/]: ",
|
||||||
|
indent=" ",
|
||||||
|
)
|
||||||
|
if self.hint_stmt is not None:
|
||||||
|
yield _prefix_with_indent(
|
||||||
|
self.hint_stmt,
|
||||||
|
console,
|
||||||
|
prefix="[cyan bold]hint[/]: ",
|
||||||
|
indent=" ",
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.link is not None:
|
||||||
|
yield ""
|
||||||
|
yield f"Link: {self.link}"
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Actual Errors
|
||||||
|
#
|
||||||
class ConfigurationError(PipError):
|
class ConfigurationError(PipError):
|
||||||
"""General exception in configuration"""
|
"""General exception in configuration"""
|
||||||
|
|
||||||
@@ -29,17 +188,54 @@ class UninstallationError(PipError):
|
|||||||
"""General exception during uninstallation"""
|
"""General exception during uninstallation"""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingPyProjectBuildRequires(DiagnosticPipError):
|
||||||
|
"""Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
|
||||||
|
|
||||||
|
reference = "missing-pyproject-build-system-requires"
|
||||||
|
|
||||||
|
def __init__(self, *, package: str) -> None:
|
||||||
|
super().__init__(
|
||||||
|
message=f"Can not process {escape(package)}",
|
||||||
|
context=Text(
|
||||||
|
"This package has an invalid pyproject.toml file.\n"
|
||||||
|
"The [build-system] table is missing the mandatory `requires` key."
|
||||||
|
),
|
||||||
|
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||||
|
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidPyProjectBuildRequires(DiagnosticPipError):
|
||||||
|
"""Raised when pyproject.toml an invalid `build-system.requires`."""
|
||||||
|
|
||||||
|
reference = "invalid-pyproject-build-system-requires"
|
||||||
|
|
||||||
|
def __init__(self, *, package: str, reason: str) -> None:
|
||||||
|
super().__init__(
|
||||||
|
message=f"Can not process {escape(package)}",
|
||||||
|
context=Text(
|
||||||
|
"This package has an invalid `build-system.requires` key in "
|
||||||
|
f"pyproject.toml.\n{reason}"
|
||||||
|
),
|
||||||
|
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||||
|
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NoneMetadataError(PipError):
|
class NoneMetadataError(PipError):
|
||||||
"""
|
"""Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
|
||||||
Raised when accessing "METADATA" or "PKG-INFO" metadata for a
|
|
||||||
pip._vendor.pkg_resources.Distribution object and
|
This signifies an inconsistency, when the Distribution claims to have
|
||||||
`dist.has_metadata('METADATA')` returns True but
|
the metadata file (if not, raise ``FileNotFoundError`` instead), but is
|
||||||
`dist.get_metadata('METADATA')` returns None (and similarly for
|
not actually able to produce its content. This may be due to permission
|
||||||
"PKG-INFO").
|
errors.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, dist, metadata_name):
|
def __init__(
|
||||||
# type: (Distribution, str) -> None
|
self,
|
||||||
|
dist: "BaseDistribution",
|
||||||
|
metadata_name: str,
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
:param dist: A Distribution object.
|
:param dist: A Distribution object.
|
||||||
:param metadata_name: The name of the metadata being accessed
|
:param metadata_name: The name of the metadata being accessed
|
||||||
@@ -48,28 +244,24 @@ class NoneMetadataError(PipError):
|
|||||||
self.dist = dist
|
self.dist = dist
|
||||||
self.metadata_name = metadata_name
|
self.metadata_name = metadata_name
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
# Use `dist` in the error message because its stringification
|
# Use `dist` in the error message because its stringification
|
||||||
# includes more information, like the version and location.
|
# includes more information, like the version and location.
|
||||||
return (
|
return "None {} metadata found for distribution: {}".format(
|
||||||
'None {} metadata found for distribution: {}'.format(
|
self.metadata_name,
|
||||||
self.metadata_name, self.dist,
|
self.dist,
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class UserInstallationInvalid(InstallationError):
|
class UserInstallationInvalid(InstallationError):
|
||||||
"""A --user install is requested on an environment without user site."""
|
"""A --user install is requested on an environment without user site."""
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return "User base directory is not specified"
|
return "User base directory is not specified"
|
||||||
|
|
||||||
|
|
||||||
class InvalidSchemeCombination(InstallationError):
|
class InvalidSchemeCombination(InstallationError):
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
before = ", ".join(str(a) for a in self.args[:-1])
|
before = ", ".join(str(a) for a in self.args[:-1])
|
||||||
return f"Cannot set {before} and {self.args[-1]} together"
|
return f"Cannot set {before} and {self.args[-1]} together"
|
||||||
|
|
||||||
@@ -102,8 +294,12 @@ class PreviousBuildDirError(PipError):
|
|||||||
class NetworkConnectionError(PipError):
|
class NetworkConnectionError(PipError):
|
||||||
"""HTTP connection error"""
|
"""HTTP connection error"""
|
||||||
|
|
||||||
def __init__(self, error_msg, response=None, request=None):
|
def __init__(
|
||||||
# type: (str, Response, Request) -> None
|
self,
|
||||||
|
error_msg: str,
|
||||||
|
response: Optional[Response] = None,
|
||||||
|
request: Optional[Request] = None,
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize NetworkConnectionError with `request` and `response`
|
Initialize NetworkConnectionError with `request` and `response`
|
||||||
objects.
|
objects.
|
||||||
@@ -111,13 +307,15 @@ class NetworkConnectionError(PipError):
|
|||||||
self.response = response
|
self.response = response
|
||||||
self.request = request
|
self.request = request
|
||||||
self.error_msg = error_msg
|
self.error_msg = error_msg
|
||||||
if (self.response is not None and not self.request and
|
if (
|
||||||
hasattr(response, 'request')):
|
self.response is not None
|
||||||
|
and not self.request
|
||||||
|
and hasattr(response, "request")
|
||||||
|
):
|
||||||
self.request = self.response.request
|
self.request = self.response.request
|
||||||
super().__init__(error_msg, response, request)
|
super().__init__(error_msg, response, request)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
return str(self.error_msg)
|
return str(self.error_msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -129,74 +327,122 @@ class UnsupportedWheel(InstallationError):
|
|||||||
"""Unsupported wheel."""
|
"""Unsupported wheel."""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidWheel(InstallationError):
|
||||||
|
"""Invalid (e.g. corrupt) wheel."""
|
||||||
|
|
||||||
|
def __init__(self, location: str, name: str):
|
||||||
|
self.location = location
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Wheel '{self.name}' located at {self.location} is invalid."
|
||||||
|
|
||||||
|
|
||||||
class MetadataInconsistent(InstallationError):
|
class MetadataInconsistent(InstallationError):
|
||||||
"""Built metadata contains inconsistent information.
|
"""Built metadata contains inconsistent information.
|
||||||
|
|
||||||
This is raised when the metadata contains values (e.g. name and version)
|
This is raised when the metadata contains values (e.g. name and version)
|
||||||
that do not match the information previously obtained from sdist filename
|
that do not match the information previously obtained from sdist filename,
|
||||||
or user-supplied ``#egg=`` value.
|
user-supplied ``#egg=`` value, or an install requirement name.
|
||||||
"""
|
"""
|
||||||
def __init__(self, ireq, field, f_val, m_val):
|
|
||||||
# type: (InstallRequirement, str, str, str) -> None
|
def __init__(
|
||||||
|
self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
|
||||||
|
) -> None:
|
||||||
self.ireq = ireq
|
self.ireq = ireq
|
||||||
self.field = field
|
self.field = field
|
||||||
self.f_val = f_val
|
self.f_val = f_val
|
||||||
self.m_val = m_val
|
self.m_val = m_val
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
template = (
|
|
||||||
"Requested {} has inconsistent {}: "
|
|
||||||
"filename has {!r}, but metadata has {!r}"
|
|
||||||
)
|
|
||||||
return template.format(self.ireq, self.field, self.f_val, self.m_val)
|
|
||||||
|
|
||||||
|
|
||||||
class InstallationSubprocessError(InstallationError):
|
|
||||||
"""A subprocess call failed during installation."""
|
|
||||||
def __init__(self, returncode, description):
|
|
||||||
# type: (int, str) -> None
|
|
||||||
self.returncode = returncode
|
|
||||||
self.description = description
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
# type: () -> str
|
|
||||||
return (
|
return (
|
||||||
"Command errored out with exit status {}: {} "
|
f"Requested {self.ireq} has inconsistent {self.field}: "
|
||||||
"Check the logs for full command output."
|
f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
|
||||||
).format(self.returncode, self.description)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
|
||||||
|
"""A subprocess call failed."""
|
||||||
|
|
||||||
|
reference = "subprocess-exited-with-error"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
command_description: str,
|
||||||
|
exit_code: int,
|
||||||
|
output_lines: Optional[List[str]],
|
||||||
|
) -> None:
|
||||||
|
if output_lines is None:
|
||||||
|
output_prompt = Text("See above for output.")
|
||||||
|
else:
|
||||||
|
output_prompt = (
|
||||||
|
Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
|
||||||
|
+ Text("".join(output_lines))
|
||||||
|
+ Text.from_markup(R"[red]\[end of output][/]")
|
||||||
|
)
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
message=(
|
||||||
|
f"[green]{escape(command_description)}[/] did not run successfully.\n"
|
||||||
|
f"exit code: {exit_code}"
|
||||||
|
),
|
||||||
|
context=output_prompt,
|
||||||
|
hint_stmt=None,
|
||||||
|
note_stmt=(
|
||||||
|
"This error originates from a subprocess, and is likely not a "
|
||||||
|
"problem with pip."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.command_description = command_description
|
||||||
|
self.exit_code = exit_code
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.command_description} exited with {self.exit_code}"
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
|
||||||
|
reference = "metadata-generation-failed"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
package_details: str,
|
||||||
|
) -> None:
|
||||||
|
super(InstallationSubprocessError, self).__init__(
|
||||||
|
message="Encountered error while generating package metadata.",
|
||||||
|
context=escape(package_details),
|
||||||
|
hint_stmt="See above for details.",
|
||||||
|
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||||
|
)
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return "metadata generation failed"
|
||||||
|
|
||||||
|
|
||||||
class HashErrors(InstallationError):
|
class HashErrors(InstallationError):
|
||||||
"""Multiple HashError instances rolled into one for reporting"""
|
"""Multiple HashError instances rolled into one for reporting"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
# type: () -> None
|
self.errors: List["HashError"] = []
|
||||||
self.errors = [] # type: List[HashError]
|
|
||||||
|
|
||||||
def append(self, error):
|
def append(self, error: "HashError") -> None:
|
||||||
# type: (HashError) -> None
|
|
||||||
self.errors.append(error)
|
self.errors.append(error)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
lines = []
|
lines = []
|
||||||
self.errors.sort(key=lambda e: e.order)
|
self.errors.sort(key=lambda e: e.order)
|
||||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||||
lines.append(cls.head)
|
lines.append(cls.head)
|
||||||
lines.extend(e.body() for e in errors_of_cls)
|
lines.extend(e.body() for e in errors_of_cls)
|
||||||
if lines:
|
if lines:
|
||||||
return '\n'.join(lines)
|
return "\n".join(lines)
|
||||||
return ''
|
return ""
|
||||||
|
|
||||||
def __nonzero__(self):
|
def __bool__(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return bool(self.errors)
|
return bool(self.errors)
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
# type: () -> bool
|
|
||||||
return self.__nonzero__()
|
|
||||||
|
|
||||||
|
|
||||||
class HashError(InstallationError):
|
class HashError(InstallationError):
|
||||||
"""
|
"""
|
||||||
@@ -214,12 +460,12 @@ class HashError(InstallationError):
|
|||||||
typically available earlier.
|
typically available earlier.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
req = None # type: Optional[InstallRequirement]
|
|
||||||
head = ''
|
|
||||||
order = -1 # type: int
|
|
||||||
|
|
||||||
def body(self):
|
req: Optional["InstallRequirement"] = None
|
||||||
# type: () -> str
|
head = ""
|
||||||
|
order: int = -1
|
||||||
|
|
||||||
|
def body(self) -> str:
|
||||||
"""Return a summary of me for display under the heading.
|
"""Return a summary of me for display under the heading.
|
||||||
|
|
||||||
This default implementation simply prints a description of the
|
This default implementation simply prints a description of the
|
||||||
@@ -229,21 +475,19 @@ class HashError(InstallationError):
|
|||||||
its link already populated by the resolver's _populate_link().
|
its link already populated by the resolver's _populate_link().
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return f' {self._requirement_name()}'
|
return f" {self._requirement_name()}"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
return f"{self.head}\n{self.body()}"
|
||||||
return f'{self.head}\n{self.body()}'
|
|
||||||
|
|
||||||
def _requirement_name(self):
|
def _requirement_name(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""Return a description of the requirement that triggered me.
|
"""Return a description of the requirement that triggered me.
|
||||||
|
|
||||||
This default implementation returns long description of the req, with
|
This default implementation returns long description of the req, with
|
||||||
line numbers
|
line numbers
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return str(self.req) if self.req else 'unknown package'
|
return str(self.req) if self.req else "unknown package"
|
||||||
|
|
||||||
|
|
||||||
class VcsHashUnsupported(HashError):
|
class VcsHashUnsupported(HashError):
|
||||||
@@ -251,8 +495,10 @@ class VcsHashUnsupported(HashError):
|
|||||||
we don't have a method for hashing those."""
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
order = 0
|
order = 0
|
||||||
head = ("Can't verify hashes for these requirements because we don't "
|
head = (
|
||||||
"have a way to hash version control repositories:")
|
"Can't verify hashes for these requirements because we don't "
|
||||||
|
"have a way to hash version control repositories:"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DirectoryUrlHashUnsupported(HashError):
|
class DirectoryUrlHashUnsupported(HashError):
|
||||||
@@ -260,32 +506,34 @@ class DirectoryUrlHashUnsupported(HashError):
|
|||||||
we don't have a method for hashing those."""
|
we don't have a method for hashing those."""
|
||||||
|
|
||||||
order = 1
|
order = 1
|
||||||
head = ("Can't verify hashes for these file:// requirements because they "
|
head = (
|
||||||
"point to directories:")
|
"Can't verify hashes for these file:// requirements because they "
|
||||||
|
"point to directories:"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HashMissing(HashError):
|
class HashMissing(HashError):
|
||||||
"""A hash was needed for a requirement but is absent."""
|
"""A hash was needed for a requirement but is absent."""
|
||||||
|
|
||||||
order = 2
|
order = 2
|
||||||
head = ('Hashes are required in --require-hashes mode, but they are '
|
head = (
|
||||||
'missing from some requirements. Here is a list of those '
|
"Hashes are required in --require-hashes mode, but they are "
|
||||||
'requirements along with the hashes their downloaded archives '
|
"missing from some requirements. Here is a list of those "
|
||||||
'actually had. Add lines like these to your requirements files to '
|
"requirements along with the hashes their downloaded archives "
|
||||||
'prevent tampering. (If you did not enable --require-hashes '
|
"actually had. Add lines like these to your requirements files to "
|
||||||
'manually, note that it turns on automatically when any package '
|
"prevent tampering. (If you did not enable --require-hashes "
|
||||||
'has a hash.)')
|
"manually, note that it turns on automatically when any package "
|
||||||
|
"has a hash.)"
|
||||||
|
)
|
||||||
|
|
||||||
def __init__(self, gotten_hash):
|
def __init__(self, gotten_hash: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
"""
|
"""
|
||||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||||
just downloaded
|
just downloaded
|
||||||
"""
|
"""
|
||||||
self.gotten_hash = gotten_hash
|
self.gotten_hash = gotten_hash
|
||||||
|
|
||||||
def body(self):
|
def body(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
# Dodge circular import.
|
# Dodge circular import.
|
||||||
from pip._internal.utils.hashes import FAVORITE_HASH
|
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||||
|
|
||||||
@@ -294,13 +542,16 @@ class HashMissing(HashError):
|
|||||||
# In the case of URL-based requirements, display the original URL
|
# In the case of URL-based requirements, display the original URL
|
||||||
# seen in the requirements file rather than the package name,
|
# seen in the requirements file rather than the package name,
|
||||||
# so the output can be directly copied into the requirements file.
|
# so the output can be directly copied into the requirements file.
|
||||||
package = (self.req.original_link if self.req.original_link
|
package = (
|
||||||
|
self.req.original_link
|
||||||
|
if self.req.original_link
|
||||||
# In case someone feeds something downright stupid
|
# In case someone feeds something downright stupid
|
||||||
# to InstallRequirement's constructor.
|
# to InstallRequirement's constructor.
|
||||||
else getattr(self.req, 'req', None))
|
else getattr(self.req, "req", None)
|
||||||
return ' {} --hash={}:{}'.format(package or 'unknown package',
|
)
|
||||||
FAVORITE_HASH,
|
return " {} --hash={}:{}".format(
|
||||||
self.gotten_hash)
|
package or "unknown package", FAVORITE_HASH, self.gotten_hash
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HashUnpinned(HashError):
|
class HashUnpinned(HashError):
|
||||||
@@ -308,8 +559,10 @@ class HashUnpinned(HashError):
|
|||||||
version."""
|
version."""
|
||||||
|
|
||||||
order = 3
|
order = 3
|
||||||
head = ('In --require-hashes mode, all requirements must have their '
|
head = (
|
||||||
'versions pinned with ==. These do not:')
|
"In --require-hashes mode, all requirements must have their "
|
||||||
|
"versions pinned with ==. These do not:"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HashMismatch(HashError):
|
class HashMismatch(HashError):
|
||||||
@@ -321,14 +574,16 @@ class HashMismatch(HashError):
|
|||||||
improve its error message.
|
improve its error message.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
order = 4
|
|
||||||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
|
||||||
'FILE. If you have updated the package versions, please update '
|
|
||||||
'the hashes. Otherwise, examine the package contents carefully; '
|
|
||||||
'someone may have tampered with them.')
|
|
||||||
|
|
||||||
def __init__(self, allowed, gots):
|
order = 4
|
||||||
# type: (Dict[str, List[str]], Dict[str, _Hash]) -> None
|
head = (
|
||||||
|
"THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
|
||||||
|
"FILE. If you have updated the package versions, please update "
|
||||||
|
"the hashes. Otherwise, examine the package contents carefully; "
|
||||||
|
"someone may have tampered with them."
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
|
||||||
"""
|
"""
|
||||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||||
hex digests
|
hex digests
|
||||||
@@ -338,13 +593,10 @@ class HashMismatch(HashError):
|
|||||||
self.allowed = allowed
|
self.allowed = allowed
|
||||||
self.gots = gots
|
self.gots = gots
|
||||||
|
|
||||||
def body(self):
|
def body(self) -> str:
|
||||||
# type: () -> str
|
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
|
||||||
return ' {}:\n{}'.format(self._requirement_name(),
|
|
||||||
self._hash_comparison())
|
|
||||||
|
|
||||||
def _hash_comparison(self):
|
def _hash_comparison(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
"""
|
"""
|
||||||
Return a comparison of actual and expected hash values.
|
Return a comparison of actual and expected hash values.
|
||||||
|
|
||||||
@@ -355,20 +607,22 @@ class HashMismatch(HashError):
|
|||||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def hash_then_or(hash_name):
|
|
||||||
# type: (str) -> chain[str]
|
def hash_then_or(hash_name: str) -> "chain[str]":
|
||||||
# For now, all the decent hashes have 6-char names, so we can get
|
# For now, all the decent hashes have 6-char names, so we can get
|
||||||
# away with hard-coding space literals.
|
# away with hard-coding space literals.
|
||||||
return chain([hash_name], repeat(' or'))
|
return chain([hash_name], repeat(" or"))
|
||||||
|
|
||||||
lines = [] # type: List[str]
|
lines: List[str] = []
|
||||||
for hash_name, expecteds in self.allowed.items():
|
for hash_name, expecteds in self.allowed.items():
|
||||||
prefix = hash_then_or(hash_name)
|
prefix = hash_then_or(hash_name)
|
||||||
lines.extend((' Expected {} {}'.format(next(prefix), e))
|
lines.extend(
|
||||||
for e in expecteds)
|
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
|
||||||
lines.append(' Got {}\n'.format(
|
)
|
||||||
self.gots[hash_name].hexdigest()))
|
lines.append(
|
||||||
return '\n'.join(lines)
|
" Got {}\n".format(self.gots[hash_name].hexdigest())
|
||||||
|
)
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedPythonVersion(InstallationError):
|
class UnsupportedPythonVersion(InstallationError):
|
||||||
@@ -377,21 +631,103 @@ class UnsupportedPythonVersion(InstallationError):
|
|||||||
|
|
||||||
|
|
||||||
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
||||||
"""When there are errors while loading a configuration file
|
"""When there are errors while loading a configuration file"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, reason="could not be loaded", fname=None, error=None):
|
def __init__(
|
||||||
# type: (str, Optional[str], Optional[configparser.Error]) -> None
|
self,
|
||||||
|
reason: str = "could not be loaded",
|
||||||
|
fname: Optional[str] = None,
|
||||||
|
error: Optional[configparser.Error] = None,
|
||||||
|
) -> None:
|
||||||
super().__init__(error)
|
super().__init__(error)
|
||||||
self.reason = reason
|
self.reason = reason
|
||||||
self.fname = fname
|
self.fname = fname
|
||||||
self.error = error
|
self.error = error
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
if self.fname is not None:
|
if self.fname is not None:
|
||||||
message_part = f" in {self.fname}."
|
message_part = f" in {self.fname}."
|
||||||
else:
|
else:
|
||||||
assert self.error is not None
|
assert self.error is not None
|
||||||
message_part = f".\n{self.error}\n"
|
message_part = f".\n{self.error}\n"
|
||||||
return f"Configuration file {self.reason}{message_part}"
|
return f"Configuration file {self.reason}{message_part}"
|
||||||
|
|
||||||
|
|
||||||
|
_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
|
||||||
|
The Python environment under {sys.prefix} is managed externally, and may not be
|
||||||
|
manipulated by the user. Please use specific tooling from the distributor of
|
||||||
|
the Python installation to interact with this environment instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ExternallyManagedEnvironment(DiagnosticPipError):
|
||||||
|
"""The current environment is externally managed.
|
||||||
|
|
||||||
|
This is raised when the current environment is externally managed, as
|
||||||
|
defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
|
||||||
|
and displayed when the error is bubbled up to the user.
|
||||||
|
|
||||||
|
:param error: The error message read from ``EXTERNALLY-MANAGED``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
reference = "externally-managed-environment"
|
||||||
|
|
||||||
|
def __init__(self, error: Optional[str]) -> None:
|
||||||
|
if error is None:
|
||||||
|
context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
|
||||||
|
else:
|
||||||
|
context = Text(error)
|
||||||
|
super().__init__(
|
||||||
|
message="This environment is externally managed",
|
||||||
|
context=context,
|
||||||
|
note_stmt=(
|
||||||
|
"If you believe this is a mistake, please contact your "
|
||||||
|
"Python installation or OS distribution provider. "
|
||||||
|
"You can override this, at the risk of breaking your Python "
|
||||||
|
"installation or OS, by passing --break-system-packages."
|
||||||
|
),
|
||||||
|
hint_stmt=Text("See PEP 668 for the detailed specification."),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _iter_externally_managed_error_keys() -> Iterator[str]:
|
||||||
|
# LC_MESSAGES is in POSIX, but not the C standard. The most common
|
||||||
|
# platform that does not implement this category is Windows, where
|
||||||
|
# using other categories for console message localization is equally
|
||||||
|
# unreliable, so we fall back to the locale-less vendor message. This
|
||||||
|
# can always be re-evaluated when a vendor proposes a new alternative.
|
||||||
|
try:
|
||||||
|
category = locale.LC_MESSAGES
|
||||||
|
except AttributeError:
|
||||||
|
lang: Optional[str] = None
|
||||||
|
else:
|
||||||
|
lang, _ = locale.getlocale(category)
|
||||||
|
if lang is not None:
|
||||||
|
yield f"Error-{lang}"
|
||||||
|
for sep in ("-", "_"):
|
||||||
|
before, found, _ = lang.partition(sep)
|
||||||
|
if not found:
|
||||||
|
continue
|
||||||
|
yield f"Error-{before}"
|
||||||
|
yield "Error"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(
|
||||||
|
cls,
|
||||||
|
config: Union[pathlib.Path, str],
|
||||||
|
) -> "ExternallyManagedEnvironment":
|
||||||
|
parser = configparser.ConfigParser(interpolation=None)
|
||||||
|
try:
|
||||||
|
parser.read(config, encoding="utf-8")
|
||||||
|
section = parser["externally-managed"]
|
||||||
|
for key in cls._iter_externally_managed_error_keys():
|
||||||
|
with contextlib.suppress(KeyError):
|
||||||
|
return cls(section[key])
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
except (OSError, UnicodeDecodeError, configparser.ParsingError):
|
||||||
|
from pip._internal.utils._log import VERBOSE
|
||||||
|
|
||||||
|
exc_info = logger.isEnabledFor(VERBOSE)
|
||||||
|
logger.warning("Failed to read %s", config, exc_info=exc_info)
|
||||||
|
return cls(None)
|
||||||
|
|||||||
@@ -2,30 +2,32 @@
|
|||||||
The main purpose of this module is to expose LinkCollector.collect_sources().
|
The main purpose of this module is to expose LinkCollector.collect_sources().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import cgi
|
|
||||||
import collections
|
import collections
|
||||||
|
import email.message
|
||||||
import functools
|
import functools
|
||||||
import html
|
|
||||||
import itertools
|
import itertools
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import xml.etree.ElementTree
|
from html.parser import HTMLParser
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import (
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
Callable,
|
Callable,
|
||||||
|
Dict,
|
||||||
Iterable,
|
Iterable,
|
||||||
List,
|
List,
|
||||||
MutableMapping,
|
MutableMapping,
|
||||||
NamedTuple,
|
NamedTuple,
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
Tuple,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
from pip._vendor import html5lib, requests
|
from pip._vendor import requests
|
||||||
from pip._vendor.requests import Response
|
from pip._vendor.requests import Response
|
||||||
from pip._vendor.requests.exceptions import RetryError, SSLError
|
from pip._vendor.requests.exceptions import RetryError, SSLError
|
||||||
|
|
||||||
@@ -35,14 +37,18 @@ from pip._internal.models.search_scope import SearchScope
|
|||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
from pip._internal.network.utils import raise_for_status
|
from pip._internal.network.utils import raise_for_status
|
||||||
from pip._internal.utils.filetypes import is_archive_file
|
from pip._internal.utils.filetypes import is_archive_file
|
||||||
from pip._internal.utils.misc import pairwise, redact_auth_from_url
|
from pip._internal.utils.misc import redact_auth_from_url
|
||||||
from pip._internal.vcs import vcs
|
from pip._internal.vcs import vcs
|
||||||
|
|
||||||
from .sources import CandidatesFromPage, LinkSource, build_source
|
from .sources import CandidatesFromPage, LinkSource, build_source
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Protocol
|
||||||
|
else:
|
||||||
|
Protocol = object
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
HTMLElement = xml.etree.ElementTree.Element
|
|
||||||
ResponseHeaders = MutableMapping[str, str]
|
ResponseHeaders = MutableMapping[str, str]
|
||||||
|
|
||||||
|
|
||||||
@@ -52,70 +58,90 @@ def _match_vcs_scheme(url: str) -> Optional[str]:
|
|||||||
Returns the matched VCS scheme, or None if there's no match.
|
Returns the matched VCS scheme, or None if there's no match.
|
||||||
"""
|
"""
|
||||||
for scheme in vcs.schemes:
|
for scheme in vcs.schemes:
|
||||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
|
||||||
return scheme
|
return scheme
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class _NotHTML(Exception):
|
class _NotAPIContent(Exception):
|
||||||
def __init__(self, content_type: str, request_desc: str) -> None:
|
def __init__(self, content_type: str, request_desc: str) -> None:
|
||||||
super().__init__(content_type, request_desc)
|
super().__init__(content_type, request_desc)
|
||||||
self.content_type = content_type
|
self.content_type = content_type
|
||||||
self.request_desc = request_desc
|
self.request_desc = request_desc
|
||||||
|
|
||||||
|
|
||||||
def _ensure_html_header(response: Response) -> None:
|
def _ensure_api_header(response: Response) -> None:
|
||||||
"""Check the Content-Type header to ensure the response contains HTML.
|
|
||||||
|
|
||||||
Raises `_NotHTML` if the content type is not text/html.
|
|
||||||
"""
|
"""
|
||||||
content_type = response.headers.get("Content-Type", "")
|
Check the Content-Type header to ensure the response contains a Simple
|
||||||
if not content_type.lower().startswith("text/html"):
|
API Response.
|
||||||
raise _NotHTML(content_type, response.request.method)
|
|
||||||
|
Raises `_NotAPIContent` if the content type is not a valid content-type.
|
||||||
|
"""
|
||||||
|
content_type = response.headers.get("Content-Type", "Unknown")
|
||||||
|
|
||||||
|
content_type_l = content_type.lower()
|
||||||
|
if content_type_l.startswith(
|
||||||
|
(
|
||||||
|
"text/html",
|
||||||
|
"application/vnd.pypi.simple.v1+html",
|
||||||
|
"application/vnd.pypi.simple.v1+json",
|
||||||
|
)
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
raise _NotAPIContent(content_type, response.request.method)
|
||||||
|
|
||||||
|
|
||||||
class _NotHTTP(Exception):
|
class _NotHTTP(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _ensure_html_response(url: str, session: PipSession) -> None:
|
def _ensure_api_response(url: str, session: PipSession) -> None:
|
||||||
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
"""
|
||||||
|
Send a HEAD request to the URL, and ensure the response contains a simple
|
||||||
|
API Response.
|
||||||
|
|
||||||
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
||||||
`_NotHTML` if the content type is not text/html.
|
`_NotAPIContent` if the content type is not a valid content type.
|
||||||
"""
|
"""
|
||||||
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
||||||
if scheme not in {'http', 'https'}:
|
if scheme not in {"http", "https"}:
|
||||||
raise _NotHTTP()
|
raise _NotHTTP()
|
||||||
|
|
||||||
resp = session.head(url, allow_redirects=True)
|
resp = session.head(url, allow_redirects=True)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
|
|
||||||
_ensure_html_header(resp)
|
_ensure_api_header(resp)
|
||||||
|
|
||||||
|
|
||||||
def _get_html_response(url: str, session: PipSession) -> Response:
|
def _get_simple_response(url: str, session: PipSession) -> Response:
|
||||||
"""Access an HTML page with GET, and return the response.
|
"""Access an Simple API response with GET, and return the response.
|
||||||
|
|
||||||
This consists of three parts:
|
This consists of three parts:
|
||||||
|
|
||||||
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
||||||
check the Content-Type is HTML, to avoid downloading a large file.
|
check the Content-Type is HTML or Simple API, to avoid downloading a
|
||||||
Raise `_NotHTTP` if the content type cannot be determined, or
|
large file. Raise `_NotHTTP` if the content type cannot be determined, or
|
||||||
`_NotHTML` if it is not HTML.
|
`_NotAPIContent` if it is not HTML or a Simple API.
|
||||||
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
||||||
3. Check the Content-Type header to make sure we got HTML, and raise
|
3. Check the Content-Type header to make sure we got a Simple API response,
|
||||||
`_NotHTML` otherwise.
|
and raise `_NotAPIContent` otherwise.
|
||||||
"""
|
"""
|
||||||
if is_archive_file(Link(url).filename):
|
if is_archive_file(Link(url).filename):
|
||||||
_ensure_html_response(url, session=session)
|
_ensure_api_response(url, session=session)
|
||||||
|
|
||||||
logger.debug('Getting page %s', redact_auth_from_url(url))
|
logger.debug("Getting page %s", redact_auth_from_url(url))
|
||||||
|
|
||||||
resp = session.get(
|
resp = session.get(
|
||||||
url,
|
url,
|
||||||
headers={
|
headers={
|
||||||
"Accept": "text/html",
|
"Accept": ", ".join(
|
||||||
|
[
|
||||||
|
"application/vnd.pypi.simple.v1+json",
|
||||||
|
"application/vnd.pypi.simple.v1+html; q=0.1",
|
||||||
|
"text/html; q=0.01",
|
||||||
|
]
|
||||||
|
),
|
||||||
# We don't want to blindly returned cached data for
|
# We don't want to blindly returned cached data for
|
||||||
# /simple/, because authors generally expecting that
|
# /simple/, because authors generally expecting that
|
||||||
# twine upload && pip install will function, but if
|
# twine upload && pip install will function, but if
|
||||||
@@ -137,153 +163,52 @@ def _get_html_response(url: str, session: PipSession) -> Response:
|
|||||||
# The check for archives above only works if the url ends with
|
# The check for archives above only works if the url ends with
|
||||||
# something that looks like an archive. However that is not a
|
# something that looks like an archive. However that is not a
|
||||||
# requirement of an url. Unless we issue a HEAD request on every
|
# requirement of an url. Unless we issue a HEAD request on every
|
||||||
# url we cannot know ahead of time for sure if something is HTML
|
# url we cannot know ahead of time for sure if something is a
|
||||||
# or not. However we can check after we've downloaded it.
|
# Simple API response or not. However we can check after we've
|
||||||
_ensure_html_header(resp)
|
# downloaded it.
|
||||||
|
_ensure_api_header(resp)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Fetched page %s as %s",
|
||||||
|
redact_auth_from_url(url),
|
||||||
|
resp.headers.get("Content-Type", "Unknown"),
|
||||||
|
)
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
|
def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
|
||||||
"""Determine if we have any encoding information in our headers.
|
"""Determine if we have any encoding information in our headers."""
|
||||||
"""
|
|
||||||
if headers and "Content-Type" in headers:
|
if headers and "Content-Type" in headers:
|
||||||
content_type, params = cgi.parse_header(headers["Content-Type"])
|
m = email.message.Message()
|
||||||
if "charset" in params:
|
m["content-type"] = headers["Content-Type"]
|
||||||
return params['charset']
|
charset = m.get_param("charset")
|
||||||
|
if charset:
|
||||||
|
return str(charset)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def _determine_base_url(document: HTMLElement, page_url: str) -> str:
|
|
||||||
"""Determine the HTML document's base URL.
|
|
||||||
|
|
||||||
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
|
||||||
attribute denotes the base URL of anchor tags in the document. If there is
|
|
||||||
no such tag (or if it does not have a valid href attribute), the HTML
|
|
||||||
file's URL is used as the base URL.
|
|
||||||
|
|
||||||
:param document: An HTML document representation. The current
|
|
||||||
implementation expects the result of ``html5lib.parse()``.
|
|
||||||
:param page_url: The URL of the HTML document.
|
|
||||||
"""
|
|
||||||
for base in document.findall(".//base"):
|
|
||||||
href = base.get("href")
|
|
||||||
if href is not None:
|
|
||||||
return href
|
|
||||||
return page_url
|
|
||||||
|
|
||||||
|
|
||||||
def _clean_url_path_part(part: str) -> str:
|
|
||||||
"""
|
|
||||||
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
|
||||||
"""
|
|
||||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
|
||||||
return urllib.parse.quote(urllib.parse.unquote(part))
|
|
||||||
|
|
||||||
|
|
||||||
def _clean_file_url_path(part: str) -> str:
|
|
||||||
"""
|
|
||||||
Clean the first part of a URL path that corresponds to a local
|
|
||||||
filesystem path (i.e. the first part after splitting on "@" characters).
|
|
||||||
"""
|
|
||||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
|
||||||
# Also, on Windows the path part might contain a drive letter which
|
|
||||||
# should not be quoted. On Linux where drive letters do not
|
|
||||||
# exist, the colon should be quoted. We rely on urllib.request
|
|
||||||
# to do the right thing here.
|
|
||||||
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
|
||||||
|
|
||||||
|
|
||||||
# percent-encoded: /
|
|
||||||
_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE)
|
|
||||||
|
|
||||||
|
|
||||||
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
|
||||||
"""
|
|
||||||
Clean the path portion of a URL.
|
|
||||||
"""
|
|
||||||
if is_local_path:
|
|
||||||
clean_func = _clean_file_url_path
|
|
||||||
else:
|
|
||||||
clean_func = _clean_url_path_part
|
|
||||||
|
|
||||||
# Split on the reserved characters prior to cleaning so that
|
|
||||||
# revision strings in VCS URLs are properly preserved.
|
|
||||||
parts = _reserved_chars_re.split(path)
|
|
||||||
|
|
||||||
cleaned_parts = []
|
|
||||||
for to_clean, reserved in pairwise(itertools.chain(parts, [''])):
|
|
||||||
cleaned_parts.append(clean_func(to_clean))
|
|
||||||
# Normalize %xx escapes (e.g. %2f -> %2F)
|
|
||||||
cleaned_parts.append(reserved.upper())
|
|
||||||
|
|
||||||
return ''.join(cleaned_parts)
|
|
||||||
|
|
||||||
|
|
||||||
def _clean_link(url: str) -> str:
|
|
||||||
"""
|
|
||||||
Make sure a link is fully quoted.
|
|
||||||
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
|
||||||
and without double-quoting other characters.
|
|
||||||
"""
|
|
||||||
# Split the URL into parts according to the general structure
|
|
||||||
# `scheme://netloc/path;parameters?query#fragment`.
|
|
||||||
result = urllib.parse.urlparse(url)
|
|
||||||
# If the netloc is empty, then the URL refers to a local filesystem path.
|
|
||||||
is_local_path = not result.netloc
|
|
||||||
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
|
||||||
return urllib.parse.urlunparse(result._replace(path=path))
|
|
||||||
|
|
||||||
|
|
||||||
def _create_link_from_element(
|
|
||||||
anchor: HTMLElement,
|
|
||||||
page_url: str,
|
|
||||||
base_url: str,
|
|
||||||
) -> Optional[Link]:
|
|
||||||
"""
|
|
||||||
Convert an anchor element in a simple repository page to a Link.
|
|
||||||
"""
|
|
||||||
href = anchor.get("href")
|
|
||||||
if not href:
|
|
||||||
return None
|
|
||||||
|
|
||||||
url = _clean_link(urllib.parse.urljoin(base_url, href))
|
|
||||||
pyrequire = anchor.get('data-requires-python')
|
|
||||||
pyrequire = html.unescape(pyrequire) if pyrequire else None
|
|
||||||
|
|
||||||
yanked_reason = anchor.get('data-yanked')
|
|
||||||
if yanked_reason:
|
|
||||||
yanked_reason = html.unescape(yanked_reason)
|
|
||||||
|
|
||||||
link = Link(
|
|
||||||
url,
|
|
||||||
comes_from=page_url,
|
|
||||||
requires_python=pyrequire,
|
|
||||||
yanked_reason=yanked_reason,
|
|
||||||
)
|
|
||||||
|
|
||||||
return link
|
|
||||||
|
|
||||||
|
|
||||||
class CacheablePageContent:
|
class CacheablePageContent:
|
||||||
def __init__(self, page: "HTMLPage") -> None:
|
def __init__(self, page: "IndexContent") -> None:
|
||||||
assert page.cache_link_parsing
|
assert page.cache_link_parsing
|
||||||
self.page = page
|
self.page = page
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self, other: object) -> bool:
|
||||||
return (isinstance(other, type(self)) and
|
return isinstance(other, type(self)) and self.page.url == other.page.url
|
||||||
self.page.url == other.page.url)
|
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
def __hash__(self) -> int:
|
||||||
return hash(self.page.url)
|
return hash(self.page.url)
|
||||||
|
|
||||||
|
|
||||||
def with_cached_html_pages(
|
class ParseLinks(Protocol):
|
||||||
fn: Callable[["HTMLPage"], Iterable[Link]],
|
def __call__(self, page: "IndexContent") -> Iterable[Link]:
|
||||||
) -> Callable[["HTMLPage"], List[Link]]:
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
|
||||||
"""
|
"""
|
||||||
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
|
Given a function that parses an Iterable[Link] from an IndexContent, cache the
|
||||||
function's result (keyed by CacheablePageContent), unless the HTMLPage
|
function's result (keyed by CacheablePageContent), unless the IndexContent
|
||||||
`page` has `page.cache_link_parsing == False`.
|
`page` has `page.cache_link_parsing == False`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -292,7 +217,7 @@ def with_cached_html_pages(
|
|||||||
return list(fn(cacheable_page.page))
|
return list(fn(cacheable_page.page))
|
||||||
|
|
||||||
@functools.wraps(fn)
|
@functools.wraps(fn)
|
||||||
def wrapper_wrapper(page: "HTMLPage") -> List[Link]:
|
def wrapper_wrapper(page: "IndexContent") -> List[Link]:
|
||||||
if page.cache_link_parsing:
|
if page.cache_link_parsing:
|
||||||
return wrapper(CacheablePageContent(page))
|
return wrapper(CacheablePageContent(page))
|
||||||
return list(fn(page))
|
return list(fn(page))
|
||||||
@@ -300,36 +225,42 @@ def with_cached_html_pages(
|
|||||||
return wrapper_wrapper
|
return wrapper_wrapper
|
||||||
|
|
||||||
|
|
||||||
@with_cached_html_pages
|
@with_cached_index_content
|
||||||
def parse_links(page: "HTMLPage") -> Iterable[Link]:
|
def parse_links(page: "IndexContent") -> Iterable[Link]:
|
||||||
"""
|
"""
|
||||||
Parse an HTML document, and yield its anchor elements as Link objects.
|
Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
|
||||||
"""
|
"""
|
||||||
document = html5lib.parse(
|
|
||||||
page.content,
|
content_type_l = page.content_type.lower()
|
||||||
transport_encoding=page.encoding,
|
if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
|
||||||
namespaceHTMLElements=False,
|
data = json.loads(page.content)
|
||||||
)
|
for file in data.get("files", []):
|
||||||
|
link = Link.from_json(file, page.url)
|
||||||
|
if link is None:
|
||||||
|
continue
|
||||||
|
yield link
|
||||||
|
return
|
||||||
|
|
||||||
|
parser = HTMLLinkParser(page.url)
|
||||||
|
encoding = page.encoding or "utf-8"
|
||||||
|
parser.feed(page.content.decode(encoding))
|
||||||
|
|
||||||
url = page.url
|
url = page.url
|
||||||
base_url = _determine_base_url(document, url)
|
base_url = parser.base_url or url
|
||||||
for anchor in document.findall(".//a"):
|
for anchor in parser.anchors:
|
||||||
link = _create_link_from_element(
|
link = Link.from_element(anchor, page_url=url, base_url=base_url)
|
||||||
anchor,
|
|
||||||
page_url=url,
|
|
||||||
base_url=base_url,
|
|
||||||
)
|
|
||||||
if link is None:
|
if link is None:
|
||||||
continue
|
continue
|
||||||
yield link
|
yield link
|
||||||
|
|
||||||
|
|
||||||
class HTMLPage:
|
class IndexContent:
|
||||||
"""Represents one page, along with its URL"""
|
"""Represents one response (or page), along with its URL"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
content: bytes,
|
content: bytes,
|
||||||
|
content_type: str,
|
||||||
encoding: Optional[str],
|
encoding: Optional[str],
|
||||||
url: str,
|
url: str,
|
||||||
cache_link_parsing: bool = True,
|
cache_link_parsing: bool = True,
|
||||||
@@ -342,6 +273,7 @@ class HTMLPage:
|
|||||||
have this set to False, for example.
|
have this set to False, for example.
|
||||||
"""
|
"""
|
||||||
self.content = content
|
self.content = content
|
||||||
|
self.content_type = content_type
|
||||||
self.encoding = encoding
|
self.encoding = encoding
|
||||||
self.url = url
|
self.url = url
|
||||||
self.cache_link_parsing = cache_link_parsing
|
self.cache_link_parsing = cache_link_parsing
|
||||||
@@ -350,80 +282,115 @@ class HTMLPage:
|
|||||||
return redact_auth_from_url(self.url)
|
return redact_auth_from_url(self.url)
|
||||||
|
|
||||||
|
|
||||||
def _handle_get_page_fail(
|
class HTMLLinkParser(HTMLParser):
|
||||||
|
"""
|
||||||
|
HTMLParser that keeps the first base HREF and a list of all anchor
|
||||||
|
elements' attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, url: str) -> None:
|
||||||
|
super().__init__(convert_charrefs=True)
|
||||||
|
|
||||||
|
self.url: str = url
|
||||||
|
self.base_url: Optional[str] = None
|
||||||
|
self.anchors: List[Dict[str, Optional[str]]] = []
|
||||||
|
|
||||||
|
def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
|
||||||
|
if tag == "base" and self.base_url is None:
|
||||||
|
href = self.get_href(attrs)
|
||||||
|
if href is not None:
|
||||||
|
self.base_url = href
|
||||||
|
elif tag == "a":
|
||||||
|
self.anchors.append(dict(attrs))
|
||||||
|
|
||||||
|
def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
|
||||||
|
for name, value in attrs:
|
||||||
|
if name == "href":
|
||||||
|
return value
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_get_simple_fail(
|
||||||
link: Link,
|
link: Link,
|
||||||
reason: Union[str, Exception],
|
reason: Union[str, Exception],
|
||||||
meth: Optional[Callable[..., None]] = None
|
meth: Optional[Callable[..., None]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if meth is None:
|
if meth is None:
|
||||||
meth = logger.debug
|
meth = logger.debug
|
||||||
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
||||||
|
|
||||||
|
|
||||||
def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage:
|
def _make_index_content(
|
||||||
|
response: Response, cache_link_parsing: bool = True
|
||||||
|
) -> IndexContent:
|
||||||
encoding = _get_encoding_from_headers(response.headers)
|
encoding = _get_encoding_from_headers(response.headers)
|
||||||
return HTMLPage(
|
return IndexContent(
|
||||||
response.content,
|
response.content,
|
||||||
|
response.headers["Content-Type"],
|
||||||
encoding=encoding,
|
encoding=encoding,
|
||||||
url=response.url,
|
url=response.url,
|
||||||
cache_link_parsing=cache_link_parsing)
|
cache_link_parsing=cache_link_parsing,
|
||||||
|
|
||||||
|
|
||||||
def _get_html_page(
|
|
||||||
link: Link, session: Optional[PipSession] = None
|
|
||||||
) -> Optional["HTMLPage"]:
|
|
||||||
if session is None:
|
|
||||||
raise TypeError(
|
|
||||||
"_get_html_page() missing 1 required keyword argument: 'session'"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
url = link.url.split('#', 1)[0]
|
|
||||||
|
def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
|
||||||
|
url = link.url.split("#", 1)[0]
|
||||||
|
|
||||||
# Check for VCS schemes that do not support lookup as web pages.
|
# Check for VCS schemes that do not support lookup as web pages.
|
||||||
vcs_scheme = _match_vcs_scheme(url)
|
vcs_scheme = _match_vcs_scheme(url)
|
||||||
if vcs_scheme:
|
if vcs_scheme:
|
||||||
logger.warning('Cannot look at %s URL %s because it does not support '
|
logger.warning(
|
||||||
'lookup as web pages.', vcs_scheme, link)
|
"Cannot look at %s URL %s because it does not support lookup as web pages.",
|
||||||
|
vcs_scheme,
|
||||||
|
link,
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Tack index.html onto file:// URLs that point to directories
|
# Tack index.html onto file:// URLs that point to directories
|
||||||
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
||||||
if (scheme == 'file' and os.path.isdir(urllib.request.url2pathname(path))):
|
if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
|
||||||
# add trailing slash if not present so urljoin doesn't trim
|
# add trailing slash if not present so urljoin doesn't trim
|
||||||
# final segment
|
# final segment
|
||||||
if not url.endswith('/'):
|
if not url.endswith("/"):
|
||||||
url += '/'
|
url += "/"
|
||||||
url = urllib.parse.urljoin(url, 'index.html')
|
# TODO: In the future, it would be nice if pip supported PEP 691
|
||||||
logger.debug(' file: URL is directory, getting %s', url)
|
# style responses in the file:// URLs, however there's no
|
||||||
|
# standard file extension for application/vnd.pypi.simple.v1+json
|
||||||
|
# so we'll need to come up with something on our own.
|
||||||
|
url = urllib.parse.urljoin(url, "index.html")
|
||||||
|
logger.debug(" file: URL is directory, getting %s", url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = _get_html_response(url, session=session)
|
resp = _get_simple_response(url, session=session)
|
||||||
except _NotHTTP:
|
except _NotHTTP:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Skipping page %s because it looks like an archive, and cannot '
|
"Skipping page %s because it looks like an archive, and cannot "
|
||||||
'be checked by a HTTP HEAD request.', link,
|
"be checked by a HTTP HEAD request.",
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
except _NotHTML as exc:
|
except _NotAPIContent as exc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Skipping page %s because the %s request got Content-Type: %s.'
|
"Skipping page %s because the %s request got Content-Type: %s. "
|
||||||
'The only supported Content-Type is text/html',
|
"The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
|
||||||
link, exc.request_desc, exc.content_type,
|
"application/vnd.pypi.simple.v1+html, and text/html",
|
||||||
|
link,
|
||||||
|
exc.request_desc,
|
||||||
|
exc.content_type,
|
||||||
)
|
)
|
||||||
except NetworkConnectionError as exc:
|
except NetworkConnectionError as exc:
|
||||||
_handle_get_page_fail(link, exc)
|
_handle_get_simple_fail(link, exc)
|
||||||
except RetryError as exc:
|
except RetryError as exc:
|
||||||
_handle_get_page_fail(link, exc)
|
_handle_get_simple_fail(link, exc)
|
||||||
except SSLError as exc:
|
except SSLError as exc:
|
||||||
reason = "There was a problem confirming the ssl certificate: "
|
reason = "There was a problem confirming the ssl certificate: "
|
||||||
reason += str(exc)
|
reason += str(exc)
|
||||||
_handle_get_page_fail(link, reason, meth=logger.info)
|
_handle_get_simple_fail(link, reason, meth=logger.info)
|
||||||
except requests.ConnectionError as exc:
|
except requests.ConnectionError as exc:
|
||||||
_handle_get_page_fail(link, f"connection error: {exc}")
|
_handle_get_simple_fail(link, f"connection error: {exc}")
|
||||||
except requests.Timeout:
|
except requests.Timeout:
|
||||||
_handle_get_page_fail(link, "timed out")
|
_handle_get_simple_fail(link, "timed out")
|
||||||
else:
|
else:
|
||||||
return _make_html_page(resp,
|
return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
|
||||||
cache_link_parsing=link.cache_link_parsing)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@@ -451,9 +418,10 @@ class LinkCollector:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(
|
def create(
|
||||||
cls, session: PipSession,
|
cls,
|
||||||
|
session: PipSession,
|
||||||
options: Values,
|
options: Values,
|
||||||
suppress_no_index: bool = False
|
suppress_no_index: bool = False,
|
||||||
) -> "LinkCollector":
|
) -> "LinkCollector":
|
||||||
"""
|
"""
|
||||||
:param session: The Session to use to make requests.
|
:param session: The Session to use to make requests.
|
||||||
@@ -463,8 +431,8 @@ class LinkCollector:
|
|||||||
index_urls = [options.index_url] + options.extra_index_urls
|
index_urls = [options.index_url] + options.extra_index_urls
|
||||||
if options.no_index and not suppress_no_index:
|
if options.no_index and not suppress_no_index:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Ignoring indexes: %s',
|
"Ignoring indexes: %s",
|
||||||
','.join(redact_auth_from_url(url) for url in index_urls),
|
",".join(redact_auth_from_url(url) for url in index_urls),
|
||||||
)
|
)
|
||||||
index_urls = []
|
index_urls = []
|
||||||
|
|
||||||
@@ -472,10 +440,13 @@ class LinkCollector:
|
|||||||
find_links = options.find_links or []
|
find_links = options.find_links or []
|
||||||
|
|
||||||
search_scope = SearchScope.create(
|
search_scope = SearchScope.create(
|
||||||
find_links=find_links, index_urls=index_urls,
|
find_links=find_links,
|
||||||
|
index_urls=index_urls,
|
||||||
|
no_index=options.no_index,
|
||||||
)
|
)
|
||||||
link_collector = LinkCollector(
|
link_collector = LinkCollector(
|
||||||
session=session, search_scope=search_scope,
|
session=session,
|
||||||
|
search_scope=search_scope,
|
||||||
)
|
)
|
||||||
return link_collector
|
return link_collector
|
||||||
|
|
||||||
@@ -483,11 +454,11 @@ class LinkCollector:
|
|||||||
def find_links(self) -> List[str]:
|
def find_links(self) -> List[str]:
|
||||||
return self.search_scope.find_links
|
return self.search_scope.find_links
|
||||||
|
|
||||||
def fetch_page(self, location: Link) -> Optional[HTMLPage]:
|
def fetch_response(self, location: Link) -> Optional[IndexContent]:
|
||||||
"""
|
"""
|
||||||
Fetch an HTML page containing package links.
|
Fetch an HTML page containing package links.
|
||||||
"""
|
"""
|
||||||
return _get_html_page(location, session=self.session)
|
return _get_index_content(location, session=self.session)
|
||||||
|
|
||||||
def collect_sources(
|
def collect_sources(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
"""Routines related to PyPI, indexes"""
|
"""Routines related to PyPI, indexes"""
|
||||||
|
|
||||||
# The following comment should be removed at some point in the future.
|
import enum
|
||||||
# mypy: strict-optional=False
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union
|
from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
from pip._vendor.packaging import specifiers
|
from pip._vendor.packaging import specifiers
|
||||||
from pip._vendor.packaging.tags import Tag
|
from pip._vendor.packaging.tags import Tag
|
||||||
@@ -37,17 +35,17 @@ from pip._internal.utils.logging import indent_log
|
|||||||
from pip._internal.utils.misc import build_netloc
|
from pip._internal.utils.misc import build_netloc
|
||||||
from pip._internal.utils.packaging import check_requires_python
|
from pip._internal.utils.packaging import check_requires_python
|
||||||
from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
|
from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
|
||||||
from pip._internal.utils.urls import url_to_path
|
|
||||||
|
|
||||||
__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder']
|
if TYPE_CHECKING:
|
||||||
|
from pip._vendor.typing_extensions import TypeGuard
|
||||||
|
|
||||||
|
__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
|
||||||
|
|
||||||
|
|
||||||
logger = getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
||||||
CandidateSortingKey = (
|
CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
|
||||||
Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_link_requires_python(
|
def _check_link_requires_python(
|
||||||
@@ -66,39 +64,54 @@ def _check_link_requires_python(
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
is_compatible = check_requires_python(
|
is_compatible = check_requires_python(
|
||||||
link.requires_python, version_info=version_info,
|
link.requires_python,
|
||||||
|
version_info=version_info,
|
||||||
)
|
)
|
||||||
except specifiers.InvalidSpecifier:
|
except specifiers.InvalidSpecifier:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Ignoring invalid Requires-Python (%r) for link: %s",
|
"Ignoring invalid Requires-Python (%r) for link: %s",
|
||||||
link.requires_python, link,
|
link.requires_python,
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not is_compatible:
|
if not is_compatible:
|
||||||
version = '.'.join(map(str, version_info))
|
version = ".".join(map(str, version_info))
|
||||||
if not ignore_requires_python:
|
if not ignore_requires_python:
|
||||||
logger.verbose(
|
logger.verbose(
|
||||||
'Link requires a different Python (%s not in: %r): %s',
|
"Link requires a different Python (%s not in: %r): %s",
|
||||||
version, link.requires_python, link,
|
version,
|
||||||
|
link.requires_python,
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Ignoring failed Requires-Python check (%s not in: %r) '
|
"Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
|
||||||
'for link: %s',
|
version,
|
||||||
version, link.requires_python, link,
|
link.requires_python,
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class LinkType(enum.Enum):
|
||||||
|
candidate = enum.auto()
|
||||||
|
different_project = enum.auto()
|
||||||
|
yanked = enum.auto()
|
||||||
|
format_unsupported = enum.auto()
|
||||||
|
format_invalid = enum.auto()
|
||||||
|
platform_mismatch = enum.auto()
|
||||||
|
requires_python_mismatch = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
class LinkEvaluator:
|
class LinkEvaluator:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Responsible for evaluating links for a particular project.
|
Responsible for evaluating links for a particular project.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
|
_py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
|
||||||
|
|
||||||
# Don't include an allow_yanked default value to make sure each call
|
# Don't include an allow_yanked default value to make sure each call
|
||||||
# site considers whether yanked releases are allowed. This also causes
|
# site considers whether yanked releases are allowed. This also causes
|
||||||
@@ -141,19 +154,20 @@ class LinkEvaluator:
|
|||||||
|
|
||||||
self.project_name = project_name
|
self.project_name = project_name
|
||||||
|
|
||||||
def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]:
|
def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
|
||||||
"""
|
"""
|
||||||
Determine whether a link is a candidate for installation.
|
Determine whether a link is a candidate for installation.
|
||||||
|
|
||||||
:return: A tuple (is_candidate, result), where `result` is (1) a
|
:return: A tuple (result, detail), where *result* is an enum
|
||||||
version string if `is_candidate` is True, and (2) if
|
representing whether the evaluation found a candidate, or the reason
|
||||||
`is_candidate` is False, an optional string to log the reason
|
why one is not found. If a candidate is found, *detail* will be the
|
||||||
the link fails to qualify.
|
candidate's version string; if one is not found, it contains the
|
||||||
|
reason the link fails to qualify.
|
||||||
"""
|
"""
|
||||||
version = None
|
version = None
|
||||||
if link.is_yanked and not self._allow_yanked:
|
if link.is_yanked and not self._allow_yanked:
|
||||||
reason = link.yanked_reason or '<none given>'
|
reason = link.yanked_reason or "<none given>"
|
||||||
return (False, f'yanked for reason: {reason}')
|
return (LinkType.yanked, f"yanked for reason: {reason}")
|
||||||
|
|
||||||
if link.egg_fragment:
|
if link.egg_fragment:
|
||||||
egg_info = link.egg_fragment
|
egg_info = link.egg_fragment
|
||||||
@@ -161,77 +175,83 @@ class LinkEvaluator:
|
|||||||
else:
|
else:
|
||||||
egg_info, ext = link.splitext()
|
egg_info, ext = link.splitext()
|
||||||
if not ext:
|
if not ext:
|
||||||
return (False, 'not a file')
|
return (LinkType.format_unsupported, "not a file")
|
||||||
if ext not in SUPPORTED_EXTENSIONS:
|
if ext not in SUPPORTED_EXTENSIONS:
|
||||||
return (False, f'unsupported archive format: {ext}')
|
return (
|
||||||
|
LinkType.format_unsupported,
|
||||||
|
f"unsupported archive format: {ext}",
|
||||||
|
)
|
||||||
if "binary" not in self._formats and ext == WHEEL_EXTENSION:
|
if "binary" not in self._formats and ext == WHEEL_EXTENSION:
|
||||||
reason = 'No binaries permitted for {}'.format(
|
reason = f"No binaries permitted for {self.project_name}"
|
||||||
self.project_name)
|
return (LinkType.format_unsupported, reason)
|
||||||
return (False, reason)
|
if "macosx10" in link.path and ext == ".zip":
|
||||||
if "macosx10" in link.path and ext == '.zip':
|
return (LinkType.format_unsupported, "macosx10 one")
|
||||||
return (False, 'macosx10 one')
|
|
||||||
if ext == WHEEL_EXTENSION:
|
if ext == WHEEL_EXTENSION:
|
||||||
try:
|
try:
|
||||||
wheel = Wheel(link.filename)
|
wheel = Wheel(link.filename)
|
||||||
except InvalidWheelFilename:
|
except InvalidWheelFilename:
|
||||||
return (False, 'invalid wheel filename')
|
return (
|
||||||
|
LinkType.format_invalid,
|
||||||
|
"invalid wheel filename",
|
||||||
|
)
|
||||||
if canonicalize_name(wheel.name) != self._canonical_name:
|
if canonicalize_name(wheel.name) != self._canonical_name:
|
||||||
reason = 'wrong project name (not {})'.format(
|
reason = f"wrong project name (not {self.project_name})"
|
||||||
self.project_name)
|
return (LinkType.different_project, reason)
|
||||||
return (False, reason)
|
|
||||||
|
|
||||||
supported_tags = self._target_python.get_tags()
|
supported_tags = self._target_python.get_tags()
|
||||||
if not wheel.supported(supported_tags):
|
if not wheel.supported(supported_tags):
|
||||||
# Include the wheel's tags in the reason string to
|
# Include the wheel's tags in the reason string to
|
||||||
# simplify troubleshooting compatibility issues.
|
# simplify troubleshooting compatibility issues.
|
||||||
file_tags = wheel.get_formatted_file_tags()
|
file_tags = ", ".join(wheel.get_formatted_file_tags())
|
||||||
reason = (
|
reason = (
|
||||||
"none of the wheel's tags ({}) are compatible "
|
f"none of the wheel's tags ({file_tags}) are compatible "
|
||||||
"(run pip debug --verbose to show compatible tags)".format(
|
f"(run pip debug --verbose to show compatible tags)"
|
||||||
', '.join(file_tags)
|
|
||||||
)
|
)
|
||||||
)
|
return (LinkType.platform_mismatch, reason)
|
||||||
return (False, reason)
|
|
||||||
|
|
||||||
version = wheel.version
|
version = wheel.version
|
||||||
|
|
||||||
# This should be up by the self.ok_binary check, but see issue 2700.
|
# This should be up by the self.ok_binary check, but see issue 2700.
|
||||||
if "source" not in self._formats and ext != WHEEL_EXTENSION:
|
if "source" not in self._formats and ext != WHEEL_EXTENSION:
|
||||||
reason = f'No sources permitted for {self.project_name}'
|
reason = f"No sources permitted for {self.project_name}"
|
||||||
return (False, reason)
|
return (LinkType.format_unsupported, reason)
|
||||||
|
|
||||||
if not version:
|
if not version:
|
||||||
version = _extract_version_from_fragment(
|
version = _extract_version_from_fragment(
|
||||||
egg_info, self._canonical_name,
|
egg_info,
|
||||||
|
self._canonical_name,
|
||||||
)
|
)
|
||||||
if not version:
|
if not version:
|
||||||
reason = f'Missing project version for {self.project_name}'
|
reason = f"Missing project version for {self.project_name}"
|
||||||
return (False, reason)
|
return (LinkType.format_invalid, reason)
|
||||||
|
|
||||||
match = self._py_version_re.search(version)
|
match = self._py_version_re.search(version)
|
||||||
if match:
|
if match:
|
||||||
version = version[:match.start()]
|
version = version[: match.start()]
|
||||||
py_version = match.group(1)
|
py_version = match.group(1)
|
||||||
if py_version != self._target_python.py_version:
|
if py_version != self._target_python.py_version:
|
||||||
return (False, 'Python version is incorrect')
|
return (
|
||||||
|
LinkType.platform_mismatch,
|
||||||
|
"Python version is incorrect",
|
||||||
|
)
|
||||||
|
|
||||||
supports_python = _check_link_requires_python(
|
supports_python = _check_link_requires_python(
|
||||||
link, version_info=self._target_python.py_version_info,
|
link,
|
||||||
|
version_info=self._target_python.py_version_info,
|
||||||
ignore_requires_python=self._ignore_requires_python,
|
ignore_requires_python=self._ignore_requires_python,
|
||||||
)
|
)
|
||||||
if not supports_python:
|
if not supports_python:
|
||||||
# Return None for the reason text to suppress calling
|
reason = f"{version} Requires-Python {link.requires_python}"
|
||||||
# _log_skipped_link().
|
return (LinkType.requires_python_mismatch, reason)
|
||||||
return (False, None)
|
|
||||||
|
|
||||||
logger.debug('Found link %s, version: %s', link, version)
|
logger.debug("Found link %s, version: %s", link, version)
|
||||||
|
|
||||||
return (True, version)
|
return (LinkType.candidate, version)
|
||||||
|
|
||||||
|
|
||||||
def filter_unallowed_hashes(
|
def filter_unallowed_hashes(
|
||||||
candidates: List[InstallationCandidate],
|
candidates: List[InstallationCandidate],
|
||||||
hashes: Hashes,
|
hashes: Optional[Hashes],
|
||||||
project_name: str,
|
project_name: str,
|
||||||
) -> List[InstallationCandidate]:
|
) -> List[InstallationCandidate]:
|
||||||
"""
|
"""
|
||||||
@@ -251,8 +271,8 @@ def filter_unallowed_hashes(
|
|||||||
"""
|
"""
|
||||||
if not hashes:
|
if not hashes:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Given no hashes to check %s links for project %r: '
|
"Given no hashes to check %s links for project %r: "
|
||||||
'discarding no candidates',
|
"discarding no candidates",
|
||||||
len(candidates),
|
len(candidates),
|
||||||
project_name,
|
project_name,
|
||||||
)
|
)
|
||||||
@@ -282,22 +302,22 @@ def filter_unallowed_hashes(
|
|||||||
filtered = list(candidates)
|
filtered = list(candidates)
|
||||||
|
|
||||||
if len(filtered) == len(candidates):
|
if len(filtered) == len(candidates):
|
||||||
discard_message = 'discarding no candidates'
|
discard_message = "discarding no candidates"
|
||||||
else:
|
else:
|
||||||
discard_message = 'discarding {} non-matches:\n {}'.format(
|
discard_message = "discarding {} non-matches:\n {}".format(
|
||||||
len(non_matches),
|
len(non_matches),
|
||||||
'\n '.join(str(candidate.link) for candidate in non_matches)
|
"\n ".join(str(candidate.link) for candidate in non_matches),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Checked %s links for project %r against %s hashes '
|
"Checked %s links for project %r against %s hashes "
|
||||||
'(%s matches, %s no digest): %s',
|
"(%s matches, %s no digest): %s",
|
||||||
len(candidates),
|
len(candidates),
|
||||||
project_name,
|
project_name,
|
||||||
hashes.digest_count,
|
hashes.digest_count,
|
||||||
match_count,
|
match_count,
|
||||||
len(matches_or_no_digest) - match_count,
|
len(matches_or_no_digest) - match_count,
|
||||||
discard_message
|
discard_message,
|
||||||
)
|
)
|
||||||
|
|
||||||
return filtered
|
return filtered
|
||||||
@@ -354,13 +374,11 @@ class BestCandidateResult:
|
|||||||
self.best_candidate = best_candidate
|
self.best_candidate = best_candidate
|
||||||
|
|
||||||
def iter_all(self) -> Iterable[InstallationCandidate]:
|
def iter_all(self) -> Iterable[InstallationCandidate]:
|
||||||
"""Iterate through all candidates.
|
"""Iterate through all candidates."""
|
||||||
"""
|
|
||||||
return iter(self._candidates)
|
return iter(self._candidates)
|
||||||
|
|
||||||
def iter_applicable(self) -> Iterable[InstallationCandidate]:
|
def iter_applicable(self) -> Iterable[InstallationCandidate]:
|
||||||
"""Iterate through the applicable candidates.
|
"""Iterate through the applicable candidates."""
|
||||||
"""
|
|
||||||
return iter(self._applicable_candidates)
|
return iter(self._applicable_candidates)
|
||||||
|
|
||||||
|
|
||||||
@@ -444,7 +462,8 @@ class CandidateEvaluator:
|
|||||||
allow_prereleases = self._allow_all_prereleases or None
|
allow_prereleases = self._allow_all_prereleases or None
|
||||||
specifier = self._specifier
|
specifier = self._specifier
|
||||||
versions = {
|
versions = {
|
||||||
str(v) for v in specifier.filter(
|
str(v)
|
||||||
|
for v in specifier.filter(
|
||||||
# We turn the version object into a str here because otherwise
|
# We turn the version object into a str here because otherwise
|
||||||
# when we're debundled but setuptools isn't, Python will see
|
# when we're debundled but setuptools isn't, Python will see
|
||||||
# packaging.version.Version and
|
# packaging.version.Version and
|
||||||
@@ -458,9 +477,7 @@ class CandidateEvaluator:
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Again, converting version to str to deal with debundling.
|
# Again, converting version to str to deal with debundling.
|
||||||
applicable_candidates = [
|
applicable_candidates = [c for c in candidates if str(c.version) in versions]
|
||||||
c for c in candidates if str(c.version) in versions
|
|
||||||
]
|
|
||||||
|
|
||||||
filtered_applicable_candidates = filter_unallowed_hashes(
|
filtered_applicable_candidates = filter_unallowed_hashes(
|
||||||
candidates=applicable_candidates,
|
candidates=applicable_candidates,
|
||||||
@@ -509,9 +526,11 @@ class CandidateEvaluator:
|
|||||||
# can raise InvalidWheelFilename
|
# can raise InvalidWheelFilename
|
||||||
wheel = Wheel(link.filename)
|
wheel = Wheel(link.filename)
|
||||||
try:
|
try:
|
||||||
pri = -(wheel.find_most_preferred_tag(
|
pri = -(
|
||||||
|
wheel.find_most_preferred_tag(
|
||||||
valid_tags, self._wheel_tag_preferences
|
valid_tags, self._wheel_tag_preferences
|
||||||
))
|
)
|
||||||
|
)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise UnsupportedWheel(
|
raise UnsupportedWheel(
|
||||||
"{} is not a supported wheel for this platform. It "
|
"{} is not a supported wheel for this platform. It "
|
||||||
@@ -520,7 +539,8 @@ class CandidateEvaluator:
|
|||||||
if self._prefer_binary:
|
if self._prefer_binary:
|
||||||
binary_preference = 1
|
binary_preference = 1
|
||||||
if wheel.build_tag is not None:
|
if wheel.build_tag is not None:
|
||||||
match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
|
match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
|
||||||
|
assert match is not None, "guaranteed by filename validation"
|
||||||
build_tag_groups = match.groups()
|
build_tag_groups = match.groups()
|
||||||
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
||||||
else: # sdist
|
else: # sdist
|
||||||
@@ -528,8 +548,12 @@ class CandidateEvaluator:
|
|||||||
has_allowed_hash = int(link.is_hash_allowed(self._hashes))
|
has_allowed_hash = int(link.is_hash_allowed(self._hashes))
|
||||||
yank_value = -1 * int(link.is_yanked) # -1 for yanked.
|
yank_value = -1 * int(link.is_yanked) # -1 for yanked.
|
||||||
return (
|
return (
|
||||||
has_allowed_hash, yank_value, binary_preference, candidate.version,
|
has_allowed_hash,
|
||||||
pri, build_tag,
|
yank_value,
|
||||||
|
binary_preference,
|
||||||
|
candidate.version,
|
||||||
|
pri,
|
||||||
|
build_tag,
|
||||||
)
|
)
|
||||||
|
|
||||||
def sort_best_candidate(
|
def sort_best_candidate(
|
||||||
@@ -603,7 +627,7 @@ class PackageFinder:
|
|||||||
self.format_control = format_control
|
self.format_control = format_control
|
||||||
|
|
||||||
# These are boring links that have already been logged somehow.
|
# These are boring links that have already been logged somehow.
|
||||||
self._logged_links: Set[Link] = set()
|
self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
|
||||||
|
|
||||||
# Don't include an allow_yanked default value to make sure each call
|
# Don't include an allow_yanked default value to make sure each call
|
||||||
# site considers whether yanked releases are allowed. This also causes
|
# site considers whether yanked releases are allowed. This also causes
|
||||||
@@ -680,6 +704,14 @@ class PackageFinder:
|
|||||||
def set_prefer_binary(self) -> None:
|
def set_prefer_binary(self) -> None:
|
||||||
self._candidate_prefs.prefer_binary = True
|
self._candidate_prefs.prefer_binary = True
|
||||||
|
|
||||||
|
def requires_python_skipped_reasons(self) -> List[str]:
|
||||||
|
reasons = {
|
||||||
|
detail
|
||||||
|
for _, result, detail in self._logged_links
|
||||||
|
if result == LinkType.requires_python_mismatch
|
||||||
|
}
|
||||||
|
return sorted(reasons)
|
||||||
|
|
||||||
def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
|
def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
|
||||||
canonical_name = canonicalize_name(project_name)
|
canonical_name = canonicalize_name(project_name)
|
||||||
formats = self.format_control.get_allowed_formats(canonical_name)
|
formats = self.format_control.get_allowed_formats(canonical_name)
|
||||||
@@ -709,12 +741,13 @@ class PackageFinder:
|
|||||||
no_eggs.append(link)
|
no_eggs.append(link)
|
||||||
return no_eggs + eggs
|
return no_eggs + eggs
|
||||||
|
|
||||||
def _log_skipped_link(self, link: Link, reason: str) -> None:
|
def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
|
||||||
if link not in self._logged_links:
|
entry = (link, result, detail)
|
||||||
|
if entry not in self._logged_links:
|
||||||
# Put the link at the end so the reason is more visible and because
|
# Put the link at the end so the reason is more visible and because
|
||||||
# the link string is usually very long.
|
# the link string is usually very long.
|
||||||
logger.debug('Skipping link: %s: %s', reason, link)
|
logger.debug("Skipping link: %s: %s", detail, link)
|
||||||
self._logged_links.add(link)
|
self._logged_links.add(entry)
|
||||||
|
|
||||||
def get_install_candidate(
|
def get_install_candidate(
|
||||||
self, link_evaluator: LinkEvaluator, link: Link
|
self, link_evaluator: LinkEvaluator, link: Link
|
||||||
@@ -723,16 +756,15 @@ class PackageFinder:
|
|||||||
If the link is a candidate for install, convert it to an
|
If the link is a candidate for install, convert it to an
|
||||||
InstallationCandidate and return it. Otherwise, return None.
|
InstallationCandidate and return it. Otherwise, return None.
|
||||||
"""
|
"""
|
||||||
is_candidate, result = link_evaluator.evaluate_link(link)
|
result, detail = link_evaluator.evaluate_link(link)
|
||||||
if not is_candidate:
|
if result != LinkType.candidate:
|
||||||
if result:
|
self._log_skipped_link(link, result, detail)
|
||||||
self._log_skipped_link(link, reason=result)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return InstallationCandidate(
|
return InstallationCandidate(
|
||||||
name=link_evaluator.project_name,
|
name=link_evaluator.project_name,
|
||||||
link=link,
|
link=link,
|
||||||
version=result,
|
version=detail,
|
||||||
)
|
)
|
||||||
|
|
||||||
def evaluate_links(
|
def evaluate_links(
|
||||||
@@ -753,13 +785,14 @@ class PackageFinder:
|
|||||||
self, project_url: Link, link_evaluator: LinkEvaluator
|
self, project_url: Link, link_evaluator: LinkEvaluator
|
||||||
) -> List[InstallationCandidate]:
|
) -> List[InstallationCandidate]:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Fetching project page and analyzing links: %s', project_url,
|
"Fetching project page and analyzing links: %s",
|
||||||
|
project_url,
|
||||||
)
|
)
|
||||||
html_page = self._link_collector.fetch_page(project_url)
|
index_response = self._link_collector.fetch_response(project_url)
|
||||||
if html_page is None:
|
if index_response is None:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
page_links = list(parse_links(html_page))
|
page_links = list(parse_links(index_response))
|
||||||
|
|
||||||
with indent_log():
|
with indent_log():
|
||||||
package_links = self.evaluate_links(
|
package_links = self.evaluate_links(
|
||||||
@@ -809,7 +842,14 @@ class PackageFinder:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if logger.isEnabledFor(logging.DEBUG) and file_candidates:
|
if logger.isEnabledFor(logging.DEBUG) and file_candidates:
|
||||||
paths = [url_to_path(c.link.url) for c in file_candidates]
|
paths = []
|
||||||
|
for candidate in file_candidates:
|
||||||
|
assert candidate.link.url # we need to have a URL
|
||||||
|
try:
|
||||||
|
paths.append(candidate.link.file_path)
|
||||||
|
except Exception:
|
||||||
|
paths.append(candidate.link.url) # it's not a local file
|
||||||
|
|
||||||
logger.debug("Local files found: %s", ", ".join(paths))
|
logger.debug("Local files found: %s", ", ".join(paths))
|
||||||
|
|
||||||
# This is an intentional priority ordering
|
# This is an intentional priority ordering
|
||||||
@@ -821,8 +861,7 @@ class PackageFinder:
|
|||||||
specifier: Optional[specifiers.BaseSpecifier] = None,
|
specifier: Optional[specifiers.BaseSpecifier] = None,
|
||||||
hashes: Optional[Hashes] = None,
|
hashes: Optional[Hashes] = None,
|
||||||
) -> CandidateEvaluator:
|
) -> CandidateEvaluator:
|
||||||
"""Create a CandidateEvaluator object to use.
|
"""Create a CandidateEvaluator object to use."""
|
||||||
"""
|
|
||||||
candidate_prefs = self._candidate_prefs
|
candidate_prefs = self._candidate_prefs
|
||||||
return CandidateEvaluator.create(
|
return CandidateEvaluator.create(
|
||||||
project_name=project_name,
|
project_name=project_name,
|
||||||
@@ -867,76 +906,84 @@ class PackageFinder:
|
|||||||
"""
|
"""
|
||||||
hashes = req.hashes(trust_internet=False)
|
hashes = req.hashes(trust_internet=False)
|
||||||
best_candidate_result = self.find_best_candidate(
|
best_candidate_result = self.find_best_candidate(
|
||||||
req.name, specifier=req.specifier, hashes=hashes,
|
req.name,
|
||||||
|
specifier=req.specifier,
|
||||||
|
hashes=hashes,
|
||||||
)
|
)
|
||||||
best_candidate = best_candidate_result.best_candidate
|
best_candidate = best_candidate_result.best_candidate
|
||||||
|
|
||||||
installed_version: Optional[_BaseVersion] = None
|
installed_version: Optional[_BaseVersion] = None
|
||||||
if req.satisfied_by is not None:
|
if req.satisfied_by is not None:
|
||||||
installed_version = parse_version(req.satisfied_by.version)
|
installed_version = req.satisfied_by.version
|
||||||
|
|
||||||
def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
|
def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
|
||||||
# This repeated parse_version and str() conversion is needed to
|
# This repeated parse_version and str() conversion is needed to
|
||||||
# handle different vendoring sources from pip and pkg_resources.
|
# handle different vendoring sources from pip and pkg_resources.
|
||||||
# If we stop using the pkg_resources provided specifier and start
|
# If we stop using the pkg_resources provided specifier and start
|
||||||
# using our own, we can drop the cast to str().
|
# using our own, we can drop the cast to str().
|
||||||
return ", ".join(sorted(
|
return (
|
||||||
|
", ".join(
|
||||||
|
sorted(
|
||||||
{str(c.version) for c in cand_iter},
|
{str(c.version) for c in cand_iter},
|
||||||
key=parse_version,
|
key=parse_version,
|
||||||
)) or "none"
|
)
|
||||||
|
)
|
||||||
|
or "none"
|
||||||
|
)
|
||||||
|
|
||||||
if installed_version is None and best_candidate is None:
|
if installed_version is None and best_candidate is None:
|
||||||
logger.critical(
|
logger.critical(
|
||||||
'Could not find a version that satisfies the requirement %s '
|
"Could not find a version that satisfies the requirement %s "
|
||||||
'(from versions: %s)',
|
"(from versions: %s)",
|
||||||
req,
|
req,
|
||||||
_format_versions(best_candidate_result.iter_all()),
|
_format_versions(best_candidate_result.iter_all()),
|
||||||
)
|
)
|
||||||
|
|
||||||
raise DistributionNotFound(
|
raise DistributionNotFound(
|
||||||
'No matching distribution found for {}'.format(
|
"No matching distribution found for {}".format(req)
|
||||||
req)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
best_installed = False
|
def _should_install_candidate(
|
||||||
if installed_version and (
|
candidate: Optional[InstallationCandidate],
|
||||||
best_candidate is None or
|
) -> "TypeGuard[InstallationCandidate]":
|
||||||
best_candidate.version <= installed_version):
|
if installed_version is None:
|
||||||
best_installed = True
|
return True
|
||||||
|
if best_candidate is None:
|
||||||
|
return False
|
||||||
|
return best_candidate.version > installed_version
|
||||||
|
|
||||||
if not upgrade and installed_version is not None:
|
if not upgrade and installed_version is not None:
|
||||||
if best_installed:
|
if _should_install_candidate(best_candidate):
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Existing installed version (%s) is most up-to-date and '
|
"Existing installed version (%s) satisfies requirement "
|
||||||
'satisfies requirement',
|
"(most up-to-date version is %s)",
|
||||||
installed_version,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.debug(
|
|
||||||
'Existing installed version (%s) satisfies requirement '
|
|
||||||
'(most up-to-date version is %s)',
|
|
||||||
installed_version,
|
installed_version,
|
||||||
best_candidate.version,
|
best_candidate.version,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
logger.debug(
|
||||||
|
"Existing installed version (%s) is most up-to-date and "
|
||||||
|
"satisfies requirement",
|
||||||
|
installed_version,
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if best_installed:
|
if _should_install_candidate(best_candidate):
|
||||||
# We have an existing version, and its the best version
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Installed version (%s) is most up-to-date (past versions: '
|
"Using version %s (newest of versions: %s)",
|
||||||
'%s)',
|
|
||||||
installed_version,
|
|
||||||
_format_versions(best_candidate_result.iter_applicable()),
|
|
||||||
)
|
|
||||||
raise BestVersionAlreadyInstalled
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
'Using version %s (newest of versions: %s)',
|
|
||||||
best_candidate.version,
|
best_candidate.version,
|
||||||
_format_versions(best_candidate_result.iter_applicable()),
|
_format_versions(best_candidate_result.iter_applicable()),
|
||||||
)
|
)
|
||||||
return best_candidate
|
return best_candidate
|
||||||
|
|
||||||
|
# We have an existing version, and its the best version
|
||||||
|
logger.debug(
|
||||||
|
"Installed version (%s) is most up-to-date (past versions: %s)",
|
||||||
|
installed_version,
|
||||||
|
_format_versions(best_candidate_result.iter_applicable()),
|
||||||
|
)
|
||||||
|
raise BestVersionAlreadyInstalled
|
||||||
|
|
||||||
|
|
||||||
def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
|
def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
|
||||||
"""Find the separator's index based on the package's canonical name.
|
"""Find the separator's index based on the package's canonical name.
|
||||||
|
|||||||
@@ -171,7 +171,6 @@ def build_source(
|
|||||||
expand_dir: bool,
|
expand_dir: bool,
|
||||||
cache_link_parsing: bool,
|
cache_link_parsing: bool,
|
||||||
) -> Tuple[Optional[str], Optional[LinkSource]]:
|
) -> Tuple[Optional[str], Optional[LinkSource]]:
|
||||||
|
|
||||||
path: Optional[str] = None
|
path: Optional[str] = None
|
||||||
url: Optional[str] = None
|
url: Optional[str] = None
|
||||||
if os.path.exists(location): # Is a local path.
|
if os.path.exists(location): # Is a local path.
|
||||||
|
|||||||
@@ -4,14 +4,14 @@ import os
|
|||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
from typing import Any, Dict, Generator, Optional, Tuple
|
||||||
|
|
||||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
from pip._internal.utils.deprecation import deprecated
|
from pip._internal.utils.deprecation import deprecated
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
from . import _distutils, _sysconfig
|
from . import _sysconfig
|
||||||
from .base import (
|
from .base import (
|
||||||
USER_CACHE_DIR,
|
USER_CACHE_DIR,
|
||||||
get_major_minor_version,
|
get_major_minor_version,
|
||||||
@@ -27,7 +27,6 @@ __all__ = [
|
|||||||
"get_bin_user",
|
"get_bin_user",
|
||||||
"get_major_minor_version",
|
"get_major_minor_version",
|
||||||
"get_platlib",
|
"get_platlib",
|
||||||
"get_prefixed_libs",
|
|
||||||
"get_purelib",
|
"get_purelib",
|
||||||
"get_scheme",
|
"get_scheme",
|
||||||
"get_src_prefix",
|
"get_src_prefix",
|
||||||
@@ -38,20 +37,48 @@ __all__ = [
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
if os.environ.get("_PIP_LOCATIONS_NO_WARN_ON_MISMATCH"):
|
|
||||||
_MISMATCH_LEVEL = logging.DEBUG
|
|
||||||
else:
|
|
||||||
_MISMATCH_LEVEL = logging.WARNING
|
|
||||||
|
|
||||||
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
||||||
|
|
||||||
|
_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
|
||||||
|
|
||||||
|
|
||||||
|
def _should_use_sysconfig() -> bool:
|
||||||
|
"""This function determines the value of _USE_SYSCONFIG.
|
||||||
|
|
||||||
|
By default, pip uses sysconfig on Python 3.10+.
|
||||||
|
But Python distributors can override this decision by setting:
|
||||||
|
sysconfig._PIP_USE_SYSCONFIG = True / False
|
||||||
|
Rationale in https://github.com/pypa/pip/issues/10647
|
||||||
|
|
||||||
|
This is a function for testability, but should be constant during any one
|
||||||
|
run.
|
||||||
|
"""
|
||||||
|
return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
|
||||||
|
|
||||||
|
|
||||||
|
_USE_SYSCONFIG = _should_use_sysconfig()
|
||||||
|
|
||||||
|
if not _USE_SYSCONFIG:
|
||||||
|
# Import distutils lazily to avoid deprecation warnings,
|
||||||
|
# but import it soon enough that it is in memory and available during
|
||||||
|
# a pip reinstall.
|
||||||
|
from . import _distutils
|
||||||
|
|
||||||
|
# Be noisy about incompatibilities if this platforms "should" be using
|
||||||
|
# sysconfig, but is explicitly opting out and using distutils instead.
|
||||||
|
if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
|
||||||
|
_MISMATCH_LEVEL = logging.WARNING
|
||||||
|
else:
|
||||||
|
_MISMATCH_LEVEL = logging.DEBUG
|
||||||
|
|
||||||
|
|
||||||
def _looks_like_bpo_44860() -> bool:
|
def _looks_like_bpo_44860() -> bool:
|
||||||
"""The resolution to bpo-44860 will change this incorrect platlib.
|
"""The resolution to bpo-44860 will change this incorrect platlib.
|
||||||
|
|
||||||
See <https://bugs.python.org/issue44860>.
|
See <https://bugs.python.org/issue44860>.
|
||||||
"""
|
"""
|
||||||
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
from distutils.command.install import INSTALL_SCHEMES
|
||||||
|
|
||||||
try:
|
try:
|
||||||
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
||||||
@@ -62,6 +89,8 @@ def _looks_like_bpo_44860() -> bool:
|
|||||||
|
|
||||||
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
||||||
platlib = scheme["platlib"]
|
platlib = scheme["platlib"]
|
||||||
|
if "/$platlibdir/" in platlib:
|
||||||
|
platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
|
||||||
if "/lib64/" not in platlib:
|
if "/lib64/" not in platlib:
|
||||||
return False
|
return False
|
||||||
unpatched = platlib.replace("/lib64/", "/lib/")
|
unpatched = platlib.replace("/lib64/", "/lib/")
|
||||||
@@ -74,7 +103,7 @@ def _looks_like_red_hat_lib() -> bool:
|
|||||||
|
|
||||||
This is the only way I can see to tell a Red Hat-patched Python.
|
This is the only way I can see to tell a Red Hat-patched Python.
|
||||||
"""
|
"""
|
||||||
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
from distutils.command.install import INSTALL_SCHEMES
|
||||||
|
|
||||||
return all(
|
return all(
|
||||||
k in INSTALL_SCHEMES
|
k in INSTALL_SCHEMES
|
||||||
@@ -86,7 +115,7 @@ def _looks_like_red_hat_lib() -> bool:
|
|||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
def _looks_like_debian_scheme() -> bool:
|
def _looks_like_debian_scheme() -> bool:
|
||||||
"""Debian adds two additional schemes."""
|
"""Debian adds two additional schemes."""
|
||||||
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
from distutils.command.install import INSTALL_SCHEMES
|
||||||
|
|
||||||
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
||||||
|
|
||||||
@@ -111,6 +140,22 @@ def _looks_like_red_hat_scheme() -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def _looks_like_slackware_scheme() -> bool:
|
||||||
|
"""Slackware patches sysconfig but fails to patch distutils and site.
|
||||||
|
|
||||||
|
Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
|
||||||
|
path, but does not do the same to the site module.
|
||||||
|
"""
|
||||||
|
if user_site is None: # User-site not available.
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
paths = sysconfig.get_paths(scheme="posix_user", expand=False)
|
||||||
|
except KeyError: # User-site not available.
|
||||||
|
return False
|
||||||
|
return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
def _looks_like_msys2_mingw_scheme() -> bool:
|
def _looks_like_msys2_mingw_scheme() -> bool:
|
||||||
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
||||||
@@ -129,9 +174,9 @@ def _looks_like_msys2_mingw_scheme() -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]:
|
def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
|
||||||
ldversion = sysconfig.get_config_var("LDVERSION")
|
ldversion = sysconfig.get_config_var("LDVERSION")
|
||||||
abiflags: str = getattr(sys, "abiflags", None)
|
abiflags = getattr(sys, "abiflags", None)
|
||||||
|
|
||||||
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
||||||
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
||||||
@@ -190,7 +235,7 @@ def get_scheme(
|
|||||||
isolated: bool = False,
|
isolated: bool = False,
|
||||||
prefix: Optional[str] = None,
|
prefix: Optional[str] = None,
|
||||||
) -> Scheme:
|
) -> Scheme:
|
||||||
old = _distutils.get_scheme(
|
new = _sysconfig.get_scheme(
|
||||||
dist_name,
|
dist_name,
|
||||||
user=user,
|
user=user,
|
||||||
home=home,
|
home=home,
|
||||||
@@ -198,7 +243,10 @@ def get_scheme(
|
|||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
prefix=prefix,
|
prefix=prefix,
|
||||||
)
|
)
|
||||||
new = _sysconfig.get_scheme(
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
|
old = _distutils.get_scheme(
|
||||||
dist_name,
|
dist_name,
|
||||||
user=user,
|
user=user,
|
||||||
home=home,
|
home=home,
|
||||||
@@ -263,6 +311,17 @@ def get_scheme(
|
|||||||
if skip_bpo_44860:
|
if skip_bpo_44860:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Slackware incorrectly patches posix_user to use lib64 instead of lib,
|
||||||
|
# but not usersite to match the location.
|
||||||
|
skip_slackware_user_scheme = (
|
||||||
|
user
|
||||||
|
and k in ("platlib", "purelib")
|
||||||
|
and not WINDOWS
|
||||||
|
and _looks_like_slackware_scheme()
|
||||||
|
)
|
||||||
|
if skip_slackware_user_scheme:
|
||||||
|
continue
|
||||||
|
|
||||||
# Both Debian and Red Hat patch Python to place the system site under
|
# Both Debian and Red Hat patch Python to place the system site under
|
||||||
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
||||||
# instead of site-packages, but the /usr/local check should cover it.
|
# instead of site-packages, but the /usr/local check should cover it.
|
||||||
@@ -296,6 +355,18 @@ def get_scheme(
|
|||||||
if skip_msys2_mingw_bug:
|
if skip_msys2_mingw_bug:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# CPython's POSIX install script invokes pip (via ensurepip) against the
|
||||||
|
# interpreter located in the source tree, not the install site. This
|
||||||
|
# triggers special logic in sysconfig that's not present in distutils.
|
||||||
|
# https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
|
||||||
|
skip_cpython_build = (
|
||||||
|
sysconfig.is_python_build(check_home=True)
|
||||||
|
and not WINDOWS
|
||||||
|
and k in ("headers", "include", "platinclude")
|
||||||
|
)
|
||||||
|
if skip_cpython_build:
|
||||||
|
continue
|
||||||
|
|
||||||
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
||||||
|
|
||||||
if not warning_contexts:
|
if not warning_contexts:
|
||||||
@@ -315,10 +386,12 @@ def get_scheme(
|
|||||||
)
|
)
|
||||||
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
||||||
deprecated(
|
deprecated(
|
||||||
|
reason=(
|
||||||
"Configuring installation scheme with distutils config files "
|
"Configuring installation scheme with distutils config files "
|
||||||
"is deprecated and will no longer work in the near future. If you "
|
"is deprecated and will no longer work in the near future. If you "
|
||||||
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
||||||
"at https://github.com/Homebrew/homebrew-core/issues/76621",
|
"at https://github.com/Homebrew/homebrew-core/issues/76621"
|
||||||
|
),
|
||||||
replacement=None,
|
replacement=None,
|
||||||
gone_in=None,
|
gone_in=None,
|
||||||
)
|
)
|
||||||
@@ -333,8 +406,11 @@ def get_scheme(
|
|||||||
|
|
||||||
|
|
||||||
def get_bin_prefix() -> str:
|
def get_bin_prefix() -> str:
|
||||||
old = _distutils.get_bin_prefix()
|
|
||||||
new = _sysconfig.get_bin_prefix()
|
new = _sysconfig.get_bin_prefix()
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
|
old = _distutils.get_bin_prefix()
|
||||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
||||||
_log_context()
|
_log_context()
|
||||||
return old
|
return old
|
||||||
@@ -363,8 +439,11 @@ def _looks_like_deb_system_dist_packages(value: str) -> bool:
|
|||||||
|
|
||||||
def get_purelib() -> str:
|
def get_purelib() -> str:
|
||||||
"""Return the default pure-Python lib location."""
|
"""Return the default pure-Python lib location."""
|
||||||
old = _distutils.get_purelib()
|
|
||||||
new = _sysconfig.get_purelib()
|
new = _sysconfig.get_purelib()
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
|
old = _distutils.get_purelib()
|
||||||
if _looks_like_deb_system_dist_packages(old):
|
if _looks_like_deb_system_dist_packages(old):
|
||||||
return old
|
return old
|
||||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
||||||
@@ -374,35 +453,15 @@ def get_purelib() -> str:
|
|||||||
|
|
||||||
def get_platlib() -> str:
|
def get_platlib() -> str:
|
||||||
"""Return the default platform-shared lib location."""
|
"""Return the default platform-shared lib location."""
|
||||||
old = _distutils.get_platlib()
|
|
||||||
new = _sysconfig.get_platlib()
|
new = _sysconfig.get_platlib()
|
||||||
|
if _USE_SYSCONFIG:
|
||||||
|
return new
|
||||||
|
|
||||||
|
from . import _distutils
|
||||||
|
|
||||||
|
old = _distutils.get_platlib()
|
||||||
if _looks_like_deb_system_dist_packages(old):
|
if _looks_like_deb_system_dist_packages(old):
|
||||||
return old
|
return old
|
||||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
||||||
_log_context()
|
_log_context()
|
||||||
return old
|
return old
|
||||||
|
|
||||||
|
|
||||||
def get_prefixed_libs(prefix: str) -> List[str]:
|
|
||||||
"""Return the lib locations under ``prefix``."""
|
|
||||||
old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
|
|
||||||
new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
|
|
||||||
|
|
||||||
warned = [
|
|
||||||
_warn_if_mismatch(
|
|
||||||
pathlib.Path(old_pure),
|
|
||||||
pathlib.Path(new_pure),
|
|
||||||
key="prefixed-purelib",
|
|
||||||
),
|
|
||||||
_warn_if_mismatch(
|
|
||||||
pathlib.Path(old_plat),
|
|
||||||
pathlib.Path(new_plat),
|
|
||||||
key="prefixed-platlib",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
if any(warned):
|
|
||||||
_log_context(prefix=prefix)
|
|
||||||
|
|
||||||
if old_pure == old_plat:
|
|
||||||
return [old_pure]
|
|
||||||
return [old_pure, old_plat]
|
|
||||||
|
|||||||
@@ -3,6 +3,17 @@
|
|||||||
# The following comment should be removed at some point in the future.
|
# The following comment should be removed at some point in the future.
|
||||||
# mypy: strict-optional=False
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
# If pip's going to use distutils, it should not be using the copy that setuptools
|
||||||
|
# might have injected into the environment. This is done by removing the injected
|
||||||
|
# shim, if it's injected.
|
||||||
|
#
|
||||||
|
# See https://github.com/pypa/pip/issues/8761 for the original discussion and
|
||||||
|
# rationale for why this is done within pip.
|
||||||
|
try:
|
||||||
|
__import__("_distutils_hack").remove_shim()
|
||||||
|
except (ImportError, AttributeError):
|
||||||
|
pass
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -10,7 +21,7 @@ from distutils.cmd import Command as DistutilsCommand
|
|||||||
from distutils.command.install import SCHEME_KEYS
|
from distutils.command.install import SCHEME_KEYS
|
||||||
from distutils.command.install import install as distutils_install_command
|
from distutils.command.install import install as distutils_install_command
|
||||||
from distutils.sysconfig import get_python_lib
|
from distutils.sysconfig import get_python_lib
|
||||||
from typing import Dict, List, Optional, Tuple, Union, cast
|
from typing import Dict, List, Optional, Union, cast
|
||||||
|
|
||||||
from pip._internal.models.scheme import Scheme
|
from pip._internal.models.scheme import Scheme
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
@@ -24,10 +35,10 @@ logger = logging.getLogger(__name__)
|
|||||||
def distutils_scheme(
|
def distutils_scheme(
|
||||||
dist_name: str,
|
dist_name: str,
|
||||||
user: bool = False,
|
user: bool = False,
|
||||||
home: str = None,
|
home: Optional[str] = None,
|
||||||
root: str = None,
|
root: Optional[str] = None,
|
||||||
isolated: bool = False,
|
isolated: bool = False,
|
||||||
prefix: str = None,
|
prefix: Optional[str] = None,
|
||||||
*,
|
*,
|
||||||
ignore_config_files: bool = False,
|
ignore_config_files: bool = False,
|
||||||
) -> Dict[str, str]:
|
) -> Dict[str, str]:
|
||||||
@@ -84,7 +95,7 @@ def distutils_scheme(
|
|||||||
if home:
|
if home:
|
||||||
prefix = home
|
prefix = home
|
||||||
elif user:
|
elif user:
|
||||||
prefix = i.install_userbase # type: ignore
|
prefix = i.install_userbase
|
||||||
else:
|
else:
|
||||||
prefix = i.prefix
|
prefix = i.prefix
|
||||||
scheme["headers"] = os.path.join(
|
scheme["headers"] = os.path.join(
|
||||||
@@ -160,10 +171,3 @@ def get_purelib() -> str:
|
|||||||
|
|
||||||
def get_platlib() -> str:
|
def get_platlib() -> str:
|
||||||
return get_python_lib(plat_specific=True)
|
return get_python_lib(plat_specific=True)
|
||||||
|
|
||||||
|
|
||||||
def get_prefixed_libs(prefix: str) -> Tuple[str, str]:
|
|
||||||
return (
|
|
||||||
get_python_lib(plat_specific=False, prefix=prefix),
|
|
||||||
get_python_lib(plat_specific=True, prefix=prefix),
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import distutils.util # FIXME: For change_root.
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@@ -9,7 +8,7 @@ from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationI
|
|||||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
from .base import get_major_minor_version, is_osx_framework
|
from .base import change_root, get_major_minor_version, is_osx_framework
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -194,7 +193,7 @@ def get_scheme(
|
|||||||
)
|
)
|
||||||
if root is not None:
|
if root is not None:
|
||||||
for key in SCHEME_KEYS:
|
for key in SCHEME_KEYS:
|
||||||
value = distutils.util.change_root(root, getattr(scheme, key))
|
value = change_root(root, getattr(scheme, key))
|
||||||
setattr(scheme, key, value)
|
setattr(scheme, key, value)
|
||||||
return scheme
|
return scheme
|
||||||
|
|
||||||
@@ -212,8 +211,3 @@ def get_purelib() -> str:
|
|||||||
|
|
||||||
def get_platlib() -> str:
|
def get_platlib() -> str:
|
||||||
return sysconfig.get_paths()["platlib"]
|
return sysconfig.get_paths()["platlib"]
|
||||||
|
|
||||||
|
|
||||||
def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]:
|
|
||||||
paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
|
|
||||||
return (paths["purelib"], paths["platlib"])
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import sys
|
|||||||
import sysconfig
|
import sysconfig
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.utils import appdirs
|
from pip._internal.utils import appdirs
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
@@ -12,7 +13,7 @@ from pip._internal.utils.virtualenv import running_under_virtualenv
|
|||||||
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||||
|
|
||||||
# FIXME doesn't account for venv linked to global site-packages
|
# FIXME doesn't account for venv linked to global site-packages
|
||||||
site_packages: typing.Optional[str] = sysconfig.get_path("purelib")
|
site_packages: str = sysconfig.get_path("purelib")
|
||||||
|
|
||||||
|
|
||||||
def get_major_minor_version() -> str:
|
def get_major_minor_version() -> str:
|
||||||
@@ -23,6 +24,34 @@ def get_major_minor_version() -> str:
|
|||||||
return "{}.{}".format(*sys.version_info)
|
return "{}.{}".format(*sys.version_info)
|
||||||
|
|
||||||
|
|
||||||
|
def change_root(new_root: str, pathname: str) -> str:
|
||||||
|
"""Return 'pathname' with 'new_root' prepended.
|
||||||
|
|
||||||
|
If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
|
||||||
|
Otherwise, it requires making 'pathname' relative and then joining the
|
||||||
|
two, which is tricky on DOS/Windows and Mac OS.
|
||||||
|
|
||||||
|
This is borrowed from Python's standard library's distutils module.
|
||||||
|
"""
|
||||||
|
if os.name == "posix":
|
||||||
|
if not os.path.isabs(pathname):
|
||||||
|
return os.path.join(new_root, pathname)
|
||||||
|
else:
|
||||||
|
return os.path.join(new_root, pathname[1:])
|
||||||
|
|
||||||
|
elif os.name == "nt":
|
||||||
|
(drive, path) = os.path.splitdrive(pathname)
|
||||||
|
if path[0] == "\\":
|
||||||
|
path = path[1:]
|
||||||
|
return os.path.join(new_root, path)
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise InstallationError(
|
||||||
|
f"Unknown platform: {os.name}\n"
|
||||||
|
"Can not change root path prefix on unknown platform."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_src_prefix() -> str:
|
def get_src_prefix() -> str:
|
||||||
if running_under_virtualenv():
|
if running_under_virtualenv():
|
||||||
src_prefix = os.path.join(sys.prefix, "src")
|
src_prefix = os.path.join(sys.prefix, "src")
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
def main(args: Optional[List[str]] = None) -> int:
|
||||||
# type: (Optional[List[str]]) -> int
|
|
||||||
"""This is preserved for old console scripts that may still be referencing
|
"""This is preserved for old console scripts that may still be referencing
|
||||||
it.
|
it.
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +1,70 @@
|
|||||||
from typing import List, Optional
|
import contextlib
|
||||||
|
import functools
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from typing import TYPE_CHECKING, List, Optional, Type, cast
|
||||||
|
|
||||||
from .base import BaseDistribution, BaseEnvironment
|
from pip._internal.utils.misc import strtobool
|
||||||
|
|
||||||
|
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from typing import Protocol
|
||||||
|
else:
|
||||||
|
Protocol = object
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"BaseDistribution",
|
"BaseDistribution",
|
||||||
"BaseEnvironment",
|
"BaseEnvironment",
|
||||||
|
"FilesystemWheel",
|
||||||
|
"MemoryWheel",
|
||||||
|
"Wheel",
|
||||||
"get_default_environment",
|
"get_default_environment",
|
||||||
"get_environment",
|
"get_environment",
|
||||||
"get_wheel_distribution",
|
"get_wheel_distribution",
|
||||||
|
"select_backend",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _should_use_importlib_metadata() -> bool:
|
||||||
|
"""Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
|
||||||
|
|
||||||
|
By default, pip uses ``importlib.metadata`` on Python 3.11+, and
|
||||||
|
``pkg_resourcess`` otherwise. This can be overridden by a couple of ways:
|
||||||
|
|
||||||
|
* If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
|
||||||
|
dictates whether ``importlib.metadata`` is used, regardless of Python
|
||||||
|
version.
|
||||||
|
* On Python 3.11+, Python distributors can patch ``importlib.metadata``
|
||||||
|
to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
|
||||||
|
makes pip use ``pkg_resources`` (unless the user set the aforementioned
|
||||||
|
environment variable to *True*).
|
||||||
|
"""
|
||||||
|
with contextlib.suppress(KeyError, ValueError):
|
||||||
|
return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
|
||||||
|
if sys.version_info < (3, 11):
|
||||||
|
return False
|
||||||
|
import importlib.metadata
|
||||||
|
|
||||||
|
return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
|
||||||
|
|
||||||
|
|
||||||
|
class Backend(Protocol):
|
||||||
|
Distribution: Type[BaseDistribution]
|
||||||
|
Environment: Type[BaseEnvironment]
|
||||||
|
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def select_backend() -> Backend:
|
||||||
|
if _should_use_importlib_metadata():
|
||||||
|
from . import importlib
|
||||||
|
|
||||||
|
return cast(Backend, importlib)
|
||||||
|
from . import pkg_resources
|
||||||
|
|
||||||
|
return cast(Backend, pkg_resources)
|
||||||
|
|
||||||
|
|
||||||
def get_default_environment() -> BaseEnvironment:
|
def get_default_environment() -> BaseEnvironment:
|
||||||
"""Get the default representation for the current environment.
|
"""Get the default representation for the current environment.
|
||||||
|
|
||||||
@@ -18,9 +72,7 @@ def get_default_environment() -> BaseEnvironment:
|
|||||||
Environment instance should be built from ``sys.path`` and may use caching
|
Environment instance should be built from ``sys.path`` and may use caching
|
||||||
to share instance state accorss calls.
|
to share instance state accorss calls.
|
||||||
"""
|
"""
|
||||||
from .pkg_resources import Environment
|
return select_backend().Environment.default()
|
||||||
|
|
||||||
return Environment.default()
|
|
||||||
|
|
||||||
|
|
||||||
def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
|
def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
|
||||||
@@ -30,12 +82,19 @@ def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
|
|||||||
given import paths. The backend must build a fresh instance representing
|
given import paths. The backend must build a fresh instance representing
|
||||||
the state of installed distributions when this function is called.
|
the state of installed distributions when this function is called.
|
||||||
"""
|
"""
|
||||||
from .pkg_resources import Environment
|
return select_backend().Environment.from_paths(paths)
|
||||||
|
|
||||||
return Environment.from_paths(paths)
|
|
||||||
|
|
||||||
|
|
||||||
def get_wheel_distribution(wheel_path: str, canonical_name: str) -> BaseDistribution:
|
def get_directory_distribution(directory: str) -> BaseDistribution:
|
||||||
|
"""Get the distribution metadata representation in the specified directory.
|
||||||
|
|
||||||
|
This returns a Distribution instance from the chosen backend based on
|
||||||
|
the given on-disk ``.dist-info`` directory.
|
||||||
|
"""
|
||||||
|
return select_backend().Distribution.from_directory(directory)
|
||||||
|
|
||||||
|
|
||||||
|
def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
|
||||||
"""Get the representation of the specified wheel's distribution metadata.
|
"""Get the representation of the specified wheel's distribution metadata.
|
||||||
|
|
||||||
This returns a Distribution instance from the chosen backend based on
|
This returns a Distribution instance from the chosen backend based on
|
||||||
@@ -43,6 +102,26 @@ def get_wheel_distribution(wheel_path: str, canonical_name: str) -> BaseDistribu
|
|||||||
|
|
||||||
:param canonical_name: Normalized project name of the given wheel.
|
:param canonical_name: Normalized project name of the given wheel.
|
||||||
"""
|
"""
|
||||||
from .pkg_resources import Distribution
|
return select_backend().Distribution.from_wheel(wheel, canonical_name)
|
||||||
|
|
||||||
return Distribution.from_wheel(wheel_path, canonical_name)
|
|
||||||
|
def get_metadata_distribution(
|
||||||
|
metadata_contents: bytes,
|
||||||
|
filename: str,
|
||||||
|
canonical_name: str,
|
||||||
|
) -> BaseDistribution:
|
||||||
|
"""Get the dist representation of the specified METADATA file contents.
|
||||||
|
|
||||||
|
This returns a Distribution instance from the chosen backend sourced from the data
|
||||||
|
in `metadata_contents`.
|
||||||
|
|
||||||
|
:param metadata_contents: Contents of a METADATA file within a dist, or one served
|
||||||
|
via PEP 658.
|
||||||
|
:param filename: Filename for the dist this metadata represents.
|
||||||
|
:param canonical_name: Normalized project name of the given dist.
|
||||||
|
"""
|
||||||
|
return select_backend().Distribution.from_metadata_file_contents(
|
||||||
|
metadata_contents,
|
||||||
|
filename,
|
||||||
|
canonical_name,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,37 +1,56 @@
|
|||||||
|
import csv
|
||||||
import email.message
|
import email.message
|
||||||
|
import functools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import pathlib
|
||||||
import re
|
import re
|
||||||
|
import zipfile
|
||||||
from typing import (
|
from typing import (
|
||||||
|
IO,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
Collection,
|
Collection,
|
||||||
Container,
|
Container,
|
||||||
|
Dict,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
List,
|
List,
|
||||||
|
NamedTuple,
|
||||||
Optional,
|
Optional,
|
||||||
|
Tuple,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
from pip._vendor.packaging.requirements import Requirement
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
|
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
||||||
|
from pip._vendor.packaging.utils import NormalizedName
|
||||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||||
|
|
||||||
|
from pip._internal.exceptions import NoneMetadataError
|
||||||
|
from pip._internal.locations import site_packages, user_site
|
||||||
from pip._internal.models.direct_url import (
|
from pip._internal.models.direct_url import (
|
||||||
DIRECT_URL_METADATA_NAME,
|
DIRECT_URL_METADATA_NAME,
|
||||||
DirectUrl,
|
DirectUrl,
|
||||||
DirectUrlValidationError,
|
DirectUrlValidationError,
|
||||||
)
|
)
|
||||||
from pip._internal.utils.misc import stdlib_pkgs # TODO: Move definition here.
|
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
|
||||||
|
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
|
||||||
|
from pip._internal.utils.misc import is_local, normalize_path
|
||||||
|
from pip._internal.utils.packaging import safe_extra
|
||||||
|
from pip._internal.utils.urls import url_to_path
|
||||||
|
|
||||||
|
from ._json import msg_to_json
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from typing import Protocol
|
from typing import Protocol
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import NormalizedName
|
|
||||||
else:
|
else:
|
||||||
Protocol = object
|
Protocol = object
|
||||||
|
|
||||||
DistributionVersion = Union[LegacyVersion, Version]
|
DistributionVersion = Union[LegacyVersion, Version]
|
||||||
|
|
||||||
|
InfoPath = Union[str, pathlib.PurePath]
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -49,7 +68,89 @@ class BaseEntryPoint(Protocol):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_installed_files_path(
|
||||||
|
entry: Tuple[str, ...],
|
||||||
|
info: Tuple[str, ...],
|
||||||
|
) -> str:
|
||||||
|
"""Convert a legacy installed-files.txt path into modern RECORD path.
|
||||||
|
|
||||||
|
The legacy format stores paths relative to the info directory, while the
|
||||||
|
modern format stores paths relative to the package root, e.g. the
|
||||||
|
site-packages directory.
|
||||||
|
|
||||||
|
:param entry: Path parts of the installed-files.txt entry.
|
||||||
|
:param info: Path parts of the egg-info directory relative to package root.
|
||||||
|
:returns: The converted entry.
|
||||||
|
|
||||||
|
For best compatibility with symlinks, this does not use ``abspath()`` or
|
||||||
|
``Path.resolve()``, but tries to work with path parts:
|
||||||
|
|
||||||
|
1. While ``entry`` starts with ``..``, remove the equal amounts of parts
|
||||||
|
from ``info``; if ``info`` is empty, start appending ``..`` instead.
|
||||||
|
2. Join the two directly.
|
||||||
|
"""
|
||||||
|
while entry and entry[0] == "..":
|
||||||
|
if not info or info[-1] == "..":
|
||||||
|
info += ("..",)
|
||||||
|
else:
|
||||||
|
info = info[:-1]
|
||||||
|
entry = entry[1:]
|
||||||
|
return str(pathlib.Path(*info, *entry))
|
||||||
|
|
||||||
|
|
||||||
|
class RequiresEntry(NamedTuple):
|
||||||
|
requirement: str
|
||||||
|
extra: str
|
||||||
|
marker: str
|
||||||
|
|
||||||
|
|
||||||
class BaseDistribution(Protocol):
|
class BaseDistribution(Protocol):
|
||||||
|
@classmethod
|
||||||
|
def from_directory(cls, directory: str) -> "BaseDistribution":
|
||||||
|
"""Load the distribution from a metadata directory.
|
||||||
|
|
||||||
|
:param directory: Path to a metadata directory, e.g. ``.dist-info``.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_metadata_file_contents(
|
||||||
|
cls,
|
||||||
|
metadata_contents: bytes,
|
||||||
|
filename: str,
|
||||||
|
project_name: str,
|
||||||
|
) -> "BaseDistribution":
|
||||||
|
"""Load the distribution from the contents of a METADATA file.
|
||||||
|
|
||||||
|
This is used to implement PEP 658 by generating a "shallow" dist object that can
|
||||||
|
be used for resolution without downloading or building the actual dist yet.
|
||||||
|
|
||||||
|
:param metadata_contents: The contents of a METADATA file.
|
||||||
|
:param filename: File name for the dist with this metadata.
|
||||||
|
:param project_name: Name of the project this dist represents.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
|
||||||
|
"""Load the distribution from a given wheel.
|
||||||
|
|
||||||
|
:param wheel: A concrete wheel definition.
|
||||||
|
:param name: File name of the wheel.
|
||||||
|
|
||||||
|
:raises InvalidWheel: Whenever loading of the wheel causes a
|
||||||
|
:py:exc:`zipfile.BadZipFile` exception to be thrown.
|
||||||
|
:raises UnsupportedWheel: If the wheel is a valid zip, but malformed
|
||||||
|
internally.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.raw_name} {self.version} ({self.location})"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.raw_name} {self.version}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def location(self) -> Optional[str]:
|
def location(self) -> Optional[str]:
|
||||||
"""Where the distribution is loaded from.
|
"""Where the distribution is loaded from.
|
||||||
@@ -65,8 +166,43 @@ class BaseDistribution(Protocol):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def info_directory(self) -> Optional[str]:
|
def editable_project_location(self) -> Optional[str]:
|
||||||
"""Location of the .[egg|dist]-info directory.
|
"""The project location for editable distributions.
|
||||||
|
|
||||||
|
This is the directory where pyproject.toml or setup.py is located.
|
||||||
|
None if the distribution is not installed in editable mode.
|
||||||
|
"""
|
||||||
|
# TODO: this property is relatively costly to compute, memoize it ?
|
||||||
|
direct_url = self.direct_url
|
||||||
|
if direct_url:
|
||||||
|
if direct_url.is_local_editable():
|
||||||
|
return url_to_path(direct_url.url)
|
||||||
|
else:
|
||||||
|
# Search for an .egg-link file by walking sys.path, as it was
|
||||||
|
# done before by dist_is_editable().
|
||||||
|
egg_link_path = egg_link_path_from_sys_path(self.raw_name)
|
||||||
|
if egg_link_path:
|
||||||
|
# TODO: get project location from second line of egg_link file
|
||||||
|
# (https://github.com/pypa/pip/issues/10243)
|
||||||
|
return self.location
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_location(self) -> Optional[str]:
|
||||||
|
"""The distribution's "installed" location.
|
||||||
|
|
||||||
|
This should generally be a ``site-packages`` directory. This is
|
||||||
|
usually ``dist.location``, except for legacy develop-installed packages,
|
||||||
|
where ``dist.location`` is the source code location, and this is where
|
||||||
|
the ``.egg-link`` file is.
|
||||||
|
|
||||||
|
The returned location is normalized (in particular, with symlinks removed).
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def info_location(self) -> Optional[str]:
|
||||||
|
"""Location of the .[egg|dist]-info directory or file.
|
||||||
|
|
||||||
Similarly to ``location``, a string value is not necessarily a
|
Similarly to ``location``, a string value is not necessarily a
|
||||||
filesystem path. ``None`` means the distribution is created in-memory.
|
filesystem path. ``None`` means the distribution is created in-memory.
|
||||||
@@ -81,13 +217,80 @@ class BaseDistribution(Protocol):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def canonical_name(self) -> "NormalizedName":
|
def installed_by_distutils(self) -> bool:
|
||||||
|
"""Whether this distribution is installed with legacy distutils format.
|
||||||
|
|
||||||
|
A distribution installed with "raw" distutils not patched by setuptools
|
||||||
|
uses one single file at ``info_location`` to store metadata. We need to
|
||||||
|
treat this specially on uninstallation.
|
||||||
|
"""
|
||||||
|
info_location = self.info_location
|
||||||
|
if not info_location:
|
||||||
|
return False
|
||||||
|
return pathlib.Path(info_location).is_file()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_as_egg(self) -> bool:
|
||||||
|
"""Whether this distribution is installed as an egg.
|
||||||
|
|
||||||
|
This usually indicates the distribution was installed by (older versions
|
||||||
|
of) easy_install.
|
||||||
|
"""
|
||||||
|
location = self.location
|
||||||
|
if not location:
|
||||||
|
return False
|
||||||
|
return location.endswith(".egg")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_with_setuptools_egg_info(self) -> bool:
|
||||||
|
"""Whether this distribution is installed with the ``.egg-info`` format.
|
||||||
|
|
||||||
|
This usually indicates the distribution was installed with setuptools
|
||||||
|
with an old pip version or with ``single-version-externally-managed``.
|
||||||
|
|
||||||
|
Note that this ensure the metadata store is a directory. distutils can
|
||||||
|
also installs an ``.egg-info``, but as a file, not a directory. This
|
||||||
|
property is *False* for that case. Also see ``installed_by_distutils``.
|
||||||
|
"""
|
||||||
|
info_location = self.info_location
|
||||||
|
if not info_location:
|
||||||
|
return False
|
||||||
|
if not info_location.endswith(".egg-info"):
|
||||||
|
return False
|
||||||
|
return pathlib.Path(info_location).is_dir()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed_with_dist_info(self) -> bool:
|
||||||
|
"""Whether this distribution is installed with the "modern format".
|
||||||
|
|
||||||
|
This indicates a "modern" installation, e.g. storing metadata in the
|
||||||
|
``.dist-info`` directory. This applies to installations made by
|
||||||
|
setuptools (but through pip, not directly), or anything using the
|
||||||
|
standardized build backend interface (PEP 517).
|
||||||
|
"""
|
||||||
|
info_location = self.info_location
|
||||||
|
if not info_location:
|
||||||
|
return False
|
||||||
|
if not info_location.endswith(".dist-info"):
|
||||||
|
return False
|
||||||
|
return pathlib.Path(info_location).is_dir()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def canonical_name(self) -> NormalizedName:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> DistributionVersion:
|
def version(self) -> DistributionVersion:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def setuptools_filename(self) -> str:
|
||||||
|
"""Convert a project name to its setuptools-compatible filename.
|
||||||
|
|
||||||
|
This is a copy of ``pkg_resources.to_filename()`` for compatibility.
|
||||||
|
"""
|
||||||
|
return self.raw_name.replace("-", "_")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def direct_url(self) -> Optional[DirectUrl]:
|
def direct_url(self) -> Optional[DirectUrl]:
|
||||||
"""Obtain a DirectUrl from this distribution.
|
"""Obtain a DirectUrl from this distribution.
|
||||||
@@ -116,39 +319,102 @@ class BaseDistribution(Protocol):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def installer(self) -> str:
|
def installer(self) -> str:
|
||||||
raise NotImplementedError()
|
try:
|
||||||
|
installer_text = self.read_text("INSTALLER")
|
||||||
|
except (OSError, ValueError, NoneMetadataError):
|
||||||
|
return "" # Fail silently if the installer file cannot be read.
|
||||||
|
for line in installer_text.splitlines():
|
||||||
|
cleaned_line = line.strip()
|
||||||
|
if cleaned_line:
|
||||||
|
return cleaned_line
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def requested(self) -> bool:
|
||||||
|
return self.is_file("REQUESTED")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def editable(self) -> bool:
|
def editable(self) -> bool:
|
||||||
raise NotImplementedError()
|
return bool(self.editable_project_location)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def local(self) -> bool:
|
def local(self) -> bool:
|
||||||
raise NotImplementedError()
|
"""If distribution is installed in the current virtual environment.
|
||||||
|
|
||||||
|
Always True if we're not in a virtualenv.
|
||||||
|
"""
|
||||||
|
if self.installed_location is None:
|
||||||
|
return False
|
||||||
|
return is_local(self.installed_location)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_usersite(self) -> bool:
|
def in_usersite(self) -> bool:
|
||||||
raise NotImplementedError()
|
if self.installed_location is None or user_site is None:
|
||||||
|
return False
|
||||||
|
return self.installed_location.startswith(normalize_path(user_site))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def in_site_packages(self) -> bool:
|
def in_site_packages(self) -> bool:
|
||||||
|
if self.installed_location is None or site_packages is None:
|
||||||
|
return False
|
||||||
|
return self.installed_location.startswith(normalize_path(site_packages))
|
||||||
|
|
||||||
|
def is_file(self, path: InfoPath) -> bool:
|
||||||
|
"""Check whether an entry in the info directory is a file."""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def read_text(self, name: str) -> str:
|
def iter_distutils_script_names(self) -> Iterator[str]:
|
||||||
"""Read a file in the .dist-info (or .egg-info) directory.
|
"""Find distutils 'scripts' entries metadata.
|
||||||
|
|
||||||
Should raise ``FileNotFoundError`` if ``name`` does not exist in the
|
If 'scripts' is supplied in ``setup.py``, distutils records those in the
|
||||||
metadata directory.
|
installed distribution's ``scripts`` directory, a file for each script.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def read_text(self, path: InfoPath) -> str:
|
||||||
|
"""Read a file in the info directory.
|
||||||
|
|
||||||
|
:raise FileNotFoundError: If ``path`` does not exist in the directory.
|
||||||
|
:raise NoneMetadataError: If ``path`` exists in the info directory, but
|
||||||
|
cannot be read.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _metadata_impl(self) -> email.message.Message:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=1)
|
||||||
|
def _metadata_cached(self) -> email.message.Message:
|
||||||
|
# When we drop python 3.7 support, move this to the metadata property and use
|
||||||
|
# functools.cached_property instead of lru_cache.
|
||||||
|
metadata = self._metadata_impl()
|
||||||
|
self._add_egg_info_requires(metadata)
|
||||||
|
return metadata
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata(self) -> email.message.Message:
|
def metadata(self) -> email.message.Message:
|
||||||
"""Metadata of distribution parsed from e.g. METADATA or PKG-INFO."""
|
"""Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
|
||||||
raise NotImplementedError()
|
|
||||||
|
This should return an empty message if the metadata file is unavailable.
|
||||||
|
|
||||||
|
:raises NoneMetadataError: If the metadata file is available, but does
|
||||||
|
not contain valid metadata.
|
||||||
|
"""
|
||||||
|
return self._metadata_cached()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def metadata_dict(self) -> Dict[str, Any]:
|
||||||
|
"""PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
|
||||||
|
|
||||||
|
This should return an empty dict if the metadata file is unavailable.
|
||||||
|
|
||||||
|
:raises NoneMetadataError: If the metadata file is available, but does
|
||||||
|
not contain valid metadata.
|
||||||
|
"""
|
||||||
|
return msg_to_json(self.metadata)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata_version(self) -> Optional[str]:
|
def metadata_version(self) -> Optional[str]:
|
||||||
@@ -159,12 +425,159 @@ class BaseDistribution(Protocol):
|
|||||||
def raw_name(self) -> str:
|
def raw_name(self) -> str:
|
||||||
"""Value of "Name:" in distribution metadata."""
|
"""Value of "Name:" in distribution metadata."""
|
||||||
# The metadata should NEVER be missing the Name: key, but if it somehow
|
# The metadata should NEVER be missing the Name: key, but if it somehow
|
||||||
# does not, fall back to the known canonical name.
|
# does, fall back to the known canonical name.
|
||||||
return self.metadata.get("Name", self.canonical_name)
|
return self.metadata.get("Name", self.canonical_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def requires_python(self) -> SpecifierSet:
|
||||||
|
"""Value of "Requires-Python:" in distribution metadata.
|
||||||
|
|
||||||
|
If the key does not exist or contains an invalid value, an empty
|
||||||
|
SpecifierSet should be returned.
|
||||||
|
"""
|
||||||
|
value = self.metadata.get("Requires-Python")
|
||||||
|
if value is None:
|
||||||
|
return SpecifierSet()
|
||||||
|
try:
|
||||||
|
# Convert to str to satisfy the type checker; this can be a Header object.
|
||||||
|
spec = SpecifierSet(str(value))
|
||||||
|
except InvalidSpecifier as e:
|
||||||
|
message = "Package %r has an invalid Requires-Python: %s"
|
||||||
|
logger.warning(message, self.raw_name, e)
|
||||||
|
return SpecifierSet()
|
||||||
|
return spec
|
||||||
|
|
||||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||||
|
"""Dependencies of this distribution.
|
||||||
|
|
||||||
|
For modern .dist-info distributions, this is the collection of
|
||||||
|
"Requires-Dist:" entries in distribution metadata.
|
||||||
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def iter_provided_extras(self) -> Iterable[str]:
|
||||||
|
"""Extras provided by this distribution.
|
||||||
|
|
||||||
|
For modern .dist-info distributions, this is the collection of
|
||||||
|
"Provides-Extra:" entries in distribution metadata.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
|
||||||
|
try:
|
||||||
|
text = self.read_text("RECORD")
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
# This extra Path-str cast normalizes entries.
|
||||||
|
return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
|
||||||
|
|
||||||
|
def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
|
||||||
|
try:
|
||||||
|
text = self.read_text("installed-files.txt")
|
||||||
|
except FileNotFoundError:
|
||||||
|
return None
|
||||||
|
paths = (p for p in text.splitlines(keepends=False) if p)
|
||||||
|
root = self.location
|
||||||
|
info = self.info_location
|
||||||
|
if root is None or info is None:
|
||||||
|
return paths
|
||||||
|
try:
|
||||||
|
info_rel = pathlib.Path(info).relative_to(root)
|
||||||
|
except ValueError: # info is not relative to root.
|
||||||
|
return paths
|
||||||
|
if not info_rel.parts: # info *is* root.
|
||||||
|
return paths
|
||||||
|
return (
|
||||||
|
_convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
|
||||||
|
for p in paths
|
||||||
|
)
|
||||||
|
|
||||||
|
def iter_declared_entries(self) -> Optional[Iterator[str]]:
|
||||||
|
"""Iterate through file entries declared in this distribution.
|
||||||
|
|
||||||
|
For modern .dist-info distributions, this is the files listed in the
|
||||||
|
``RECORD`` metadata file. For legacy setuptools distributions, this
|
||||||
|
comes from ``installed-files.txt``, with entries normalized to be
|
||||||
|
compatible with the format used by ``RECORD``.
|
||||||
|
|
||||||
|
:return: An iterator for listed entries, or None if the distribution
|
||||||
|
contains neither ``RECORD`` nor ``installed-files.txt``.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
self._iter_declared_entries_from_record()
|
||||||
|
or self._iter_declared_entries_from_legacy()
|
||||||
|
)
|
||||||
|
|
||||||
|
def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
|
||||||
|
"""Parse a ``requires.txt`` in an egg-info directory.
|
||||||
|
|
||||||
|
This is an INI-ish format where an egg-info stores dependencies. A
|
||||||
|
section name describes extra other environment markers, while each entry
|
||||||
|
is an arbitrary string (not a key-value pair) representing a dependency
|
||||||
|
as a requirement string (no markers).
|
||||||
|
|
||||||
|
There is a construct in ``importlib.metadata`` called ``Sectioned`` that
|
||||||
|
does mostly the same, but the format is currently considered private.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
content = self.read_text("requires.txt")
|
||||||
|
except FileNotFoundError:
|
||||||
|
return
|
||||||
|
extra = marker = "" # Section-less entries don't have markers.
|
||||||
|
for line in content.splitlines():
|
||||||
|
line = line.strip()
|
||||||
|
if not line or line.startswith("#"): # Comment; ignored.
|
||||||
|
continue
|
||||||
|
if line.startswith("[") and line.endswith("]"): # A section header.
|
||||||
|
extra, _, marker = line.strip("[]").partition(":")
|
||||||
|
continue
|
||||||
|
yield RequiresEntry(requirement=line, extra=extra, marker=marker)
|
||||||
|
|
||||||
|
def _iter_egg_info_extras(self) -> Iterable[str]:
|
||||||
|
"""Get extras from the egg-info directory."""
|
||||||
|
known_extras = {""}
|
||||||
|
for entry in self._iter_requires_txt_entries():
|
||||||
|
if entry.extra in known_extras:
|
||||||
|
continue
|
||||||
|
known_extras.add(entry.extra)
|
||||||
|
yield entry.extra
|
||||||
|
|
||||||
|
def _iter_egg_info_dependencies(self) -> Iterable[str]:
|
||||||
|
"""Get distribution dependencies from the egg-info directory.
|
||||||
|
|
||||||
|
To ease parsing, this converts a legacy dependency entry into a PEP 508
|
||||||
|
requirement string. Like ``_iter_requires_txt_entries()``, there is code
|
||||||
|
in ``importlib.metadata`` that does mostly the same, but not do exactly
|
||||||
|
what we need.
|
||||||
|
|
||||||
|
Namely, ``importlib.metadata`` does not normalize the extra name before
|
||||||
|
putting it into the requirement string, which causes marker comparison
|
||||||
|
to fail because the dist-info format do normalize. This is consistent in
|
||||||
|
all currently available PEP 517 backends, although not standardized.
|
||||||
|
"""
|
||||||
|
for entry in self._iter_requires_txt_entries():
|
||||||
|
if entry.extra and entry.marker:
|
||||||
|
marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
|
||||||
|
elif entry.extra:
|
||||||
|
marker = f'extra == "{safe_extra(entry.extra)}"'
|
||||||
|
elif entry.marker:
|
||||||
|
marker = entry.marker
|
||||||
|
else:
|
||||||
|
marker = ""
|
||||||
|
if marker:
|
||||||
|
yield f"{entry.requirement} ; {marker}"
|
||||||
|
else:
|
||||||
|
yield entry.requirement
|
||||||
|
|
||||||
|
def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
|
||||||
|
"""Add egg-info requires.txt information to the metadata."""
|
||||||
|
if not metadata.get_all("Requires-Dist"):
|
||||||
|
for dep in self._iter_egg_info_dependencies():
|
||||||
|
metadata["Requires-Dist"] = dep
|
||||||
|
if not metadata.get_all("Provides-Extra"):
|
||||||
|
for extra in self._iter_egg_info_extras():
|
||||||
|
metadata["Provides-Extra"] = extra
|
||||||
|
|
||||||
|
|
||||||
class BaseEnvironment:
|
class BaseEnvironment:
|
||||||
"""An environment containing distributions to introspect."""
|
"""An environment containing distributions to introspect."""
|
||||||
@@ -178,7 +591,11 @@ class BaseEnvironment:
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
|
def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
|
||||||
"""Given a requirement name, return the installed distributions."""
|
"""Given a requirement name, return the installed distributions.
|
||||||
|
|
||||||
|
The name may not be normalized. The implementation must canonicalize
|
||||||
|
it for lookup.
|
||||||
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def _iter_distributions(self) -> Iterator["BaseDistribution"]:
|
def _iter_distributions(self) -> Iterator["BaseDistribution"]:
|
||||||
@@ -190,8 +607,8 @@ class BaseEnvironment:
|
|||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def iter_distributions(self) -> Iterator["BaseDistribution"]:
|
def iter_all_distributions(self) -> Iterator[BaseDistribution]:
|
||||||
"""Iterate through installed distributions."""
|
"""Iterate through all installed distributions without any filtering."""
|
||||||
for dist in self._iter_distributions():
|
for dist in self._iter_distributions():
|
||||||
# Make sure the distribution actually comes from a valid Python
|
# Make sure the distribution actually comes from a valid Python
|
||||||
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
||||||
@@ -221,6 +638,11 @@ class BaseEnvironment:
|
|||||||
) -> Iterator[BaseDistribution]:
|
) -> Iterator[BaseDistribution]:
|
||||||
"""Return a list of installed distributions.
|
"""Return a list of installed distributions.
|
||||||
|
|
||||||
|
This is based on ``iter_all_distributions()`` with additional filtering
|
||||||
|
options. Note that ``iter_installed_distributions()`` without arguments
|
||||||
|
is *not* equal to ``iter_all_distributions()``, since some of the
|
||||||
|
configurations exclude packages by default.
|
||||||
|
|
||||||
:param local_only: If True (default), only return installations
|
:param local_only: If True (default), only return installations
|
||||||
local to the current virtualenv, if in a virtualenv.
|
local to the current virtualenv, if in a virtualenv.
|
||||||
:param skip: An iterable of canonicalized project names to ignore;
|
:param skip: An iterable of canonicalized project names to ignore;
|
||||||
@@ -230,7 +652,7 @@ class BaseEnvironment:
|
|||||||
:param user_only: If True, only report installations in the user
|
:param user_only: If True, only report installations in the user
|
||||||
site directory.
|
site directory.
|
||||||
"""
|
"""
|
||||||
it = self.iter_distributions()
|
it = self.iter_all_distributions()
|
||||||
if local_only:
|
if local_only:
|
||||||
it = (d for d in it if d.local)
|
it = (d for d in it if d.local)
|
||||||
if not include_editables:
|
if not include_editables:
|
||||||
@@ -240,3 +662,27 @@ class BaseEnvironment:
|
|||||||
if user_only:
|
if user_only:
|
||||||
it = (d for d in it if d.in_usersite)
|
it = (d for d in it if d.in_usersite)
|
||||||
return (d for d in it if d.canonical_name not in skip)
|
return (d for d in it if d.canonical_name not in skip)
|
||||||
|
|
||||||
|
|
||||||
|
class Wheel(Protocol):
|
||||||
|
location: str
|
||||||
|
|
||||||
|
def as_zipfile(self) -> zipfile.ZipFile:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class FilesystemWheel(Wheel):
|
||||||
|
def __init__(self, location: str) -> None:
|
||||||
|
self.location = location
|
||||||
|
|
||||||
|
def as_zipfile(self) -> zipfile.ZipFile:
|
||||||
|
return zipfile.ZipFile(self.location, allowZip64=True)
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryWheel(Wheel):
|
||||||
|
def __init__(self, location: str, stream: IO[bytes]) -> None:
|
||||||
|
self.location = location
|
||||||
|
self.stream = stream
|
||||||
|
|
||||||
|
def as_zipfile(self) -> zipfile.ZipFile:
|
||||||
|
return zipfile.ZipFile(self.stream, allowZip64=True)
|
||||||
|
|||||||
@@ -1,29 +1,28 @@
|
|||||||
import email.message
|
import email.message
|
||||||
|
import email.parser
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
from typing import (
|
from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
|
||||||
TYPE_CHECKING,
|
|
||||||
Collection,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
NamedTuple,
|
|
||||||
Optional,
|
|
||||||
)
|
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
from pip._vendor import pkg_resources
|
||||||
from pip._vendor.packaging.requirements import Requirement
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
|
|
||||||
from pip._internal.utils import misc # TODO: Move definition here.
|
from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
|
||||||
from pip._internal.utils.packaging import get_installer, get_metadata
|
from pip._internal.utils.egg_link import egg_link_path_from_location
|
||||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
from pip._internal.utils.misc import display_path, normalize_path
|
||||||
|
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
|
||||||
|
|
||||||
from .base import BaseDistribution, BaseEntryPoint, BaseEnvironment, DistributionVersion
|
from .base import (
|
||||||
|
BaseDistribution,
|
||||||
if TYPE_CHECKING:
|
BaseEntryPoint,
|
||||||
from pip._vendor.packaging.utils import NormalizedName
|
BaseEnvironment,
|
||||||
|
DistributionVersion,
|
||||||
|
InfoPath,
|
||||||
|
Wheel,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -34,14 +33,101 @@ class EntryPoint(NamedTuple):
|
|||||||
group: str
|
group: str
|
||||||
|
|
||||||
|
|
||||||
|
class InMemoryMetadata:
|
||||||
|
"""IMetadataProvider that reads metadata files from a dictionary.
|
||||||
|
|
||||||
|
This also maps metadata decoding exceptions to our internal exception type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
|
||||||
|
self._metadata = metadata
|
||||||
|
self._wheel_name = wheel_name
|
||||||
|
|
||||||
|
def has_metadata(self, name: str) -> bool:
|
||||||
|
return name in self._metadata
|
||||||
|
|
||||||
|
def get_metadata(self, name: str) -> str:
|
||||||
|
try:
|
||||||
|
return self._metadata[name].decode()
|
||||||
|
except UnicodeDecodeError as e:
|
||||||
|
# Augment the default error with the origin of the file.
|
||||||
|
raise UnsupportedWheel(
|
||||||
|
f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_metadata_lines(self, name: str) -> Iterable[str]:
|
||||||
|
return pkg_resources.yield_lines(self.get_metadata(name))
|
||||||
|
|
||||||
|
def metadata_isdir(self, name: str) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def metadata_listdir(self, name: str) -> List[str]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def run_script(self, script_name: str, namespace: str) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Distribution(BaseDistribution):
|
class Distribution(BaseDistribution):
|
||||||
def __init__(self, dist: pkg_resources.Distribution) -> None:
|
def __init__(self, dist: pkg_resources.Distribution) -> None:
|
||||||
self._dist = dist
|
self._dist = dist
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_wheel(cls, path: str, name: str) -> "Distribution":
|
def from_directory(cls, directory: str) -> BaseDistribution:
|
||||||
with zipfile.ZipFile(path, allowZip64=True) as zf:
|
dist_dir = directory.rstrip(os.sep)
|
||||||
dist = pkg_resources_distribution_for_wheel(zf, name, path)
|
|
||||||
|
# Build a PathMetadata object, from path to metadata. :wink:
|
||||||
|
base_dir, dist_dir_name = os.path.split(dist_dir)
|
||||||
|
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
||||||
|
|
||||||
|
# Determine the correct Distribution object type.
|
||||||
|
if dist_dir.endswith(".egg-info"):
|
||||||
|
dist_cls = pkg_resources.Distribution
|
||||||
|
dist_name = os.path.splitext(dist_dir_name)[0]
|
||||||
|
else:
|
||||||
|
assert dist_dir.endswith(".dist-info")
|
||||||
|
dist_cls = pkg_resources.DistInfoDistribution
|
||||||
|
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
||||||
|
|
||||||
|
dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_metadata_file_contents(
|
||||||
|
cls,
|
||||||
|
metadata_contents: bytes,
|
||||||
|
filename: str,
|
||||||
|
project_name: str,
|
||||||
|
) -> BaseDistribution:
|
||||||
|
metadata_dict = {
|
||||||
|
"METADATA": metadata_contents,
|
||||||
|
}
|
||||||
|
dist = pkg_resources.DistInfoDistribution(
|
||||||
|
location=filename,
|
||||||
|
metadata=InMemoryMetadata(metadata_dict, filename),
|
||||||
|
project_name=project_name,
|
||||||
|
)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
|
||||||
|
try:
|
||||||
|
with wheel.as_zipfile() as zf:
|
||||||
|
info_dir, _ = parse_wheel(zf, name)
|
||||||
|
metadata_dict = {
|
||||||
|
path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
|
||||||
|
for path in zf.namelist()
|
||||||
|
if path.startswith(f"{info_dir}/")
|
||||||
|
}
|
||||||
|
except zipfile.BadZipFile as e:
|
||||||
|
raise InvalidWheel(wheel.location, name) from e
|
||||||
|
except UnsupportedWheel as e:
|
||||||
|
raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
|
||||||
|
dist = pkg_resources.DistInfoDistribution(
|
||||||
|
location=wheel.location,
|
||||||
|
metadata=InMemoryMetadata(metadata_dict, wheel.location),
|
||||||
|
project_name=name,
|
||||||
|
)
|
||||||
return cls(dist)
|
return cls(dist)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -49,41 +135,52 @@ class Distribution(BaseDistribution):
|
|||||||
return self._dist.location
|
return self._dist.location
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def info_directory(self) -> Optional[str]:
|
def installed_location(self) -> Optional[str]:
|
||||||
|
egg_link = egg_link_path_from_location(self.raw_name)
|
||||||
|
if egg_link:
|
||||||
|
location = egg_link
|
||||||
|
elif self.location:
|
||||||
|
location = self.location
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
return normalize_path(location)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def info_location(self) -> Optional[str]:
|
||||||
return self._dist.egg_info
|
return self._dist.egg_info
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def canonical_name(self) -> "NormalizedName":
|
def installed_by_distutils(self) -> bool:
|
||||||
|
# A distutils-installed distribution is provided by FileMetadata. This
|
||||||
|
# provider has a "path" attribute not present anywhere else. Not the
|
||||||
|
# best introspection logic, but pip has been doing this for a long time.
|
||||||
|
try:
|
||||||
|
return bool(self._dist._provider.path)
|
||||||
|
except AttributeError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def canonical_name(self) -> NormalizedName:
|
||||||
return canonicalize_name(self._dist.project_name)
|
return canonicalize_name(self._dist.project_name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> DistributionVersion:
|
def version(self) -> DistributionVersion:
|
||||||
return parse_version(self._dist.version)
|
return parse_version(self._dist.version)
|
||||||
|
|
||||||
@property
|
def is_file(self, path: InfoPath) -> bool:
|
||||||
def installer(self) -> str:
|
return self._dist.has_metadata(str(path))
|
||||||
return get_installer(self._dist)
|
|
||||||
|
|
||||||
@property
|
def iter_distutils_script_names(self) -> Iterator[str]:
|
||||||
def editable(self) -> bool:
|
yield from self._dist.metadata_listdir("scripts")
|
||||||
return misc.dist_is_editable(self._dist)
|
|
||||||
|
|
||||||
@property
|
def read_text(self, path: InfoPath) -> str:
|
||||||
def local(self) -> bool:
|
name = str(path)
|
||||||
return misc.dist_is_local(self._dist)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def in_usersite(self) -> bool:
|
|
||||||
return misc.dist_in_usersite(self._dist)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def in_site_packages(self) -> bool:
|
|
||||||
return misc.dist_in_site_packages(self._dist)
|
|
||||||
|
|
||||||
def read_text(self, name: str) -> str:
|
|
||||||
if not self._dist.has_metadata(name):
|
if not self._dist.has_metadata(name):
|
||||||
raise FileNotFoundError(name)
|
raise FileNotFoundError(name)
|
||||||
return self._dist.get_metadata(name)
|
content = self._dist.get_metadata(name)
|
||||||
|
if content is None:
|
||||||
|
raise NoneMetadataError(self, name)
|
||||||
|
return content
|
||||||
|
|
||||||
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
||||||
for group, entries in self._dist.get_entry_map().items():
|
for group, entries in self._dist.get_entry_map().items():
|
||||||
@@ -91,15 +188,36 @@ class Distribution(BaseDistribution):
|
|||||||
name, _, value = str(entry_point).partition("=")
|
name, _, value = str(entry_point).partition("=")
|
||||||
yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
|
yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
|
||||||
|
|
||||||
@property
|
def _metadata_impl(self) -> email.message.Message:
|
||||||
def metadata(self) -> email.message.Message:
|
"""
|
||||||
return get_metadata(self._dist)
|
:raises NoneMetadataError: if the distribution reports `has_metadata()`
|
||||||
|
True but `get_metadata()` returns None.
|
||||||
|
"""
|
||||||
|
if isinstance(self._dist, pkg_resources.DistInfoDistribution):
|
||||||
|
metadata_name = "METADATA"
|
||||||
|
else:
|
||||||
|
metadata_name = "PKG-INFO"
|
||||||
|
try:
|
||||||
|
metadata = self.read_text(metadata_name)
|
||||||
|
except FileNotFoundError:
|
||||||
|
if self.location:
|
||||||
|
displaying_path = display_path(self.location)
|
||||||
|
else:
|
||||||
|
displaying_path = repr(self.location)
|
||||||
|
logger.warning("No metadata found in %s", displaying_path)
|
||||||
|
metadata = ""
|
||||||
|
feed_parser = email.parser.FeedParser()
|
||||||
|
feed_parser.feed(metadata)
|
||||||
|
return feed_parser.close()
|
||||||
|
|
||||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||||
if extras: # pkg_resources raises on invalid extras, so we sanitize.
|
if extras: # pkg_resources raises on invalid extras, so we sanitize.
|
||||||
extras = frozenset(extras).intersection(self._dist.extras)
|
extras = frozenset(extras).intersection(self._dist.extras)
|
||||||
return self._dist.requires(extras)
|
return self._dist.requires(extras)
|
||||||
|
|
||||||
|
def iter_provided_extras(self) -> Iterable[str]:
|
||||||
|
return self._dist.extras
|
||||||
|
|
||||||
|
|
||||||
class Environment(BaseEnvironment):
|
class Environment(BaseEnvironment):
|
||||||
def __init__(self, ws: pkg_resources.WorkingSet) -> None:
|
def __init__(self, ws: pkg_resources.WorkingSet) -> None:
|
||||||
@@ -113,6 +231,10 @@ class Environment(BaseEnvironment):
|
|||||||
def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
|
def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
|
||||||
return cls(pkg_resources.WorkingSet(paths))
|
return cls(pkg_resources.WorkingSet(paths))
|
||||||
|
|
||||||
|
def _iter_distributions(self) -> Iterator[BaseDistribution]:
|
||||||
|
for dist in self._ws:
|
||||||
|
yield Distribution(dist)
|
||||||
|
|
||||||
def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
|
def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
|
||||||
"""Find a distribution matching the ``name`` in the environment.
|
"""Find a distribution matching the ``name`` in the environment.
|
||||||
|
|
||||||
@@ -120,13 +242,12 @@ class Environment(BaseEnvironment):
|
|||||||
match the behavior of ``pkg_resources.get_distribution()``.
|
match the behavior of ``pkg_resources.get_distribution()``.
|
||||||
"""
|
"""
|
||||||
canonical_name = canonicalize_name(name)
|
canonical_name = canonicalize_name(name)
|
||||||
for dist in self.iter_distributions():
|
for dist in self.iter_all_distributions():
|
||||||
if dist.canonical_name == canonical_name:
|
if dist.canonical_name == canonical_name:
|
||||||
return dist
|
return dist
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
|
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
|
||||||
|
|
||||||
# Search the distribution by looking through the working set.
|
# Search the distribution by looking through the working set.
|
||||||
dist = self._search_distribution(name)
|
dist = self._search_distribution(name)
|
||||||
if dist:
|
if dist:
|
||||||
@@ -147,7 +268,3 @@ class Environment(BaseEnvironment):
|
|||||||
except pkg_resources.DistributionNotFound:
|
except pkg_resources.DistributionNotFound:
|
||||||
return None
|
return None
|
||||||
return self._search_distribution(name)
|
return self._search_distribution(name)
|
||||||
|
|
||||||
def _iter_distributions(self) -> Iterator[BaseDistribution]:
|
|
||||||
for dist in self._ws:
|
|
||||||
yield Distribution(dist)
|
|
||||||
|
|||||||
@@ -5,8 +5,7 @@ from pip._internal.utils.models import KeyBasedCompareMixin
|
|||||||
|
|
||||||
|
|
||||||
class InstallationCandidate(KeyBasedCompareMixin):
|
class InstallationCandidate(KeyBasedCompareMixin):
|
||||||
"""Represents a potential "candidate" for installation.
|
"""Represents a potential "candidate" for installation."""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ["name", "version", "link"]
|
__slots__ = ["name", "version", "link"]
|
||||||
|
|
||||||
@@ -17,15 +16,19 @@ class InstallationCandidate(KeyBasedCompareMixin):
|
|||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
key=(self.name, self.version, self.link),
|
key=(self.name, self.version, self.link),
|
||||||
defining_class=InstallationCandidate
|
defining_class=InstallationCandidate,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
||||||
self.name, self.version, self.link,
|
self.name,
|
||||||
|
self.version,
|
||||||
|
self.link,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return '{!r} candidate (version {} at {})'.format(
|
return "{!r} candidate (version {} at {})".format(
|
||||||
self.name, self.version, self.link,
|
self.name,
|
||||||
|
self.version,
|
||||||
|
self.link,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -74,14 +74,10 @@ class VcsInfo:
|
|||||||
vcs: str,
|
vcs: str,
|
||||||
commit_id: str,
|
commit_id: str,
|
||||||
requested_revision: Optional[str] = None,
|
requested_revision: Optional[str] = None,
|
||||||
resolved_revision: Optional[str] = None,
|
|
||||||
resolved_revision_type: Optional[str] = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
self.vcs = vcs
|
self.vcs = vcs
|
||||||
self.requested_revision = requested_revision
|
self.requested_revision = requested_revision
|
||||||
self.commit_id = commit_id
|
self.commit_id = commit_id
|
||||||
self.resolved_revision = resolved_revision
|
|
||||||
self.resolved_revision_type = resolved_revision_type
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
|
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
|
||||||
@@ -91,8 +87,6 @@ class VcsInfo:
|
|||||||
vcs=_get_required(d, str, "vcs"),
|
vcs=_get_required(d, str, "vcs"),
|
||||||
commit_id=_get_required(d, str, "commit_id"),
|
commit_id=_get_required(d, str, "commit_id"),
|
||||||
requested_revision=_get(d, str, "requested_revision"),
|
requested_revision=_get(d, str, "requested_revision"),
|
||||||
resolved_revision=_get(d, str, "resolved_revision"),
|
|
||||||
resolved_revision_type=_get(d, str, "resolved_revision_type"),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _to_dict(self) -> Dict[str, Any]:
|
def _to_dict(self) -> Dict[str, Any]:
|
||||||
@@ -100,8 +94,6 @@ class VcsInfo:
|
|||||||
vcs=self.vcs,
|
vcs=self.vcs,
|
||||||
requested_revision=self.requested_revision,
|
requested_revision=self.requested_revision,
|
||||||
commit_id=self.commit_id,
|
commit_id=self.commit_id,
|
||||||
resolved_revision=self.resolved_revision,
|
|
||||||
resolved_revision_type=self.resolved_revision_type,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -111,17 +103,42 @@ class ArchiveInfo:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hash: Optional[str] = None,
|
hash: Optional[str] = None,
|
||||||
|
hashes: Optional[Dict[str, str]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
# set hashes before hash, since the hash setter will further populate hashes
|
||||||
|
self.hashes = hashes
|
||||||
self.hash = hash
|
self.hash = hash
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hash(self) -> Optional[str]:
|
||||||
|
return self._hash
|
||||||
|
|
||||||
|
@hash.setter
|
||||||
|
def hash(self, value: Optional[str]) -> None:
|
||||||
|
if value is not None:
|
||||||
|
# Auto-populate the hashes key to upgrade to the new format automatically.
|
||||||
|
# We don't back-populate the legacy hash key from hashes.
|
||||||
|
try:
|
||||||
|
hash_name, hash_value = value.split("=", 1)
|
||||||
|
except ValueError:
|
||||||
|
raise DirectUrlValidationError(
|
||||||
|
f"invalid archive_info.hash format: {value!r}"
|
||||||
|
)
|
||||||
|
if self.hashes is None:
|
||||||
|
self.hashes = {hash_name: hash_value}
|
||||||
|
elif hash_name not in self.hashes:
|
||||||
|
self.hashes = self.hashes.copy()
|
||||||
|
self.hashes[hash_name] = hash_value
|
||||||
|
self._hash = value
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
|
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
|
||||||
if d is None:
|
if d is None:
|
||||||
return None
|
return None
|
||||||
return cls(hash=_get(d, str, "hash"))
|
return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
|
||||||
|
|
||||||
def _to_dict(self) -> Dict[str, Any]:
|
def _to_dict(self) -> Dict[str, Any]:
|
||||||
return _filter_none(hash=self.hash)
|
return _filter_none(hash=self.hash, hashes=self.hashes)
|
||||||
|
|
||||||
|
|
||||||
class DirInfo:
|
class DirInfo:
|
||||||
@@ -137,9 +154,7 @@ class DirInfo:
|
|||||||
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
|
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
|
||||||
if d is None:
|
if d is None:
|
||||||
return None
|
return None
|
||||||
return cls(
|
return cls(editable=_get_required(d, bool, "editable", default=False))
|
||||||
editable=_get_required(d, bool, "editable", default=False)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _to_dict(self) -> Dict[str, Any]:
|
def _to_dict(self) -> Dict[str, Any]:
|
||||||
return _filter_none(editable=self.editable or None)
|
return _filter_none(editable=self.editable or None)
|
||||||
@@ -149,7 +164,6 @@ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
|||||||
|
|
||||||
|
|
||||||
class DirectUrl:
|
class DirectUrl:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
@@ -165,9 +179,9 @@ class DirectUrl:
|
|||||||
return netloc
|
return netloc
|
||||||
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
||||||
if (
|
if (
|
||||||
isinstance(self.info, VcsInfo) and
|
isinstance(self.info, VcsInfo)
|
||||||
self.info.vcs == "git" and
|
and self.info.vcs == "git"
|
||||||
user_pass == "git"
|
and user_pass == "git"
|
||||||
):
|
):
|
||||||
return netloc
|
return netloc
|
||||||
if ENV_VAR_RE.match(user_pass):
|
if ENV_VAR_RE.match(user_pass):
|
||||||
@@ -218,3 +232,6 @@ class DirectUrl:
|
|||||||
|
|
||||||
def to_json(self) -> str:
|
def to_json(self) -> str:
|
||||||
return json.dumps(self.to_dict(), sort_keys=True)
|
return json.dumps(self.to_dict(), sort_keys=True)
|
||||||
|
|
||||||
|
def is_local_editable(self) -> bool:
|
||||||
|
return isinstance(self.info, DirInfo) and self.info.editable
|
||||||
|
|||||||
@@ -6,15 +6,14 @@ from pip._internal.exceptions import CommandError
|
|||||||
|
|
||||||
|
|
||||||
class FormatControl:
|
class FormatControl:
|
||||||
"""Helper for managing formats from which a package can be installed.
|
"""Helper for managing formats from which a package can be installed."""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ["no_binary", "only_binary"]
|
__slots__ = ["no_binary", "only_binary"]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
no_binary: Optional[Set[str]] = None,
|
no_binary: Optional[Set[str]] = None,
|
||||||
only_binary: Optional[Set[str]] = None
|
only_binary: Optional[Set[str]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
if no_binary is None:
|
if no_binary is None:
|
||||||
no_binary = set()
|
no_binary = set()
|
||||||
@@ -31,35 +30,30 @@ class FormatControl:
|
|||||||
if self.__slots__ != other.__slots__:
|
if self.__slots__ != other.__slots__:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return all(
|
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
|
||||||
getattr(self, k) == getattr(other, k)
|
|
||||||
for k in self.__slots__
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "{}({}, {})".format(
|
return "{}({}, {})".format(
|
||||||
self.__class__.__name__,
|
self.__class__.__name__, self.no_binary, self.only_binary
|
||||||
self.no_binary,
|
|
||||||
self.only_binary
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
|
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
|
||||||
if value.startswith('-'):
|
if value.startswith("-"):
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"--no-binary / --only-binary option requires 1 argument."
|
"--no-binary / --only-binary option requires 1 argument."
|
||||||
)
|
)
|
||||||
new = value.split(',')
|
new = value.split(",")
|
||||||
while ':all:' in new:
|
while ":all:" in new:
|
||||||
other.clear()
|
other.clear()
|
||||||
target.clear()
|
target.clear()
|
||||||
target.add(':all:')
|
target.add(":all:")
|
||||||
del new[:new.index(':all:') + 1]
|
del new[: new.index(":all:") + 1]
|
||||||
# Without a none, we want to discard everything as :all: covers it
|
# Without a none, we want to discard everything as :all: covers it
|
||||||
if ':none:' not in new:
|
if ":none:" not in new:
|
||||||
return
|
return
|
||||||
for name in new:
|
for name in new:
|
||||||
if name == ':none:':
|
if name == ":none:":
|
||||||
target.clear()
|
target.clear()
|
||||||
continue
|
continue
|
||||||
name = canonicalize_name(name)
|
name = canonicalize_name(name)
|
||||||
@@ -69,16 +63,18 @@ class FormatControl:
|
|||||||
def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
|
def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
|
||||||
result = {"binary", "source"}
|
result = {"binary", "source"}
|
||||||
if canonical_name in self.only_binary:
|
if canonical_name in self.only_binary:
|
||||||
result.discard('source')
|
result.discard("source")
|
||||||
elif canonical_name in self.no_binary:
|
elif canonical_name in self.no_binary:
|
||||||
result.discard('binary')
|
result.discard("binary")
|
||||||
elif ':all:' in self.only_binary:
|
elif ":all:" in self.only_binary:
|
||||||
result.discard('source')
|
result.discard("source")
|
||||||
elif ':all:' in self.no_binary:
|
elif ":all:" in self.no_binary:
|
||||||
result.discard('binary')
|
result.discard("binary")
|
||||||
return frozenset(result)
|
return frozenset(result)
|
||||||
|
|
||||||
def disallow_binaries(self) -> None:
|
def disallow_binaries(self) -> None:
|
||||||
self.handle_mutual_excludes(
|
self.handle_mutual_excludes(
|
||||||
':all:', self.no_binary, self.only_binary,
|
":all:",
|
||||||
|
self.no_binary,
|
||||||
|
self.only_binary,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,18 +2,16 @@ import urllib.parse
|
|||||||
|
|
||||||
|
|
||||||
class PackageIndex:
|
class PackageIndex:
|
||||||
"""Represents a Package Index and provides easier access to endpoints
|
"""Represents a Package Index and provides easier access to endpoints"""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ['url', 'netloc', 'simple_url', 'pypi_url',
|
__slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
|
||||||
'file_storage_domain']
|
|
||||||
|
|
||||||
def __init__(self, url: str, file_storage_domain: str) -> None:
|
def __init__(self, url: str, file_storage_domain: str) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.url = url
|
self.url = url
|
||||||
self.netloc = urllib.parse.urlsplit(url).netloc
|
self.netloc = urllib.parse.urlsplit(url).netloc
|
||||||
self.simple_url = self._url_for_path('simple')
|
self.simple_url = self._url_for_path("simple")
|
||||||
self.pypi_url = self._url_for_path('pypi')
|
self.pypi_url = self._url_for_path("pypi")
|
||||||
|
|
||||||
# This is part of a temporary hack used to block installs of PyPI
|
# This is part of a temporary hack used to block installs of PyPI
|
||||||
# packages which depend on external urls only necessary until PyPI can
|
# packages which depend on external urls only necessary until PyPI can
|
||||||
@@ -24,9 +22,7 @@ class PackageIndex:
|
|||||||
return urllib.parse.urljoin(self.url, path)
|
return urllib.parse.urljoin(self.url, path)
|
||||||
|
|
||||||
|
|
||||||
PyPI = PackageIndex(
|
PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
|
||||||
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
|
||||||
)
|
|
||||||
TestPyPI = PackageIndex(
|
TestPyPI = PackageIndex(
|
||||||
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
"https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,14 +1,28 @@
|
|||||||
import functools
|
import functools
|
||||||
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union
|
from dataclasses import dataclass
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
Mapping,
|
||||||
|
NamedTuple,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pip._internal.utils.deprecation import deprecated
|
||||||
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
||||||
from pip._internal.utils.hashes import Hashes
|
from pip._internal.utils.hashes import Hashes
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import (
|
||||||
|
pairwise,
|
||||||
redact_auth_from_url,
|
redact_auth_from_url,
|
||||||
split_auth_from_netloc,
|
split_auth_from_netloc,
|
||||||
splitext,
|
splitext,
|
||||||
@@ -17,38 +31,172 @@ from pip._internal.utils.models import KeyBasedCompareMixin
|
|||||||
from pip._internal.utils.urls import path_to_url, url_to_path
|
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pip._internal.index.collector import HTMLPage
|
from pip._internal.index.collector import IndexContent
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
_SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5")
|
# Order matters, earlier hashes have a precedence over later hashes for what
|
||||||
|
# we will pick to use.
|
||||||
|
_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class LinkHash:
|
||||||
|
"""Links to content may have embedded hash values. This class parses those.
|
||||||
|
|
||||||
|
`name` must be any member of `_SUPPORTED_HASHES`.
|
||||||
|
|
||||||
|
This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
|
||||||
|
be JSON-serializable to conform to PEP 610, this class contains the logic for
|
||||||
|
parsing a hash name and value for correctness, and then checking whether that hash
|
||||||
|
conforms to a schema with `.is_hash_allowed()`."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
value: str
|
||||||
|
|
||||||
|
_hash_url_fragment_re = re.compile(
|
||||||
|
# NB: we do not validate that the second group (.*) is a valid hex
|
||||||
|
# digest. Instead, we simply keep that string in this class, and then check it
|
||||||
|
# against Hashes when hash-checking is needed. This is easier to debug than
|
||||||
|
# proactively discarding an invalid hex digest, as we handle incorrect hashes
|
||||||
|
# and malformed hashes in the same place.
|
||||||
|
r"[#&]({choices})=([^&]*)".format(
|
||||||
|
choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
assert self.name in _SUPPORTED_HASHES
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]:
|
||||||
|
"""Parse a PEP 658 data-dist-info-metadata hash."""
|
||||||
|
if dist_info_metadata == "true":
|
||||||
|
return None
|
||||||
|
name, sep, value = dist_info_metadata.partition("=")
|
||||||
|
if not sep:
|
||||||
|
return None
|
||||||
|
if name not in _SUPPORTED_HASHES:
|
||||||
|
return None
|
||||||
|
return cls(name=name, value=value)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@functools.lru_cache(maxsize=None)
|
||||||
|
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
|
||||||
|
"""Search a string for a checksum algorithm name and encoded output value."""
|
||||||
|
match = cls._hash_url_fragment_re.search(url)
|
||||||
|
if match is None:
|
||||||
|
return None
|
||||||
|
name, value = match.groups()
|
||||||
|
return cls(name=name, value=value)
|
||||||
|
|
||||||
|
def as_dict(self) -> Dict[str, str]:
|
||||||
|
return {self.name: self.value}
|
||||||
|
|
||||||
|
def as_hashes(self) -> Hashes:
|
||||||
|
"""Return a Hashes instance which checks only for the current hash."""
|
||||||
|
return Hashes({self.name: [self.value]})
|
||||||
|
|
||||||
|
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
||||||
|
"""
|
||||||
|
Return True if the current hash is allowed by `hashes`.
|
||||||
|
"""
|
||||||
|
if hashes is None:
|
||||||
|
return False
|
||||||
|
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_url_path_part(part: str) -> str:
|
||||||
|
"""
|
||||||
|
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
||||||
|
"""
|
||||||
|
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||||
|
return urllib.parse.quote(urllib.parse.unquote(part))
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_file_url_path(part: str) -> str:
|
||||||
|
"""
|
||||||
|
Clean the first part of a URL path that corresponds to a local
|
||||||
|
filesystem path (i.e. the first part after splitting on "@" characters).
|
||||||
|
"""
|
||||||
|
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||||
|
# Also, on Windows the path part might contain a drive letter which
|
||||||
|
# should not be quoted. On Linux where drive letters do not
|
||||||
|
# exist, the colon should be quoted. We rely on urllib.request
|
||||||
|
# to do the right thing here.
|
||||||
|
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
||||||
|
|
||||||
|
|
||||||
|
# percent-encoded: /
|
||||||
|
_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
||||||
|
"""
|
||||||
|
Clean the path portion of a URL.
|
||||||
|
"""
|
||||||
|
if is_local_path:
|
||||||
|
clean_func = _clean_file_url_path
|
||||||
|
else:
|
||||||
|
clean_func = _clean_url_path_part
|
||||||
|
|
||||||
|
# Split on the reserved characters prior to cleaning so that
|
||||||
|
# revision strings in VCS URLs are properly preserved.
|
||||||
|
parts = _reserved_chars_re.split(path)
|
||||||
|
|
||||||
|
cleaned_parts = []
|
||||||
|
for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
|
||||||
|
cleaned_parts.append(clean_func(to_clean))
|
||||||
|
# Normalize %xx escapes (e.g. %2f -> %2F)
|
||||||
|
cleaned_parts.append(reserved.upper())
|
||||||
|
|
||||||
|
return "".join(cleaned_parts)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_quoted_url(url: str) -> str:
|
||||||
|
"""
|
||||||
|
Make sure a link is fully quoted.
|
||||||
|
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
||||||
|
and without double-quoting other characters.
|
||||||
|
"""
|
||||||
|
# Split the URL into parts according to the general structure
|
||||||
|
# `scheme://netloc/path;parameters?query#fragment`.
|
||||||
|
result = urllib.parse.urlparse(url)
|
||||||
|
# If the netloc is empty, then the URL refers to a local filesystem path.
|
||||||
|
is_local_path = not result.netloc
|
||||||
|
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
||||||
|
return urllib.parse.urlunparse(result._replace(path=path))
|
||||||
|
|
||||||
|
|
||||||
class Link(KeyBasedCompareMixin):
|
class Link(KeyBasedCompareMixin):
|
||||||
"""Represents a parsed link from a Package Index's simple URL
|
"""Represents a parsed link from a Package Index's simple URL"""
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = [
|
__slots__ = [
|
||||||
"_parsed_url",
|
"_parsed_url",
|
||||||
"_url",
|
"_url",
|
||||||
|
"_hashes",
|
||||||
"comes_from",
|
"comes_from",
|
||||||
"requires_python",
|
"requires_python",
|
||||||
"yanked_reason",
|
"yanked_reason",
|
||||||
|
"dist_info_metadata",
|
||||||
"cache_link_parsing",
|
"cache_link_parsing",
|
||||||
|
"egg_fragment",
|
||||||
]
|
]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
comes_from: Optional[Union[str, "HTMLPage"]] = None,
|
comes_from: Optional[Union[str, "IndexContent"]] = None,
|
||||||
requires_python: Optional[str] = None,
|
requires_python: Optional[str] = None,
|
||||||
yanked_reason: Optional[str] = None,
|
yanked_reason: Optional[str] = None,
|
||||||
|
dist_info_metadata: Optional[str] = None,
|
||||||
cache_link_parsing: bool = True,
|
cache_link_parsing: bool = True,
|
||||||
|
hashes: Optional[Mapping[str, str]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
:param url: url of the resource pointed to (href of the link)
|
:param url: url of the resource pointed to (href of the link)
|
||||||
:param comes_from: instance of HTMLPage where the link was found,
|
:param comes_from: instance of IndexContent where the link was found,
|
||||||
or string.
|
or string.
|
||||||
:param requires_python: String containing the `Requires-Python`
|
:param requires_python: String containing the `Requires-Python`
|
||||||
metadata field, specified in PEP 345. This may be specified by
|
metadata field, specified in PEP 345. This may be specified by
|
||||||
@@ -60,15 +208,20 @@ class Link(KeyBasedCompareMixin):
|
|||||||
a simple repository HTML link. If the file has been yanked but
|
a simple repository HTML link. If the file has been yanked but
|
||||||
no reason was provided, this should be the empty string. See
|
no reason was provided, this should be the empty string. See
|
||||||
PEP 592 for more information and the specification.
|
PEP 592 for more information and the specification.
|
||||||
|
:param dist_info_metadata: the metadata attached to the file, or None if no such
|
||||||
|
metadata is provided. This is the value of the "data-dist-info-metadata"
|
||||||
|
attribute, if present, in a simple repository HTML link. This may be parsed
|
||||||
|
into its own `Link` by `self.metadata_link()`. See PEP 658 for more
|
||||||
|
information and the specification.
|
||||||
:param cache_link_parsing: A flag that is used elsewhere to determine
|
:param cache_link_parsing: A flag that is used elsewhere to determine
|
||||||
whether resources retrieved from this link
|
whether resources retrieved from this link should be cached. PyPI
|
||||||
should be cached. PyPI index urls should
|
URLs should generally have this set to False, for example.
|
||||||
generally have this set to False, for
|
:param hashes: A mapping of hash names to digests to allow us to
|
||||||
example.
|
determine the validity of a download.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# url can be a UNC windows share
|
# url can be a UNC windows share
|
||||||
if url.startswith('\\\\'):
|
if url.startswith("\\\\"):
|
||||||
url = path_to_url(url)
|
url = path_to_url(url)
|
||||||
|
|
||||||
self._parsed_url = urllib.parse.urlsplit(url)
|
self._parsed_url = urllib.parse.urlsplit(url)
|
||||||
@@ -76,27 +229,99 @@ class Link(KeyBasedCompareMixin):
|
|||||||
# trying to set a new value.
|
# trying to set a new value.
|
||||||
self._url = url
|
self._url = url
|
||||||
|
|
||||||
|
link_hash = LinkHash.find_hash_url_fragment(url)
|
||||||
|
hashes_from_link = {} if link_hash is None else link_hash.as_dict()
|
||||||
|
if hashes is None:
|
||||||
|
self._hashes = hashes_from_link
|
||||||
|
else:
|
||||||
|
self._hashes = {**hashes, **hashes_from_link}
|
||||||
|
|
||||||
self.comes_from = comes_from
|
self.comes_from = comes_from
|
||||||
self.requires_python = requires_python if requires_python else None
|
self.requires_python = requires_python if requires_python else None
|
||||||
self.yanked_reason = yanked_reason
|
self.yanked_reason = yanked_reason
|
||||||
|
self.dist_info_metadata = dist_info_metadata
|
||||||
|
|
||||||
super().__init__(key=url, defining_class=Link)
|
super().__init__(key=url, defining_class=Link)
|
||||||
|
|
||||||
self.cache_link_parsing = cache_link_parsing
|
self.cache_link_parsing = cache_link_parsing
|
||||||
|
self.egg_fragment = self._egg_fragment()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_json(
|
||||||
|
cls,
|
||||||
|
file_data: Dict[str, Any],
|
||||||
|
page_url: str,
|
||||||
|
) -> Optional["Link"]:
|
||||||
|
"""
|
||||||
|
Convert an pypi json document from a simple repository page into a Link.
|
||||||
|
"""
|
||||||
|
file_url = file_data.get("url")
|
||||||
|
if file_url is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
|
||||||
|
pyrequire = file_data.get("requires-python")
|
||||||
|
yanked_reason = file_data.get("yanked")
|
||||||
|
dist_info_metadata = file_data.get("dist-info-metadata")
|
||||||
|
hashes = file_data.get("hashes", {})
|
||||||
|
|
||||||
|
# The Link.yanked_reason expects an empty string instead of a boolean.
|
||||||
|
if yanked_reason and not isinstance(yanked_reason, str):
|
||||||
|
yanked_reason = ""
|
||||||
|
# The Link.yanked_reason expects None instead of False.
|
||||||
|
elif not yanked_reason:
|
||||||
|
yanked_reason = None
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
url,
|
||||||
|
comes_from=page_url,
|
||||||
|
requires_python=pyrequire,
|
||||||
|
yanked_reason=yanked_reason,
|
||||||
|
hashes=hashes,
|
||||||
|
dist_info_metadata=dist_info_metadata,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_element(
|
||||||
|
cls,
|
||||||
|
anchor_attribs: Dict[str, Optional[str]],
|
||||||
|
page_url: str,
|
||||||
|
base_url: str,
|
||||||
|
) -> Optional["Link"]:
|
||||||
|
"""
|
||||||
|
Convert an anchor element's attributes in a simple repository page to a Link.
|
||||||
|
"""
|
||||||
|
href = anchor_attribs.get("href")
|
||||||
|
if not href:
|
||||||
|
return None
|
||||||
|
|
||||||
|
url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
|
||||||
|
pyrequire = anchor_attribs.get("data-requires-python")
|
||||||
|
yanked_reason = anchor_attribs.get("data-yanked")
|
||||||
|
dist_info_metadata = anchor_attribs.get("data-dist-info-metadata")
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
url,
|
||||||
|
comes_from=page_url,
|
||||||
|
requires_python=pyrequire,
|
||||||
|
yanked_reason=yanked_reason,
|
||||||
|
dist_info_metadata=dist_info_metadata,
|
||||||
|
)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
if self.requires_python:
|
if self.requires_python:
|
||||||
rp = f' (requires-python:{self.requires_python})'
|
rp = f" (requires-python:{self.requires_python})"
|
||||||
else:
|
else:
|
||||||
rp = ''
|
rp = ""
|
||||||
if self.comes_from:
|
if self.comes_from:
|
||||||
return '{} (from {}){}'.format(
|
return "{} (from {}){}".format(
|
||||||
redact_auth_from_url(self._url), self.comes_from, rp)
|
redact_auth_from_url(self._url), self.comes_from, rp
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
return redact_auth_from_url(str(self._url))
|
return redact_auth_from_url(str(self._url))
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f'<Link {self}>'
|
return f"<Link {self}>"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self) -> str:
|
def url(self) -> str:
|
||||||
@@ -104,7 +329,7 @@ class Link(KeyBasedCompareMixin):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def filename(self) -> str:
|
def filename(self) -> str:
|
||||||
path = self.path.rstrip('/')
|
path = self.path.rstrip("/")
|
||||||
name = posixpath.basename(path)
|
name = posixpath.basename(path)
|
||||||
if not name:
|
if not name:
|
||||||
# Make sure we don't leak auth information if the netloc
|
# Make sure we don't leak auth information if the netloc
|
||||||
@@ -113,7 +338,7 @@ class Link(KeyBasedCompareMixin):
|
|||||||
return netloc
|
return netloc
|
||||||
|
|
||||||
name = urllib.parse.unquote(name)
|
name = urllib.parse.unquote(name)
|
||||||
assert name, f'URL {self._url!r} produced no filename'
|
assert name, f"URL {self._url!r} produced no filename"
|
||||||
return name
|
return name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -136,7 +361,7 @@ class Link(KeyBasedCompareMixin):
|
|||||||
return urllib.parse.unquote(self._parsed_url.path)
|
return urllib.parse.unquote(self._parsed_url.path)
|
||||||
|
|
||||||
def splitext(self) -> Tuple[str, str]:
|
def splitext(self) -> Tuple[str, str]:
|
||||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
return splitext(posixpath.basename(self.path.rstrip("/")))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ext(self) -> str:
|
def ext(self) -> str:
|
||||||
@@ -145,18 +370,34 @@ class Link(KeyBasedCompareMixin):
|
|||||||
@property
|
@property
|
||||||
def url_without_fragment(self) -> str:
|
def url_without_fragment(self) -> str:
|
||||||
scheme, netloc, path, query, fragment = self._parsed_url
|
scheme, netloc, path, query, fragment = self._parsed_url
|
||||||
return urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
|
return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
||||||
|
|
||||||
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
|
||||||
|
|
||||||
@property
|
# Per PEP 508.
|
||||||
def egg_fragment(self) -> Optional[str]:
|
_project_name_re = re.compile(
|
||||||
|
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
||||||
|
)
|
||||||
|
|
||||||
|
def _egg_fragment(self) -> Optional[str]:
|
||||||
match = self._egg_fragment_re.search(self._url)
|
match = self._egg_fragment_re.search(self._url)
|
||||||
if not match:
|
if not match:
|
||||||
return None
|
return None
|
||||||
return match.group(1)
|
|
||||||
|
|
||||||
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
# An egg fragment looks like a PEP 508 project name, along with
|
||||||
|
# an optional extras specifier. Anything else is invalid.
|
||||||
|
project_name = match.group(1)
|
||||||
|
if not self._project_name_re.match(project_name):
|
||||||
|
deprecated(
|
||||||
|
reason=f"{self} contains an egg fragment with a non-PEP 508 name",
|
||||||
|
replacement="to use the req @ url syntax, and remove the egg fragment",
|
||||||
|
gone_in="25.0",
|
||||||
|
issue=11617,
|
||||||
|
)
|
||||||
|
|
||||||
|
return project_name
|
||||||
|
|
||||||
|
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def subdirectory_fragment(self) -> Optional[str]:
|
def subdirectory_fragment(self) -> Optional[str]:
|
||||||
@@ -165,31 +406,37 @@ class Link(KeyBasedCompareMixin):
|
|||||||
return None
|
return None
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
|
|
||||||
_hash_re = re.compile(
|
def metadata_link(self) -> Optional["Link"]:
|
||||||
r'({choices})=([a-f0-9]+)'.format(choices="|".join(_SUPPORTED_HASHES))
|
"""Implementation of PEP 658 parsing."""
|
||||||
)
|
# Note that Link.from_element() parsing the "data-dist-info-metadata" attribute
|
||||||
|
# from an HTML anchor tag is typically how the Link.dist_info_metadata attribute
|
||||||
|
# gets set.
|
||||||
|
if self.dist_info_metadata is None:
|
||||||
|
return None
|
||||||
|
metadata_url = f"{self.url_without_fragment}.metadata"
|
||||||
|
metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata)
|
||||||
|
if metadata_link_hash is None:
|
||||||
|
return Link(metadata_url)
|
||||||
|
return Link(metadata_url, hashes=metadata_link_hash.as_dict())
|
||||||
|
|
||||||
|
def as_hashes(self) -> Hashes:
|
||||||
|
return Hashes({k: [v] for k, v in self._hashes.items()})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hash(self) -> Optional[str]:
|
def hash(self) -> Optional[str]:
|
||||||
match = self._hash_re.search(self._url)
|
return next(iter(self._hashes.values()), None)
|
||||||
if match:
|
|
||||||
return match.group(2)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hash_name(self) -> Optional[str]:
|
def hash_name(self) -> Optional[str]:
|
||||||
match = self._hash_re.search(self._url)
|
return next(iter(self._hashes), None)
|
||||||
if match:
|
|
||||||
return match.group(1)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def show_url(self) -> str:
|
def show_url(self) -> str:
|
||||||
return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
|
return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_file(self) -> bool:
|
def is_file(self) -> bool:
|
||||||
return self.scheme == 'file'
|
return self.scheme == "file"
|
||||||
|
|
||||||
def is_existing_dir(self) -> bool:
|
def is_existing_dir(self) -> bool:
|
||||||
return self.is_file and os.path.isdir(self.file_path)
|
return self.is_file and os.path.isdir(self.file_path)
|
||||||
@@ -210,19 +457,15 @@ class Link(KeyBasedCompareMixin):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def has_hash(self) -> bool:
|
def has_hash(self) -> bool:
|
||||||
return self.hash_name is not None
|
return bool(self._hashes)
|
||||||
|
|
||||||
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
||||||
"""
|
"""
|
||||||
Return True if the link has a hash and it is allowed.
|
Return True if the link has a hash and it is allowed by `hashes`.
|
||||||
"""
|
"""
|
||||||
if hashes is None or not self.has_hash:
|
if hashes is None:
|
||||||
return False
|
return False
|
||||||
# Assert non-None so mypy knows self.hash_name and self.hash are str.
|
return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
|
||||||
assert self.hash_name is not None
|
|
||||||
assert self.hash is not None
|
|
||||||
|
|
||||||
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
|
||||||
|
|
||||||
|
|
||||||
class _CleanResult(NamedTuple):
|
class _CleanResult(NamedTuple):
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ https://docs.python.org/3/install/index.html#alternate-installation.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data']
|
SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
|
||||||
|
|
||||||
|
|
||||||
class Scheme:
|
class Scheme:
|
||||||
|
|||||||
@@ -20,13 +20,14 @@ class SearchScope:
|
|||||||
Encapsulates the locations that pip is configured to search.
|
Encapsulates the locations that pip is configured to search.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ["find_links", "index_urls"]
|
__slots__ = ["find_links", "index_urls", "no_index"]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(
|
def create(
|
||||||
cls,
|
cls,
|
||||||
find_links: List[str],
|
find_links: List[str],
|
||||||
index_urls: List[str],
|
index_urls: List[str],
|
||||||
|
no_index: bool,
|
||||||
) -> "SearchScope":
|
) -> "SearchScope":
|
||||||
"""
|
"""
|
||||||
Create a SearchScope object after normalizing the `find_links`.
|
Create a SearchScope object after normalizing the `find_links`.
|
||||||
@@ -38,7 +39,7 @@ class SearchScope:
|
|||||||
# blindly normalize anything starting with a ~...
|
# blindly normalize anything starting with a ~...
|
||||||
built_find_links: List[str] = []
|
built_find_links: List[str] = []
|
||||||
for link in find_links:
|
for link in find_links:
|
||||||
if link.startswith('~'):
|
if link.startswith("~"):
|
||||||
new_link = normalize_path(link)
|
new_link = normalize_path(link)
|
||||||
if os.path.exists(new_link):
|
if os.path.exists(new_link):
|
||||||
link = new_link
|
link = new_link
|
||||||
@@ -49,33 +50,35 @@ class SearchScope:
|
|||||||
if not has_tls():
|
if not has_tls():
|
||||||
for link in itertools.chain(index_urls, built_find_links):
|
for link in itertools.chain(index_urls, built_find_links):
|
||||||
parsed = urllib.parse.urlparse(link)
|
parsed = urllib.parse.urlparse(link)
|
||||||
if parsed.scheme == 'https':
|
if parsed.scheme == "https":
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'pip is configured with locations that require '
|
"pip is configured with locations that require "
|
||||||
'TLS/SSL, however the ssl module in Python is not '
|
"TLS/SSL, however the ssl module in Python is not "
|
||||||
'available.'
|
"available."
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
find_links=built_find_links,
|
find_links=built_find_links,
|
||||||
index_urls=index_urls,
|
index_urls=index_urls,
|
||||||
|
no_index=no_index,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
find_links: List[str],
|
find_links: List[str],
|
||||||
index_urls: List[str],
|
index_urls: List[str],
|
||||||
|
no_index: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.find_links = find_links
|
self.find_links = find_links
|
||||||
self.index_urls = index_urls
|
self.index_urls = index_urls
|
||||||
|
self.no_index = no_index
|
||||||
|
|
||||||
def get_formatted_locations(self) -> str:
|
def get_formatted_locations(self) -> str:
|
||||||
lines = []
|
lines = []
|
||||||
redacted_index_urls = []
|
redacted_index_urls = []
|
||||||
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
||||||
for url in self.index_urls:
|
for url in self.index_urls:
|
||||||
|
|
||||||
redacted_index_url = redact_auth_from_url(url)
|
redacted_index_url = redact_auth_from_url(url)
|
||||||
|
|
||||||
# Parse the URL
|
# Parse the URL
|
||||||
@@ -88,20 +91,23 @@ class SearchScope:
|
|||||||
# exceptions for malformed URLs
|
# exceptions for malformed URLs
|
||||||
if not purl.scheme and not purl.netloc:
|
if not purl.scheme and not purl.netloc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'The index url "%s" seems invalid, '
|
'The index url "%s" seems invalid, please provide a scheme.',
|
||||||
'please provide a scheme.', redacted_index_url)
|
redacted_index_url,
|
||||||
|
)
|
||||||
|
|
||||||
redacted_index_urls.append(redacted_index_url)
|
redacted_index_urls.append(redacted_index_url)
|
||||||
|
|
||||||
lines.append('Looking in indexes: {}'.format(
|
lines.append(
|
||||||
', '.join(redacted_index_urls)))
|
"Looking in indexes: {}".format(", ".join(redacted_index_urls))
|
||||||
|
)
|
||||||
|
|
||||||
if self.find_links:
|
if self.find_links:
|
||||||
lines.append(
|
lines.append(
|
||||||
'Looking in links: {}'.format(', '.join(
|
"Looking in links: {}".format(
|
||||||
redact_auth_from_url(url) for url in self.find_links))
|
", ".join(redact_auth_from_url(url) for url in self.find_links)
|
||||||
)
|
)
|
||||||
return '\n'.join(lines)
|
)
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
def get_index_urls_locations(self, project_name: str) -> List[str]:
|
def get_index_urls_locations(self, project_name: str) -> List[str]:
|
||||||
"""Returns the locations found via self.index_urls
|
"""Returns the locations found via self.index_urls
|
||||||
@@ -112,15 +118,15 @@ class SearchScope:
|
|||||||
|
|
||||||
def mkurl_pypi_url(url: str) -> str:
|
def mkurl_pypi_url(url: str) -> str:
|
||||||
loc = posixpath.join(
|
loc = posixpath.join(
|
||||||
url,
|
url, urllib.parse.quote(canonicalize_name(project_name))
|
||||||
urllib.parse.quote(canonicalize_name(project_name)))
|
)
|
||||||
# For maximum compatibility with easy_install, ensure the path
|
# For maximum compatibility with easy_install, ensure the path
|
||||||
# ends in a trailing slash. Although this isn't in the spec
|
# ends in a trailing slash. Although this isn't in the spec
|
||||||
# (and PyPI can handle it without the slash) some other index
|
# (and PyPI can handle it without the slash) some other index
|
||||||
# implementations might break if they relied on easy_install's
|
# implementations might break if they relied on easy_install's
|
||||||
# behavior.
|
# behavior.
|
||||||
if not loc.endswith('/'):
|
if not loc.endswith("/"):
|
||||||
loc = loc + '/'
|
loc = loc + "/"
|
||||||
return loc
|
return loc
|
||||||
|
|
||||||
return [mkurl_pypi_url(url) for url in self.index_urls]
|
return [mkurl_pypi_url(url) for url in self.index_urls]
|
||||||
|
|||||||
@@ -9,8 +9,13 @@ class SelectionPreferences:
|
|||||||
and installing files.
|
and installing files.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control',
|
__slots__ = [
|
||||||
'prefer_binary', 'ignore_requires_python']
|
"allow_yanked",
|
||||||
|
"allow_all_prereleases",
|
||||||
|
"format_control",
|
||||||
|
"prefer_binary",
|
||||||
|
"ignore_requires_python",
|
||||||
|
]
|
||||||
|
|
||||||
# Don't include an allow_yanked default value to make sure each call
|
# Don't include an allow_yanked default value to make sure each call
|
||||||
# site considers whether yanked releases are allowed. This also causes
|
# site considers whether yanked releases are allowed. This also causes
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ class TargetPython:
|
|||||||
else:
|
else:
|
||||||
py_version_info = normalize_version_info(py_version_info)
|
py_version_info = normalize_version_info(py_version_info)
|
||||||
|
|
||||||
py_version = '.'.join(map(str, py_version_info[:2]))
|
py_version = ".".join(map(str, py_version_info[:2]))
|
||||||
|
|
||||||
self.abis = abis
|
self.abis = abis
|
||||||
self.implementation = implementation
|
self.implementation = implementation
|
||||||
@@ -70,19 +70,18 @@ class TargetPython:
|
|||||||
"""
|
"""
|
||||||
display_version = None
|
display_version = None
|
||||||
if self._given_py_version_info is not None:
|
if self._given_py_version_info is not None:
|
||||||
display_version = '.'.join(
|
display_version = ".".join(
|
||||||
str(part) for part in self._given_py_version_info
|
str(part) for part in self._given_py_version_info
|
||||||
)
|
)
|
||||||
|
|
||||||
key_values = [
|
key_values = [
|
||||||
('platforms', self.platforms),
|
("platforms", self.platforms),
|
||||||
('version_info', display_version),
|
("version_info", display_version),
|
||||||
('abis', self.abis),
|
("abis", self.abis),
|
||||||
('implementation', self.implementation),
|
("implementation", self.implementation),
|
||||||
]
|
]
|
||||||
return ' '.join(
|
return " ".join(
|
||||||
f'{key}={value!r}' for key, value in key_values
|
f"{key}={value!r}" for key, value in key_values if value is not None
|
||||||
if value is not None
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_tags(self) -> List[Tag]:
|
def get_tags(self) -> List[Tag]:
|
||||||
|
|||||||
@@ -13,10 +13,10 @@ class Wheel:
|
|||||||
"""A wheel file"""
|
"""A wheel file"""
|
||||||
|
|
||||||
wheel_file_re = re.compile(
|
wheel_file_re = re.compile(
|
||||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
|
||||||
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
|
||||||
\.whl|\.dist-info)$""",
|
\.whl|\.dist-info)$""",
|
||||||
re.VERBOSE
|
re.VERBOSE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, filename: str) -> None:
|
def __init__(self, filename: str) -> None:
|
||||||
@@ -25,23 +25,20 @@ class Wheel:
|
|||||||
"""
|
"""
|
||||||
wheel_info = self.wheel_file_re.match(filename)
|
wheel_info = self.wheel_file_re.match(filename)
|
||||||
if not wheel_info:
|
if not wheel_info:
|
||||||
raise InvalidWheelFilename(
|
raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
|
||||||
f"{filename} is not a valid wheel filename."
|
|
||||||
)
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.name = wheel_info.group('name').replace('_', '-')
|
self.name = wheel_info.group("name").replace("_", "-")
|
||||||
# we'll assume "_" means "-" due to wheel naming scheme
|
# we'll assume "_" means "-" due to wheel naming scheme
|
||||||
# (https://github.com/pypa/pip/issues/1150)
|
# (https://github.com/pypa/pip/issues/1150)
|
||||||
self.version = wheel_info.group('ver').replace('_', '-')
|
self.version = wheel_info.group("ver").replace("_", "-")
|
||||||
self.build_tag = wheel_info.group('build')
|
self.build_tag = wheel_info.group("build")
|
||||||
self.pyversions = wheel_info.group('pyver').split('.')
|
self.pyversions = wheel_info.group("pyver").split(".")
|
||||||
self.abis = wheel_info.group('abi').split('.')
|
self.abis = wheel_info.group("abi").split(".")
|
||||||
self.plats = wheel_info.group('plat').split('.')
|
self.plats = wheel_info.group("plat").split(".")
|
||||||
|
|
||||||
# All the tag combinations from this file
|
# All the tag combinations from this file
|
||||||
self.file_tags = {
|
self.file_tags = {
|
||||||
Tag(x, y, z) for x in self.pyversions
|
Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
|
||||||
for y in self.abis for z in self.plats
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_formatted_file_tags(self) -> List[str]:
|
def get_formatted_file_tags(self) -> List[str]:
|
||||||
@@ -61,7 +58,10 @@ class Wheel:
|
|||||||
:raises ValueError: If none of the wheel's file tags match one of
|
:raises ValueError: If none of the wheel's file tags match one of
|
||||||
the supported tags.
|
the supported tags.
|
||||||
"""
|
"""
|
||||||
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
try:
|
||||||
|
return next(i for i, t in enumerate(tags) if t in self.file_tags)
|
||||||
|
except StopIteration:
|
||||||
|
raise ValueError()
|
||||||
|
|
||||||
def find_most_preferred_tag(
|
def find_most_preferred_tag(
|
||||||
self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
|
self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
|
||||||
|
|||||||
@@ -3,9 +3,18 @@
|
|||||||
Contains interface (MultiDomainBasicAuth) and associated glue code for
|
Contains interface (MultiDomainBasicAuth) and associated glue code for
|
||||||
providing credentials in the context of network requests.
|
providing credentials in the context of network requests.
|
||||||
"""
|
"""
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sysconfig
|
||||||
|
import typing
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
from abc import ABC, abstractmethod
|
||||||
|
from functools import lru_cache
|
||||||
|
from os.path import commonprefix
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, NamedTuple, Optional, Tuple
|
||||||
|
|
||||||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||||
from pip._vendor.requests.models import Request, Response
|
from pip._vendor.requests.models import Request, Response
|
||||||
@@ -23,59 +32,204 @@ from pip._internal.vcs.versioncontrol import AuthInfo
|
|||||||
|
|
||||||
logger = getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
Credentials = Tuple[str, str, str]
|
KEYRING_DISABLED = False
|
||||||
|
|
||||||
try:
|
|
||||||
import keyring
|
|
||||||
except ImportError:
|
|
||||||
keyring = None
|
|
||||||
except Exception as exc:
|
|
||||||
logger.warning(
|
|
||||||
"Keyring is skipped due to an exception: %s",
|
|
||||||
str(exc),
|
|
||||||
)
|
|
||||||
keyring = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]:
|
class Credentials(NamedTuple):
|
||||||
"""Return the tuple auth for a given url from keyring."""
|
url: str
|
||||||
global keyring
|
username: str
|
||||||
if not url or not keyring:
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class KeyRingBaseProvider(ABC):
|
||||||
|
"""Keyring base provider interface"""
|
||||||
|
|
||||||
|
has_keyring: bool
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class KeyRingNullProvider(KeyRingBaseProvider):
|
||||||
|
"""Keyring null provider"""
|
||||||
|
|
||||||
|
has_keyring = False
|
||||||
|
|
||||||
|
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||||
try:
|
return None
|
||||||
get_credential = keyring.get_credential
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
class KeyRingPythonProvider(KeyRingBaseProvider):
|
||||||
else:
|
"""Keyring interface which uses locally imported `keyring`"""
|
||||||
|
|
||||||
|
has_keyring = True
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
import keyring
|
||||||
|
|
||||||
|
self.keyring = keyring
|
||||||
|
|
||||||
|
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||||
|
# Support keyring's get_credential interface which supports getting
|
||||||
|
# credentials without a username. This is only available for
|
||||||
|
# keyring>=15.2.0.
|
||||||
|
if hasattr(self.keyring, "get_credential"):
|
||||||
logger.debug("Getting credentials from keyring for %s", url)
|
logger.debug("Getting credentials from keyring for %s", url)
|
||||||
cred = get_credential(url, username)
|
cred = self.keyring.get_credential(url, username)
|
||||||
if cred is not None:
|
if cred is not None:
|
||||||
return cred.username, cred.password
|
return cred.username, cred.password
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if username:
|
if username is not None:
|
||||||
logger.debug("Getting password from keyring for %s", url)
|
logger.debug("Getting password from keyring for %s", url)
|
||||||
password = keyring.get_password(url, username)
|
password = self.keyring.get_password(url, username)
|
||||||
if password:
|
if password:
|
||||||
return username, password
|
return username, password
|
||||||
|
|
||||||
except Exception as exc:
|
|
||||||
logger.warning(
|
|
||||||
"Keyring is skipped due to an exception: %s",
|
|
||||||
str(exc),
|
|
||||||
)
|
|
||||||
keyring = None
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||||
|
self.keyring.set_password(url, username, password)
|
||||||
|
|
||||||
|
|
||||||
|
class KeyRingCliProvider(KeyRingBaseProvider):
|
||||||
|
"""Provider which uses `keyring` cli
|
||||||
|
|
||||||
|
Instead of calling the keyring package installed alongside pip
|
||||||
|
we call keyring on the command line which will enable pip to
|
||||||
|
use which ever installation of keyring is available first in
|
||||||
|
PATH.
|
||||||
|
"""
|
||||||
|
|
||||||
|
has_keyring = True
|
||||||
|
|
||||||
|
def __init__(self, cmd: str) -> None:
|
||||||
|
self.keyring = cmd
|
||||||
|
|
||||||
|
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||||
|
# This is the default implementation of keyring.get_credential
|
||||||
|
# https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
|
||||||
|
if username is not None:
|
||||||
|
password = self._get_password(url, username)
|
||||||
|
if password is not None:
|
||||||
|
return username, password
|
||||||
|
return None
|
||||||
|
|
||||||
|
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||||
|
return self._set_password(url, username, password)
|
||||||
|
|
||||||
|
def _get_password(self, service_name: str, username: str) -> Optional[str]:
|
||||||
|
"""Mirror the implementation of keyring.get_password using cli"""
|
||||||
|
if self.keyring is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
cmd = [self.keyring, "get", service_name, username]
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["PYTHONIOENCODING"] = "utf-8"
|
||||||
|
res = subprocess.run(
|
||||||
|
cmd,
|
||||||
|
stdin=subprocess.DEVNULL,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
if res.returncode:
|
||||||
|
return None
|
||||||
|
return res.stdout.decode("utf-8").strip(os.linesep)
|
||||||
|
|
||||||
|
def _set_password(self, service_name: str, username: str, password: str) -> None:
|
||||||
|
"""Mirror the implementation of keyring.set_password using cli"""
|
||||||
|
if self.keyring is None:
|
||||||
|
return None
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["PYTHONIOENCODING"] = "utf-8"
|
||||||
|
subprocess.run(
|
||||||
|
[self.keyring, "set", service_name, username],
|
||||||
|
input=f"{password}{os.linesep}".encode("utf-8"),
|
||||||
|
env=env,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=None)
|
||||||
|
def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
|
||||||
|
logger.verbose("Keyring provider requested: %s", provider)
|
||||||
|
|
||||||
|
# keyring has previously failed and been disabled
|
||||||
|
if KEYRING_DISABLED:
|
||||||
|
provider = "disabled"
|
||||||
|
if provider in ["import", "auto"]:
|
||||||
|
try:
|
||||||
|
impl = KeyRingPythonProvider()
|
||||||
|
logger.verbose("Keyring provider set: import")
|
||||||
|
return impl
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
except Exception as exc:
|
||||||
|
# In the event of an unexpected exception
|
||||||
|
# we should warn the user
|
||||||
|
msg = "Installed copy of keyring fails with exception %s"
|
||||||
|
if provider == "auto":
|
||||||
|
msg = msg + ", trying to find a keyring executable as a fallback"
|
||||||
|
logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
|
||||||
|
if provider in ["subprocess", "auto"]:
|
||||||
|
cli = shutil.which("keyring")
|
||||||
|
if cli and cli.startswith(sysconfig.get_path("scripts")):
|
||||||
|
# all code within this function is stolen from shutil.which implementation
|
||||||
|
@typing.no_type_check
|
||||||
|
def PATH_as_shutil_which_determines_it() -> str:
|
||||||
|
path = os.environ.get("PATH", None)
|
||||||
|
if path is None:
|
||||||
|
try:
|
||||||
|
path = os.confstr("CS_PATH")
|
||||||
|
except (AttributeError, ValueError):
|
||||||
|
# os.confstr() or CS_PATH is not available
|
||||||
|
path = os.defpath
|
||||||
|
# bpo-35755: Don't use os.defpath if the PATH environment variable is
|
||||||
|
# set to an empty string
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
scripts = Path(sysconfig.get_path("scripts"))
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
|
||||||
|
p = Path(path)
|
||||||
|
try:
|
||||||
|
if not p.samefile(scripts):
|
||||||
|
paths.append(path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
path = os.pathsep.join(paths)
|
||||||
|
|
||||||
|
cli = shutil.which("keyring", path=path)
|
||||||
|
|
||||||
|
if cli:
|
||||||
|
logger.verbose("Keyring provider set: subprocess with executable %s", cli)
|
||||||
|
return KeyRingCliProvider(cli)
|
||||||
|
|
||||||
|
logger.verbose("Keyring provider set: disabled")
|
||||||
|
return KeyRingNullProvider()
|
||||||
|
|
||||||
|
|
||||||
class MultiDomainBasicAuth(AuthBase):
|
class MultiDomainBasicAuth(AuthBase):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, prompting: bool = True, index_urls: Optional[List[str]] = None
|
self,
|
||||||
|
prompting: bool = True,
|
||||||
|
index_urls: Optional[List[str]] = None,
|
||||||
|
keyring_provider: str = "auto",
|
||||||
) -> None:
|
) -> None:
|
||||||
self.prompting = prompting
|
self.prompting = prompting
|
||||||
self.index_urls = index_urls
|
self.index_urls = index_urls
|
||||||
|
self.keyring_provider = keyring_provider # type: ignore[assignment]
|
||||||
self.passwords: Dict[str, AuthInfo] = {}
|
self.passwords: Dict[str, AuthInfo] = {}
|
||||||
# When the user is prompted to enter credentials and keyring is
|
# When the user is prompted to enter credentials and keyring is
|
||||||
# available, we will offer to save them. If the user accepts,
|
# available, we will offer to save them. If the user accepts,
|
||||||
@@ -84,6 +238,47 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
# ``save_credentials`` to save these.
|
# ``save_credentials`` to save these.
|
||||||
self._credentials_to_save: Optional[Credentials] = None
|
self._credentials_to_save: Optional[Credentials] = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def keyring_provider(self) -> KeyRingBaseProvider:
|
||||||
|
return get_keyring_provider(self._keyring_provider)
|
||||||
|
|
||||||
|
@keyring_provider.setter
|
||||||
|
def keyring_provider(self, provider: str) -> None:
|
||||||
|
# The free function get_keyring_provider has been decorated with
|
||||||
|
# functools.cache. If an exception occurs in get_keyring_auth that
|
||||||
|
# cache will be cleared and keyring disabled, take that into account
|
||||||
|
# if you want to remove this indirection.
|
||||||
|
self._keyring_provider = provider
|
||||||
|
|
||||||
|
@property
|
||||||
|
def use_keyring(self) -> bool:
|
||||||
|
# We won't use keyring when --no-input is passed unless
|
||||||
|
# a specific provider is requested because it might require
|
||||||
|
# user interaction
|
||||||
|
return self.prompting or self._keyring_provider not in ["auto", "disabled"]
|
||||||
|
|
||||||
|
def _get_keyring_auth(
|
||||||
|
self,
|
||||||
|
url: Optional[str],
|
||||||
|
username: Optional[str],
|
||||||
|
) -> Optional[AuthInfo]:
|
||||||
|
"""Return the tuple auth for a given url from keyring."""
|
||||||
|
# Do nothing if no url was provided
|
||||||
|
if not url:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return self.keyring_provider.get_auth_info(url, username)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Keyring is skipped due to an exception: %s",
|
||||||
|
str(exc),
|
||||||
|
)
|
||||||
|
global KEYRING_DISABLED
|
||||||
|
KEYRING_DISABLED = True
|
||||||
|
get_keyring_provider.cache_clear()
|
||||||
|
return None
|
||||||
|
|
||||||
def _get_index_url(self, url: str) -> Optional[str]:
|
def _get_index_url(self, url: str) -> Optional[str]:
|
||||||
"""Return the original index URL matching the requested URL.
|
"""Return the original index URL matching the requested URL.
|
||||||
|
|
||||||
@@ -100,15 +295,42 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
if not url or not self.index_urls:
|
if not url or not self.index_urls:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for u in self.index_urls:
|
url = remove_auth_from_url(url).rstrip("/") + "/"
|
||||||
prefix = remove_auth_from_url(u).rstrip("/") + "/"
|
parsed_url = urllib.parse.urlsplit(url)
|
||||||
if url.startswith(prefix):
|
|
||||||
return u
|
candidates = []
|
||||||
|
|
||||||
|
for index in self.index_urls:
|
||||||
|
index = index.rstrip("/") + "/"
|
||||||
|
parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
|
||||||
|
if parsed_url == parsed_index:
|
||||||
|
return index
|
||||||
|
|
||||||
|
if parsed_url.netloc != parsed_index.netloc:
|
||||||
|
continue
|
||||||
|
|
||||||
|
candidate = urllib.parse.urlsplit(index)
|
||||||
|
candidates.append(candidate)
|
||||||
|
|
||||||
|
if not candidates:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
candidates.sort(
|
||||||
|
reverse=True,
|
||||||
|
key=lambda candidate: commonprefix(
|
||||||
|
[
|
||||||
|
parsed_url.path,
|
||||||
|
candidate.path,
|
||||||
|
]
|
||||||
|
).rfind("/"),
|
||||||
|
)
|
||||||
|
|
||||||
|
return urllib.parse.urlunsplit(candidates[0])
|
||||||
|
|
||||||
def _get_new_credentials(
|
def _get_new_credentials(
|
||||||
self,
|
self,
|
||||||
original_url: str,
|
original_url: str,
|
||||||
|
*,
|
||||||
allow_netrc: bool = True,
|
allow_netrc: bool = True,
|
||||||
allow_keyring: bool = False,
|
allow_keyring: bool = False,
|
||||||
) -> AuthInfo:
|
) -> AuthInfo:
|
||||||
@@ -152,8 +374,8 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
# The index url is more specific than the netloc, so try it first
|
# The index url is more specific than the netloc, so try it first
|
||||||
# fmt: off
|
# fmt: off
|
||||||
kr_auth = (
|
kr_auth = (
|
||||||
get_keyring_auth(index_url, username) or
|
self._get_keyring_auth(index_url, username) or
|
||||||
get_keyring_auth(netloc, username)
|
self._get_keyring_auth(netloc, username)
|
||||||
)
|
)
|
||||||
# fmt: on
|
# fmt: on
|
||||||
if kr_auth:
|
if kr_auth:
|
||||||
@@ -179,9 +401,16 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
# Try to get credentials from original url
|
# Try to get credentials from original url
|
||||||
username, password = self._get_new_credentials(original_url)
|
username, password = self._get_new_credentials(original_url)
|
||||||
|
|
||||||
# If credentials not found, use any stored credentials for this netloc
|
# If credentials not found, use any stored credentials for this netloc.
|
||||||
if username is None and password is None:
|
# Do this if either the username or the password is missing.
|
||||||
username, password = self.passwords.get(netloc, (None, None))
|
# This accounts for the situation in which the user has specified
|
||||||
|
# the username in the index url, but the password comes from keyring.
|
||||||
|
if (username is None or password is None) and netloc in self.passwords:
|
||||||
|
un, pw = self.passwords[netloc]
|
||||||
|
# It is possible that the cached credentials are for a different username,
|
||||||
|
# in which case the cache should be ignored.
|
||||||
|
if username is None or username == un:
|
||||||
|
username, password = un, pw
|
||||||
|
|
||||||
if username is not None or password is not None:
|
if username is not None or password is not None:
|
||||||
# Convert the username and password if they're None, so that
|
# Convert the username and password if they're None, so that
|
||||||
@@ -223,10 +452,11 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
def _prompt_for_password(
|
def _prompt_for_password(
|
||||||
self, netloc: str
|
self, netloc: str
|
||||||
) -> Tuple[Optional[str], Optional[str], bool]:
|
) -> Tuple[Optional[str], Optional[str], bool]:
|
||||||
username = ask_input(f"User for {netloc}: ")
|
username = ask_input(f"User for {netloc}: ") if self.prompting else None
|
||||||
if not username:
|
if not username:
|
||||||
return None, None, False
|
return None, None, False
|
||||||
auth = get_keyring_auth(netloc, username)
|
if self.use_keyring:
|
||||||
|
auth = self._get_keyring_auth(netloc, username)
|
||||||
if auth and auth[0] is not None and auth[1] is not None:
|
if auth and auth[0] is not None and auth[1] is not None:
|
||||||
return auth[0], auth[1], False
|
return auth[0], auth[1], False
|
||||||
password = ask_password("Password: ")
|
password = ask_password("Password: ")
|
||||||
@@ -234,7 +464,11 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
|
|
||||||
# Factored out to allow for easy patching in tests
|
# Factored out to allow for easy patching in tests
|
||||||
def _should_save_password_to_keyring(self) -> bool:
|
def _should_save_password_to_keyring(self) -> bool:
|
||||||
if not keyring:
|
if (
|
||||||
|
not self.prompting
|
||||||
|
or not self.use_keyring
|
||||||
|
or not self.keyring_provider.has_keyring
|
||||||
|
):
|
||||||
return False
|
return False
|
||||||
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
||||||
|
|
||||||
@@ -244,19 +478,22 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
if resp.status_code != 401:
|
if resp.status_code != 401:
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
# We are not able to prompt the user so simply return the response
|
username, password = None, None
|
||||||
if not self.prompting:
|
|
||||||
return resp
|
|
||||||
|
|
||||||
parsed = urllib.parse.urlparse(resp.url)
|
|
||||||
|
|
||||||
# Query the keyring for credentials:
|
# Query the keyring for credentials:
|
||||||
|
if self.use_keyring:
|
||||||
username, password = self._get_new_credentials(
|
username, password = self._get_new_credentials(
|
||||||
resp.url,
|
resp.url,
|
||||||
allow_netrc=False,
|
allow_netrc=False,
|
||||||
allow_keyring=True,
|
allow_keyring=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# We are not able to prompt the user so simply return the response
|
||||||
|
if not self.prompting and not username and not password:
|
||||||
|
return resp
|
||||||
|
|
||||||
|
parsed = urllib.parse.urlparse(resp.url)
|
||||||
|
|
||||||
# Prompt the user for a new username and password
|
# Prompt the user for a new username and password
|
||||||
save = False
|
save = False
|
||||||
if not username and not password:
|
if not username and not password:
|
||||||
@@ -269,7 +506,11 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
|
|
||||||
# Prompt to save the password to keyring
|
# Prompt to save the password to keyring
|
||||||
if save and self._should_save_password_to_keyring():
|
if save and self._should_save_password_to_keyring():
|
||||||
self._credentials_to_save = (parsed.netloc, username, password)
|
self._credentials_to_save = Credentials(
|
||||||
|
url=parsed.netloc,
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
)
|
||||||
|
|
||||||
# Consume content and release the original connection to allow our new
|
# Consume content and release the original connection to allow our new
|
||||||
# request to reuse the same one.
|
# request to reuse the same one.
|
||||||
@@ -302,15 +543,17 @@ class MultiDomainBasicAuth(AuthBase):
|
|||||||
|
|
||||||
def save_credentials(self, resp: Response, **kwargs: Any) -> None:
|
def save_credentials(self, resp: Response, **kwargs: Any) -> None:
|
||||||
"""Response callback to save credentials on success."""
|
"""Response callback to save credentials on success."""
|
||||||
assert keyring is not None, "should never reach here without keyring"
|
assert (
|
||||||
if not keyring:
|
self.keyring_provider.has_keyring
|
||||||
return
|
), "should never reach here without keyring"
|
||||||
|
|
||||||
creds = self._credentials_to_save
|
creds = self._credentials_to_save
|
||||||
self._credentials_to_save = None
|
self._credentials_to_save = None
|
||||||
if creds and resp.status_code < 400:
|
if creds and resp.status_code < 400:
|
||||||
try:
|
try:
|
||||||
logger.info("Saving credentials to keyring")
|
logger.info("Saving credentials to keyring")
|
||||||
keyring.set_password(*creds)
|
self.keyring_provider.save_auth_info(
|
||||||
|
creds.url, creds.username, creds.password
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Failed to save credentials")
|
logger.exception("Failed to save credentials")
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import Iterator, Optional
|
from typing import Generator, Optional
|
||||||
|
|
||||||
from pip._vendor.cachecontrol.cache import BaseCache
|
from pip._vendor.cachecontrol.cache import BaseCache
|
||||||
from pip._vendor.cachecontrol.caches import FileCache
|
from pip._vendor.cachecontrol.caches import FileCache
|
||||||
@@ -18,7 +18,7 @@ def is_from_cache(response: Response) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def suppressed_cache_errors() -> Iterator[None]:
|
def suppressed_cache_errors() -> Generator[None, None, None]:
|
||||||
"""If we can't access the cache then we can just skip caching and process
|
"""If we can't access the cache then we can just skip caching and process
|
||||||
requests as if caching wasn't enabled.
|
requests as if caching wasn't enabled.
|
||||||
"""
|
"""
|
||||||
@@ -53,7 +53,7 @@ class SafeFileCache(BaseCache):
|
|||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
|
|
||||||
def set(self, key: str, value: bytes) -> None:
|
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
|
||||||
path = self._get_cache_path(key)
|
path = self._get_cache_path(key)
|
||||||
with suppressed_cache_errors():
|
with suppressed_cache_errors():
|
||||||
ensure_dir(os.path.dirname(path))
|
ensure_dir(os.path.dirname(path))
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"""Download files with progress indicators.
|
"""Download files with progress indicators.
|
||||||
"""
|
"""
|
||||||
import cgi
|
import email.message
|
||||||
import logging
|
import logging
|
||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
@@ -8,7 +8,7 @@ from typing import Iterable, Optional, Tuple
|
|||||||
|
|
||||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
|
||||||
from pip._internal.cli.progress_bars import DownloadProgressProvider
|
from pip._internal.cli.progress_bars import get_download_progress_renderer
|
||||||
from pip._internal.exceptions import NetworkConnectionError
|
from pip._internal.exceptions import NetworkConnectionError
|
||||||
from pip._internal.models.index import PyPI
|
from pip._internal.models.index import PyPI
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
@@ -65,7 +65,8 @@ def _prepare_download(
|
|||||||
if not show_progress:
|
if not show_progress:
|
||||||
return chunks
|
return chunks
|
||||||
|
|
||||||
return DownloadProgressProvider(progress_bar, max=total_length)(chunks)
|
renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
|
||||||
|
return renderer(chunks)
|
||||||
|
|
||||||
|
|
||||||
def sanitize_content_filename(filename: str) -> str:
|
def sanitize_content_filename(filename: str) -> str:
|
||||||
@@ -80,12 +81,13 @@ def parse_content_disposition(content_disposition: str, default_filename: str) -
|
|||||||
Parse the "filename" value from a Content-Disposition header, and
|
Parse the "filename" value from a Content-Disposition header, and
|
||||||
return the default filename if the result is empty.
|
return the default filename if the result is empty.
|
||||||
"""
|
"""
|
||||||
_type, params = cgi.parse_header(content_disposition)
|
m = email.message.Message()
|
||||||
filename = params.get("filename")
|
m["content-type"] = content_disposition
|
||||||
|
filename = m.get_param("filename")
|
||||||
if filename:
|
if filename:
|
||||||
# We need to sanitize the filename to prevent directory traversal
|
# We need to sanitize the filename to prevent directory traversal
|
||||||
# in case the filename contains ".." path parts.
|
# in case the filename contains ".." path parts.
|
||||||
filename = sanitize_content_filename(filename)
|
filename = sanitize_content_filename(str(filename))
|
||||||
return filename or default_filename
|
return filename or default_filename
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,36 +5,36 @@ __all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
|
|||||||
from bisect import bisect_left, bisect_right
|
from bisect import bisect_left, bisect_right
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
from typing import Any, Dict, Generator, List, Optional, Tuple
|
||||||
from zipfile import BadZipfile, ZipFile
|
from zipfile import BadZipFile, ZipFile
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
|
||||||
|
from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
||||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
|
||||||
|
|
||||||
|
|
||||||
class HTTPRangeRequestUnsupported(Exception):
|
class HTTPRangeRequestUnsupported(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def dist_from_wheel_url(name: str, url: str, session: PipSession) -> Distribution:
|
def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
|
||||||
"""Return a pkg_resources.Distribution from the given wheel URL.
|
"""Return a distribution object from the given wheel URL.
|
||||||
|
|
||||||
This uses HTTP range requests to only fetch the potion of the wheel
|
This uses HTTP range requests to only fetch the portion of the wheel
|
||||||
containing metadata, just enough for the object to be constructed.
|
containing metadata, just enough for the object to be constructed.
|
||||||
If such requests are not supported, HTTPRangeRequestUnsupported
|
If such requests are not supported, HTTPRangeRequestUnsupported
|
||||||
is raised.
|
is raised.
|
||||||
"""
|
"""
|
||||||
with LazyZipOverHTTP(url, session) as wheel:
|
with LazyZipOverHTTP(url, session) as zf:
|
||||||
# For read-only ZIP files, ZipFile only needs methods read,
|
# For read-only ZIP files, ZipFile only needs methods read,
|
||||||
# seek, seekable and tell, not the whole IO protocol.
|
# seek, seekable and tell, not the whole IO protocol.
|
||||||
zip_file = ZipFile(wheel) # type: ignore
|
wheel = MemoryWheel(zf.name, zf) # type: ignore
|
||||||
# After context manager exit, wheel.name
|
# After context manager exit, wheel.name
|
||||||
# is an invalid file by intention.
|
# is an invalid file by intention.
|
||||||
return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name)
|
return get_wheel_distribution(wheel, canonicalize_name(name))
|
||||||
|
|
||||||
|
|
||||||
class LazyZipOverHTTP:
|
class LazyZipOverHTTP:
|
||||||
@@ -135,11 +135,11 @@ class LazyZipOverHTTP:
|
|||||||
self._file.__enter__()
|
self._file.__enter__()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, *exc: Any) -> Optional[bool]:
|
def __exit__(self, *exc: Any) -> None:
|
||||||
return self._file.__exit__(*exc)
|
self._file.__exit__(*exc)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def _stay(self) -> Iterator[None]:
|
def _stay(self) -> Generator[None, None, None]:
|
||||||
"""Return a context manager keeping the position.
|
"""Return a context manager keeping the position.
|
||||||
|
|
||||||
At the end of the block, seek back to original position.
|
At the end of the block, seek back to original position.
|
||||||
@@ -160,7 +160,7 @@ class LazyZipOverHTTP:
|
|||||||
# For read-only ZIP files, ZipFile only needs
|
# For read-only ZIP files, ZipFile only needs
|
||||||
# methods read, seek, seekable and tell.
|
# methods read, seek, seekable and tell.
|
||||||
ZipFile(self) # type: ignore
|
ZipFile(self) # type: ignore
|
||||||
except BadZipfile:
|
except BadZipFile:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
@@ -177,8 +177,8 @@ class LazyZipOverHTTP:
|
|||||||
|
|
||||||
def _merge(
|
def _merge(
|
||||||
self, start: int, end: int, left: int, right: int
|
self, start: int, end: int, left: int, right: int
|
||||||
) -> Iterator[Tuple[int, int]]:
|
) -> Generator[Tuple[int, int], None, None]:
|
||||||
"""Return an iterator of intervals to be fetched.
|
"""Return a generator of intervals to be fetched.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
start (int): Start of needed interval
|
start (int): Start of needed interval
|
||||||
|
|||||||
@@ -2,17 +2,8 @@
|
|||||||
network request configuration and behavior.
|
network request configuration and behavior.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# When mypy runs on Windows the call to distro.linux_distribution() is skipped
|
|
||||||
# resulting in the failure:
|
|
||||||
#
|
|
||||||
# error: unused 'type: ignore' comment
|
|
||||||
#
|
|
||||||
# If the upstream module adds typing, this comment should be removed. See
|
|
||||||
# https://github.com/nir0s/distro/pull/269
|
|
||||||
#
|
|
||||||
# mypy: warn-unused-ignores=False
|
|
||||||
|
|
||||||
import email.utils
|
import email.utils
|
||||||
|
import io
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
@@ -24,11 +15,23 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Dict,
|
||||||
|
Generator,
|
||||||
|
List,
|
||||||
|
Mapping,
|
||||||
|
Optional,
|
||||||
|
Sequence,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from pip._vendor import requests, urllib3
|
from pip._vendor import requests, urllib3
|
||||||
from pip._vendor.cachecontrol import CacheControlAdapter
|
from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter
|
||||||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter
|
||||||
|
from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter
|
||||||
from pip._vendor.requests.models import PreparedRequest, Response
|
from pip._vendor.requests.models import PreparedRequest, Response
|
||||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||||
from pip._vendor.urllib3.connectionpool import ConnectionPool
|
from pip._vendor.urllib3.connectionpool import ConnectionPool
|
||||||
@@ -46,6 +49,12 @@ from pip._internal.utils.glibc import libc_ver
|
|||||||
from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
|
from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
|
||||||
from pip._internal.utils.urls import url_to_path
|
from pip._internal.utils.urls import url_to_path
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ssl import SSLContext
|
||||||
|
|
||||||
|
from pip._vendor.urllib3.poolmanager import PoolManager
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
|
SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
|
||||||
@@ -128,9 +137,8 @@ def user_agent() -> str:
|
|||||||
if sys.platform.startswith("linux"):
|
if sys.platform.startswith("linux"):
|
||||||
from pip._vendor import distro
|
from pip._vendor import distro
|
||||||
|
|
||||||
# https://github.com/nir0s/distro/pull/269
|
linux_distribution = distro.name(), distro.version(), distro.codename()
|
||||||
linux_distribution = distro.linux_distribution() # type: ignore
|
distro_infos: Dict[str, Any] = dict(
|
||||||
distro_infos = dict(
|
|
||||||
filter(
|
filter(
|
||||||
lambda x: x[1],
|
lambda x: x[1],
|
||||||
zip(["name", "version", "id"], linux_distribution),
|
zip(["name", "version", "id"], linux_distribution),
|
||||||
@@ -218,8 +226,11 @@ class LocalFSAdapter(BaseAdapter):
|
|||||||
try:
|
try:
|
||||||
stats = os.stat(pathname)
|
stats = os.stat(pathname)
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
|
# format the exception raised as a io.BytesIO object,
|
||||||
|
# to return a better error message:
|
||||||
resp.status_code = 404
|
resp.status_code = 404
|
||||||
resp.raw = exc
|
resp.reason = type(exc).__name__
|
||||||
|
resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
|
||||||
else:
|
else:
|
||||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||||
@@ -240,6 +251,48 @@ class LocalFSAdapter(BaseAdapter):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _SSLContextAdapterMixin:
|
||||||
|
"""Mixin to add the ``ssl_context`` constructor argument to HTTP adapters.
|
||||||
|
|
||||||
|
The additional argument is forwarded directly to the pool manager. This allows us
|
||||||
|
to dynamically decide what SSL store to use at runtime, which is used to implement
|
||||||
|
the optional ``truststore`` backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
ssl_context: Optional["SSLContext"] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
self._ssl_context = ssl_context
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
def init_poolmanager(
|
||||||
|
self,
|
||||||
|
connections: int,
|
||||||
|
maxsize: int,
|
||||||
|
block: bool = DEFAULT_POOLBLOCK,
|
||||||
|
**pool_kwargs: Any,
|
||||||
|
) -> "PoolManager":
|
||||||
|
if self._ssl_context is not None:
|
||||||
|
pool_kwargs.setdefault("ssl_context", self._ssl_context)
|
||||||
|
return super().init_poolmanager( # type: ignore[misc]
|
||||||
|
connections=connections,
|
||||||
|
maxsize=maxsize,
|
||||||
|
block=block,
|
||||||
|
**pool_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InsecureHTTPAdapter(HTTPAdapter):
|
class InsecureHTTPAdapter(HTTPAdapter):
|
||||||
def cert_verify(
|
def cert_verify(
|
||||||
self,
|
self,
|
||||||
@@ -263,7 +316,6 @@ class InsecureCacheControlAdapter(CacheControlAdapter):
|
|||||||
|
|
||||||
|
|
||||||
class PipSession(requests.Session):
|
class PipSession(requests.Session):
|
||||||
|
|
||||||
timeout: Optional[int] = None
|
timeout: Optional[int] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -273,6 +325,7 @@ class PipSession(requests.Session):
|
|||||||
cache: Optional[str] = None,
|
cache: Optional[str] = None,
|
||||||
trusted_hosts: Sequence[str] = (),
|
trusted_hosts: Sequence[str] = (),
|
||||||
index_urls: Optional[List[str]] = None,
|
index_urls: Optional[List[str]] = None,
|
||||||
|
ssl_context: Optional["SSLContext"] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -325,13 +378,14 @@ class PipSession(requests.Session):
|
|||||||
secure_adapter = CacheControlAdapter(
|
secure_adapter = CacheControlAdapter(
|
||||||
cache=SafeFileCache(cache),
|
cache=SafeFileCache(cache),
|
||||||
max_retries=retries,
|
max_retries=retries,
|
||||||
|
ssl_context=ssl_context,
|
||||||
)
|
)
|
||||||
self._trusted_host_adapter = InsecureCacheControlAdapter(
|
self._trusted_host_adapter = InsecureCacheControlAdapter(
|
||||||
cache=SafeFileCache(cache),
|
cache=SafeFileCache(cache),
|
||||||
max_retries=retries,
|
max_retries=retries,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
secure_adapter = HTTPAdapter(max_retries=retries)
|
secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context)
|
||||||
self._trusted_host_adapter = insecure_adapter
|
self._trusted_host_adapter = insecure_adapter
|
||||||
|
|
||||||
self.mount("https://", secure_adapter)
|
self.mount("https://", secure_adapter)
|
||||||
@@ -369,12 +423,19 @@ class PipSession(requests.Session):
|
|||||||
if host_port not in self.pip_trusted_origins:
|
if host_port not in self.pip_trusted_origins:
|
||||||
self.pip_trusted_origins.append(host_port)
|
self.pip_trusted_origins.append(host_port)
|
||||||
|
|
||||||
|
self.mount(
|
||||||
|
build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
|
||||||
|
)
|
||||||
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
|
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
|
||||||
if not host_port[1]:
|
if not host_port[1]:
|
||||||
|
self.mount(
|
||||||
|
build_url_from_netloc(host, scheme="http") + ":",
|
||||||
|
self._trusted_host_adapter,
|
||||||
|
)
|
||||||
# Mount wildcard ports for the same host.
|
# Mount wildcard ports for the same host.
|
||||||
self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
|
self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
|
||||||
|
|
||||||
def iter_secure_origins(self) -> Iterator[SecureOrigin]:
|
def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]:
|
||||||
yield from SECURE_ORIGINS
|
yield from SECURE_ORIGINS
|
||||||
for host, port in self.pip_trusted_origins:
|
for host, port in self.pip_trusted_origins:
|
||||||
yield ("*", host, "*" if port is None else port)
|
yield ("*", host, "*" if port is None else port)
|
||||||
@@ -403,7 +464,7 @@ class PipSession(requests.Session):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
addr = ipaddress.ip_address(origin_host)
|
addr = ipaddress.ip_address(origin_host or "")
|
||||||
network = ipaddress.ip_network(secure_host)
|
network = ipaddress.ip_network(secure_host)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# We don't have both a valid address or a valid network, so
|
# We don't have both a valid address or a valid network, so
|
||||||
@@ -449,6 +510,8 @@ class PipSession(requests.Session):
|
|||||||
def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
|
def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
|
||||||
# Allow setting a default timeout on a session
|
# Allow setting a default timeout on a session
|
||||||
kwargs.setdefault("timeout", self.timeout)
|
kwargs.setdefault("timeout", self.timeout)
|
||||||
|
# Allow setting a default proxies on a session
|
||||||
|
kwargs.setdefault("proxies", self.proxies)
|
||||||
|
|
||||||
# Dispatch the actual request
|
# Dispatch the actual request
|
||||||
return super().request(method, url, *args, **kwargs)
|
return super().request(method, url, *args, **kwargs)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from typing import Dict, Iterator
|
from typing import Dict, Generator
|
||||||
|
|
||||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ def raise_for_status(resp: Response) -> None:
|
|||||||
|
|
||||||
def response_chunks(
|
def response_chunks(
|
||||||
response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
|
response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
|
||||||
) -> Iterator[bytes]:
|
) -> Generator[bytes, None, None]:
|
||||||
"""Given a requests Response, provide the data chunks."""
|
"""Given a requests Response, provide the data chunks."""
|
||||||
try:
|
try:
|
||||||
# Special case for urllib3.
|
# Special case for urllib3.
|
||||||
|
|||||||
@@ -3,33 +3,37 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
InstallationSubprocessError,
|
||||||
|
MetadataGenerationFailed,
|
||||||
|
)
|
||||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
|
|
||||||
|
|
||||||
def generate_metadata(build_env, backend):
|
def generate_metadata(
|
||||||
# type: (BuildEnvironment, Pep517HookCaller) -> str
|
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
|
||||||
|
) -> str:
|
||||||
"""Generate metadata using mechanisms described in PEP 517.
|
"""Generate metadata using mechanisms described in PEP 517.
|
||||||
|
|
||||||
Returns the generated metadata directory.
|
Returns the generated metadata directory.
|
||||||
"""
|
"""
|
||||||
metadata_tmpdir = TempDirectory(
|
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
||||||
kind="modern-metadata", globally_managed=True
|
|
||||||
)
|
|
||||||
|
|
||||||
metadata_dir = metadata_tmpdir.path
|
metadata_dir = metadata_tmpdir.path
|
||||||
|
|
||||||
with build_env:
|
with build_env:
|
||||||
# Note that Pep517HookCaller implements a fallback for
|
# Note that BuildBackendHookCaller implements a fallback for
|
||||||
# prepare_metadata_for_build_wheel, so we don't have to
|
# prepare_metadata_for_build_wheel, so we don't have to
|
||||||
# consider the possibility that this hook doesn't exist.
|
# consider the possibility that this hook doesn't exist.
|
||||||
runner = runner_with_spinner_message("Preparing wheel metadata")
|
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
|
||||||
with backend.subprocess_runner(runner):
|
with backend.subprocess_runner(runner):
|
||||||
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
try:
|
||||||
metadata_dir
|
distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
|
||||||
)
|
except InstallationSubprocessError as error:
|
||||||
|
raise MetadataGenerationFailed(package_details=details) from error
|
||||||
|
|
||||||
return os.path.join(metadata_dir, distinfo_dir)
|
return os.path.join(metadata_dir, distinfo_dir)
|
||||||
|
|||||||
@@ -5,7 +5,12 @@ import logging
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.cli.spinners import open_spinner
|
||||||
|
from pip._internal.exceptions import (
|
||||||
|
InstallationError,
|
||||||
|
InstallationSubprocessError,
|
||||||
|
MetadataGenerationFailed,
|
||||||
|
)
|
||||||
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
||||||
from pip._internal.utils.subprocess import call_subprocess
|
from pip._internal.utils.subprocess import call_subprocess
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
@@ -13,49 +18,39 @@ from pip._internal.utils.temp_dir import TempDirectory
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _find_egg_info(directory):
|
def _find_egg_info(directory: str) -> str:
|
||||||
# type: (str) -> str
|
"""Find an .egg-info subdirectory in `directory`."""
|
||||||
"""Find an .egg-info subdirectory in `directory`.
|
filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
|
||||||
"""
|
|
||||||
filenames = [
|
|
||||||
f for f in os.listdir(directory) if f.endswith(".egg-info")
|
|
||||||
]
|
|
||||||
|
|
||||||
if not filenames:
|
if not filenames:
|
||||||
raise InstallationError(
|
raise InstallationError(f"No .egg-info directory found in {directory}")
|
||||||
f"No .egg-info directory found in {directory}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(filenames) > 1:
|
if len(filenames) > 1:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"More than one .egg-info directory found in {}".format(
|
"More than one .egg-info directory found in {}".format(directory)
|
||||||
directory
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return os.path.join(directory, filenames[0])
|
return os.path.join(directory, filenames[0])
|
||||||
|
|
||||||
|
|
||||||
def generate_metadata(
|
def generate_metadata(
|
||||||
build_env, # type: BuildEnvironment
|
build_env: BuildEnvironment,
|
||||||
setup_py_path, # type: str
|
setup_py_path: str,
|
||||||
source_dir, # type: str
|
source_dir: str,
|
||||||
isolated, # type: bool
|
isolated: bool,
|
||||||
details, # type: str
|
details: str,
|
||||||
):
|
) -> str:
|
||||||
# type: (...) -> str
|
|
||||||
"""Generate metadata using setup.py-based defacto mechanisms.
|
"""Generate metadata using setup.py-based defacto mechanisms.
|
||||||
|
|
||||||
Returns the generated metadata directory.
|
Returns the generated metadata directory.
|
||||||
"""
|
"""
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Running setup.py (path:%s) egg_info for package %s',
|
"Running setup.py (path:%s) egg_info for package %s",
|
||||||
setup_py_path, details,
|
setup_py_path,
|
||||||
|
details,
|
||||||
)
|
)
|
||||||
|
|
||||||
egg_info_dir = TempDirectory(
|
egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
|
||||||
kind="pip-egg-info", globally_managed=True
|
|
||||||
).path
|
|
||||||
|
|
||||||
args = make_setuptools_egg_info_args(
|
args = make_setuptools_egg_info_args(
|
||||||
setup_py_path,
|
setup_py_path,
|
||||||
@@ -64,11 +59,16 @@ def generate_metadata(
|
|||||||
)
|
)
|
||||||
|
|
||||||
with build_env:
|
with build_env:
|
||||||
|
with open_spinner("Preparing metadata (setup.py)") as spinner:
|
||||||
|
try:
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
args,
|
args,
|
||||||
cwd=source_dir,
|
cwd=source_dir,
|
||||||
command_desc='python setup.py egg_info',
|
command_desc="python setup.py egg_info",
|
||||||
|
spinner=spinner,
|
||||||
)
|
)
|
||||||
|
except InstallationSubprocessError as error:
|
||||||
|
raise MetadataGenerationFailed(package_details=details) from error
|
||||||
|
|
||||||
# Return the .egg-info directory.
|
# Return the .egg-info directory.
|
||||||
return _find_egg_info(egg_info_dir)
|
return _find_egg_info(egg_info_dir)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||||
|
|
||||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
|
|
||||||
@@ -10,22 +10,21 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def build_wheel_pep517(
|
def build_wheel_pep517(
|
||||||
name, # type: str
|
name: str,
|
||||||
backend, # type: Pep517HookCaller
|
backend: BuildBackendHookCaller,
|
||||||
metadata_directory, # type: str
|
metadata_directory: str,
|
||||||
tempd, # type: str
|
tempd: str,
|
||||||
):
|
) -> Optional[str]:
|
||||||
# type: (...) -> Optional[str]
|
|
||||||
"""Build one InstallRequirement using the PEP 517 build process.
|
"""Build one InstallRequirement using the PEP 517 build process.
|
||||||
|
|
||||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||||
"""
|
"""
|
||||||
assert metadata_directory is not None
|
assert metadata_directory is not None
|
||||||
try:
|
try:
|
||||||
logger.debug('Destination directory: %s', tempd)
|
logger.debug("Destination directory: %s", tempd)
|
||||||
|
|
||||||
runner = runner_with_spinner_message(
|
runner = runner_with_spinner_message(
|
||||||
f'Building wheel for {name} (PEP 517)'
|
f"Building wheel for {name} (pyproject.toml)"
|
||||||
)
|
)
|
||||||
with backend.subprocess_runner(runner):
|
with backend.subprocess_runner(runner):
|
||||||
wheel_name = backend.build_wheel(
|
wheel_name = backend.build_wheel(
|
||||||
@@ -33,6 +32,6 @@ def build_wheel_pep517(
|
|||||||
metadata_directory=metadata_directory,
|
metadata_directory=metadata_directory,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error('Failed building wheel for %s', name)
|
logger.error("Failed building wheel for %s", name)
|
||||||
return None
|
return None
|
||||||
return os.path.join(tempd, wheel_name)
|
return os.path.join(tempd, wheel_name)
|
||||||
|
|||||||
@@ -4,59 +4,51 @@ from typing import List, Optional
|
|||||||
|
|
||||||
from pip._internal.cli.spinners import open_spinner
|
from pip._internal.cli.spinners import open_spinner
|
||||||
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
||||||
from pip._internal.utils.subprocess import (
|
from pip._internal.utils.subprocess import call_subprocess, format_command_args
|
||||||
LOG_DIVIDER,
|
|
||||||
call_subprocess,
|
|
||||||
format_command_args,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def format_command_result(
|
def format_command_result(
|
||||||
command_args, # type: List[str]
|
command_args: List[str],
|
||||||
command_output, # type: str
|
command_output: str,
|
||||||
):
|
) -> str:
|
||||||
# type: (...) -> str
|
|
||||||
"""Format command information for logging."""
|
"""Format command information for logging."""
|
||||||
command_desc = format_command_args(command_args)
|
command_desc = format_command_args(command_args)
|
||||||
text = f'Command arguments: {command_desc}\n'
|
text = f"Command arguments: {command_desc}\n"
|
||||||
|
|
||||||
if not command_output:
|
if not command_output:
|
||||||
text += 'Command output: None'
|
text += "Command output: None"
|
||||||
elif logger.getEffectiveLevel() > logging.DEBUG:
|
elif logger.getEffectiveLevel() > logging.DEBUG:
|
||||||
text += 'Command output: [use --verbose to show]'
|
text += "Command output: [use --verbose to show]"
|
||||||
else:
|
else:
|
||||||
if not command_output.endswith('\n'):
|
if not command_output.endswith("\n"):
|
||||||
command_output += '\n'
|
command_output += "\n"
|
||||||
text += f'Command output:\n{command_output}{LOG_DIVIDER}'
|
text += f"Command output:\n{command_output}"
|
||||||
|
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
def get_legacy_build_wheel_path(
|
def get_legacy_build_wheel_path(
|
||||||
names, # type: List[str]
|
names: List[str],
|
||||||
temp_dir, # type: str
|
temp_dir: str,
|
||||||
name, # type: str
|
name: str,
|
||||||
command_args, # type: List[str]
|
command_args: List[str],
|
||||||
command_output, # type: str
|
command_output: str,
|
||||||
):
|
) -> Optional[str]:
|
||||||
# type: (...) -> Optional[str]
|
|
||||||
"""Return the path to the wheel in the temporary build directory."""
|
"""Return the path to the wheel in the temporary build directory."""
|
||||||
# Sort for determinism.
|
# Sort for determinism.
|
||||||
names = sorted(names)
|
names = sorted(names)
|
||||||
if not names:
|
if not names:
|
||||||
msg = (
|
msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
|
||||||
'Legacy build of wheel for {!r} created no files.\n'
|
|
||||||
).format(name)
|
|
||||||
msg += format_command_result(command_args, command_output)
|
msg += format_command_result(command_args, command_output)
|
||||||
logger.warning(msg)
|
logger.warning(msg)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if len(names) > 1:
|
if len(names) > 1:
|
||||||
msg = (
|
msg = (
|
||||||
'Legacy build of wheel for {!r} created more than one file.\n'
|
"Legacy build of wheel for {!r} created more than one file.\n"
|
||||||
'Filenames (choosing first): {}\n'
|
"Filenames (choosing first): {}\n"
|
||||||
).format(name, names)
|
).format(name, names)
|
||||||
msg += format_command_result(command_args, command_output)
|
msg += format_command_result(command_args, command_output)
|
||||||
logger.warning(msg)
|
logger.warning(msg)
|
||||||
@@ -65,14 +57,13 @@ def get_legacy_build_wheel_path(
|
|||||||
|
|
||||||
|
|
||||||
def build_wheel_legacy(
|
def build_wheel_legacy(
|
||||||
name, # type: str
|
name: str,
|
||||||
setup_py_path, # type: str
|
setup_py_path: str,
|
||||||
source_dir, # type: str
|
source_dir: str,
|
||||||
global_options, # type: List[str]
|
global_options: List[str],
|
||||||
build_options, # type: List[str]
|
build_options: List[str],
|
||||||
tempd, # type: str
|
tempd: str,
|
||||||
):
|
) -> Optional[str]:
|
||||||
# type: (...) -> Optional[str]
|
|
||||||
"""Build one unpacked package using the "legacy" build process.
|
"""Build one unpacked package using the "legacy" build process.
|
||||||
|
|
||||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||||
@@ -84,19 +75,20 @@ def build_wheel_legacy(
|
|||||||
destination_dir=tempd,
|
destination_dir=tempd,
|
||||||
)
|
)
|
||||||
|
|
||||||
spin_message = f'Building wheel for {name} (setup.py)'
|
spin_message = f"Building wheel for {name} (setup.py)"
|
||||||
with open_spinner(spin_message) as spinner:
|
with open_spinner(spin_message) as spinner:
|
||||||
logger.debug('Destination directory: %s', tempd)
|
logger.debug("Destination directory: %s", tempd)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
output = call_subprocess(
|
output = call_subprocess(
|
||||||
wheel_args,
|
wheel_args,
|
||||||
|
command_desc="python setup.py bdist_wheel",
|
||||||
cwd=source_dir,
|
cwd=source_dir,
|
||||||
spinner=spinner,
|
spinner=spinner,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
spinner.finish("error")
|
spinner.finish("error")
|
||||||
logger.error('Failed building wheel for %s', name)
|
logger.error("Failed building wheel for %s", name)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
names = os.listdir(tempd)
|
names = os.listdir(tempd)
|
||||||
|
|||||||
@@ -2,19 +2,16 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Callable, Dict, List, NamedTuple, Optional, Set, Tuple
|
from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
|
||||||
|
|
||||||
from pip._vendor.packaging.requirements import Requirement
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||||
|
|
||||||
from pip._internal.distributions import make_distribution_for_install_requirement
|
from pip._internal.distributions import make_distribution_for_install_requirement
|
||||||
from pip._internal.metadata import get_default_environment
|
from pip._internal.metadata import get_default_environment
|
||||||
from pip._internal.metadata.base import DistributionVersion
|
from pip._internal.metadata.base import DistributionVersion
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from pip._vendor.packaging.utils import NormalizedName
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -24,12 +21,12 @@ class PackageDetails(NamedTuple):
|
|||||||
|
|
||||||
|
|
||||||
# Shorthands
|
# Shorthands
|
||||||
PackageSet = Dict['NormalizedName', PackageDetails]
|
PackageSet = Dict[NormalizedName, PackageDetails]
|
||||||
Missing = Tuple['NormalizedName', Requirement]
|
Missing = Tuple[NormalizedName, Requirement]
|
||||||
Conflicting = Tuple['NormalizedName', DistributionVersion, Requirement]
|
Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
|
||||||
|
|
||||||
MissingDict = Dict['NormalizedName', List[Missing]]
|
MissingDict = Dict[NormalizedName, List[Missing]]
|
||||||
ConflictingDict = Dict['NormalizedName', List[Conflicting]]
|
ConflictingDict = Dict[NormalizedName, List[Conflicting]]
|
||||||
CheckResult = Tuple[MissingDict, ConflictingDict]
|
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||||
ConflictDetails = Tuple[PackageSet, CheckResult]
|
ConflictDetails = Tuple[PackageSet, CheckResult]
|
||||||
|
|
||||||
@@ -51,8 +48,9 @@ def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
|
|||||||
return package_set, problems
|
return package_set, problems
|
||||||
|
|
||||||
|
|
||||||
def check_package_set(package_set, should_ignore=None):
|
def check_package_set(
|
||||||
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
|
||||||
|
) -> CheckResult:
|
||||||
"""Check if a package set is consistent
|
"""Check if a package set is consistent
|
||||||
|
|
||||||
If should_ignore is passed, it should be a callable that takes a
|
If should_ignore is passed, it should be a callable that takes a
|
||||||
@@ -64,8 +62,8 @@ def check_package_set(package_set, should_ignore=None):
|
|||||||
|
|
||||||
for package_name, package_detail in package_set.items():
|
for package_name, package_detail in package_set.items():
|
||||||
# Info about dependencies of package_name
|
# Info about dependencies of package_name
|
||||||
missing_deps = set() # type: Set[Missing]
|
missing_deps: Set[Missing] = set()
|
||||||
conflicting_deps = set() # type: Set[Conflicting]
|
conflicting_deps: Set[Conflicting] = set()
|
||||||
|
|
||||||
if should_ignore and should_ignore(package_name):
|
if should_ignore and should_ignore(package_name):
|
||||||
continue
|
continue
|
||||||
@@ -77,7 +75,7 @@ def check_package_set(package_set, should_ignore=None):
|
|||||||
if name not in package_set:
|
if name not in package_set:
|
||||||
missed = True
|
missed = True
|
||||||
if req.marker is not None:
|
if req.marker is not None:
|
||||||
missed = req.marker.evaluate()
|
missed = req.marker.evaluate({"extra": ""})
|
||||||
if missed:
|
if missed:
|
||||||
missing_deps.add((name, req))
|
missing_deps.add((name, req))
|
||||||
continue
|
continue
|
||||||
@@ -95,8 +93,7 @@ def check_package_set(package_set, should_ignore=None):
|
|||||||
return missing, conflicting
|
return missing, conflicting
|
||||||
|
|
||||||
|
|
||||||
def check_install_conflicts(to_install):
|
def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
|
||||||
# type: (List[InstallRequirement]) -> ConflictDetails
|
|
||||||
"""For checking if the dependency graph would be consistent after \
|
"""For checking if the dependency graph would be consistent after \
|
||||||
installing given requirements
|
installing given requirements
|
||||||
"""
|
"""
|
||||||
@@ -112,33 +109,32 @@ def check_install_conflicts(to_install):
|
|||||||
package_set,
|
package_set,
|
||||||
check_package_set(
|
check_package_set(
|
||||||
package_set, should_ignore=lambda name: name not in whitelist
|
package_set, should_ignore=lambda name: name not in whitelist
|
||||||
)
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _simulate_installation_of(to_install, package_set):
|
def _simulate_installation_of(
|
||||||
# type: (List[InstallRequirement], PackageSet) -> Set[NormalizedName]
|
to_install: List[InstallRequirement], package_set: PackageSet
|
||||||
"""Computes the version of packages after installing to_install.
|
) -> Set[NormalizedName]:
|
||||||
"""
|
"""Computes the version of packages after installing to_install."""
|
||||||
# Keep track of packages that were installed
|
# Keep track of packages that were installed
|
||||||
installed = set()
|
installed = set()
|
||||||
|
|
||||||
# Modify it as installing requirement_set would (assuming no errors)
|
# Modify it as installing requirement_set would (assuming no errors)
|
||||||
for inst_req in to_install:
|
for inst_req in to_install:
|
||||||
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
||||||
dist = abstract_dist.get_pkg_resources_distribution()
|
dist = abstract_dist.get_metadata_distribution()
|
||||||
|
name = dist.canonical_name
|
||||||
assert dist is not None
|
package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
|
||||||
name = canonicalize_name(dist.project_name)
|
|
||||||
package_set[name] = PackageDetails(dist.parsed_version, dist.requires())
|
|
||||||
|
|
||||||
installed.add(name)
|
installed.add(name)
|
||||||
|
|
||||||
return installed
|
return installed
|
||||||
|
|
||||||
|
|
||||||
def _create_whitelist(would_be_installed, package_set):
|
def _create_whitelist(
|
||||||
# type: (Set[NormalizedName], PackageSet) -> Set[NormalizedName]
|
would_be_installed: Set[NormalizedName], package_set: PackageSet
|
||||||
|
) -> Set[NormalizedName]:
|
||||||
packages_affected = set(would_be_installed)
|
packages_affected = set(would_be_installed)
|
||||||
|
|
||||||
for package_name in package_set:
|
for package_name in package_set:
|
||||||
|
|||||||
@@ -1,19 +1,8 @@
|
|||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import (
|
from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
|
||||||
Container,
|
|
||||||
Dict,
|
|
||||||
Iterable,
|
|
||||||
Iterator,
|
|
||||||
List,
|
|
||||||
NamedTuple,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
from pip._vendor.packaging.requirements import Requirement
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.packaging.version import Version
|
from pip._vendor.packaging.version import Version
|
||||||
|
|
||||||
@@ -30,22 +19,20 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class _EditableInfo(NamedTuple):
|
class _EditableInfo(NamedTuple):
|
||||||
requirement: Optional[str]
|
requirement: str
|
||||||
editable: bool
|
|
||||||
comments: List[str]
|
comments: List[str]
|
||||||
|
|
||||||
|
|
||||||
def freeze(
|
def freeze(
|
||||||
requirement=None, # type: Optional[List[str]]
|
requirement: Optional[List[str]] = None,
|
||||||
local_only=False, # type: bool
|
local_only: bool = False,
|
||||||
user_only=False, # type: bool
|
user_only: bool = False,
|
||||||
paths=None, # type: Optional[List[str]]
|
paths: Optional[List[str]] = None,
|
||||||
isolated=False, # type: bool
|
isolated: bool = False,
|
||||||
exclude_editable=False, # type: bool
|
exclude_editable: bool = False,
|
||||||
skip=() # type: Container[str]
|
skip: Container[str] = (),
|
||||||
):
|
) -> Generator[str, None, None]:
|
||||||
# type: (...) -> Iterator[str]
|
installations: Dict[str, FrozenRequirement] = {}
|
||||||
installations = {} # type: Dict[str, FrozenRequirement]
|
|
||||||
|
|
||||||
dists = get_environment(paths).iter_installed_distributions(
|
dists = get_environment(paths).iter_installed_distributions(
|
||||||
local_only=local_only,
|
local_only=local_only,
|
||||||
@@ -63,42 +50,50 @@ def freeze(
|
|||||||
# should only be emitted once, even if the same option is in multiple
|
# should only be emitted once, even if the same option is in multiple
|
||||||
# requirements files, so we need to keep track of what has been emitted
|
# requirements files, so we need to keep track of what has been emitted
|
||||||
# so that we don't emit it again if it's seen again
|
# so that we don't emit it again if it's seen again
|
||||||
emitted_options = set() # type: Set[str]
|
emitted_options: Set[str] = set()
|
||||||
# keep track of which files a requirement is in so that we can
|
# keep track of which files a requirement is in so that we can
|
||||||
# give an accurate warning if a requirement appears multiple times.
|
# give an accurate warning if a requirement appears multiple times.
|
||||||
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
req_files: Dict[str, List[str]] = collections.defaultdict(list)
|
||||||
for req_file_path in requirement:
|
for req_file_path in requirement:
|
||||||
with open(req_file_path) as req_file:
|
with open(req_file_path) as req_file:
|
||||||
for line in req_file:
|
for line in req_file:
|
||||||
if (not line.strip() or
|
if (
|
||||||
line.strip().startswith('#') or
|
not line.strip()
|
||||||
line.startswith((
|
or line.strip().startswith("#")
|
||||||
'-r', '--requirement',
|
or line.startswith(
|
||||||
'-f', '--find-links',
|
(
|
||||||
'-i', '--index-url',
|
"-r",
|
||||||
'--pre',
|
"--requirement",
|
||||||
'--trusted-host',
|
"-f",
|
||||||
'--process-dependency-links',
|
"--find-links",
|
||||||
'--extra-index-url',
|
"-i",
|
||||||
'--use-feature'))):
|
"--index-url",
|
||||||
|
"--pre",
|
||||||
|
"--trusted-host",
|
||||||
|
"--process-dependency-links",
|
||||||
|
"--extra-index-url",
|
||||||
|
"--use-feature",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
line = line.rstrip()
|
line = line.rstrip()
|
||||||
if line not in emitted_options:
|
if line not in emitted_options:
|
||||||
emitted_options.add(line)
|
emitted_options.add(line)
|
||||||
yield line
|
yield line
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if line.startswith('-e') or line.startswith('--editable'):
|
if line.startswith("-e") or line.startswith("--editable"):
|
||||||
if line.startswith('-e'):
|
if line.startswith("-e"):
|
||||||
line = line[2:].strip()
|
line = line[2:].strip()
|
||||||
else:
|
else:
|
||||||
line = line[len('--editable'):].strip().lstrip('=')
|
line = line[len("--editable") :].strip().lstrip("=")
|
||||||
line_req = install_req_from_editable(
|
line_req = install_req_from_editable(
|
||||||
line,
|
line,
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
line_req = install_req_from_line(
|
line_req = install_req_from_line(
|
||||||
COMMENT_RE.sub('', line).strip(),
|
COMMENT_RE.sub("", line).strip(),
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -106,15 +101,15 @@ def freeze(
|
|||||||
logger.info(
|
logger.info(
|
||||||
"Skipping line in requirement file [%s] because "
|
"Skipping line in requirement file [%s] because "
|
||||||
"it's not clear what it would install: %s",
|
"it's not clear what it would install: %s",
|
||||||
req_file_path, line.strip(),
|
req_file_path,
|
||||||
|
line.strip(),
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
" (add #egg=PackageName to the URL to avoid"
|
" (add #egg=PackageName to the URL to avoid"
|
||||||
" this warning)"
|
" this warning)"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
line_req_canonical_name = canonicalize_name(
|
line_req_canonical_name = canonicalize_name(line_req.name)
|
||||||
line_req.name)
|
|
||||||
if line_req_canonical_name not in installations:
|
if line_req_canonical_name not in installations:
|
||||||
# either it's not installed, or it is installed
|
# either it's not installed, or it is installed
|
||||||
# but has been processed already
|
# but has been processed already
|
||||||
@@ -123,14 +118,13 @@ def freeze(
|
|||||||
"Requirement file [%s] contains %s, but "
|
"Requirement file [%s] contains %s, but "
|
||||||
"package %r is not installed",
|
"package %r is not installed",
|
||||||
req_file_path,
|
req_file_path,
|
||||||
COMMENT_RE.sub('', line).strip(),
|
COMMENT_RE.sub("", line).strip(),
|
||||||
line_req.name
|
line_req.name,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
req_files[line_req.name].append(req_file_path)
|
req_files[line_req.name].append(req_file_path)
|
||||||
else:
|
else:
|
||||||
yield str(installations[
|
yield str(installations[line_req_canonical_name]).rstrip()
|
||||||
line_req_canonical_name]).rstrip()
|
|
||||||
del installations[line_req_canonical_name]
|
del installations[line_req_canonical_name]
|
||||||
req_files[line_req.name].append(req_file_path)
|
req_files[line_req.name].append(req_file_path)
|
||||||
|
|
||||||
@@ -138,42 +132,33 @@ def freeze(
|
|||||||
# single requirements file or in different requirements files).
|
# single requirements file or in different requirements files).
|
||||||
for name, files in req_files.items():
|
for name, files in req_files.items():
|
||||||
if len(files) > 1:
|
if len(files) > 1:
|
||||||
logger.warning("Requirement %s included multiple times [%s]",
|
logger.warning(
|
||||||
name, ', '.join(sorted(set(files))))
|
"Requirement %s included multiple times [%s]",
|
||||||
|
name,
|
||||||
yield(
|
", ".join(sorted(set(files))),
|
||||||
'## The following requirements were added by '
|
|
||||||
'pip freeze:'
|
|
||||||
)
|
)
|
||||||
for installation in sorted(
|
|
||||||
installations.values(), key=lambda x: x.name.lower()):
|
yield ("## The following requirements were added by pip freeze:")
|
||||||
|
for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
|
||||||
if installation.canonical_name not in skip:
|
if installation.canonical_name not in skip:
|
||||||
yield str(installation).rstrip()
|
yield str(installation).rstrip()
|
||||||
|
|
||||||
|
|
||||||
def _format_as_name_version(dist: BaseDistribution) -> str:
|
def _format_as_name_version(dist: BaseDistribution) -> str:
|
||||||
if isinstance(dist.version, Version):
|
dist_version = dist.version
|
||||||
return f"{dist.raw_name}=={dist.version}"
|
if isinstance(dist_version, Version):
|
||||||
return f"{dist.raw_name}==={dist.version}"
|
return f"{dist.raw_name}=={dist_version}"
|
||||||
|
return f"{dist.raw_name}==={dist_version}"
|
||||||
|
|
||||||
|
|
||||||
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
||||||
"""
|
"""
|
||||||
Compute and return values (req, editable, comments) for use in
|
Compute and return values (req, comments) for use in
|
||||||
FrozenRequirement.from_dist().
|
FrozenRequirement.from_dist().
|
||||||
"""
|
"""
|
||||||
if not dist.editable:
|
editable_project_location = dist.editable_project_location
|
||||||
return _EditableInfo(requirement=None, editable=False, comments=[])
|
assert editable_project_location
|
||||||
if dist.location is None:
|
location = os.path.normcase(os.path.abspath(editable_project_location))
|
||||||
display = _format_as_name_version(dist)
|
|
||||||
logger.warning("Editable requirement not found on disk: %s", display)
|
|
||||||
return _EditableInfo(
|
|
||||||
requirement=None,
|
|
||||||
editable=True,
|
|
||||||
comments=[f"# Editable install not found ({display})"],
|
|
||||||
)
|
|
||||||
|
|
||||||
location = os.path.normcase(os.path.abspath(dist.location))
|
|
||||||
|
|
||||||
from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
|
from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
|
||||||
|
|
||||||
@@ -182,13 +167,13 @@ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
|||||||
if vcs_backend is None:
|
if vcs_backend is None:
|
||||||
display = _format_as_name_version(dist)
|
display = _format_as_name_version(dist)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'No VCS found for editable requirement "%s" in: %r', display,
|
'No VCS found for editable requirement "%s" in: %r',
|
||||||
|
display,
|
||||||
location,
|
location,
|
||||||
)
|
)
|
||||||
return _EditableInfo(
|
return _EditableInfo(
|
||||||
requirement=location,
|
requirement=location,
|
||||||
editable=True,
|
comments=[f"# Editable install with no version control ({display})"],
|
||||||
comments=[f'# Editable install with no version control ({display})'],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
vcs_name = type(vcs_backend).__name__
|
vcs_name = type(vcs_backend).__name__
|
||||||
@@ -199,50 +184,47 @@ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
|||||||
display = _format_as_name_version(dist)
|
display = _format_as_name_version(dist)
|
||||||
return _EditableInfo(
|
return _EditableInfo(
|
||||||
requirement=location,
|
requirement=location,
|
||||||
editable=True,
|
comments=[f"# Editable {vcs_name} install with no remote ({display})"],
|
||||||
comments=[f'# Editable {vcs_name} install with no remote ({display})'],
|
|
||||||
)
|
)
|
||||||
except RemoteNotValidError as ex:
|
except RemoteNotValidError as ex:
|
||||||
display = _format_as_name_version(dist)
|
display = _format_as_name_version(dist)
|
||||||
return _EditableInfo(
|
return _EditableInfo(
|
||||||
requirement=location,
|
requirement=location,
|
||||||
editable=True,
|
|
||||||
comments=[
|
comments=[
|
||||||
f"# Editable {vcs_name} install ({display}) with either a deleted "
|
f"# Editable {vcs_name} install ({display}) with either a deleted "
|
||||||
f"local remote or invalid URI:",
|
f"local remote or invalid URI:",
|
||||||
f"# '{ex.url}'",
|
f"# '{ex.url}'",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
except BadCommand:
|
except BadCommand:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'cannot determine version of editable source in %s '
|
"cannot determine version of editable source in %s "
|
||||||
'(%s command not found in path)',
|
"(%s command not found in path)",
|
||||||
location,
|
location,
|
||||||
vcs_backend.name,
|
vcs_backend.name,
|
||||||
)
|
)
|
||||||
return _EditableInfo(requirement=None, editable=True, comments=[])
|
return _EditableInfo(requirement=location, comments=[])
|
||||||
|
|
||||||
except InstallationError as exc:
|
except InstallationError as exc:
|
||||||
logger.warning(
|
logger.warning("Error when trying to get requirement for VCS system %s", exc)
|
||||||
"Error when trying to get requirement for VCS system %s, "
|
|
||||||
"falling back to uneditable format", exc
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return _EditableInfo(requirement=req, editable=True, comments=[])
|
return _EditableInfo(requirement=req, comments=[])
|
||||||
|
|
||||||
logger.warning('Could not determine repository location of %s', location)
|
logger.warning("Could not determine repository location of %s", location)
|
||||||
|
|
||||||
return _EditableInfo(
|
return _EditableInfo(
|
||||||
requirement=None,
|
requirement=location,
|
||||||
editable=False,
|
comments=["## !! Could not determine repository location"],
|
||||||
comments=['## !! Could not determine repository location'],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class FrozenRequirement:
|
class FrozenRequirement:
|
||||||
def __init__(self, name, req, editable, comments=()):
|
def __init__(
|
||||||
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
self,
|
||||||
|
name: str,
|
||||||
|
req: str,
|
||||||
|
editable: bool,
|
||||||
|
comments: Iterable[str] = (),
|
||||||
|
) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.canonical_name = canonicalize_name(name)
|
self.canonical_name = canonicalize_name(name)
|
||||||
self.req = req
|
self.req = req
|
||||||
@@ -251,27 +233,23 @@ class FrozenRequirement:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
|
def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
|
||||||
# TODO `get_requirement_info` is taking care of editable requirements.
|
editable = dist.editable
|
||||||
# TODO This should be refactored when we will add detection of
|
if editable:
|
||||||
# editable that provide .dist-info metadata.
|
req, comments = _get_editable_info(dist)
|
||||||
req, editable, comments = _get_editable_info(dist)
|
else:
|
||||||
if req is None and not editable:
|
comments = []
|
||||||
# if PEP 610 metadata is present, attempt to use it
|
|
||||||
direct_url = dist.direct_url
|
direct_url = dist.direct_url
|
||||||
if direct_url:
|
if direct_url:
|
||||||
req = direct_url_as_pep440_direct_reference(
|
# if PEP 610 metadata is present, use it
|
||||||
direct_url, dist.raw_name
|
req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
|
||||||
)
|
else:
|
||||||
comments = []
|
|
||||||
if req is None:
|
|
||||||
# name==version requirement
|
# name==version requirement
|
||||||
req = _format_as_name_version(dist)
|
req = _format_as_name_version(dist)
|
||||||
|
|
||||||
return cls(dist.raw_name, req, editable, comments=comments)
|
return cls(dist.raw_name, req, editable, comments=comments)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
req = self.req
|
req = self.req
|
||||||
if self.editable:
|
if self.editable:
|
||||||
req = f'-e {req}'
|
req = f"-e {req}"
|
||||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
return "\n".join(list(self.comments) + [str(req)]) + "\n"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Legacy editable installation process, i.e. `setup.py develop`.
|
"""Legacy editable installation process, i.e. `setup.py develop`.
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from typing import List, Optional, Sequence
|
from typing import Optional, Sequence
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
from pip._internal.build_env import BuildEnvironment
|
||||||
from pip._internal.utils.logging import indent_log
|
from pip._internal.utils.logging import indent_log
|
||||||
@@ -12,27 +12,25 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def install_editable(
|
def install_editable(
|
||||||
install_options, # type: List[str]
|
*,
|
||||||
global_options, # type: Sequence[str]
|
global_options: Sequence[str],
|
||||||
prefix, # type: Optional[str]
|
prefix: Optional[str],
|
||||||
home, # type: Optional[str]
|
home: Optional[str],
|
||||||
use_user_site, # type: bool
|
use_user_site: bool,
|
||||||
name, # type: str
|
name: str,
|
||||||
setup_py_path, # type: str
|
setup_py_path: str,
|
||||||
isolated, # type: bool
|
isolated: bool,
|
||||||
build_env, # type: BuildEnvironment
|
build_env: BuildEnvironment,
|
||||||
unpacked_source_directory, # type: str
|
unpacked_source_directory: str,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""Install a package in editable mode. Most arguments are pass-through
|
"""Install a package in editable mode. Most arguments are pass-through
|
||||||
to setuptools.
|
to setuptools.
|
||||||
"""
|
"""
|
||||||
logger.info('Running setup.py develop for %s', name)
|
logger.info("Running setup.py develop for %s", name)
|
||||||
|
|
||||||
args = make_setuptools_develop_args(
|
args = make_setuptools_develop_args(
|
||||||
setup_py_path,
|
setup_py_path,
|
||||||
global_options=global_options,
|
global_options=global_options,
|
||||||
install_options=install_options,
|
|
||||||
no_user_config=isolated,
|
no_user_config=isolated,
|
||||||
prefix=prefix,
|
prefix=prefix,
|
||||||
home=home,
|
home=home,
|
||||||
@@ -43,5 +41,6 @@ def install_editable(
|
|||||||
with build_env:
|
with build_env:
|
||||||
call_subprocess(
|
call_subprocess(
|
||||||
args,
|
args,
|
||||||
|
command_desc="python setup.py develop",
|
||||||
cwd=unpacked_source_directory,
|
cwd=unpacked_source_directory,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,132 +0,0 @@
|
|||||||
"""Legacy installation process, i.e. `setup.py install`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from distutils.util import change_root
|
|
||||||
from typing import List, Optional, Sequence
|
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
from pip._internal.models.scheme import Scheme
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import ensure_dir
|
|
||||||
from pip._internal.utils.setuptools_build import make_setuptools_install_args
|
|
||||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class LegacyInstallFailure(Exception):
|
|
||||||
def __init__(self):
|
|
||||||
# type: () -> None
|
|
||||||
self.parent = sys.exc_info()
|
|
||||||
|
|
||||||
|
|
||||||
def write_installed_files_from_setuptools_record(
|
|
||||||
record_lines: List[str],
|
|
||||||
root: Optional[str],
|
|
||||||
req_description: str,
|
|
||||||
) -> None:
|
|
||||||
def prepend_root(path):
|
|
||||||
# type: (str) -> str
|
|
||||||
if root is None or not os.path.isabs(path):
|
|
||||||
return path
|
|
||||||
else:
|
|
||||||
return change_root(root, path)
|
|
||||||
|
|
||||||
for line in record_lines:
|
|
||||||
directory = os.path.dirname(line)
|
|
||||||
if directory.endswith('.egg-info'):
|
|
||||||
egg_info_dir = prepend_root(directory)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
message = (
|
|
||||||
"{} did not indicate that it installed an "
|
|
||||||
".egg-info directory. Only setup.py projects "
|
|
||||||
"generating .egg-info directories are supported."
|
|
||||||
).format(req_description)
|
|
||||||
raise InstallationError(message)
|
|
||||||
|
|
||||||
new_lines = []
|
|
||||||
for line in record_lines:
|
|
||||||
filename = line.strip()
|
|
||||||
if os.path.isdir(filename):
|
|
||||||
filename += os.path.sep
|
|
||||||
new_lines.append(
|
|
||||||
os.path.relpath(prepend_root(filename), egg_info_dir)
|
|
||||||
)
|
|
||||||
new_lines.sort()
|
|
||||||
ensure_dir(egg_info_dir)
|
|
||||||
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
|
||||||
with open(inst_files_path, 'w') as f:
|
|
||||||
f.write('\n'.join(new_lines) + '\n')
|
|
||||||
|
|
||||||
|
|
||||||
def install(
|
|
||||||
install_options, # type: List[str]
|
|
||||||
global_options, # type: Sequence[str]
|
|
||||||
root, # type: Optional[str]
|
|
||||||
home, # type: Optional[str]
|
|
||||||
prefix, # type: Optional[str]
|
|
||||||
use_user_site, # type: bool
|
|
||||||
pycompile, # type: bool
|
|
||||||
scheme, # type: Scheme
|
|
||||||
setup_py_path, # type: str
|
|
||||||
isolated, # type: bool
|
|
||||||
req_name, # type: str
|
|
||||||
build_env, # type: BuildEnvironment
|
|
||||||
unpacked_source_directory, # type: str
|
|
||||||
req_description, # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> bool
|
|
||||||
|
|
||||||
header_dir = scheme.headers
|
|
||||||
|
|
||||||
with TempDirectory(kind="record") as temp_dir:
|
|
||||||
try:
|
|
||||||
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
|
||||||
install_args = make_setuptools_install_args(
|
|
||||||
setup_py_path,
|
|
||||||
global_options=global_options,
|
|
||||||
install_options=install_options,
|
|
||||||
record_filename=record_filename,
|
|
||||||
root=root,
|
|
||||||
prefix=prefix,
|
|
||||||
header_dir=header_dir,
|
|
||||||
home=home,
|
|
||||||
use_user_site=use_user_site,
|
|
||||||
no_user_config=isolated,
|
|
||||||
pycompile=pycompile,
|
|
||||||
)
|
|
||||||
|
|
||||||
runner = runner_with_spinner_message(
|
|
||||||
f"Running setup.py install for {req_name}"
|
|
||||||
)
|
|
||||||
with indent_log(), build_env:
|
|
||||||
runner(
|
|
||||||
cmd=install_args,
|
|
||||||
cwd=unpacked_source_directory,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not os.path.exists(record_filename):
|
|
||||||
logger.debug('Record file %s not found', record_filename)
|
|
||||||
# Signal to the caller that we didn't install the new package
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
# Signal to the caller that we didn't install the new package
|
|
||||||
raise LegacyInstallFailure
|
|
||||||
|
|
||||||
# At this point, we have successfully installed the requirement.
|
|
||||||
|
|
||||||
# We intentionally do not use any encoding to read the file because
|
|
||||||
# setuptools writes the file using distutils.file_util.write_file,
|
|
||||||
# which does not specify an encoding.
|
|
||||||
with open(record_filename) as f:
|
|
||||||
record_lines = f.read().splitlines()
|
|
||||||
|
|
||||||
write_installed_files_from_setuptools_record(record_lines, root, req_description)
|
|
||||||
return True
|
|
||||||
@@ -22,6 +22,7 @@ from typing import (
|
|||||||
BinaryIO,
|
BinaryIO,
|
||||||
Callable,
|
Callable,
|
||||||
Dict,
|
Dict,
|
||||||
|
Generator,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
List,
|
List,
|
||||||
@@ -38,11 +39,14 @@ from zipfile import ZipFile, ZipInfo
|
|||||||
from pip._vendor.distlib.scripts import ScriptMaker
|
from pip._vendor.distlib.scripts import ScriptMaker
|
||||||
from pip._vendor.distlib.util import get_export_entry
|
from pip._vendor.distlib.util import get_export_entry
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.six import ensure_str, ensure_text, reraise
|
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.locations import get_major_minor_version
|
from pip._internal.locations import get_major_minor_version
|
||||||
from pip._internal.metadata import BaseDistribution, get_wheel_distribution
|
from pip._internal.metadata import (
|
||||||
|
BaseDistribution,
|
||||||
|
FilesystemWheel,
|
||||||
|
get_wheel_distribution,
|
||||||
|
)
|
||||||
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
||||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||||
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||||
@@ -59,62 +63,55 @@ if TYPE_CHECKING:
|
|||||||
from typing import Protocol
|
from typing import Protocol
|
||||||
|
|
||||||
class File(Protocol):
|
class File(Protocol):
|
||||||
src_record_path = None # type: RecordPath
|
src_record_path: "RecordPath"
|
||||||
dest_path = None # type: str
|
dest_path: str
|
||||||
changed = None # type: bool
|
changed: bool
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RecordPath = NewType('RecordPath', str)
|
RecordPath = NewType("RecordPath", str)
|
||||||
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
||||||
|
|
||||||
|
|
||||||
def rehash(path, blocksize=1 << 20):
|
def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
|
||||||
# type: (str, int) -> Tuple[str, str]
|
|
||||||
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
||||||
h, length = hash_file(path, blocksize)
|
h, length = hash_file(path, blocksize)
|
||||||
digest = 'sha256=' + urlsafe_b64encode(
|
digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
|
||||||
h.digest()
|
|
||||||
).decode('latin1').rstrip('=')
|
|
||||||
return (digest, str(length))
|
return (digest, str(length))
|
||||||
|
|
||||||
|
|
||||||
def csv_io_kwargs(mode):
|
def csv_io_kwargs(mode: str) -> Dict[str, Any]:
|
||||||
# type: (str) -> Dict[str, Any]
|
|
||||||
"""Return keyword arguments to properly open a CSV file
|
"""Return keyword arguments to properly open a CSV file
|
||||||
in the given mode.
|
in the given mode.
|
||||||
"""
|
"""
|
||||||
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
|
return {"mode": mode, "newline": "", "encoding": "utf-8"}
|
||||||
|
|
||||||
|
|
||||||
def fix_script(path):
|
def fix_script(path: str) -> bool:
|
||||||
# type: (str) -> bool
|
|
||||||
"""Replace #!python with #!/path/to/python
|
"""Replace #!python with #!/path/to/python
|
||||||
Return True if file was changed.
|
Return True if file was changed.
|
||||||
"""
|
"""
|
||||||
# XXX RECORD hashes will need to be updated
|
# XXX RECORD hashes will need to be updated
|
||||||
assert os.path.isfile(path)
|
assert os.path.isfile(path)
|
||||||
|
|
||||||
with open(path, 'rb') as script:
|
with open(path, "rb") as script:
|
||||||
firstline = script.readline()
|
firstline = script.readline()
|
||||||
if not firstline.startswith(b'#!python'):
|
if not firstline.startswith(b"#!python"):
|
||||||
return False
|
return False
|
||||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||||
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
firstline = b"#!" + exename + os.linesep.encode("ascii")
|
||||||
rest = script.read()
|
rest = script.read()
|
||||||
with open(path, 'wb') as script:
|
with open(path, "wb") as script:
|
||||||
script.write(firstline)
|
script.write(firstline)
|
||||||
script.write(rest)
|
script.write(rest)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def wheel_root_is_purelib(metadata):
|
def wheel_root_is_purelib(metadata: Message) -> bool:
|
||||||
# type: (Message) -> bool
|
|
||||||
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
||||||
|
|
||||||
|
|
||||||
@@ -129,8 +126,7 @@ def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, s
|
|||||||
return console_scripts, gui_scripts
|
return console_scripts, gui_scripts
|
||||||
|
|
||||||
|
|
||||||
def message_about_scripts_not_on_PATH(scripts):
|
def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
|
||||||
# type: (Sequence[str]) -> Optional[str]
|
|
||||||
"""Determine if any scripts are not on PATH and format a warning.
|
"""Determine if any scripts are not on PATH and format a warning.
|
||||||
Returns a warning message if one or more scripts are not on PATH,
|
Returns a warning message if one or more scripts are not on PATH,
|
||||||
otherwise None.
|
otherwise None.
|
||||||
@@ -139,7 +135,7 @@ def message_about_scripts_not_on_PATH(scripts):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Group scripts by the path they were installed in
|
# Group scripts by the path they were installed in
|
||||||
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
|
||||||
for destfile in scripts:
|
for destfile in scripts:
|
||||||
parent_dir = os.path.dirname(destfile)
|
parent_dir = os.path.dirname(destfile)
|
||||||
script_name = os.path.basename(destfile)
|
script_name = os.path.basename(destfile)
|
||||||
@@ -147,23 +143,26 @@ def message_about_scripts_not_on_PATH(scripts):
|
|||||||
|
|
||||||
# We don't want to warn for directories that are on PATH.
|
# We don't want to warn for directories that are on PATH.
|
||||||
not_warn_dirs = [
|
not_warn_dirs = [
|
||||||
os.path.normcase(i).rstrip(os.sep) for i in
|
os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
|
||||||
os.environ.get("PATH", "").split(os.pathsep)
|
for i in os.environ.get("PATH", "").split(os.pathsep)
|
||||||
]
|
]
|
||||||
# If an executable sits with sys.executable, we don't warn for it.
|
# If an executable sits with sys.executable, we don't warn for it.
|
||||||
# This covers the case of venv invocations without activating the venv.
|
# This covers the case of venv invocations without activating the venv.
|
||||||
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
not_warn_dirs.append(
|
||||||
warn_for = {
|
os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
|
||||||
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
)
|
||||||
if os.path.normcase(parent_dir) not in not_warn_dirs
|
warn_for: Dict[str, Set[str]] = {
|
||||||
} # type: Dict[str, Set[str]]
|
parent_dir: scripts
|
||||||
|
for parent_dir, scripts in grouped_by_dir.items()
|
||||||
|
if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
|
||||||
|
}
|
||||||
if not warn_for:
|
if not warn_for:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Format a message
|
# Format a message
|
||||||
msg_lines = []
|
msg_lines = []
|
||||||
for parent_dir, dir_scripts in warn_for.items():
|
for parent_dir, dir_scripts in warn_for.items():
|
||||||
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
sorted_scripts: List[str] = sorted(dir_scripts)
|
||||||
if len(sorted_scripts) == 1:
|
if len(sorted_scripts) == 1:
|
||||||
start_text = "script {} is".format(sorted_scripts[0])
|
start_text = "script {} is".format(sorted_scripts[0])
|
||||||
else:
|
else:
|
||||||
@@ -172,8 +171,9 @@ def message_about_scripts_not_on_PATH(scripts):
|
|||||||
)
|
)
|
||||||
|
|
||||||
msg_lines.append(
|
msg_lines.append(
|
||||||
"The {} installed in '{}' which is not on PATH."
|
"The {} installed in '{}' which is not on PATH.".format(
|
||||||
.format(start_text, parent_dir)
|
start_text, parent_dir
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
last_line_fmt = (
|
last_line_fmt = (
|
||||||
@@ -200,8 +200,9 @@ def message_about_scripts_not_on_PATH(scripts):
|
|||||||
return "\n".join(msg_lines)
|
return "\n".join(msg_lines)
|
||||||
|
|
||||||
|
|
||||||
def _normalized_outrows(outrows):
|
def _normalized_outrows(
|
||||||
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
|
outrows: Iterable[InstalledCSVRow],
|
||||||
|
) -> List[Tuple[str, str, str]]:
|
||||||
"""Normalize the given rows of a RECORD file.
|
"""Normalize the given rows of a RECORD file.
|
||||||
|
|
||||||
Items in each row are converted into str. Rows are then sorted to make
|
Items in each row are converted into str. Rows are then sorted to make
|
||||||
@@ -221,69 +222,57 @@ def _normalized_outrows(outrows):
|
|||||||
# For additional background, see--
|
# For additional background, see--
|
||||||
# https://github.com/pypa/pip/issues/5868
|
# https://github.com/pypa/pip/issues/5868
|
||||||
return sorted(
|
return sorted(
|
||||||
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
|
(record_path, hash_, str(size)) for record_path, hash_, size in outrows
|
||||||
for record_path, hash_, size in outrows
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _record_to_fs_path(record_path):
|
def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
|
||||||
# type: (RecordPath) -> str
|
return os.path.join(lib_dir, record_path)
|
||||||
return record_path
|
|
||||||
|
|
||||||
|
|
||||||
def _fs_to_record_path(path, relative_to=None):
|
def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
|
||||||
# type: (str, Optional[str]) -> RecordPath
|
|
||||||
if relative_to is not None:
|
|
||||||
# On Windows, do not handle relative paths if they belong to different
|
# On Windows, do not handle relative paths if they belong to different
|
||||||
# logical disks
|
# logical disks
|
||||||
if os.path.splitdrive(path)[0].lower() == \
|
if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
|
||||||
os.path.splitdrive(relative_to)[0].lower():
|
path = os.path.relpath(path, lib_dir)
|
||||||
path = os.path.relpath(path, relative_to)
|
|
||||||
path = path.replace(os.path.sep, '/')
|
|
||||||
return cast('RecordPath', path)
|
|
||||||
|
|
||||||
|
path = path.replace(os.path.sep, "/")
|
||||||
def _parse_record_path(record_column):
|
return cast("RecordPath", path)
|
||||||
# type: (str) -> RecordPath
|
|
||||||
p = ensure_text(record_column, encoding='utf-8')
|
|
||||||
return cast('RecordPath', p)
|
|
||||||
|
|
||||||
|
|
||||||
def get_csv_rows_for_installed(
|
def get_csv_rows_for_installed(
|
||||||
old_csv_rows, # type: List[List[str]]
|
old_csv_rows: List[List[str]],
|
||||||
installed, # type: Dict[RecordPath, RecordPath]
|
installed: Dict[RecordPath, RecordPath],
|
||||||
changed, # type: Set[RecordPath]
|
changed: Set[RecordPath],
|
||||||
generated, # type: List[str]
|
generated: List[str],
|
||||||
lib_dir, # type: str
|
lib_dir: str,
|
||||||
):
|
) -> List[InstalledCSVRow]:
|
||||||
# type: (...) -> List[InstalledCSVRow]
|
|
||||||
"""
|
"""
|
||||||
:param installed: A map from archive RECORD path to installation RECORD
|
:param installed: A map from archive RECORD path to installation RECORD
|
||||||
path.
|
path.
|
||||||
"""
|
"""
|
||||||
installed_rows = [] # type: List[InstalledCSVRow]
|
installed_rows: List[InstalledCSVRow] = []
|
||||||
for row in old_csv_rows:
|
for row in old_csv_rows:
|
||||||
if len(row) > 3:
|
if len(row) > 3:
|
||||||
logger.warning('RECORD line has more than three elements: %s', row)
|
logger.warning("RECORD line has more than three elements: %s", row)
|
||||||
old_record_path = _parse_record_path(row[0])
|
old_record_path = cast("RecordPath", row[0])
|
||||||
new_record_path = installed.pop(old_record_path, old_record_path)
|
new_record_path = installed.pop(old_record_path, old_record_path)
|
||||||
if new_record_path in changed:
|
if new_record_path in changed:
|
||||||
digest, length = rehash(_record_to_fs_path(new_record_path))
|
digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
|
||||||
else:
|
else:
|
||||||
digest = row[1] if len(row) > 1 else ''
|
digest = row[1] if len(row) > 1 else ""
|
||||||
length = row[2] if len(row) > 2 else ''
|
length = row[2] if len(row) > 2 else ""
|
||||||
installed_rows.append((new_record_path, digest, length))
|
installed_rows.append((new_record_path, digest, length))
|
||||||
for f in generated:
|
for f in generated:
|
||||||
path = _fs_to_record_path(f, lib_dir)
|
path = _fs_to_record_path(f, lib_dir)
|
||||||
digest, length = rehash(f)
|
digest, length = rehash(f)
|
||||||
installed_rows.append((path, digest, length))
|
installed_rows.append((path, digest, length))
|
||||||
for installed_record_path in installed.values():
|
for installed_record_path in installed.values():
|
||||||
installed_rows.append((installed_record_path, '', ''))
|
installed_rows.append((installed_record_path, "", ""))
|
||||||
return installed_rows
|
return installed_rows
|
||||||
|
|
||||||
|
|
||||||
def get_console_script_specs(console):
|
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||||
# type: (Dict[str, str]) -> List[str]
|
|
||||||
"""
|
"""
|
||||||
Given the mapping from entrypoint name to callable, return the relevant
|
Given the mapping from entrypoint name to callable, return the relevant
|
||||||
console script specs.
|
console script specs.
|
||||||
@@ -326,62 +315,57 @@ def get_console_script_specs(console):
|
|||||||
# DEFAULT
|
# DEFAULT
|
||||||
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||||
# and easy_install-X.Y.
|
# and easy_install-X.Y.
|
||||||
pip_script = console.pop('pip', None)
|
pip_script = console.pop("pip", None)
|
||||||
if pip_script:
|
if pip_script:
|
||||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
scripts_to_generate.append('pip = ' + pip_script)
|
scripts_to_generate.append("pip = " + pip_script)
|
||||||
|
|
||||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append(
|
||||||
'pip{} = {}'.format(sys.version_info[0], pip_script)
|
"pip{} = {}".format(sys.version_info[0], pip_script)
|
||||||
)
|
)
|
||||||
|
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
|
||||||
f'pip{get_major_minor_version()} = {pip_script}'
|
|
||||||
)
|
|
||||||
# Delete any other versioned pip entry points
|
# Delete any other versioned pip entry points
|
||||||
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
|
||||||
for k in pip_ep:
|
for k in pip_ep:
|
||||||
del console[k]
|
del console[k]
|
||||||
easy_install_script = console.pop('easy_install', None)
|
easy_install_script = console.pop("easy_install", None)
|
||||||
if easy_install_script:
|
if easy_install_script:
|
||||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append("easy_install = " + easy_install_script)
|
||||||
'easy_install = ' + easy_install_script
|
|
||||||
)
|
|
||||||
|
|
||||||
scripts_to_generate.append(
|
scripts_to_generate.append(
|
||||||
'easy_install-{} = {}'.format(
|
"easy_install-{} = {}".format(
|
||||||
get_major_minor_version(), easy_install_script
|
get_major_minor_version(), easy_install_script
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# Delete any other versioned easy_install entry points
|
# Delete any other versioned easy_install entry points
|
||||||
easy_install_ep = [
|
easy_install_ep = [
|
||||||
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
|
||||||
]
|
]
|
||||||
for k in easy_install_ep:
|
for k in easy_install_ep:
|
||||||
del console[k]
|
del console[k]
|
||||||
|
|
||||||
# Generate the console entry points specified in the wheel
|
# Generate the console entry points specified in the wheel
|
||||||
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
|
scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
|
||||||
|
|
||||||
return scripts_to_generate
|
return scripts_to_generate
|
||||||
|
|
||||||
|
|
||||||
class ZipBackedFile:
|
class ZipBackedFile:
|
||||||
def __init__(self, src_record_path, dest_path, zip_file):
|
def __init__(
|
||||||
# type: (RecordPath, str, ZipFile) -> None
|
self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
|
||||||
|
) -> None:
|
||||||
self.src_record_path = src_record_path
|
self.src_record_path = src_record_path
|
||||||
self.dest_path = dest_path
|
self.dest_path = dest_path
|
||||||
self._zip_file = zip_file
|
self._zip_file = zip_file
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
def _getinfo(self):
|
def _getinfo(self) -> ZipInfo:
|
||||||
# type: () -> ZipInfo
|
|
||||||
return self._zip_file.getinfo(self.src_record_path)
|
return self._zip_file.getinfo(self.src_record_path)
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
# directory creation is lazy and after file filtering
|
# directory creation is lazy and after file filtering
|
||||||
# to ensure we don't install empty dirs; empty dirs can't be
|
# to ensure we don't install empty dirs; empty dirs can't be
|
||||||
# uninstalled.
|
# uninstalled.
|
||||||
@@ -410,22 +394,19 @@ class ZipBackedFile:
|
|||||||
|
|
||||||
|
|
||||||
class ScriptFile:
|
class ScriptFile:
|
||||||
def __init__(self, file):
|
def __init__(self, file: "File") -> None:
|
||||||
# type: (File) -> None
|
|
||||||
self._file = file
|
self._file = file
|
||||||
self.src_record_path = self._file.src_record_path
|
self.src_record_path = self._file.src_record_path
|
||||||
self.dest_path = self._file.dest_path
|
self.dest_path = self._file.dest_path
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
def save(self):
|
def save(self) -> None:
|
||||||
# type: () -> None
|
|
||||||
self._file.save()
|
self._file.save()
|
||||||
self.changed = fix_script(self.dest_path)
|
self.changed = fix_script(self.dest_path)
|
||||||
|
|
||||||
|
|
||||||
class MissingCallableSuffix(InstallationError):
|
class MissingCallableSuffix(InstallationError):
|
||||||
def __init__(self, entry_point):
|
def __init__(self, entry_point: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
"Invalid script entry point: {} - A callable "
|
"Invalid script entry point: {} - A callable "
|
||||||
"suffix is required. Cf https://packaging.python.org/"
|
"suffix is required. Cf https://packaging.python.org/"
|
||||||
@@ -434,31 +415,30 @@ class MissingCallableSuffix(InstallationError):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _raise_for_invalid_entrypoint(specification):
|
def _raise_for_invalid_entrypoint(specification: str) -> None:
|
||||||
# type: (str) -> None
|
|
||||||
entry = get_export_entry(specification)
|
entry = get_export_entry(specification)
|
||||||
if entry is not None and entry.suffix is None:
|
if entry is not None and entry.suffix is None:
|
||||||
raise MissingCallableSuffix(str(entry))
|
raise MissingCallableSuffix(str(entry))
|
||||||
|
|
||||||
|
|
||||||
class PipScriptMaker(ScriptMaker):
|
class PipScriptMaker(ScriptMaker):
|
||||||
def make(self, specification, options=None):
|
def make(
|
||||||
# type: (str, Dict[str, Any]) -> List[str]
|
self, specification: str, options: Optional[Dict[str, Any]] = None
|
||||||
|
) -> List[str]:
|
||||||
_raise_for_invalid_entrypoint(specification)
|
_raise_for_invalid_entrypoint(specification)
|
||||||
return super().make(specification, options)
|
return super().make(specification, options)
|
||||||
|
|
||||||
|
|
||||||
def _install_wheel(
|
def _install_wheel(
|
||||||
name, # type: str
|
name: str,
|
||||||
wheel_zip, # type: ZipFile
|
wheel_zip: ZipFile,
|
||||||
wheel_path, # type: str
|
wheel_path: str,
|
||||||
scheme, # type: Scheme
|
scheme: Scheme,
|
||||||
pycompile=True, # type: bool
|
pycompile: bool = True,
|
||||||
warn_script_location=True, # type: bool
|
warn_script_location: bool = True,
|
||||||
direct_url=None, # type: Optional[DirectUrl]
|
direct_url: Optional[DirectUrl] = None,
|
||||||
requested=False, # type: bool
|
requested: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""Install a wheel.
|
"""Install a wheel.
|
||||||
|
|
||||||
:param name: Name of the project to install
|
:param name: Name of the project to install
|
||||||
@@ -485,33 +465,23 @@ def _install_wheel(
|
|||||||
# installed = files copied from the wheel to the destination
|
# installed = files copied from the wheel to the destination
|
||||||
# changed = files changed while installing (scripts #! line typically)
|
# changed = files changed while installing (scripts #! line typically)
|
||||||
# generated = files newly generated during the install (script wrappers)
|
# generated = files newly generated during the install (script wrappers)
|
||||||
installed = {} # type: Dict[RecordPath, RecordPath]
|
installed: Dict[RecordPath, RecordPath] = {}
|
||||||
changed = set() # type: Set[RecordPath]
|
changed: Set[RecordPath] = set()
|
||||||
generated = [] # type: List[str]
|
generated: List[str] = []
|
||||||
|
|
||||||
def record_installed(srcfile, destfile, modified=False):
|
def record_installed(
|
||||||
# type: (RecordPath, str, bool) -> None
|
srcfile: RecordPath, destfile: str, modified: bool = False
|
||||||
|
) -> None:
|
||||||
"""Map archive RECORD paths to installation RECORD paths."""
|
"""Map archive RECORD paths to installation RECORD paths."""
|
||||||
newpath = _fs_to_record_path(destfile, lib_dir)
|
newpath = _fs_to_record_path(destfile, lib_dir)
|
||||||
installed[srcfile] = newpath
|
installed[srcfile] = newpath
|
||||||
if modified:
|
if modified:
|
||||||
changed.add(_fs_to_record_path(destfile))
|
changed.add(newpath)
|
||||||
|
|
||||||
def all_paths():
|
def is_dir_path(path: RecordPath) -> bool:
|
||||||
# type: () -> Iterable[RecordPath]
|
|
||||||
names = wheel_zip.namelist()
|
|
||||||
# If a flag is set, names may be unicode in Python 2. We convert to
|
|
||||||
# text explicitly so these are valid for lookup in RECORD.
|
|
||||||
decoded_names = map(ensure_text, names)
|
|
||||||
for name in decoded_names:
|
|
||||||
yield cast("RecordPath", name)
|
|
||||||
|
|
||||||
def is_dir_path(path):
|
|
||||||
# type: (RecordPath) -> bool
|
|
||||||
return path.endswith("/")
|
return path.endswith("/")
|
||||||
|
|
||||||
def assert_no_path_traversal(dest_dir_path, target_path):
|
def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
|
||||||
# type: (str, str) -> None
|
|
||||||
if not is_within_directory(dest_dir_path, target_path):
|
if not is_within_directory(dest_dir_path, target_path):
|
||||||
message = (
|
message = (
|
||||||
"The wheel {!r} has a file {!r} trying to install"
|
"The wheel {!r} has a file {!r} trying to install"
|
||||||
@@ -521,10 +491,10 @@ def _install_wheel(
|
|||||||
message.format(wheel_path, target_path, dest_dir_path)
|
message.format(wheel_path, target_path, dest_dir_path)
|
||||||
)
|
)
|
||||||
|
|
||||||
def root_scheme_file_maker(zip_file, dest):
|
def root_scheme_file_maker(
|
||||||
# type: (ZipFile, str) -> Callable[[RecordPath], File]
|
zip_file: ZipFile, dest: str
|
||||||
def make_root_scheme_file(record_path):
|
) -> Callable[[RecordPath], "File"]:
|
||||||
# type: (RecordPath) -> File
|
def make_root_scheme_file(record_path: RecordPath) -> "File":
|
||||||
normed_path = os.path.normpath(record_path)
|
normed_path = os.path.normpath(record_path)
|
||||||
dest_path = os.path.join(dest, normed_path)
|
dest_path = os.path.join(dest, normed_path)
|
||||||
assert_no_path_traversal(dest, dest_path)
|
assert_no_path_traversal(dest, dest_path)
|
||||||
@@ -532,17 +502,12 @@ def _install_wheel(
|
|||||||
|
|
||||||
return make_root_scheme_file
|
return make_root_scheme_file
|
||||||
|
|
||||||
def data_scheme_file_maker(zip_file, scheme):
|
def data_scheme_file_maker(
|
||||||
# type: (ZipFile, Scheme) -> Callable[[RecordPath], File]
|
zip_file: ZipFile, scheme: Scheme
|
||||||
scheme_paths = {}
|
) -> Callable[[RecordPath], "File"]:
|
||||||
for key in SCHEME_KEYS:
|
scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
|
||||||
encoded_key = ensure_text(key)
|
|
||||||
scheme_paths[encoded_key] = ensure_text(
|
|
||||||
getattr(scheme, key), encoding=sys.getfilesystemencoding()
|
|
||||||
)
|
|
||||||
|
|
||||||
def make_data_scheme_file(record_path):
|
def make_data_scheme_file(record_path: RecordPath) -> "File":
|
||||||
# type: (RecordPath) -> File
|
|
||||||
normed_path = os.path.normpath(record_path)
|
normed_path = os.path.normpath(record_path)
|
||||||
try:
|
try:
|
||||||
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
|
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
|
||||||
@@ -561,9 +526,7 @@ def _install_wheel(
|
|||||||
"Unknown scheme key used in {}: {} (for file {!r}). .data"
|
"Unknown scheme key used in {}: {} (for file {!r}). .data"
|
||||||
" directory contents should be in subdirectories named"
|
" directory contents should be in subdirectories named"
|
||||||
" with a valid scheme key ({})"
|
" with a valid scheme key ({})"
|
||||||
).format(
|
).format(wheel_path, scheme_key, record_path, valid_scheme_keys)
|
||||||
wheel_path, scheme_key, record_path, valid_scheme_keys
|
|
||||||
)
|
|
||||||
raise InstallationError(message)
|
raise InstallationError(message)
|
||||||
|
|
||||||
dest_path = os.path.join(scheme_path, dest_subpath)
|
dest_path = os.path.join(scheme_path, dest_subpath)
|
||||||
@@ -572,30 +535,19 @@ def _install_wheel(
|
|||||||
|
|
||||||
return make_data_scheme_file
|
return make_data_scheme_file
|
||||||
|
|
||||||
def is_data_scheme_path(path):
|
def is_data_scheme_path(path: RecordPath) -> bool:
|
||||||
# type: (RecordPath) -> bool
|
|
||||||
return path.split("/", 1)[0].endswith(".data")
|
return path.split("/", 1)[0].endswith(".data")
|
||||||
|
|
||||||
paths = all_paths()
|
paths = cast(List[RecordPath], wheel_zip.namelist())
|
||||||
file_paths = filterfalse(is_dir_path, paths)
|
file_paths = filterfalse(is_dir_path, paths)
|
||||||
root_scheme_paths, data_scheme_paths = partition(
|
root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
|
||||||
is_data_scheme_path, file_paths
|
|
||||||
)
|
|
||||||
|
|
||||||
make_root_scheme_file = root_scheme_file_maker(
|
make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
|
||||||
wheel_zip,
|
files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
|
||||||
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
|
|
||||||
)
|
|
||||||
files = map(make_root_scheme_file, root_scheme_paths)
|
|
||||||
|
|
||||||
def is_script_scheme_path(path):
|
def is_script_scheme_path(path: RecordPath) -> bool:
|
||||||
# type: (RecordPath) -> bool
|
|
||||||
parts = path.split("/", 2)
|
parts = path.split("/", 2)
|
||||||
return (
|
return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
|
||||||
len(parts) > 2 and
|
|
||||||
parts[0].endswith(".data") and
|
|
||||||
parts[1] == "scripts"
|
|
||||||
)
|
|
||||||
|
|
||||||
other_scheme_paths, script_scheme_paths = partition(
|
other_scheme_paths, script_scheme_paths = partition(
|
||||||
is_script_scheme_path, data_scheme_paths
|
is_script_scheme_path, data_scheme_paths
|
||||||
@@ -606,30 +558,32 @@ def _install_wheel(
|
|||||||
files = chain(files, other_scheme_files)
|
files = chain(files, other_scheme_files)
|
||||||
|
|
||||||
# Get the defined entry points
|
# Get the defined entry points
|
||||||
distribution = get_wheel_distribution(wheel_path, canonicalize_name(name))
|
distribution = get_wheel_distribution(
|
||||||
|
FilesystemWheel(wheel_path),
|
||||||
|
canonicalize_name(name),
|
||||||
|
)
|
||||||
console, gui = get_entrypoints(distribution)
|
console, gui = get_entrypoints(distribution)
|
||||||
|
|
||||||
def is_entrypoint_wrapper(file):
|
def is_entrypoint_wrapper(file: "File") -> bool:
|
||||||
# type: (File) -> bool
|
|
||||||
# EP, EP.exe and EP-script.py are scripts generated for
|
# EP, EP.exe and EP-script.py are scripts generated for
|
||||||
# entry point EP by setuptools
|
# entry point EP by setuptools
|
||||||
path = file.dest_path
|
path = file.dest_path
|
||||||
name = os.path.basename(path)
|
name = os.path.basename(path)
|
||||||
if name.lower().endswith('.exe'):
|
if name.lower().endswith(".exe"):
|
||||||
matchname = name[:-4]
|
matchname = name[:-4]
|
||||||
elif name.lower().endswith('-script.py'):
|
elif name.lower().endswith("-script.py"):
|
||||||
matchname = name[:-10]
|
matchname = name[:-10]
|
||||||
elif name.lower().endswith(".pya"):
|
elif name.lower().endswith(".pya"):
|
||||||
matchname = name[:-4]
|
matchname = name[:-4]
|
||||||
else:
|
else:
|
||||||
matchname = name
|
matchname = name
|
||||||
# Ignore setuptools-generated scripts
|
# Ignore setuptools-generated scripts
|
||||||
return (matchname in console or matchname in gui)
|
return matchname in console or matchname in gui
|
||||||
|
|
||||||
script_scheme_files = map(make_data_scheme_file, script_scheme_paths)
|
script_scheme_files: Iterator[File] = map(
|
||||||
script_scheme_files = filterfalse(
|
make_data_scheme_file, script_scheme_paths
|
||||||
is_entrypoint_wrapper, script_scheme_files
|
|
||||||
)
|
)
|
||||||
|
script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
|
||||||
script_scheme_files = map(ScriptFile, script_scheme_files)
|
script_scheme_files = map(ScriptFile, script_scheme_files)
|
||||||
files = chain(files, script_scheme_files)
|
files = chain(files, script_scheme_files)
|
||||||
|
|
||||||
@@ -637,8 +591,7 @@ def _install_wheel(
|
|||||||
file.save()
|
file.save()
|
||||||
record_installed(file.src_record_path, file.dest_path, file.changed)
|
record_installed(file.src_record_path, file.dest_path, file.changed)
|
||||||
|
|
||||||
def pyc_source_file_paths():
|
def pyc_source_file_paths() -> Generator[str, None, None]:
|
||||||
# type: () -> Iterator[str]
|
|
||||||
# We de-duplicate installation paths, since there can be overlap (e.g.
|
# We de-duplicate installation paths, since there can be overlap (e.g.
|
||||||
# file in .data maps to same location as file in wheel root).
|
# file in .data maps to same location as file in wheel root).
|
||||||
# Sorting installation paths makes it easier to reproduce and debug
|
# Sorting installation paths makes it easier to reproduce and debug
|
||||||
@@ -647,30 +600,21 @@ def _install_wheel(
|
|||||||
full_installed_path = os.path.join(lib_dir, installed_path)
|
full_installed_path = os.path.join(lib_dir, installed_path)
|
||||||
if not os.path.isfile(full_installed_path):
|
if not os.path.isfile(full_installed_path):
|
||||||
continue
|
continue
|
||||||
if not full_installed_path.endswith('.py'):
|
if not full_installed_path.endswith(".py"):
|
||||||
continue
|
continue
|
||||||
yield full_installed_path
|
yield full_installed_path
|
||||||
|
|
||||||
def pyc_output_path(path):
|
def pyc_output_path(path: str) -> str:
|
||||||
# type: (str) -> str
|
"""Return the path the pyc file would have been written to."""
|
||||||
"""Return the path the pyc file would have been written to.
|
|
||||||
"""
|
|
||||||
return importlib.util.cache_from_source(path)
|
return importlib.util.cache_from_source(path)
|
||||||
|
|
||||||
# Compile all of the pyc files for the installed files
|
# Compile all of the pyc files for the installed files
|
||||||
if pycompile:
|
if pycompile:
|
||||||
with captured_stdout() as stdout:
|
with captured_stdout() as stdout:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.filterwarnings('ignore')
|
warnings.filterwarnings("ignore")
|
||||||
for path in pyc_source_file_paths():
|
for path in pyc_source_file_paths():
|
||||||
# Python 2's `compileall.compile_file` requires a str in
|
success = compileall.compile_file(path, force=True, quiet=True)
|
||||||
# error cases, so we must convert to the native type.
|
|
||||||
path_arg = ensure_str(
|
|
||||||
path, encoding=sys.getfilesystemencoding()
|
|
||||||
)
|
|
||||||
success = compileall.compile_file(
|
|
||||||
path_arg, force=True, quiet=True
|
|
||||||
)
|
|
||||||
if success:
|
if success:
|
||||||
pyc_path = pyc_output_path(path)
|
pyc_path = pyc_output_path(path)
|
||||||
assert os.path.exists(pyc_path)
|
assert os.path.exists(pyc_path)
|
||||||
@@ -689,7 +633,7 @@ def _install_wheel(
|
|||||||
# Ensure we don't generate any variants for scripts because this is almost
|
# Ensure we don't generate any variants for scripts because this is almost
|
||||||
# never what somebody wants.
|
# never what somebody wants.
|
||||||
# See https://bitbucket.org/pypa/distlib/issue/35/
|
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||||
maker.variants = {''}
|
maker.variants = {""}
|
||||||
|
|
||||||
# This is required because otherwise distlib creates scripts that are not
|
# This is required because otherwise distlib creates scripts that are not
|
||||||
# executable.
|
# executable.
|
||||||
@@ -699,14 +643,12 @@ def _install_wheel(
|
|||||||
# Generate the console and GUI entry points specified in the wheel
|
# Generate the console and GUI entry points specified in the wheel
|
||||||
scripts_to_generate = get_console_script_specs(console)
|
scripts_to_generate = get_console_script_specs(console)
|
||||||
|
|
||||||
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
|
gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
|
||||||
|
|
||||||
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
||||||
generated.extend(generated_console_scripts)
|
generated.extend(generated_console_scripts)
|
||||||
|
|
||||||
generated.extend(
|
generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
|
||||||
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
|
||||||
)
|
|
||||||
|
|
||||||
if warn_script_location:
|
if warn_script_location:
|
||||||
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
||||||
@@ -716,8 +658,7 @@ def _install_wheel(
|
|||||||
generated_file_mode = 0o666 & ~current_umask()
|
generated_file_mode = 0o666 & ~current_umask()
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def _generate_file(path, **kwargs):
|
def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
||||||
# type: (str, **Any) -> Iterator[BinaryIO]
|
|
||||||
with adjacent_tmp_file(path, **kwargs) as f:
|
with adjacent_tmp_file(path, **kwargs) as f:
|
||||||
yield f
|
yield f
|
||||||
os.chmod(f.name, generated_file_mode)
|
os.chmod(f.name, generated_file_mode)
|
||||||
@@ -726,9 +667,9 @@ def _install_wheel(
|
|||||||
dest_info_dir = os.path.join(lib_dir, info_dir)
|
dest_info_dir = os.path.join(lib_dir, info_dir)
|
||||||
|
|
||||||
# Record pip as the installer
|
# Record pip as the installer
|
||||||
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
|
installer_path = os.path.join(dest_info_dir, "INSTALLER")
|
||||||
with _generate_file(installer_path) as installer_file:
|
with _generate_file(installer_path) as installer_file:
|
||||||
installer_file.write(b'pip\n')
|
installer_file.write(b"pip\n")
|
||||||
generated.append(installer_path)
|
generated.append(installer_path)
|
||||||
|
|
||||||
# Record the PEP 610 direct URL reference
|
# Record the PEP 610 direct URL reference
|
||||||
@@ -740,12 +681,12 @@ def _install_wheel(
|
|||||||
|
|
||||||
# Record the REQUESTED file
|
# Record the REQUESTED file
|
||||||
if requested:
|
if requested:
|
||||||
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
|
requested_path = os.path.join(dest_info_dir, "REQUESTED")
|
||||||
with open(requested_path, "wb"):
|
with open(requested_path, "wb"):
|
||||||
pass
|
pass
|
||||||
generated.append(requested_path)
|
generated.append(requested_path)
|
||||||
|
|
||||||
record_text = distribution.read_text('RECORD')
|
record_text = distribution.read_text("RECORD")
|
||||||
record_rows = list(csv.reader(record_text.splitlines()))
|
record_rows = list(csv.reader(record_text.splitlines()))
|
||||||
|
|
||||||
rows = get_csv_rows_for_installed(
|
rows = get_csv_rows_for_installed(
|
||||||
@@ -753,42 +694,38 @@ def _install_wheel(
|
|||||||
installed=installed,
|
installed=installed,
|
||||||
changed=changed,
|
changed=changed,
|
||||||
generated=generated,
|
generated=generated,
|
||||||
lib_dir=lib_dir)
|
lib_dir=lib_dir,
|
||||||
|
)
|
||||||
|
|
||||||
# Record details of all files installed
|
# Record details of all files installed
|
||||||
record_path = os.path.join(dest_info_dir, 'RECORD')
|
record_path = os.path.join(dest_info_dir, "RECORD")
|
||||||
|
|
||||||
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
|
with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
|
||||||
# The type mypy infers for record_file is different for Python 3
|
# Explicitly cast to typing.IO[str] as a workaround for the mypy error:
|
||||||
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
|
# "writer" has incompatible type "BinaryIO"; expected "_Writer"
|
||||||
# cast to typing.IO[str] as a workaround.
|
writer = csv.writer(cast("IO[str]", record_file))
|
||||||
writer = csv.writer(cast('IO[str]', record_file))
|
|
||||||
writer.writerows(_normalized_outrows(rows))
|
writer.writerows(_normalized_outrows(rows))
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def req_error_context(req_description):
|
def req_error_context(req_description: str) -> Generator[None, None, None]:
|
||||||
# type: (str) -> Iterator[None]
|
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
except InstallationError as e:
|
except InstallationError as e:
|
||||||
message = "For req: {}. {}".format(req_description, e.args[0])
|
message = "For req: {}. {}".format(req_description, e.args[0])
|
||||||
reraise(
|
raise InstallationError(message) from e
|
||||||
InstallationError, InstallationError(message), sys.exc_info()[2]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def install_wheel(
|
def install_wheel(
|
||||||
name, # type: str
|
name: str,
|
||||||
wheel_path, # type: str
|
wheel_path: str,
|
||||||
scheme, # type: Scheme
|
scheme: Scheme,
|
||||||
req_description, # type: str
|
req_description: str,
|
||||||
pycompile=True, # type: bool
|
pycompile: bool = True,
|
||||||
warn_script_location=True, # type: bool
|
warn_script_location: bool = True,
|
||||||
direct_url=None, # type: Optional[DirectUrl]
|
direct_url: Optional[DirectUrl] = None,
|
||||||
requested=False, # type: bool
|
requested: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
with ZipFile(wheel_path, allowZip64=True) as z:
|
with ZipFile(wheel_path, allowZip64=True) as z:
|
||||||
with req_error_context(req_description):
|
with req_error_context(req_description):
|
||||||
_install_wheel(
|
_install_wheel(
|
||||||
|
|||||||
@@ -8,10 +8,9 @@ import logging
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Dict, Iterable, List, Optional, Tuple
|
from typing import Dict, Iterable, List, Optional
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.distributions import make_distribution_for_install_requirement
|
from pip._internal.distributions import make_distribution_for_install_requirement
|
||||||
from pip._internal.distributions.installed import InstalledDistribution
|
from pip._internal.distributions.installed import InstalledDistribution
|
||||||
@@ -20,11 +19,14 @@ from pip._internal.exceptions import (
|
|||||||
HashMismatch,
|
HashMismatch,
|
||||||
HashUnpinned,
|
HashUnpinned,
|
||||||
InstallationError,
|
InstallationError,
|
||||||
|
MetadataInconsistent,
|
||||||
NetworkConnectionError,
|
NetworkConnectionError,
|
||||||
PreviousBuildDirError,
|
PreviousBuildDirError,
|
||||||
VcsHashUnsupported,
|
VcsHashUnsupported,
|
||||||
)
|
)
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution, get_metadata_distribution
|
||||||
|
from pip._internal.models.direct_url import ArchiveInfo
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
from pip._internal.models.wheel import Wheel
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.network.download import BatchDownloader, Downloader
|
from pip._internal.network.download import BatchDownloader, Downloader
|
||||||
@@ -33,13 +35,20 @@ from pip._internal.network.lazy_wheel import (
|
|||||||
dist_from_wheel_url,
|
dist_from_wheel_url,
|
||||||
)
|
)
|
||||||
from pip._internal.network.session import PipSession
|
from pip._internal.network.session import PipSession
|
||||||
|
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.req.req_tracker import RequirementTracker
|
from pip._internal.utils.direct_url_helpers import (
|
||||||
from pip._internal.utils.deprecation import deprecated
|
direct_url_for_editable,
|
||||||
from pip._internal.utils.filesystem import copy2_fixed
|
direct_url_from_link,
|
||||||
|
)
|
||||||
from pip._internal.utils.hashes import Hashes, MissingHashes
|
from pip._internal.utils.hashes import Hashes, MissingHashes
|
||||||
from pip._internal.utils.logging import indent_log
|
from pip._internal.utils.logging import indent_log
|
||||||
from pip._internal.utils.misc import display_path, hide_url, is_installable_dir, rmtree
|
from pip._internal.utils.misc import (
|
||||||
|
display_path,
|
||||||
|
hash_file,
|
||||||
|
hide_url,
|
||||||
|
is_installable_dir,
|
||||||
|
)
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
from pip._internal.utils.temp_dir import TempDirectory
|
||||||
from pip._internal.utils.unpacking import unpack_file
|
from pip._internal.utils.unpacking import unpack_file
|
||||||
from pip._internal.vcs import vcs
|
from pip._internal.vcs import vcs
|
||||||
@@ -48,30 +57,29 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def _get_prepared_distribution(
|
def _get_prepared_distribution(
|
||||||
req, # type: InstallRequirement
|
req: InstallRequirement,
|
||||||
req_tracker, # type: RequirementTracker
|
build_tracker: BuildTracker,
|
||||||
finder, # type: PackageFinder
|
finder: PackageFinder,
|
||||||
build_isolation, # type: bool
|
build_isolation: bool,
|
||||||
):
|
check_build_deps: bool,
|
||||||
# type: (...) -> Distribution
|
) -> BaseDistribution:
|
||||||
"""Prepare a distribution for installation."""
|
"""Prepare a distribution for installation."""
|
||||||
abstract_dist = make_distribution_for_install_requirement(req)
|
abstract_dist = make_distribution_for_install_requirement(req)
|
||||||
with req_tracker.track(req):
|
with build_tracker.track(req):
|
||||||
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
|
abstract_dist.prepare_distribution_metadata(
|
||||||
return abstract_dist.get_pkg_resources_distribution()
|
finder, build_isolation, check_build_deps
|
||||||
|
)
|
||||||
|
return abstract_dist.get_metadata_distribution()
|
||||||
|
|
||||||
|
|
||||||
def unpack_vcs_link(link, location):
|
def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
|
||||||
# type: (Link, str) -> None
|
|
||||||
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||||
assert vcs_backend is not None
|
assert vcs_backend is not None
|
||||||
vcs_backend.unpack(location, url=hide_url(link.url))
|
vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
|
||||||
|
|
||||||
|
|
||||||
class File:
|
class File:
|
||||||
|
def __init__(self, path: str, content_type: Optional[str]) -> None:
|
||||||
def __init__(self, path, content_type):
|
|
||||||
# type: (str, Optional[str]) -> None
|
|
||||||
self.path = path
|
self.path = path
|
||||||
if content_type is None:
|
if content_type is None:
|
||||||
self.content_type = mimetypes.guess_type(path)[0]
|
self.content_type = mimetypes.guess_type(path)[0]
|
||||||
@@ -80,19 +88,16 @@ class File:
|
|||||||
|
|
||||||
|
|
||||||
def get_http_url(
|
def get_http_url(
|
||||||
link, # type: Link
|
link: Link,
|
||||||
download, # type: Downloader
|
download: Downloader,
|
||||||
download_dir=None, # type: Optional[str]
|
download_dir: Optional[str] = None,
|
||||||
hashes=None, # type: Optional[Hashes]
|
hashes: Optional[Hashes] = None,
|
||||||
):
|
) -> File:
|
||||||
# type: (...) -> File
|
|
||||||
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
||||||
# If a download dir is specified, is the file already downloaded there?
|
# If a download dir is specified, is the file already downloaded there?
|
||||||
already_downloaded_path = None
|
already_downloaded_path = None
|
||||||
if download_dir:
|
if download_dir:
|
||||||
already_downloaded_path = _check_download_dir(
|
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
||||||
link, download_dir, hashes
|
|
||||||
)
|
|
||||||
|
|
||||||
if already_downloaded_path:
|
if already_downloaded_path:
|
||||||
from_path = already_downloaded_path
|
from_path = already_downloaded_path
|
||||||
@@ -106,72 +111,14 @@ def get_http_url(
|
|||||||
return File(from_path, content_type)
|
return File(from_path, content_type)
|
||||||
|
|
||||||
|
|
||||||
def _copy2_ignoring_special_files(src, dest):
|
|
||||||
# type: (str, str) -> None
|
|
||||||
"""Copying special files is not supported, but as a convenience to users
|
|
||||||
we skip errors copying them. This supports tools that may create e.g.
|
|
||||||
socket files in the project source directory.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
copy2_fixed(src, dest)
|
|
||||||
except shutil.SpecialFileError as e:
|
|
||||||
# SpecialFileError may be raised due to either the source or
|
|
||||||
# destination. If the destination was the cause then we would actually
|
|
||||||
# care, but since the destination directory is deleted prior to
|
|
||||||
# copy we ignore all of them assuming it is caused by the source.
|
|
||||||
logger.warning(
|
|
||||||
"Ignoring special file error '%s' encountered copying %s to %s.",
|
|
||||||
str(e),
|
|
||||||
src,
|
|
||||||
dest,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _copy_source_tree(source, target):
|
|
||||||
# type: (str, str) -> None
|
|
||||||
target_abspath = os.path.abspath(target)
|
|
||||||
target_basename = os.path.basename(target_abspath)
|
|
||||||
target_dirname = os.path.dirname(target_abspath)
|
|
||||||
|
|
||||||
def ignore(d, names):
|
|
||||||
# type: (str, List[str]) -> List[str]
|
|
||||||
skipped = [] # type: List[str]
|
|
||||||
if d == source:
|
|
||||||
# Pulling in those directories can potentially be very slow,
|
|
||||||
# exclude the following directories if they appear in the top
|
|
||||||
# level dir (and only it).
|
|
||||||
# See discussion at https://github.com/pypa/pip/pull/6770
|
|
||||||
skipped += ['.tox', '.nox']
|
|
||||||
if os.path.abspath(d) == target_dirname:
|
|
||||||
# Prevent an infinite recursion if the target is in source.
|
|
||||||
# This can happen when TMPDIR is set to ${PWD}/...
|
|
||||||
# and we copy PWD to TMPDIR.
|
|
||||||
skipped += [target_basename]
|
|
||||||
return skipped
|
|
||||||
|
|
||||||
shutil.copytree(
|
|
||||||
source,
|
|
||||||
target,
|
|
||||||
ignore=ignore,
|
|
||||||
symlinks=True,
|
|
||||||
copy_function=_copy2_ignoring_special_files,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_file_url(
|
def get_file_url(
|
||||||
link, # type: Link
|
link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
|
||||||
download_dir=None, # type: Optional[str]
|
) -> File:
|
||||||
hashes=None # type: Optional[Hashes]
|
"""Get file and optionally check its hash."""
|
||||||
):
|
|
||||||
# type: (...) -> File
|
|
||||||
"""Get file and optionally check its hash.
|
|
||||||
"""
|
|
||||||
# If a download dir is specified, is the file already there and valid?
|
# If a download dir is specified, is the file already there and valid?
|
||||||
already_downloaded_path = None
|
already_downloaded_path = None
|
||||||
if download_dir:
|
if download_dir:
|
||||||
already_downloaded_path = _check_download_dir(
|
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
||||||
link, download_dir, hashes
|
|
||||||
)
|
|
||||||
|
|
||||||
if already_downloaded_path:
|
if already_downloaded_path:
|
||||||
from_path = already_downloaded_path
|
from_path = already_downloaded_path
|
||||||
@@ -189,13 +136,13 @@ def get_file_url(
|
|||||||
|
|
||||||
|
|
||||||
def unpack_url(
|
def unpack_url(
|
||||||
link, # type: Link
|
link: Link,
|
||||||
location, # type: str
|
location: str,
|
||||||
download, # type: Downloader
|
download: Downloader,
|
||||||
download_dir=None, # type: Optional[str]
|
verbosity: int,
|
||||||
hashes=None, # type: Optional[Hashes]
|
download_dir: Optional[str] = None,
|
||||||
):
|
hashes: Optional[Hashes] = None,
|
||||||
# type: (...) -> Optional[File]
|
) -> Optional[File]:
|
||||||
"""Unpack link into location, downloading if required.
|
"""Unpack link into location, downloading if required.
|
||||||
|
|
||||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||||
@@ -205,30 +152,10 @@ def unpack_url(
|
|||||||
"""
|
"""
|
||||||
# non-editable vcs urls
|
# non-editable vcs urls
|
||||||
if link.is_vcs:
|
if link.is_vcs:
|
||||||
unpack_vcs_link(link, location)
|
unpack_vcs_link(link, location, verbosity=verbosity)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Once out-of-tree-builds are no longer supported, could potentially
|
assert not link.is_existing_dir()
|
||||||
# replace the below condition with `assert not link.is_existing_dir`
|
|
||||||
# - unpack_url does not need to be called for in-tree-builds.
|
|
||||||
#
|
|
||||||
# As further cleanup, _copy_source_tree and accompanying tests can
|
|
||||||
# be removed.
|
|
||||||
if link.is_existing_dir():
|
|
||||||
deprecated(
|
|
||||||
"A future pip version will change local packages to be built "
|
|
||||||
"in-place without first copying to a temporary directory. "
|
|
||||||
"We recommend you use --use-feature=in-tree-build to test "
|
|
||||||
"your packages with this new behavior before it becomes the "
|
|
||||||
"default.\n",
|
|
||||||
replacement=None,
|
|
||||||
gone_in="21.3",
|
|
||||||
issue=7555
|
|
||||||
)
|
|
||||||
if os.path.isdir(location):
|
|
||||||
rmtree(location)
|
|
||||||
_copy_source_tree(link.file_path, location)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# file urls
|
# file urls
|
||||||
if link.is_file:
|
if link.is_file:
|
||||||
@@ -251,9 +178,13 @@ def unpack_url(
|
|||||||
return file
|
return file
|
||||||
|
|
||||||
|
|
||||||
def _check_download_dir(link, download_dir, hashes):
|
def _check_download_dir(
|
||||||
# type: (Link, str, Optional[Hashes]) -> Optional[str]
|
link: Link,
|
||||||
""" Check download_dir for previously downloaded file with correct hash
|
download_dir: str,
|
||||||
|
hashes: Optional[Hashes],
|
||||||
|
warn_on_hash_mismatch: bool = True,
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Check download_dir for previously downloaded file with correct hash
|
||||||
If a correct file is found return its path else None
|
If a correct file is found return its path else None
|
||||||
"""
|
"""
|
||||||
download_path = os.path.join(download_dir, link.filename)
|
download_path = os.path.join(download_dir, link.filename)
|
||||||
@@ -262,15 +193,15 @@ def _check_download_dir(link, download_dir, hashes):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# If already downloaded, does its hash match?
|
# If already downloaded, does its hash match?
|
||||||
logger.info('File was already downloaded %s', download_path)
|
logger.info("File was already downloaded %s", download_path)
|
||||||
if hashes:
|
if hashes:
|
||||||
try:
|
try:
|
||||||
hashes.check_against_path(download_path)
|
hashes.check_against_path(download_path)
|
||||||
except HashMismatch:
|
except HashMismatch:
|
||||||
|
if warn_on_hash_mismatch:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Previously-downloaded file %s has bad hash. '
|
"Previously-downloaded file %s has bad hash. Re-downloading.",
|
||||||
'Re-downloading.',
|
download_path,
|
||||||
download_path
|
|
||||||
)
|
)
|
||||||
os.unlink(download_path)
|
os.unlink(download_path)
|
||||||
return None
|
return None
|
||||||
@@ -278,30 +209,29 @@ def _check_download_dir(link, download_dir, hashes):
|
|||||||
|
|
||||||
|
|
||||||
class RequirementPreparer:
|
class RequirementPreparer:
|
||||||
"""Prepares a Requirement
|
"""Prepares a Requirement"""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
build_dir, # type: str
|
build_dir: str,
|
||||||
download_dir, # type: Optional[str]
|
download_dir: Optional[str],
|
||||||
src_dir, # type: str
|
src_dir: str,
|
||||||
build_isolation, # type: bool
|
build_isolation: bool,
|
||||||
req_tracker, # type: RequirementTracker
|
check_build_deps: bool,
|
||||||
session, # type: PipSession
|
build_tracker: BuildTracker,
|
||||||
progress_bar, # type: str
|
session: PipSession,
|
||||||
finder, # type: PackageFinder
|
progress_bar: str,
|
||||||
require_hashes, # type: bool
|
finder: PackageFinder,
|
||||||
use_user_site, # type: bool
|
require_hashes: bool,
|
||||||
lazy_wheel, # type: bool
|
use_user_site: bool,
|
||||||
in_tree_build, # type: bool
|
lazy_wheel: bool,
|
||||||
):
|
verbosity: int,
|
||||||
# type: (...) -> None
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
self.src_dir = src_dir
|
self.src_dir = src_dir
|
||||||
self.build_dir = build_dir
|
self.build_dir = build_dir
|
||||||
self.req_tracker = req_tracker
|
self.build_tracker = build_tracker
|
||||||
self._session = session
|
self._session = session
|
||||||
self._download = Downloader(session, progress_bar)
|
self._download = Downloader(session, progress_bar)
|
||||||
self._batch_download = BatchDownloader(session, progress_bar)
|
self._batch_download = BatchDownloader(session, progress_bar)
|
||||||
@@ -314,6 +244,9 @@ class RequirementPreparer:
|
|||||||
# Is build isolation allowed?
|
# Is build isolation allowed?
|
||||||
self.build_isolation = build_isolation
|
self.build_isolation = build_isolation
|
||||||
|
|
||||||
|
# Should check build dependencies?
|
||||||
|
self.check_build_deps = check_build_deps
|
||||||
|
|
||||||
# Should hash-checking be required?
|
# Should hash-checking be required?
|
||||||
self.require_hashes = require_hashes
|
self.require_hashes = require_hashes
|
||||||
|
|
||||||
@@ -323,35 +256,45 @@ class RequirementPreparer:
|
|||||||
# Should wheels be downloaded lazily?
|
# Should wheels be downloaded lazily?
|
||||||
self.use_lazy_wheel = lazy_wheel
|
self.use_lazy_wheel = lazy_wheel
|
||||||
|
|
||||||
# Should in-tree builds be used for local paths?
|
# How verbose should underlying tooling be?
|
||||||
self.in_tree_build = in_tree_build
|
self.verbosity = verbosity
|
||||||
|
|
||||||
# Memoized downloaded files, as mapping of url: (path, mime type)
|
# Memoized downloaded files, as mapping of url: path.
|
||||||
self._downloaded = {} # type: Dict[str, Tuple[str, str]]
|
self._downloaded: Dict[str, str] = {}
|
||||||
|
|
||||||
# Previous "header" printed for a link-based InstallRequirement
|
# Previous "header" printed for a link-based InstallRequirement
|
||||||
self._previous_requirement_header = ("", "")
|
self._previous_requirement_header = ("", "")
|
||||||
|
|
||||||
def _log_preparing_link(self, req):
|
def _log_preparing_link(self, req: InstallRequirement) -> None:
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
"""Provide context for the requirement being prepared."""
|
"""Provide context for the requirement being prepared."""
|
||||||
if req.link.is_file and not req.original_link_is_in_wheel_cache:
|
if req.link.is_file and not req.is_wheel_from_cache:
|
||||||
message = "Processing %s"
|
message = "Processing %s"
|
||||||
information = str(display_path(req.link.file_path))
|
information = str(display_path(req.link.file_path))
|
||||||
else:
|
else:
|
||||||
message = "Collecting %s"
|
message = "Collecting %s"
|
||||||
information = str(req.req or req)
|
information = str(req.req or req)
|
||||||
|
|
||||||
|
# If we used req.req, inject requirement source if available (this
|
||||||
|
# would already be included if we used req directly)
|
||||||
|
if req.req and req.comes_from:
|
||||||
|
if isinstance(req.comes_from, str):
|
||||||
|
comes_from: Optional[str] = req.comes_from
|
||||||
|
else:
|
||||||
|
comes_from = req.comes_from.from_path()
|
||||||
|
if comes_from:
|
||||||
|
information += f" (from {comes_from})"
|
||||||
|
|
||||||
if (message, information) != self._previous_requirement_header:
|
if (message, information) != self._previous_requirement_header:
|
||||||
self._previous_requirement_header = (message, information)
|
self._previous_requirement_header = (message, information)
|
||||||
logger.info(message, information)
|
logger.info(message, information)
|
||||||
|
|
||||||
if req.original_link_is_in_wheel_cache:
|
if req.is_wheel_from_cache:
|
||||||
with indent_log():
|
with indent_log():
|
||||||
logger.info("Using cached %s", req.link.filename)
|
logger.info("Using cached %s", req.link.filename)
|
||||||
|
|
||||||
def _ensure_link_req_src_dir(self, req, parallel_builds):
|
def _ensure_link_req_src_dir(
|
||||||
# type: (InstallRequirement, bool) -> None
|
self, req: InstallRequirement, parallel_builds: bool
|
||||||
|
) -> None:
|
||||||
"""Ensure source_dir of a linked InstallRequirement."""
|
"""Ensure source_dir of a linked InstallRequirement."""
|
||||||
# Since source_dir is only set for editable requirements.
|
# Since source_dir is only set for editable requirements.
|
||||||
if req.link.is_wheel:
|
if req.link.is_wheel:
|
||||||
@@ -359,7 +302,7 @@ class RequirementPreparer:
|
|||||||
# directory.
|
# directory.
|
||||||
return
|
return
|
||||||
assert req.source_dir is None
|
assert req.source_dir is None
|
||||||
if req.link.is_existing_dir() and self.in_tree_build:
|
if req.link.is_existing_dir():
|
||||||
# build local directories in-tree
|
# build local directories in-tree
|
||||||
req.source_dir = req.link.file_path
|
req.source_dir = req.link.file_path
|
||||||
return
|
return
|
||||||
@@ -376,6 +319,7 @@ class RequirementPreparer:
|
|||||||
# installation.
|
# installation.
|
||||||
# FIXME: this won't upgrade when there's an existing
|
# FIXME: this won't upgrade when there's an existing
|
||||||
# package unpacked in `req.source_dir`
|
# package unpacked in `req.source_dir`
|
||||||
|
# TODO: this check is now probably dead code
|
||||||
if is_installable_dir(req.source_dir):
|
if is_installable_dir(req.source_dir):
|
||||||
raise PreviousBuildDirError(
|
raise PreviousBuildDirError(
|
||||||
"pip can't proceed with requirements '{}' due to a"
|
"pip can't proceed with requirements '{}' due to a"
|
||||||
@@ -385,8 +329,7 @@ class RequirementPreparer:
|
|||||||
"Please delete it and try again.".format(req, req.source_dir)
|
"Please delete it and try again.".format(req, req.source_dir)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _get_linked_req_hashes(self, req):
|
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
|
||||||
# type: (InstallRequirement) -> Hashes
|
|
||||||
# By the time this is called, the requirement's link should have
|
# By the time this is called, the requirement's link should have
|
||||||
# been checked so we can tell what kind of requirements req is
|
# been checked so we can tell what kind of requirements req is
|
||||||
# and raise some more informative errors than otherwise.
|
# and raise some more informative errors than otherwise.
|
||||||
@@ -418,18 +361,72 @@ class RequirementPreparer:
|
|||||||
# showing the user what the hash should be.
|
# showing the user what the hash should be.
|
||||||
return req.hashes(trust_internet=False) or MissingHashes()
|
return req.hashes(trust_internet=False) or MissingHashes()
|
||||||
|
|
||||||
def _fetch_metadata_using_lazy_wheel(self, link):
|
def _fetch_metadata_only(
|
||||||
# type: (Link) -> Optional[Distribution]
|
self,
|
||||||
"""Fetch metadata using lazy wheel, if possible."""
|
req: InstallRequirement,
|
||||||
if not self.use_lazy_wheel:
|
) -> Optional[BaseDistribution]:
|
||||||
return None
|
|
||||||
if self.require_hashes:
|
if self.require_hashes:
|
||||||
logger.debug('Lazy wheel is not used as hash checking is required')
|
logger.debug(
|
||||||
|
"Metadata-only fetching is not used as hash checking is required",
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
# Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
|
||||||
|
return self._fetch_metadata_using_link_data_attr(
|
||||||
|
req
|
||||||
|
) or self._fetch_metadata_using_lazy_wheel(req.link)
|
||||||
|
|
||||||
|
def _fetch_metadata_using_link_data_attr(
|
||||||
|
self,
|
||||||
|
req: InstallRequirement,
|
||||||
|
) -> Optional[BaseDistribution]:
|
||||||
|
"""Fetch metadata from the data-dist-info-metadata attribute, if possible."""
|
||||||
|
# (1) Get the link to the metadata file, if provided by the backend.
|
||||||
|
metadata_link = req.link.metadata_link()
|
||||||
|
if metadata_link is None:
|
||||||
|
return None
|
||||||
|
assert req.req is not None
|
||||||
|
logger.info(
|
||||||
|
"Obtaining dependency information for %s from %s",
|
||||||
|
req.req,
|
||||||
|
metadata_link,
|
||||||
|
)
|
||||||
|
# (2) Download the contents of the METADATA file, separate from the dist itself.
|
||||||
|
metadata_file = get_http_url(
|
||||||
|
metadata_link,
|
||||||
|
self._download,
|
||||||
|
hashes=metadata_link.as_hashes(),
|
||||||
|
)
|
||||||
|
with open(metadata_file.path, "rb") as f:
|
||||||
|
metadata_contents = f.read()
|
||||||
|
# (3) Generate a dist just from those file contents.
|
||||||
|
metadata_dist = get_metadata_distribution(
|
||||||
|
metadata_contents,
|
||||||
|
req.link.filename,
|
||||||
|
req.req.name,
|
||||||
|
)
|
||||||
|
# (4) Ensure the Name: field from the METADATA file matches the name from the
|
||||||
|
# install requirement.
|
||||||
|
#
|
||||||
|
# NB: raw_name will fall back to the name from the install requirement if
|
||||||
|
# the Name: field is not present, but it's noted in the raw_name docstring
|
||||||
|
# that that should NEVER happen anyway.
|
||||||
|
if metadata_dist.raw_name != req.req.name:
|
||||||
|
raise MetadataInconsistent(
|
||||||
|
req, "Name", req.req.name, metadata_dist.raw_name
|
||||||
|
)
|
||||||
|
return metadata_dist
|
||||||
|
|
||||||
|
def _fetch_metadata_using_lazy_wheel(
|
||||||
|
self,
|
||||||
|
link: Link,
|
||||||
|
) -> Optional[BaseDistribution]:
|
||||||
|
"""Fetch metadata using lazy wheel, if possible."""
|
||||||
|
# --use-feature=fast-deps must be provided.
|
||||||
|
if not self.use_lazy_wheel:
|
||||||
return None
|
return None
|
||||||
if link.is_file or not link.is_wheel:
|
if link.is_file or not link.is_wheel:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Lazy wheel is not used as '
|
"Lazy wheel is not used as %r does not point to a remote wheel",
|
||||||
'%r does not points to a remote wheel',
|
|
||||||
link,
|
link,
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
@@ -437,22 +434,22 @@ class RequirementPreparer:
|
|||||||
wheel = Wheel(link.filename)
|
wheel = Wheel(link.filename)
|
||||||
name = canonicalize_name(wheel.name)
|
name = canonicalize_name(wheel.name)
|
||||||
logger.info(
|
logger.info(
|
||||||
'Obtaining dependency information from %s %s',
|
"Obtaining dependency information from %s %s",
|
||||||
name, wheel.version,
|
name,
|
||||||
|
wheel.version,
|
||||||
)
|
)
|
||||||
url = link.url.split('#', 1)[0]
|
url = link.url.split("#", 1)[0]
|
||||||
try:
|
try:
|
||||||
return dist_from_wheel_url(name, url, self._session)
|
return dist_from_wheel_url(name, url, self._session)
|
||||||
except HTTPRangeRequestUnsupported:
|
except HTTPRangeRequestUnsupported:
|
||||||
logger.debug('%s does not support range requests', url)
|
logger.debug("%s does not support range requests", url)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _complete_partial_requirements(
|
def _complete_partial_requirements(
|
||||||
self,
|
self,
|
||||||
partially_downloaded_reqs, # type: Iterable[InstallRequirement]
|
partially_downloaded_reqs: Iterable[InstallRequirement],
|
||||||
parallel_builds=False, # type: bool
|
parallel_builds: bool = False,
|
||||||
):
|
) -> None:
|
||||||
# type: (...) -> None
|
|
||||||
"""Download any requirements which were only fetched by metadata."""
|
"""Download any requirements which were only fetched by metadata."""
|
||||||
# Download to a temporary directory. These will be copied over as
|
# Download to a temporary directory. These will be copied over as
|
||||||
# needed for downstream 'download', 'wheel', and 'install' commands.
|
# needed for downstream 'download', 'wheel', and 'install' commands.
|
||||||
@@ -461,7 +458,7 @@ class RequirementPreparer:
|
|||||||
# Map each link to the requirement that owns it. This allows us to set
|
# Map each link to the requirement that owns it. This allows us to set
|
||||||
# `req.local_file_path` on the appropriate requirement after passing
|
# `req.local_file_path` on the appropriate requirement after passing
|
||||||
# all the links at once into BatchDownloader.
|
# all the links at once into BatchDownloader.
|
||||||
links_to_fully_download = {} # type: Dict[Link, InstallRequirement]
|
links_to_fully_download: Dict[Link, InstallRequirement] = {}
|
||||||
for req in partially_downloaded_reqs:
|
for req in partially_downloaded_reqs:
|
||||||
assert req.link
|
assert req.link
|
||||||
links_to_fully_download[req.link] = req
|
links_to_fully_download[req.link] = req
|
||||||
@@ -480,35 +477,47 @@ class RequirementPreparer:
|
|||||||
for req in partially_downloaded_reqs:
|
for req in partially_downloaded_reqs:
|
||||||
self._prepare_linked_requirement(req, parallel_builds)
|
self._prepare_linked_requirement(req, parallel_builds)
|
||||||
|
|
||||||
def prepare_linked_requirement(self, req, parallel_builds=False):
|
def prepare_linked_requirement(
|
||||||
# type: (InstallRequirement, bool) -> Distribution
|
self, req: InstallRequirement, parallel_builds: bool = False
|
||||||
|
) -> BaseDistribution:
|
||||||
"""Prepare a requirement to be obtained from req.link."""
|
"""Prepare a requirement to be obtained from req.link."""
|
||||||
assert req.link
|
assert req.link
|
||||||
link = req.link
|
|
||||||
self._log_preparing_link(req)
|
self._log_preparing_link(req)
|
||||||
with indent_log():
|
with indent_log():
|
||||||
# Check if the relevant file is already available
|
# Check if the relevant file is already available
|
||||||
# in the download directory
|
# in the download directory
|
||||||
file_path = None
|
file_path = None
|
||||||
if self.download_dir is not None and link.is_wheel:
|
if self.download_dir is not None and req.link.is_wheel:
|
||||||
hashes = self._get_linked_req_hashes(req)
|
hashes = self._get_linked_req_hashes(req)
|
||||||
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
file_path = _check_download_dir(
|
||||||
|
req.link,
|
||||||
|
self.download_dir,
|
||||||
|
hashes,
|
||||||
|
# When a locally built wheel has been found in cache, we don't warn
|
||||||
|
# about re-downloading when the already downloaded wheel hash does
|
||||||
|
# not match. This is because the hash must be checked against the
|
||||||
|
# original link, not the cached link. It that case the already
|
||||||
|
# downloaded file will be removed and re-fetched from cache (which
|
||||||
|
# implies a hash check against the cache entry's origin.json).
|
||||||
|
warn_on_hash_mismatch=not req.is_wheel_from_cache,
|
||||||
|
)
|
||||||
|
|
||||||
if file_path is not None:
|
if file_path is not None:
|
||||||
# The file is already available, so mark it as downloaded
|
# The file is already available, so mark it as downloaded
|
||||||
self._downloaded[req.link.url] = file_path, None
|
self._downloaded[req.link.url] = file_path
|
||||||
else:
|
else:
|
||||||
# The file is not available, attempt to fetch only metadata
|
# The file is not available, attempt to fetch only metadata
|
||||||
wheel_dist = self._fetch_metadata_using_lazy_wheel(link)
|
metadata_dist = self._fetch_metadata_only(req)
|
||||||
if wheel_dist is not None:
|
if metadata_dist is not None:
|
||||||
req.needs_more_preparation = True
|
req.needs_more_preparation = True
|
||||||
return wheel_dist
|
return metadata_dist
|
||||||
|
|
||||||
# None of the optimizations worked, fully prepare the requirement
|
# None of the optimizations worked, fully prepare the requirement
|
||||||
return self._prepare_linked_requirement(req, parallel_builds)
|
return self._prepare_linked_requirement(req, parallel_builds)
|
||||||
|
|
||||||
def prepare_linked_requirements_more(self, reqs, parallel_builds=False):
|
def prepare_linked_requirements_more(
|
||||||
# type: (Iterable[InstallRequirement], bool) -> None
|
self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
|
||||||
|
) -> None:
|
||||||
"""Prepare linked requirements more, if needed."""
|
"""Prepare linked requirements more, if needed."""
|
||||||
reqs = [req for req in reqs if req.needs_more_preparation]
|
reqs = [req for req in reqs if req.needs_more_preparation]
|
||||||
for req in reqs:
|
for req in reqs:
|
||||||
@@ -517,12 +526,12 @@ class RequirementPreparer:
|
|||||||
hashes = self._get_linked_req_hashes(req)
|
hashes = self._get_linked_req_hashes(req)
|
||||||
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
||||||
if file_path is not None:
|
if file_path is not None:
|
||||||
self._downloaded[req.link.url] = file_path, None
|
self._downloaded[req.link.url] = file_path
|
||||||
req.needs_more_preparation = False
|
req.needs_more_preparation = False
|
||||||
|
|
||||||
# Prepare requirements we found were already downloaded for some
|
# Prepare requirements we found were already downloaded for some
|
||||||
# reason. The other downloads will be completed separately.
|
# reason. The other downloads will be completed separately.
|
||||||
partially_downloaded_reqs = [] # type: List[InstallRequirement]
|
partially_downloaded_reqs: List[InstallRequirement] = []
|
||||||
for req in reqs:
|
for req in reqs:
|
||||||
if req.needs_more_preparation:
|
if req.needs_more_preparation:
|
||||||
partially_downloaded_reqs.append(req)
|
partially_downloaded_reqs.append(req)
|
||||||
@@ -532,35 +541,87 @@ class RequirementPreparer:
|
|||||||
# TODO: separate this part out from RequirementPreparer when the v1
|
# TODO: separate this part out from RequirementPreparer when the v1
|
||||||
# resolver can be removed!
|
# resolver can be removed!
|
||||||
self._complete_partial_requirements(
|
self._complete_partial_requirements(
|
||||||
partially_downloaded_reqs, parallel_builds=parallel_builds,
|
partially_downloaded_reqs,
|
||||||
|
parallel_builds=parallel_builds,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _prepare_linked_requirement(self, req, parallel_builds):
|
def _prepare_linked_requirement(
|
||||||
# type: (InstallRequirement, bool) -> Distribution
|
self, req: InstallRequirement, parallel_builds: bool
|
||||||
|
) -> BaseDistribution:
|
||||||
assert req.link
|
assert req.link
|
||||||
link = req.link
|
link = req.link
|
||||||
|
|
||||||
self._ensure_link_req_src_dir(req, parallel_builds)
|
|
||||||
hashes = self._get_linked_req_hashes(req)
|
hashes = self._get_linked_req_hashes(req)
|
||||||
|
|
||||||
if link.is_existing_dir() and self.in_tree_build:
|
if hashes and req.is_wheel_from_cache:
|
||||||
|
assert req.download_info is not None
|
||||||
|
assert link.is_wheel
|
||||||
|
assert link.is_file
|
||||||
|
# We need to verify hashes, and we have found the requirement in the cache
|
||||||
|
# of locally built wheels.
|
||||||
|
if (
|
||||||
|
isinstance(req.download_info.info, ArchiveInfo)
|
||||||
|
and req.download_info.info.hashes
|
||||||
|
and hashes.has_one_of(req.download_info.info.hashes)
|
||||||
|
):
|
||||||
|
# At this point we know the requirement was built from a hashable source
|
||||||
|
# artifact, and we verified that the cache entry's hash of the original
|
||||||
|
# artifact matches one of the hashes we expect. We don't verify hashes
|
||||||
|
# against the cached wheel, because the wheel is not the original.
|
||||||
|
hashes = None
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
"The hashes of the source archive found in cache entry "
|
||||||
|
"don't match, ignoring cached built wheel "
|
||||||
|
"and re-downloading source."
|
||||||
|
)
|
||||||
|
req.link = req.cached_wheel_source_link
|
||||||
|
link = req.link
|
||||||
|
|
||||||
|
self._ensure_link_req_src_dir(req, parallel_builds)
|
||||||
|
|
||||||
|
if link.is_existing_dir():
|
||||||
local_file = None
|
local_file = None
|
||||||
elif link.url not in self._downloaded:
|
elif link.url not in self._downloaded:
|
||||||
try:
|
try:
|
||||||
local_file = unpack_url(
|
local_file = unpack_url(
|
||||||
link, req.source_dir, self._download,
|
link,
|
||||||
self.download_dir, hashes
|
req.source_dir,
|
||||||
|
self._download,
|
||||||
|
self.verbosity,
|
||||||
|
self.download_dir,
|
||||||
|
hashes,
|
||||||
)
|
)
|
||||||
except NetworkConnectionError as exc:
|
except NetworkConnectionError as exc:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
'Could not install requirement {} because of HTTP '
|
"Could not install requirement {} because of HTTP "
|
||||||
'error {} for URL {}'.format(req, exc, link)
|
"error {} for URL {}".format(req, exc, link)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
file_path, content_type = self._downloaded[link.url]
|
file_path = self._downloaded[link.url]
|
||||||
if hashes:
|
if hashes:
|
||||||
hashes.check_against_path(file_path)
|
hashes.check_against_path(file_path)
|
||||||
local_file = File(file_path, content_type)
|
local_file = File(file_path, content_type=None)
|
||||||
|
|
||||||
|
# If download_info is set, we got it from the wheel cache.
|
||||||
|
if req.download_info is None:
|
||||||
|
# Editables don't go through this function (see
|
||||||
|
# prepare_editable_requirement).
|
||||||
|
assert not req.editable
|
||||||
|
req.download_info = direct_url_from_link(link, req.source_dir)
|
||||||
|
# Make sure we have a hash in download_info. If we got it as part of the
|
||||||
|
# URL, it will have been verified and we can rely on it. Otherwise we
|
||||||
|
# compute it from the downloaded file.
|
||||||
|
# FIXME: https://github.com/pypa/pip/issues/11943
|
||||||
|
if (
|
||||||
|
isinstance(req.download_info.info, ArchiveInfo)
|
||||||
|
and not req.download_info.info.hashes
|
||||||
|
and local_file
|
||||||
|
):
|
||||||
|
hash = hash_file(local_file.path)[0].hexdigest()
|
||||||
|
# We populate info.hash for backward compatibility.
|
||||||
|
# This will automatically populate info.hashes.
|
||||||
|
req.download_info.info.hash = f"sha256={hash}"
|
||||||
|
|
||||||
# For use in later processing,
|
# For use in later processing,
|
||||||
# preserve the file path on the requirement.
|
# preserve the file path on the requirement.
|
||||||
@@ -568,12 +629,15 @@ class RequirementPreparer:
|
|||||||
req.local_file_path = local_file.path
|
req.local_file_path = local_file.path
|
||||||
|
|
||||||
dist = _get_prepared_distribution(
|
dist = _get_prepared_distribution(
|
||||||
req, self.req_tracker, self.finder, self.build_isolation,
|
req,
|
||||||
|
self.build_tracker,
|
||||||
|
self.finder,
|
||||||
|
self.build_isolation,
|
||||||
|
self.check_build_deps,
|
||||||
)
|
)
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
def save_linked_requirement(self, req):
|
def save_linked_requirement(self, req: InstallRequirement) -> None:
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
assert self.download_dir is not None
|
assert self.download_dir is not None
|
||||||
assert req.link is not None
|
assert req.link is not None
|
||||||
link = req.link
|
link = req.link
|
||||||
@@ -584,8 +648,9 @@ class RequirementPreparer:
|
|||||||
|
|
||||||
if link.is_existing_dir():
|
if link.is_existing_dir():
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Not copying link to destination directory '
|
"Not copying link to destination directory "
|
||||||
'since it is a directory: %s', link,
|
"since it is a directory: %s",
|
||||||
|
link,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
if req.local_file_path is None:
|
if req.local_file_path is None:
|
||||||
@@ -596,31 +661,35 @@ class RequirementPreparer:
|
|||||||
if not os.path.exists(download_location):
|
if not os.path.exists(download_location):
|
||||||
shutil.copy(req.local_file_path, download_location)
|
shutil.copy(req.local_file_path, download_location)
|
||||||
download_path = display_path(download_location)
|
download_path = display_path(download_location)
|
||||||
logger.info('Saved %s', download_path)
|
logger.info("Saved %s", download_path)
|
||||||
|
|
||||||
def prepare_editable_requirement(
|
def prepare_editable_requirement(
|
||||||
self,
|
self,
|
||||||
req, # type: InstallRequirement
|
req: InstallRequirement,
|
||||||
):
|
) -> BaseDistribution:
|
||||||
# type: (...) -> Distribution
|
"""Prepare an editable requirement."""
|
||||||
"""Prepare an editable requirement
|
|
||||||
"""
|
|
||||||
assert req.editable, "cannot prepare a non-editable req as editable"
|
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||||
|
|
||||||
logger.info('Obtaining %s', req)
|
logger.info("Obtaining %s", req)
|
||||||
|
|
||||||
with indent_log():
|
with indent_log():
|
||||||
if self.require_hashes:
|
if self.require_hashes:
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
'The editable requirement {} cannot be installed when '
|
"The editable requirement {} cannot be installed when "
|
||||||
'requiring hashes, because there is no single file to '
|
"requiring hashes, because there is no single file to "
|
||||||
'hash.'.format(req)
|
"hash.".format(req)
|
||||||
)
|
)
|
||||||
req.ensure_has_source_dir(self.src_dir)
|
req.ensure_has_source_dir(self.src_dir)
|
||||||
req.update_editable()
|
req.update_editable()
|
||||||
|
assert req.source_dir
|
||||||
|
req.download_info = direct_url_for_editable(req.unpacked_source_directory)
|
||||||
|
|
||||||
dist = _get_prepared_distribution(
|
dist = _get_prepared_distribution(
|
||||||
req, self.req_tracker, self.finder, self.build_isolation,
|
req,
|
||||||
|
self.build_tracker,
|
||||||
|
self.finder,
|
||||||
|
self.build_isolation,
|
||||||
|
self.check_build_deps,
|
||||||
)
|
)
|
||||||
|
|
||||||
req.check_if_exists(self.use_user_site)
|
req.check_if_exists(self.use_user_site)
|
||||||
@@ -629,27 +698,24 @@ class RequirementPreparer:
|
|||||||
|
|
||||||
def prepare_installed_requirement(
|
def prepare_installed_requirement(
|
||||||
self,
|
self,
|
||||||
req, # type: InstallRequirement
|
req: InstallRequirement,
|
||||||
skip_reason # type: str
|
skip_reason: str,
|
||||||
):
|
) -> BaseDistribution:
|
||||||
# type: (...) -> Distribution
|
"""Prepare an already-installed requirement."""
|
||||||
"""Prepare an already-installed requirement
|
|
||||||
"""
|
|
||||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||||
assert skip_reason is not None, (
|
assert skip_reason is not None, (
|
||||||
"did not get skip reason skipped but req.satisfied_by "
|
"did not get skip reason skipped but req.satisfied_by "
|
||||||
"is set to {}".format(req.satisfied_by)
|
"is set to {}".format(req.satisfied_by)
|
||||||
)
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
'Requirement %s: %s (%s)',
|
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
|
||||||
skip_reason, req, req.satisfied_by.version
|
|
||||||
)
|
)
|
||||||
with indent_log():
|
with indent_log():
|
||||||
if self.require_hashes:
|
if self.require_hashes:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Since it is already installed, we are trusting this '
|
"Since it is already installed, we are trusting this "
|
||||||
'package without checking its hash. To ensure a '
|
"package without checking its hash. To ensure a "
|
||||||
'completely repeatable environment, install into an '
|
"completely repeatable environment, install into an "
|
||||||
'empty virtualenv.'
|
"empty virtualenv."
|
||||||
)
|
)
|
||||||
return InstalledDistribution(req).get_pkg_resources_distribution()
|
return InstalledDistribution(req).get_metadata_distribution()
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import importlib.util
|
||||||
import os
|
import os
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Any, List, Optional
|
from typing import Any, List, Optional
|
||||||
@@ -5,34 +6,29 @@ from typing import Any, List, Optional
|
|||||||
from pip._vendor import tomli
|
from pip._vendor import tomli
|
||||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import (
|
||||||
|
InstallationError,
|
||||||
|
InvalidPyProjectBuildRequires,
|
||||||
|
MissingPyProjectBuildRequires,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _is_list_of_str(obj):
|
def _is_list_of_str(obj: Any) -> bool:
|
||||||
# type: (Any) -> bool
|
return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
|
||||||
return (
|
|
||||||
isinstance(obj, list) and
|
|
||||||
all(isinstance(item, str) for item in obj)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def make_pyproject_path(unpacked_source_directory):
|
def make_pyproject_path(unpacked_source_directory: str) -> str:
|
||||||
# type: (str) -> str
|
return os.path.join(unpacked_source_directory, "pyproject.toml")
|
||||||
return os.path.join(unpacked_source_directory, 'pyproject.toml')
|
|
||||||
|
|
||||||
|
|
||||||
BuildSystemDetails = namedtuple('BuildSystemDetails', [
|
BuildSystemDetails = namedtuple(
|
||||||
'requires', 'backend', 'check', 'backend_path'
|
"BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
|
||||||
])
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_pyproject_toml(
|
def load_pyproject_toml(
|
||||||
use_pep517, # type: Optional[bool]
|
use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
|
||||||
pyproject_toml, # type: str
|
) -> Optional[BuildSystemDetails]:
|
||||||
setup_py, # type: str
|
|
||||||
req_name # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> Optional[BuildSystemDetails]
|
|
||||||
"""Load the pyproject.toml file.
|
"""Load the pyproject.toml file.
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
@@ -57,9 +53,15 @@ def load_pyproject_toml(
|
|||||||
has_pyproject = os.path.isfile(pyproject_toml)
|
has_pyproject = os.path.isfile(pyproject_toml)
|
||||||
has_setup = os.path.isfile(setup_py)
|
has_setup = os.path.isfile(setup_py)
|
||||||
|
|
||||||
|
if not has_pyproject and not has_setup:
|
||||||
|
raise InstallationError(
|
||||||
|
f"{req_name} does not appear to be a Python project: "
|
||||||
|
f"neither 'setup.py' nor 'pyproject.toml' found."
|
||||||
|
)
|
||||||
|
|
||||||
if has_pyproject:
|
if has_pyproject:
|
||||||
with open(pyproject_toml, encoding="utf-8") as f:
|
with open(pyproject_toml, encoding="utf-8") as f:
|
||||||
pp_toml = tomli.load(f)
|
pp_toml = tomli.loads(f.read())
|
||||||
build_system = pp_toml.get("build-system")
|
build_system = pp_toml.get("build-system")
|
||||||
else:
|
else:
|
||||||
build_system = None
|
build_system = None
|
||||||
@@ -82,17 +84,26 @@ def load_pyproject_toml(
|
|||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Disabling PEP 517 processing is invalid: "
|
"Disabling PEP 517 processing is invalid: "
|
||||||
"project specifies a build backend of {} "
|
"project specifies a build backend of {} "
|
||||||
"in pyproject.toml".format(
|
"in pyproject.toml".format(build_system["build-backend"])
|
||||||
build_system["build-backend"]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
use_pep517 = True
|
use_pep517 = True
|
||||||
|
|
||||||
# If we haven't worked out whether to use PEP 517 yet,
|
# If we haven't worked out whether to use PEP 517 yet,
|
||||||
# and the user hasn't explicitly stated a preference,
|
# and the user hasn't explicitly stated a preference,
|
||||||
# we do so if the project has a pyproject.toml file.
|
# we do so if the project has a pyproject.toml file
|
||||||
|
# or if we cannot import setuptools or wheels.
|
||||||
|
|
||||||
|
# We fallback to PEP 517 when without setuptools or without the wheel package,
|
||||||
|
# so setuptools can be installed as a default build backend.
|
||||||
|
# For more info see:
|
||||||
|
# https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
|
||||||
|
# https://github.com/pypa/pip/issues/8559
|
||||||
elif use_pep517 is None:
|
elif use_pep517 is None:
|
||||||
use_pep517 = has_pyproject
|
use_pep517 = (
|
||||||
|
has_pyproject
|
||||||
|
or not importlib.util.find_spec("setuptools")
|
||||||
|
or not importlib.util.find_spec("wheel")
|
||||||
|
)
|
||||||
|
|
||||||
# At this point, we know whether we're going to use PEP 517.
|
# At this point, we know whether we're going to use PEP 517.
|
||||||
assert use_pep517 is not None
|
assert use_pep517 is not None
|
||||||
@@ -124,52 +135,37 @@ def load_pyproject_toml(
|
|||||||
|
|
||||||
# Ensure that the build-system section in pyproject.toml conforms
|
# Ensure that the build-system section in pyproject.toml conforms
|
||||||
# to PEP 518.
|
# to PEP 518.
|
||||||
error_template = (
|
|
||||||
"{package} has a pyproject.toml file that does not comply "
|
|
||||||
"with PEP 518: {reason}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Specifying the build-system table but not the requires key is invalid
|
# Specifying the build-system table but not the requires key is invalid
|
||||||
if "requires" not in build_system:
|
if "requires" not in build_system:
|
||||||
raise InstallationError(
|
raise MissingPyProjectBuildRequires(package=req_name)
|
||||||
error_template.format(package=req_name, reason=(
|
|
||||||
"it has a 'build-system' table but not "
|
|
||||||
"'build-system.requires' which is mandatory in the table"
|
|
||||||
))
|
|
||||||
)
|
|
||||||
|
|
||||||
# Error out if requires is not a list of strings
|
# Error out if requires is not a list of strings
|
||||||
requires = build_system["requires"]
|
requires = build_system["requires"]
|
||||||
if not _is_list_of_str(requires):
|
if not _is_list_of_str(requires):
|
||||||
raise InstallationError(error_template.format(
|
raise InvalidPyProjectBuildRequires(
|
||||||
package=req_name,
|
package=req_name,
|
||||||
reason="'build-system.requires' is not a list of strings.",
|
reason="It is not a list of strings.",
|
||||||
))
|
)
|
||||||
|
|
||||||
# Each requirement must be valid as per PEP 508
|
# Each requirement must be valid as per PEP 508
|
||||||
for requirement in requires:
|
for requirement in requires:
|
||||||
try:
|
try:
|
||||||
Requirement(requirement)
|
Requirement(requirement)
|
||||||
except InvalidRequirement:
|
except InvalidRequirement as error:
|
||||||
raise InstallationError(
|
raise InvalidPyProjectBuildRequires(
|
||||||
error_template.format(
|
|
||||||
package=req_name,
|
package=req_name,
|
||||||
reason=(
|
reason=f"It contains an invalid requirement: {requirement!r}",
|
||||||
"'build-system.requires' contains an invalid "
|
) from error
|
||||||
"requirement: {!r}".format(requirement)
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
backend = build_system.get("build-backend")
|
backend = build_system.get("build-backend")
|
||||||
backend_path = build_system.get("backend-path", [])
|
backend_path = build_system.get("backend-path", [])
|
||||||
check = [] # type: List[str]
|
check: List[str] = []
|
||||||
if backend is None:
|
if backend is None:
|
||||||
# If the user didn't specify a backend, we assume they want to use
|
# If the user didn't specify a backend, we assume they want to use
|
||||||
# the setuptools backend. But we can't be sure they have included
|
# the setuptools backend. But we can't be sure they have included
|
||||||
# a version of setuptools which supplies the backend, or wheel
|
# a version of setuptools which supplies the backend. So we
|
||||||
# (which is needed by the backend) in their requirements. So we
|
# make a note to check that this requirement is present once
|
||||||
# make a note to check that those requirements are present once
|
|
||||||
# we have set up the environment.
|
# we have set up the environment.
|
||||||
# This is quite a lot of work to check for a very specific case. But
|
# This is quite a lot of work to check for a very specific case. But
|
||||||
# the problem is, that case is potentially quite common - projects that
|
# the problem is, that case is potentially quite common - projects that
|
||||||
@@ -178,6 +174,6 @@ def load_pyproject_toml(
|
|||||||
# tools themselves. The original PEP 518 code had a similar check (but
|
# tools themselves. The original PEP 518 code had a similar check (but
|
||||||
# implemented in a different way).
|
# implemented in a different way).
|
||||||
backend = "setuptools.build_meta:__legacy__"
|
backend = "setuptools.build_meta:__legacy__"
|
||||||
check = ["setuptools>=40.8.0", "wheel"]
|
check = ["setuptools>=40.8.0"]
|
||||||
|
|
||||||
return BuildSystemDetails(requires, backend, check, backend_path)
|
return BuildSystemDetails(requires, backend, check, backend_path)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import logging
|
import logging
|
||||||
from typing import Iterator, List, Optional, Sequence, Tuple
|
from typing import Generator, List, Optional, Sequence, Tuple
|
||||||
|
|
||||||
from pip._internal.utils.logging import indent_log
|
from pip._internal.utils.logging import indent_log
|
||||||
|
|
||||||
@@ -9,8 +9,10 @@ from .req_install import InstallRequirement
|
|||||||
from .req_set import RequirementSet
|
from .req_set import RequirementSet
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"RequirementSet", "InstallRequirement",
|
"RequirementSet",
|
||||||
"parse_requirements", "install_given_reqs",
|
"InstallRequirement",
|
||||||
|
"parse_requirements",
|
||||||
|
"install_given_reqs",
|
||||||
]
|
]
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -26,7 +28,7 @@ class InstallationResult:
|
|||||||
|
|
||||||
def _validate_requirements(
|
def _validate_requirements(
|
||||||
requirements: List[InstallRequirement],
|
requirements: List[InstallRequirement],
|
||||||
) -> Iterator[Tuple[str, InstallRequirement]]:
|
) -> Generator[Tuple[str, InstallRequirement], None, None]:
|
||||||
for req in requirements:
|
for req in requirements:
|
||||||
assert req.name, f"invalid to-be-installed requirement: {req}"
|
assert req.name, f"invalid to-be-installed requirement: {req}"
|
||||||
yield req.name, req
|
yield req.name, req
|
||||||
@@ -34,7 +36,6 @@ def _validate_requirements(
|
|||||||
|
|
||||||
def install_given_reqs(
|
def install_given_reqs(
|
||||||
requirements: List[InstallRequirement],
|
requirements: List[InstallRequirement],
|
||||||
install_options: List[str],
|
|
||||||
global_options: Sequence[str],
|
global_options: Sequence[str],
|
||||||
root: Optional[str],
|
root: Optional[str],
|
||||||
home: Optional[str],
|
home: Optional[str],
|
||||||
@@ -52,8 +53,8 @@ def install_given_reqs(
|
|||||||
|
|
||||||
if to_install:
|
if to_install:
|
||||||
logger.info(
|
logger.info(
|
||||||
'Installing collected packages: %s',
|
"Installing collected packages: %s",
|
||||||
', '.join(to_install.keys()),
|
", ".join(to_install.keys()),
|
||||||
)
|
)
|
||||||
|
|
||||||
installed = []
|
installed = []
|
||||||
@@ -61,17 +62,14 @@ def install_given_reqs(
|
|||||||
with indent_log():
|
with indent_log():
|
||||||
for req_name, requirement in to_install.items():
|
for req_name, requirement in to_install.items():
|
||||||
if requirement.should_reinstall:
|
if requirement.should_reinstall:
|
||||||
logger.info('Attempting uninstall: %s', req_name)
|
logger.info("Attempting uninstall: %s", req_name)
|
||||||
with indent_log():
|
with indent_log():
|
||||||
uninstalled_pathset = requirement.uninstall(
|
uninstalled_pathset = requirement.uninstall(auto_confirm=True)
|
||||||
auto_confirm=True
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
uninstalled_pathset = None
|
uninstalled_pathset = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
requirement.install(
|
requirement.install(
|
||||||
install_options,
|
|
||||||
global_options,
|
global_options,
|
||||||
root=root,
|
root=root,
|
||||||
home=home,
|
home=home,
|
||||||
|
|||||||
@@ -11,28 +11,28 @@ InstallRequirement.
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Dict, Optional, Set, Tuple, Union
|
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
from pip._vendor.packaging.markers import Marker
|
from pip._vendor.packaging.markers import Marker
|
||||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||||
from pip._vendor.packaging.specifiers import Specifier
|
from pip._vendor.packaging.specifiers import Specifier
|
||||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.models.index import PyPI, TestPyPI
|
from pip._internal.models.index import PyPI, TestPyPI
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
from pip._internal.models.wheel import Wheel
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.pyproject import make_pyproject_path
|
|
||||||
from pip._internal.req.req_file import ParsedRequirement
|
from pip._internal.req.req_file import ParsedRequirement
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.utils.filetypes import is_archive_file
|
from pip._internal.utils.filetypes import is_archive_file
|
||||||
from pip._internal.utils.misc import is_installable_dir
|
from pip._internal.utils.misc import is_installable_dir
|
||||||
|
from pip._internal.utils.packaging import get_requirement
|
||||||
from pip._internal.utils.urls import path_to_url
|
from pip._internal.utils.urls import path_to_url
|
||||||
from pip._internal.vcs import is_url, vcs
|
from pip._internal.vcs import is_url, vcs
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"install_req_from_editable", "install_req_from_line",
|
"install_req_from_editable",
|
||||||
"parse_editable"
|
"install_req_from_line",
|
||||||
|
"parse_editable",
|
||||||
]
|
]
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -40,7 +40,7 @@ operators = Specifier._operators.keys()
|
|||||||
|
|
||||||
|
|
||||||
def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
||||||
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
m = re.match(r"^(.+)(\[[^\]]+\])$", path)
|
||||||
extras = None
|
extras = None
|
||||||
if m:
|
if m:
|
||||||
path_no_extras = m.group(1)
|
path_no_extras = m.group(1)
|
||||||
@@ -54,7 +54,7 @@ def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
|||||||
def convert_extras(extras: Optional[str]) -> Set[str]:
|
def convert_extras(extras: Optional[str]) -> Set[str]:
|
||||||
if not extras:
|
if not extras:
|
||||||
return set()
|
return set()
|
||||||
return Requirement("placeholder" + extras.lower()).extras
|
return get_requirement("placeholder" + extras.lower()).extras
|
||||||
|
|
||||||
|
|
||||||
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
||||||
@@ -74,39 +74,23 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
|||||||
url_no_extras, extras = _strip_extras(url)
|
url_no_extras, extras = _strip_extras(url)
|
||||||
|
|
||||||
if os.path.isdir(url_no_extras):
|
if os.path.isdir(url_no_extras):
|
||||||
setup_py = os.path.join(url_no_extras, 'setup.py')
|
|
||||||
setup_cfg = os.path.join(url_no_extras, 'setup.cfg')
|
|
||||||
if not os.path.exists(setup_py) and not os.path.exists(setup_cfg):
|
|
||||||
msg = (
|
|
||||||
'File "setup.py" or "setup.cfg" not found. Directory cannot be '
|
|
||||||
'installed in editable mode: {}'
|
|
||||||
.format(os.path.abspath(url_no_extras))
|
|
||||||
)
|
|
||||||
pyproject_path = make_pyproject_path(url_no_extras)
|
|
||||||
if os.path.isfile(pyproject_path):
|
|
||||||
msg += (
|
|
||||||
'\n(A "pyproject.toml" file was found, but editable '
|
|
||||||
'mode currently requires a setuptools-based build.)'
|
|
||||||
)
|
|
||||||
raise InstallationError(msg)
|
|
||||||
|
|
||||||
# Treating it as code that has already been checked out
|
# Treating it as code that has already been checked out
|
||||||
url_no_extras = path_to_url(url_no_extras)
|
url_no_extras = path_to_url(url_no_extras)
|
||||||
|
|
||||||
if url_no_extras.lower().startswith('file:'):
|
if url_no_extras.lower().startswith("file:"):
|
||||||
package_name = Link(url_no_extras).egg_fragment
|
package_name = Link(url_no_extras).egg_fragment
|
||||||
if extras:
|
if extras:
|
||||||
return (
|
return (
|
||||||
package_name,
|
package_name,
|
||||||
url_no_extras,
|
url_no_extras,
|
||||||
Requirement("placeholder" + extras.lower()).extras,
|
get_requirement("placeholder" + extras.lower()).extras,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
return package_name, url_no_extras, set()
|
return package_name, url_no_extras, set()
|
||||||
|
|
||||||
for version_control in vcs:
|
for version_control in vcs:
|
||||||
if url.lower().startswith(f'{version_control}:'):
|
if url.lower().startswith(f"{version_control}:"):
|
||||||
url = f'{version_control}+{url}'
|
url = f"{version_control}+{url}"
|
||||||
break
|
break
|
||||||
|
|
||||||
link = Link(url)
|
link = Link(url)
|
||||||
@@ -114,9 +98,9 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
|||||||
if not link.is_vcs:
|
if not link.is_vcs:
|
||||||
backends = ", ".join(vcs.all_schemes)
|
backends = ", ".join(vcs.all_schemes)
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
f'{editable_req} is not a valid editable requirement. '
|
f"{editable_req} is not a valid editable requirement. "
|
||||||
f'It should either be a path to a local project or a VCS URL '
|
f"It should either be a path to a local project or a VCS URL "
|
||||||
f'(beginning with {backends}).'
|
f"(beginning with {backends})."
|
||||||
)
|
)
|
||||||
|
|
||||||
package_name = link.egg_fragment
|
package_name = link.egg_fragment
|
||||||
@@ -128,33 +112,56 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
|||||||
return package_name, url, set()
|
return package_name, url, set()
|
||||||
|
|
||||||
|
|
||||||
|
def check_first_requirement_in_file(filename: str) -> None:
|
||||||
|
"""Check if file is parsable as a requirements file.
|
||||||
|
|
||||||
|
This is heavily based on ``pkg_resources.parse_requirements``, but
|
||||||
|
simplified to just check the first meaningful line.
|
||||||
|
|
||||||
|
:raises InvalidRequirement: If the first meaningful line cannot be parsed
|
||||||
|
as an requirement.
|
||||||
|
"""
|
||||||
|
with open(filename, encoding="utf-8", errors="ignore") as f:
|
||||||
|
# Create a steppable iterator, so we can handle \-continuations.
|
||||||
|
lines = (
|
||||||
|
line
|
||||||
|
for line in (line.strip() for line in f)
|
||||||
|
if line and not line.startswith("#") # Skip blank lines/comments.
|
||||||
|
)
|
||||||
|
|
||||||
|
for line in lines:
|
||||||
|
# Drop comments -- a hash without a space may be in a URL.
|
||||||
|
if " #" in line:
|
||||||
|
line = line[: line.find(" #")]
|
||||||
|
# If there is a line continuation, drop it, and append the next line.
|
||||||
|
if line.endswith("\\"):
|
||||||
|
line = line[:-2].strip() + next(lines, "")
|
||||||
|
Requirement(line)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
def deduce_helpful_msg(req: str) -> str:
|
def deduce_helpful_msg(req: str) -> str:
|
||||||
"""Returns helpful msg in case requirements file does not exist,
|
"""Returns helpful msg in case requirements file does not exist,
|
||||||
or cannot be parsed.
|
or cannot be parsed.
|
||||||
|
|
||||||
:params req: Requirements file path
|
:params req: Requirements file path
|
||||||
"""
|
"""
|
||||||
msg = ""
|
if not os.path.exists(req):
|
||||||
if os.path.exists(req):
|
return f" File '{req}' does not exist."
|
||||||
msg = " The path does exist. "
|
msg = " The path does exist. "
|
||||||
# Try to parse and check if it is a requirements file.
|
# Try to parse and check if it is a requirements file.
|
||||||
try:
|
try:
|
||||||
with open(req) as fp:
|
check_first_requirement_in_file(req)
|
||||||
# parse first line only
|
except InvalidRequirement:
|
||||||
next(parse_requirements(fp.read()))
|
logger.debug("Cannot parse '%s' as requirements file", req)
|
||||||
msg += (
|
|
||||||
"The argument you provided "
|
|
||||||
"({}) appears to be a"
|
|
||||||
" requirements file. If that is the"
|
|
||||||
" case, use the '-r' flag to install"
|
|
||||||
" the packages specified within it."
|
|
||||||
).format(req)
|
|
||||||
except RequirementParseError:
|
|
||||||
logger.debug(
|
|
||||||
"Cannot parse '%s' as requirements file", req, exc_info=True
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
msg += f" File '{req}' does not exist."
|
msg += (
|
||||||
|
f"The argument you provided "
|
||||||
|
f"({req}) appears to be a"
|
||||||
|
f" requirements file. If that is the"
|
||||||
|
f" case, use the '-r' flag to install"
|
||||||
|
f" the packages specified within it."
|
||||||
|
)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
@@ -194,13 +201,16 @@ def parse_req_from_editable(editable_req: str) -> RequirementParts:
|
|||||||
def install_req_from_editable(
|
def install_req_from_editable(
|
||||||
editable_req: str,
|
editable_req: str,
|
||||||
comes_from: Optional[Union[InstallRequirement, str]] = None,
|
comes_from: Optional[Union[InstallRequirement, str]] = None,
|
||||||
|
*,
|
||||||
use_pep517: Optional[bool] = None,
|
use_pep517: Optional[bool] = None,
|
||||||
isolated: bool = False,
|
isolated: bool = False,
|
||||||
options: Optional[Dict[str, Any]] = None,
|
global_options: Optional[List[str]] = None,
|
||||||
|
hash_options: Optional[Dict[str, List[str]]] = None,
|
||||||
constraint: bool = False,
|
constraint: bool = False,
|
||||||
user_supplied: bool = False,
|
user_supplied: bool = False,
|
||||||
|
permit_editable_wheels: bool = False,
|
||||||
|
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||||
) -> InstallRequirement:
|
) -> InstallRequirement:
|
||||||
|
|
||||||
parts = parse_req_from_editable(editable_req)
|
parts = parse_req_from_editable(editable_req)
|
||||||
|
|
||||||
return InstallRequirement(
|
return InstallRequirement(
|
||||||
@@ -208,13 +218,14 @@ def install_req_from_editable(
|
|||||||
comes_from=comes_from,
|
comes_from=comes_from,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
editable=True,
|
editable=True,
|
||||||
|
permit_editable_wheels=permit_editable_wheels,
|
||||||
link=parts.link,
|
link=parts.link,
|
||||||
constraint=constraint,
|
constraint=constraint,
|
||||||
use_pep517=use_pep517,
|
use_pep517=use_pep517,
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
install_options=options.get("install_options", []) if options else [],
|
global_options=global_options,
|
||||||
global_options=options.get("global_options", []) if options else [],
|
hash_options=hash_options,
|
||||||
hash_options=options.get("hashes", {}) if options else {},
|
config_settings=config_settings,
|
||||||
extras=parts.extras,
|
extras=parts.extras,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -250,6 +261,8 @@ def _get_url_from_path(path: str, name: str) -> Optional[str]:
|
|||||||
if _looks_like_path(name) and os.path.isdir(path):
|
if _looks_like_path(name) and os.path.isdir(path):
|
||||||
if is_installable_dir(path):
|
if is_installable_dir(path):
|
||||||
return path_to_url(path)
|
return path_to_url(path)
|
||||||
|
# TODO: The is_installable_dir test here might not be necessary
|
||||||
|
# now that it is done in load_pyproject_toml too.
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
f"Directory {name!r} is not installable. Neither 'setup.py' "
|
f"Directory {name!r} is not installable. Neither 'setup.py' "
|
||||||
"nor 'pyproject.toml' found."
|
"nor 'pyproject.toml' found."
|
||||||
@@ -258,24 +271,23 @@ def _get_url_from_path(path: str, name: str) -> Optional[str]:
|
|||||||
return None
|
return None
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
return path_to_url(path)
|
return path_to_url(path)
|
||||||
urlreq_parts = name.split('@', 1)
|
urlreq_parts = name.split("@", 1)
|
||||||
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
||||||
# If the path contains '@' and the part before it does not look
|
# If the path contains '@' and the part before it does not look
|
||||||
# like a path, try to treat it as a PEP 440 URL req instead.
|
# like a path, try to treat it as a PEP 440 URL req instead.
|
||||||
return None
|
return None
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Requirement %r looks like a filename, but the '
|
"Requirement %r looks like a filename, but the file does not exist",
|
||||||
'file does not exist',
|
name,
|
||||||
name
|
|
||||||
)
|
)
|
||||||
return path_to_url(path)
|
return path_to_url(path)
|
||||||
|
|
||||||
|
|
||||||
def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
|
def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
|
||||||
if is_url(name):
|
if is_url(name):
|
||||||
marker_sep = '; '
|
marker_sep = "; "
|
||||||
else:
|
else:
|
||||||
marker_sep = ';'
|
marker_sep = ";"
|
||||||
if marker_sep in name:
|
if marker_sep in name:
|
||||||
name, markers_as_string = name.split(marker_sep, 1)
|
name, markers_as_string = name.split(marker_sep, 1)
|
||||||
markers_as_string = markers_as_string.strip()
|
markers_as_string = markers_as_string.strip()
|
||||||
@@ -302,9 +314,8 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
|||||||
# it's a local file, dir, or url
|
# it's a local file, dir, or url
|
||||||
if link:
|
if link:
|
||||||
# Handle relative file URLs
|
# Handle relative file URLs
|
||||||
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
if link.scheme == "file" and re.search(r"\.\./", link.url):
|
||||||
link = Link(
|
link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||||
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
|
||||||
# wheel file
|
# wheel file
|
||||||
if link.is_wheel:
|
if link.is_wheel:
|
||||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||||
@@ -323,25 +334,24 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
|||||||
def with_source(text: str) -> str:
|
def with_source(text: str) -> str:
|
||||||
if not line_source:
|
if not line_source:
|
||||||
return text
|
return text
|
||||||
return f'{text} (from {line_source})'
|
return f"{text} (from {line_source})"
|
||||||
|
|
||||||
def _parse_req_string(req_as_string: str) -> Requirement:
|
def _parse_req_string(req_as_string: str) -> Requirement:
|
||||||
try:
|
try:
|
||||||
req = Requirement(req_as_string)
|
req = get_requirement(req_as_string)
|
||||||
except InvalidRequirement:
|
except InvalidRequirement:
|
||||||
if os.path.sep in req_as_string:
|
if os.path.sep in req_as_string:
|
||||||
add_msg = "It looks like a path."
|
add_msg = "It looks like a path."
|
||||||
add_msg += deduce_helpful_msg(req_as_string)
|
add_msg += deduce_helpful_msg(req_as_string)
|
||||||
elif ('=' in req_as_string and
|
elif "=" in req_as_string and not any(
|
||||||
not any(op in req_as_string for op in operators)):
|
op in req_as_string for op in operators
|
||||||
|
):
|
||||||
add_msg = "= is not a valid operator. Did you mean == ?"
|
add_msg = "= is not a valid operator. Did you mean == ?"
|
||||||
else:
|
else:
|
||||||
add_msg = ''
|
add_msg = ""
|
||||||
msg = with_source(
|
msg = with_source(f"Invalid requirement: {req_as_string!r}")
|
||||||
f'Invalid requirement: {req_as_string!r}'
|
|
||||||
)
|
|
||||||
if add_msg:
|
if add_msg:
|
||||||
msg += f'\nHint: {add_msg}'
|
msg += f"\nHint: {add_msg}"
|
||||||
raise InstallationError(msg)
|
raise InstallationError(msg)
|
||||||
else:
|
else:
|
||||||
# Deprecate extras after specifiers: "name>=1.0[extras]"
|
# Deprecate extras after specifiers: "name>=1.0[extras]"
|
||||||
@@ -350,7 +360,7 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
|||||||
# RequirementParts
|
# RequirementParts
|
||||||
for spec in req.specifier:
|
for spec in req.specifier:
|
||||||
spec_str = str(spec)
|
spec_str = str(spec)
|
||||||
if spec_str.endswith(']'):
|
if spec_str.endswith("]"):
|
||||||
msg = f"Extras after version '{spec_str}'."
|
msg = f"Extras after version '{spec_str}'."
|
||||||
raise InstallationError(msg)
|
raise InstallationError(msg)
|
||||||
return req
|
return req
|
||||||
@@ -366,12 +376,15 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
|||||||
def install_req_from_line(
|
def install_req_from_line(
|
||||||
name: str,
|
name: str,
|
||||||
comes_from: Optional[Union[str, InstallRequirement]] = None,
|
comes_from: Optional[Union[str, InstallRequirement]] = None,
|
||||||
|
*,
|
||||||
use_pep517: Optional[bool] = None,
|
use_pep517: Optional[bool] = None,
|
||||||
isolated: bool = False,
|
isolated: bool = False,
|
||||||
options: Optional[Dict[str, Any]] = None,
|
global_options: Optional[List[str]] = None,
|
||||||
|
hash_options: Optional[Dict[str, List[str]]] = None,
|
||||||
constraint: bool = False,
|
constraint: bool = False,
|
||||||
line_source: Optional[str] = None,
|
line_source: Optional[str] = None,
|
||||||
user_supplied: bool = False,
|
user_supplied: bool = False,
|
||||||
|
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||||
) -> InstallRequirement:
|
) -> InstallRequirement:
|
||||||
"""Creates an InstallRequirement from a name, which might be a
|
"""Creates an InstallRequirement from a name, which might be a
|
||||||
requirement, directory containing 'setup.py', filename, or URL.
|
requirement, directory containing 'setup.py', filename, or URL.
|
||||||
@@ -382,11 +395,15 @@ def install_req_from_line(
|
|||||||
parts = parse_req_from_line(name, line_source)
|
parts = parse_req_from_line(name, line_source)
|
||||||
|
|
||||||
return InstallRequirement(
|
return InstallRequirement(
|
||||||
parts.requirement, comes_from, link=parts.link, markers=parts.markers,
|
parts.requirement,
|
||||||
use_pep517=use_pep517, isolated=isolated,
|
comes_from,
|
||||||
install_options=options.get("install_options", []) if options else [],
|
link=parts.link,
|
||||||
global_options=options.get("global_options", []) if options else [],
|
markers=parts.markers,
|
||||||
hash_options=options.get("hashes", {}) if options else {},
|
use_pep517=use_pep517,
|
||||||
|
isolated=isolated,
|
||||||
|
global_options=global_options,
|
||||||
|
hash_options=hash_options,
|
||||||
|
config_settings=config_settings,
|
||||||
constraint=constraint,
|
constraint=constraint,
|
||||||
extras=parts.extras,
|
extras=parts.extras,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
@@ -401,7 +418,7 @@ def install_req_from_req_string(
|
|||||||
user_supplied: bool = False,
|
user_supplied: bool = False,
|
||||||
) -> InstallRequirement:
|
) -> InstallRequirement:
|
||||||
try:
|
try:
|
||||||
req = Requirement(req_string)
|
req = get_requirement(req_string)
|
||||||
except InvalidRequirement:
|
except InvalidRequirement:
|
||||||
raise InstallationError(f"Invalid requirement: '{req_string}'")
|
raise InstallationError(f"Invalid requirement: '{req_string}'")
|
||||||
|
|
||||||
@@ -409,8 +426,12 @@ def install_req_from_req_string(
|
|||||||
PyPI.file_storage_domain,
|
PyPI.file_storage_domain,
|
||||||
TestPyPI.file_storage_domain,
|
TestPyPI.file_storage_domain,
|
||||||
]
|
]
|
||||||
if (req.url and comes_from and comes_from.link and
|
if (
|
||||||
comes_from.link.netloc in domains_not_allowed):
|
req.url
|
||||||
|
and comes_from
|
||||||
|
and comes_from.link
|
||||||
|
and comes_from.link.netloc in domains_not_allowed
|
||||||
|
):
|
||||||
# Explicitly disallow pypi packages that depend on external urls
|
# Explicitly disallow pypi packages that depend on external urls
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Packages installed from PyPI cannot depend on packages "
|
"Packages installed from PyPI cannot depend on packages "
|
||||||
@@ -432,6 +453,7 @@ def install_req_from_parsed_requirement(
|
|||||||
isolated: bool = False,
|
isolated: bool = False,
|
||||||
use_pep517: Optional[bool] = None,
|
use_pep517: Optional[bool] = None,
|
||||||
user_supplied: bool = False,
|
user_supplied: bool = False,
|
||||||
|
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||||
) -> InstallRequirement:
|
) -> InstallRequirement:
|
||||||
if parsed_req.is_editable:
|
if parsed_req.is_editable:
|
||||||
req = install_req_from_editable(
|
req = install_req_from_editable(
|
||||||
@@ -441,6 +463,7 @@ def install_req_from_parsed_requirement(
|
|||||||
constraint=parsed_req.constraint,
|
constraint=parsed_req.constraint,
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
|
config_settings=config_settings,
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -449,10 +472,18 @@ def install_req_from_parsed_requirement(
|
|||||||
comes_from=parsed_req.comes_from,
|
comes_from=parsed_req.comes_from,
|
||||||
use_pep517=use_pep517,
|
use_pep517=use_pep517,
|
||||||
isolated=isolated,
|
isolated=isolated,
|
||||||
options=parsed_req.options,
|
global_options=(
|
||||||
|
parsed_req.options.get("global_options", [])
|
||||||
|
if parsed_req.options
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
hash_options=(
|
||||||
|
parsed_req.options.get("hashes", {}) if parsed_req.options else {}
|
||||||
|
),
|
||||||
constraint=parsed_req.constraint,
|
constraint=parsed_req.constraint,
|
||||||
line_source=parsed_req.line_source,
|
line_source=parsed_req.line_source,
|
||||||
user_supplied=user_supplied,
|
user_supplied=user_supplied,
|
||||||
|
config_settings=config_settings,
|
||||||
)
|
)
|
||||||
return req
|
return req
|
||||||
|
|
||||||
@@ -468,7 +499,8 @@ def install_req_from_link_and_ireq(
|
|||||||
markers=ireq.markers,
|
markers=ireq.markers,
|
||||||
use_pep517=ireq.use_pep517,
|
use_pep517=ireq.use_pep517,
|
||||||
isolated=ireq.isolated,
|
isolated=ireq.isolated,
|
||||||
install_options=ireq.install_options,
|
|
||||||
global_options=ireq.global_options,
|
global_options=ireq.global_options,
|
||||||
hash_options=ireq.hash_options,
|
hash_options=ireq.hash_options,
|
||||||
|
config_settings=ireq.config_settings,
|
||||||
|
user_supplied=ireq.user_supplied,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,13 +2,24 @@
|
|||||||
Requirements file parsing
|
Requirements file parsing
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from optparse import Values
|
from optparse import Values
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Optional, Tuple
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Generator,
|
||||||
|
Iterable,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
)
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
from pip._internal.cli import cmdoptions
|
||||||
from pip._internal.exceptions import InstallationError, RequirementsFileParseError
|
from pip._internal.exceptions import InstallationError, RequirementsFileParseError
|
||||||
@@ -25,20 +36,20 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
|
||||||
__all__ = ['parse_requirements']
|
__all__ = ["parse_requirements"]
|
||||||
|
|
||||||
ReqFileLines = Iterator[Tuple[int, str]]
|
ReqFileLines = Iterable[Tuple[int, str]]
|
||||||
|
|
||||||
LineParser = Callable[[str], Tuple[str, Values]]
|
LineParser = Callable[[str], Tuple[str, Values]]
|
||||||
|
|
||||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
|
||||||
COMMENT_RE = re.compile(r'(^|\s+)#.*$')
|
COMMENT_RE = re.compile(r"(^|\s+)#.*$")
|
||||||
|
|
||||||
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||||
# variable name consisting of only uppercase letters, digits or the '_'
|
# variable name consisting of only uppercase letters, digits or the '_'
|
||||||
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||||
# 2013 Edition.
|
# 2013 Edition.
|
||||||
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
|
||||||
|
|
||||||
SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
|
SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
|
||||||
cmdoptions.index_url,
|
cmdoptions.index_url,
|
||||||
@@ -59,14 +70,16 @@ SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
|
|||||||
|
|
||||||
# options to be passed to requirements
|
# options to be passed to requirements
|
||||||
SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
|
SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
|
||||||
cmdoptions.install_options,
|
|
||||||
cmdoptions.global_options,
|
cmdoptions.global_options,
|
||||||
cmdoptions.hash,
|
cmdoptions.hash,
|
||||||
|
cmdoptions.config_settings,
|
||||||
]
|
]
|
||||||
|
|
||||||
# the 'dest' string values
|
# the 'dest' string values
|
||||||
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ParsedRequirement:
|
class ParsedRequirement:
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -119,7 +132,7 @@ def parse_requirements(
|
|||||||
finder: Optional["PackageFinder"] = None,
|
finder: Optional["PackageFinder"] = None,
|
||||||
options: Optional[optparse.Values] = None,
|
options: Optional[optparse.Values] = None,
|
||||||
constraint: bool = False,
|
constraint: bool = False,
|
||||||
) -> Iterator[ParsedRequirement]:
|
) -> Generator[ParsedRequirement, None, None]:
|
||||||
"""Parse a requirements file and yield ParsedRequirement instances.
|
"""Parse a requirements file and yield ParsedRequirement instances.
|
||||||
|
|
||||||
:param filename: Path or url of requirements file.
|
:param filename: Path or url of requirements file.
|
||||||
@@ -134,10 +147,7 @@ def parse_requirements(
|
|||||||
|
|
||||||
for parsed_line in parser.parse(filename, constraint):
|
for parsed_line in parser.parse(filename, constraint):
|
||||||
parsed_req = handle_line(
|
parsed_req = handle_line(
|
||||||
parsed_line,
|
parsed_line, options=options, finder=finder, session=session
|
||||||
options=options,
|
|
||||||
finder=finder,
|
|
||||||
session=session
|
|
||||||
)
|
)
|
||||||
if parsed_req is not None:
|
if parsed_req is not None:
|
||||||
yield parsed_req
|
yield parsed_req
|
||||||
@@ -159,10 +169,11 @@ def handle_requirement_line(
|
|||||||
line: ParsedLine,
|
line: ParsedLine,
|
||||||
options: Optional[optparse.Values] = None,
|
options: Optional[optparse.Values] = None,
|
||||||
) -> ParsedRequirement:
|
) -> ParsedRequirement:
|
||||||
|
|
||||||
# preserve for the nested code path
|
# preserve for the nested code path
|
||||||
line_comes_from = '{} {} (line {})'.format(
|
line_comes_from = "{} {} (line {})".format(
|
||||||
'-c' if line.constraint else '-r', line.filename, line.lineno,
|
"-c" if line.constraint else "-r",
|
||||||
|
line.filename,
|
||||||
|
line.lineno,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert line.is_requirement
|
assert line.is_requirement
|
||||||
@@ -177,17 +188,13 @@ def handle_requirement_line(
|
|||||||
constraint=line.constraint,
|
constraint=line.constraint,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if options:
|
|
||||||
# Disable wheels if the user has specified build options
|
|
||||||
cmdoptions.check_install_build_global(options, line.opts)
|
|
||||||
|
|
||||||
# get the options that apply to requirements
|
# get the options that apply to requirements
|
||||||
req_options = {}
|
req_options = {}
|
||||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||||
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
||||||
req_options[dest] = line.opts.__dict__[dest]
|
req_options[dest] = line.opts.__dict__[dest]
|
||||||
|
|
||||||
line_source = f'line {line.lineno} of {line.filename}'
|
line_source = f"line {line.lineno} of {line.filename}"
|
||||||
return ParsedRequirement(
|
return ParsedRequirement(
|
||||||
requirement=line.requirement,
|
requirement=line.requirement,
|
||||||
is_editable=line.is_editable,
|
is_editable=line.is_editable,
|
||||||
@@ -206,6 +213,12 @@ def handle_option_line(
|
|||||||
options: Optional[optparse.Values] = None,
|
options: Optional[optparse.Values] = None,
|
||||||
session: Optional[PipSession] = None,
|
session: Optional[PipSession] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
if opts.hashes:
|
||||||
|
logger.warning(
|
||||||
|
"%s line %s has --hash but no requirement, and will be ignored.",
|
||||||
|
filename,
|
||||||
|
lineno,
|
||||||
|
)
|
||||||
|
|
||||||
if options:
|
if options:
|
||||||
# percolate options upward
|
# percolate options upward
|
||||||
@@ -213,19 +226,20 @@ def handle_option_line(
|
|||||||
options.require_hashes = opts.require_hashes
|
options.require_hashes = opts.require_hashes
|
||||||
if opts.features_enabled:
|
if opts.features_enabled:
|
||||||
options.features_enabled.extend(
|
options.features_enabled.extend(
|
||||||
f for f in opts.features_enabled
|
f for f in opts.features_enabled if f not in options.features_enabled
|
||||||
if f not in options.features_enabled
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# set finder options
|
# set finder options
|
||||||
if finder:
|
if finder:
|
||||||
find_links = finder.find_links
|
find_links = finder.find_links
|
||||||
index_urls = finder.index_urls
|
index_urls = finder.index_urls
|
||||||
if opts.index_url:
|
no_index = finder.search_scope.no_index
|
||||||
index_urls = [opts.index_url]
|
|
||||||
if opts.no_index is True:
|
if opts.no_index is True:
|
||||||
|
no_index = True
|
||||||
index_urls = []
|
index_urls = []
|
||||||
if opts.extra_index_urls:
|
if opts.index_url and not no_index:
|
||||||
|
index_urls = [opts.index_url]
|
||||||
|
if opts.extra_index_urls and not no_index:
|
||||||
index_urls.extend(opts.extra_index_urls)
|
index_urls.extend(opts.extra_index_urls)
|
||||||
if opts.find_links:
|
if opts.find_links:
|
||||||
# FIXME: it would be nice to keep track of the source
|
# FIXME: it would be nice to keep track of the source
|
||||||
@@ -245,6 +259,7 @@ def handle_option_line(
|
|||||||
search_scope = SearchScope(
|
search_scope = SearchScope(
|
||||||
find_links=find_links,
|
find_links=find_links,
|
||||||
index_urls=index_urls,
|
index_urls=index_urls,
|
||||||
|
no_index=no_index,
|
||||||
)
|
)
|
||||||
finder.search_scope = search_scope
|
finder.search_scope = search_scope
|
||||||
|
|
||||||
@@ -256,7 +271,7 @@ def handle_option_line(
|
|||||||
|
|
||||||
if session:
|
if session:
|
||||||
for host in opts.trusted_hosts or []:
|
for host in opts.trusted_hosts or []:
|
||||||
source = f'line {lineno} of {filename}'
|
source = f"line {lineno} of {filename}"
|
||||||
session.add_trusted_host(host, source=source)
|
session.add_trusted_host(host, source=source)
|
||||||
|
|
||||||
|
|
||||||
@@ -313,18 +328,18 @@ class RequirementsFileParser:
|
|||||||
self._session = session
|
self._session = session
|
||||||
self._line_parser = line_parser
|
self._line_parser = line_parser
|
||||||
|
|
||||||
def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
|
def parse(
|
||||||
"""Parse a given file, yielding parsed lines.
|
self, filename: str, constraint: bool
|
||||||
"""
|
) -> Generator[ParsedLine, None, None]:
|
||||||
|
"""Parse a given file, yielding parsed lines."""
|
||||||
yield from self._parse_and_recurse(filename, constraint)
|
yield from self._parse_and_recurse(filename, constraint)
|
||||||
|
|
||||||
def _parse_and_recurse(
|
def _parse_and_recurse(
|
||||||
self, filename: str, constraint: bool
|
self, filename: str, constraint: bool
|
||||||
) -> Iterator[ParsedLine]:
|
) -> Generator[ParsedLine, None, None]:
|
||||||
for line in self._parse_file(filename, constraint):
|
for line in self._parse_file(filename, constraint):
|
||||||
if (
|
if not line.is_requirement and (
|
||||||
not line.is_requirement and
|
line.opts.requirements or line.opts.constraints
|
||||||
(line.opts.requirements or line.opts.constraints)
|
|
||||||
):
|
):
|
||||||
# parse a nested requirements file
|
# parse a nested requirements file
|
||||||
if line.opts.requirements:
|
if line.opts.requirements:
|
||||||
@@ -342,14 +357,17 @@ class RequirementsFileParser:
|
|||||||
elif not SCHEME_RE.search(req_path):
|
elif not SCHEME_RE.search(req_path):
|
||||||
# do a join so relative paths work
|
# do a join so relative paths work
|
||||||
req_path = os.path.join(
|
req_path = os.path.join(
|
||||||
os.path.dirname(filename), req_path,
|
os.path.dirname(filename),
|
||||||
|
req_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
yield from self._parse_and_recurse(req_path, nested_constraint)
|
yield from self._parse_and_recurse(req_path, nested_constraint)
|
||||||
else:
|
else:
|
||||||
yield line
|
yield line
|
||||||
|
|
||||||
def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
|
def _parse_file(
|
||||||
|
self, filename: str, constraint: bool
|
||||||
|
) -> Generator[ParsedLine, None, None]:
|
||||||
_, content = get_file_content(filename, self._session)
|
_, content = get_file_content(filename, self._session)
|
||||||
|
|
||||||
lines_enum = preprocess(content)
|
lines_enum = preprocess(content)
|
||||||
@@ -359,7 +377,7 @@ class RequirementsFileParser:
|
|||||||
args_str, opts = self._line_parser(line)
|
args_str, opts = self._line_parser(line)
|
||||||
except OptionParsingError as e:
|
except OptionParsingError as e:
|
||||||
# add offending line
|
# add offending line
|
||||||
msg = f'Invalid requirement: {line}\n{e.msg}'
|
msg = f"Invalid requirement: {line}\n{e.msg}"
|
||||||
raise RequirementsFileParseError(msg)
|
raise RequirementsFileParseError(msg)
|
||||||
|
|
||||||
yield ParsedLine(
|
yield ParsedLine(
|
||||||
@@ -383,7 +401,12 @@ def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
|
|||||||
|
|
||||||
args_str, options_str = break_args_options(line)
|
args_str, options_str = break_args_options(line)
|
||||||
|
|
||||||
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
|
try:
|
||||||
|
options = shlex.split(options_str)
|
||||||
|
except ValueError as e:
|
||||||
|
raise OptionParsingError(f"Could not split options: {options_str}") from e
|
||||||
|
|
||||||
|
opts, _ = parser.parse_args(options, defaults)
|
||||||
|
|
||||||
return args_str, opts
|
return args_str, opts
|
||||||
|
|
||||||
@@ -395,16 +418,16 @@ def break_args_options(line: str) -> Tuple[str, str]:
|
|||||||
(and then optparse) the options, not the args. args can contain markers
|
(and then optparse) the options, not the args. args can contain markers
|
||||||
which are corrupted by shlex.
|
which are corrupted by shlex.
|
||||||
"""
|
"""
|
||||||
tokens = line.split(' ')
|
tokens = line.split(" ")
|
||||||
args = []
|
args = []
|
||||||
options = tokens[:]
|
options = tokens[:]
|
||||||
for token in tokens:
|
for token in tokens:
|
||||||
if token.startswith('-') or token.startswith('--'):
|
if token.startswith("-") or token.startswith("--"):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
args.append(token)
|
args.append(token)
|
||||||
options.pop(0)
|
options.pop(0)
|
||||||
return ' '.join(args), ' '.join(options)
|
return " ".join(args), " ".join(options)
|
||||||
|
|
||||||
|
|
||||||
class OptionParsingError(Exception):
|
class OptionParsingError(Exception):
|
||||||
@@ -427,6 +450,7 @@ def build_parser() -> optparse.OptionParser:
|
|||||||
# that in our own exception.
|
# that in our own exception.
|
||||||
def parser_exit(self: Any, msg: str) -> "NoReturn":
|
def parser_exit(self: Any, msg: str) -> "NoReturn":
|
||||||
raise OptionParsingError(msg)
|
raise OptionParsingError(msg)
|
||||||
|
|
||||||
# NOTE: mypy disallows assigning to a method
|
# NOTE: mypy disallows assigning to a method
|
||||||
# https://github.com/python/mypy/issues/2427
|
# https://github.com/python/mypy/issues/2427
|
||||||
parser.exit = parser_exit # type: ignore
|
parser.exit = parser_exit # type: ignore
|
||||||
@@ -441,26 +465,26 @@ def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
|
|||||||
primary_line_number = None
|
primary_line_number = None
|
||||||
new_line: List[str] = []
|
new_line: List[str] = []
|
||||||
for line_number, line in lines_enum:
|
for line_number, line in lines_enum:
|
||||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
if not line.endswith("\\") or COMMENT_RE.match(line):
|
||||||
if COMMENT_RE.match(line):
|
if COMMENT_RE.match(line):
|
||||||
# this ensures comments are always matched later
|
# this ensures comments are always matched later
|
||||||
line = ' ' + line
|
line = " " + line
|
||||||
if new_line:
|
if new_line:
|
||||||
new_line.append(line)
|
new_line.append(line)
|
||||||
assert primary_line_number is not None
|
assert primary_line_number is not None
|
||||||
yield primary_line_number, ''.join(new_line)
|
yield primary_line_number, "".join(new_line)
|
||||||
new_line = []
|
new_line = []
|
||||||
else:
|
else:
|
||||||
yield line_number, line
|
yield line_number, line
|
||||||
else:
|
else:
|
||||||
if not new_line:
|
if not new_line:
|
||||||
primary_line_number = line_number
|
primary_line_number = line_number
|
||||||
new_line.append(line.strip('\\'))
|
new_line.append(line.strip("\\"))
|
||||||
|
|
||||||
# last line contains \
|
# last line contains \
|
||||||
if new_line:
|
if new_line:
|
||||||
assert primary_line_number is not None
|
assert primary_line_number is not None
|
||||||
yield primary_line_number, ''.join(new_line)
|
yield primary_line_number, "".join(new_line)
|
||||||
|
|
||||||
# TODO: handle space after '\'.
|
# TODO: handle space after '\'.
|
||||||
|
|
||||||
@@ -470,7 +494,7 @@ def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
|
|||||||
Strips comments and filter empty lines.
|
Strips comments and filter empty lines.
|
||||||
"""
|
"""
|
||||||
for line_number, line in lines_enum:
|
for line_number, line in lines_enum:
|
||||||
line = COMMENT_RE.sub('', line)
|
line = COMMENT_RE.sub("", line)
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line:
|
if line:
|
||||||
yield line_number, line
|
yield line_number, line
|
||||||
@@ -514,15 +538,15 @@ def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
|
|||||||
scheme = get_url_scheme(url)
|
scheme = get_url_scheme(url)
|
||||||
|
|
||||||
# Pip has special support for file:// URLs (LocalFSAdapter).
|
# Pip has special support for file:// URLs (LocalFSAdapter).
|
||||||
if scheme in ['http', 'https', 'file']:
|
if scheme in ["http", "https", "file"]:
|
||||||
resp = session.get(url)
|
resp = session.get(url)
|
||||||
raise_for_status(resp)
|
raise_for_status(resp)
|
||||||
return resp.url, resp.text
|
return resp.url, resp.text
|
||||||
|
|
||||||
# Assume this is a bare path.
|
# Assume this is a bare path.
|
||||||
try:
|
try:
|
||||||
with open(url, 'rb') as f:
|
with open(url, "rb") as f:
|
||||||
content = auto_decode(f.read())
|
content = auto_decode(f.read())
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
raise InstallationError(f'Could not open requirements file: {exc}')
|
raise InstallationError(f"Could not open requirements file: {exc}")
|
||||||
return url, content
|
return url, content
|
||||||
|
|||||||
@@ -1,55 +1,59 @@
|
|||||||
# The following comment should be removed at some point in the future.
|
# The following comment should be removed at some point in the future.
|
||||||
# mypy: strict-optional=False
|
# mypy: strict-optional=False
|
||||||
|
|
||||||
|
import functools
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
import zipfile
|
import zipfile
|
||||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Union
|
from optparse import Values
|
||||||
|
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
||||||
|
|
||||||
from pip._vendor import pkg_resources, six
|
|
||||||
from pip._vendor.packaging.markers import Marker
|
from pip._vendor.packaging.markers import Marker
|
||||||
from pip._vendor.packaging.requirements import Requirement
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
from pip._vendor.packaging.version import Version
|
from pip._vendor.packaging.version import Version
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
from pip._vendor.packaging.version import parse as parse_version
|
||||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
||||||
from pip._internal.exceptions import InstallationError
|
from pip._internal.exceptions import InstallationError
|
||||||
from pip._internal.locations import get_scheme
|
from pip._internal.locations import get_scheme
|
||||||
|
from pip._internal.metadata import (
|
||||||
|
BaseDistribution,
|
||||||
|
get_default_environment,
|
||||||
|
get_directory_distribution,
|
||||||
|
get_wheel_distribution,
|
||||||
|
)
|
||||||
|
from pip._internal.metadata.base import FilesystemWheel
|
||||||
|
from pip._internal.models.direct_url import DirectUrl
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
from pip._internal.operations.build.metadata import generate_metadata
|
from pip._internal.operations.build.metadata import generate_metadata
|
||||||
|
from pip._internal.operations.build.metadata_editable import generate_editable_metadata
|
||||||
from pip._internal.operations.build.metadata_legacy import (
|
from pip._internal.operations.build.metadata_legacy import (
|
||||||
generate_metadata as generate_metadata_legacy,
|
generate_metadata as generate_metadata_legacy,
|
||||||
)
|
)
|
||||||
from pip._internal.operations.install.editable_legacy import (
|
from pip._internal.operations.install.editable_legacy import (
|
||||||
install_editable as install_editable_legacy,
|
install_editable as install_editable_legacy,
|
||||||
)
|
)
|
||||||
from pip._internal.operations.install.legacy import LegacyInstallFailure
|
|
||||||
from pip._internal.operations.install.legacy import install as install_legacy
|
|
||||||
from pip._internal.operations.install.wheel import install_wheel
|
from pip._internal.operations.install.wheel import install_wheel
|
||||||
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
||||||
from pip._internal.req.req_uninstall import UninstallPathSet
|
from pip._internal.req.req_uninstall import UninstallPathSet
|
||||||
from pip._internal.utils.deprecation import deprecated
|
from pip._internal.utils.deprecation import deprecated
|
||||||
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
|
||||||
from pip._internal.utils.hashes import Hashes
|
from pip._internal.utils.hashes import Hashes
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import (
|
||||||
|
ConfiguredBuildBackendHookCaller,
|
||||||
ask_path_exists,
|
ask_path_exists,
|
||||||
backup_dir,
|
backup_dir,
|
||||||
display_path,
|
display_path,
|
||||||
dist_in_site_packages,
|
|
||||||
dist_in_usersite,
|
|
||||||
get_distribution,
|
|
||||||
hide_url,
|
hide_url,
|
||||||
redact_auth_from_url,
|
redact_auth_from_url,
|
||||||
)
|
)
|
||||||
from pip._internal.utils.packaging import get_metadata
|
from pip._internal.utils.packaging import safe_extra
|
||||||
|
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
from pip._internal.vcs import vcs
|
from pip._internal.vcs import vcs
|
||||||
@@ -57,32 +61,6 @@ from pip._internal.vcs import vcs
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _get_dist(metadata_directory: str) -> Distribution:
|
|
||||||
"""Return a pkg_resources.Distribution for the provided
|
|
||||||
metadata directory.
|
|
||||||
"""
|
|
||||||
dist_dir = metadata_directory.rstrip(os.sep)
|
|
||||||
|
|
||||||
# Build a PathMetadata object, from path to metadata. :wink:
|
|
||||||
base_dir, dist_dir_name = os.path.split(dist_dir)
|
|
||||||
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
|
||||||
|
|
||||||
# Determine the correct Distribution object type.
|
|
||||||
if dist_dir.endswith(".egg-info"):
|
|
||||||
dist_cls = pkg_resources.Distribution
|
|
||||||
dist_name = os.path.splitext(dist_dir_name)[0]
|
|
||||||
else:
|
|
||||||
assert dist_dir.endswith(".dist-info")
|
|
||||||
dist_cls = pkg_resources.DistInfoDistribution
|
|
||||||
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
|
||||||
|
|
||||||
return dist_cls(
|
|
||||||
base_dir,
|
|
||||||
project_name=dist_name,
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class InstallRequirement:
|
class InstallRequirement:
|
||||||
"""
|
"""
|
||||||
Represents something that may be installed later on, may have information
|
Represents something that may be installed later on, may have information
|
||||||
@@ -99,19 +77,21 @@ class InstallRequirement:
|
|||||||
markers: Optional[Marker] = None,
|
markers: Optional[Marker] = None,
|
||||||
use_pep517: Optional[bool] = None,
|
use_pep517: Optional[bool] = None,
|
||||||
isolated: bool = False,
|
isolated: bool = False,
|
||||||
install_options: Optional[List[str]] = None,
|
*,
|
||||||
global_options: Optional[List[str]] = None,
|
global_options: Optional[List[str]] = None,
|
||||||
hash_options: Optional[Dict[str, List[str]]] = None,
|
hash_options: Optional[Dict[str, List[str]]] = None,
|
||||||
|
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||||
constraint: bool = False,
|
constraint: bool = False,
|
||||||
extras: Iterable[str] = (),
|
extras: Collection[str] = (),
|
||||||
user_supplied: bool = False,
|
user_supplied: bool = False,
|
||||||
|
permit_editable_wheels: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
assert req is None or isinstance(req, Requirement), req
|
assert req is None or isinstance(req, Requirement), req
|
||||||
self.req = req
|
self.req = req
|
||||||
self.comes_from = comes_from
|
self.comes_from = comes_from
|
||||||
self.constraint = constraint
|
self.constraint = constraint
|
||||||
self.editable = editable
|
self.editable = editable
|
||||||
self.legacy_install_reason: Optional[int] = None
|
self.permit_editable_wheels = permit_editable_wheels
|
||||||
|
|
||||||
# source_dir is the local directory where the linked requirement is
|
# source_dir is the local directory where the linked requirement is
|
||||||
# located, or unpacked. In case unpacking is needed, creating and
|
# located, or unpacked. In case unpacking is needed, creating and
|
||||||
@@ -122,15 +102,21 @@ class InstallRequirement:
|
|||||||
if self.editable:
|
if self.editable:
|
||||||
assert link
|
assert link
|
||||||
if link.is_file:
|
if link.is_file:
|
||||||
self.source_dir = os.path.normpath(
|
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
|
||||||
os.path.abspath(link.file_path)
|
|
||||||
)
|
|
||||||
|
|
||||||
if link is None and req and req.url:
|
if link is None and req and req.url:
|
||||||
# PEP 508 URL requirement
|
# PEP 508 URL requirement
|
||||||
link = Link(req.url)
|
link = Link(req.url)
|
||||||
self.link = self.original_link = link
|
self.link = self.original_link = link
|
||||||
self.original_link_is_in_wheel_cache = False
|
|
||||||
|
# When this InstallRequirement is a wheel obtained from the cache of locally
|
||||||
|
# built wheels, this is the source link corresponding to the cache entry, which
|
||||||
|
# was used to download and build the cached wheel.
|
||||||
|
self.cached_wheel_source_link: Optional[Link] = None
|
||||||
|
|
||||||
|
# Information about the location of the artifact that was downloaded . This
|
||||||
|
# property is guaranteed to be set in resolver results.
|
||||||
|
self.download_info: Optional[DirectUrl] = None
|
||||||
|
|
||||||
# Path to any downloaded or already-existing package.
|
# Path to any downloaded or already-existing package.
|
||||||
self.local_file_path: Optional[str] = None
|
self.local_file_path: Optional[str] = None
|
||||||
@@ -140,18 +126,15 @@ class InstallRequirement:
|
|||||||
if extras:
|
if extras:
|
||||||
self.extras = extras
|
self.extras = extras
|
||||||
elif req:
|
elif req:
|
||||||
self.extras = {
|
self.extras = {safe_extra(extra) for extra in req.extras}
|
||||||
pkg_resources.safe_extra(extra) for extra in req.extras
|
|
||||||
}
|
|
||||||
else:
|
else:
|
||||||
self.extras = set()
|
self.extras = set()
|
||||||
if markers is None and req:
|
if markers is None and req:
|
||||||
markers = req.marker
|
markers = req.marker
|
||||||
self.markers = markers
|
self.markers = markers
|
||||||
|
|
||||||
# This holds the pkg_resources.Distribution object if this requirement
|
# This holds the Distribution object if this requirement is already installed.
|
||||||
# is already available:
|
self.satisfied_by: Optional[BaseDistribution] = None
|
||||||
self.satisfied_by: Optional[Distribution] = None
|
|
||||||
# Whether the installation process should try to uninstall an existing
|
# Whether the installation process should try to uninstall an existing
|
||||||
# distribution before installing this requirement.
|
# distribution before installing this requirement.
|
||||||
self.should_reinstall = False
|
self.should_reinstall = False
|
||||||
@@ -160,9 +143,9 @@ class InstallRequirement:
|
|||||||
# Set to True after successful installation
|
# Set to True after successful installation
|
||||||
self.install_succeeded: Optional[bool] = None
|
self.install_succeeded: Optional[bool] = None
|
||||||
# Supplied options
|
# Supplied options
|
||||||
self.install_options = install_options if install_options else []
|
|
||||||
self.global_options = global_options if global_options else []
|
self.global_options = global_options if global_options else []
|
||||||
self.hash_options = hash_options if hash_options else {}
|
self.hash_options = hash_options if hash_options else {}
|
||||||
|
self.config_settings = config_settings
|
||||||
# Set to True after successful preparation of this requirement
|
# Set to True after successful preparation of this requirement
|
||||||
self.prepared = False
|
self.prepared = False
|
||||||
# User supplied requirement are explicitly requested for installation
|
# User supplied requirement are explicitly requested for installation
|
||||||
@@ -186,7 +169,7 @@ class InstallRequirement:
|
|||||||
self.requirements_to_check: List[str] = []
|
self.requirements_to_check: List[str] = []
|
||||||
|
|
||||||
# The PEP 517 backend we should use to build the project
|
# The PEP 517 backend we should use to build the project
|
||||||
self.pep517_backend: Optional[Pep517HookCaller] = None
|
self.pep517_backend: Optional[BuildBackendHookCaller] = None
|
||||||
|
|
||||||
# Are we using PEP 517 for this requirement?
|
# Are we using PEP 517 for this requirement?
|
||||||
# After pyproject.toml has been loaded, the only valid values are True
|
# After pyproject.toml has been loaded, the only valid values are True
|
||||||
@@ -202,36 +185,38 @@ class InstallRequirement:
|
|||||||
if self.req:
|
if self.req:
|
||||||
s = str(self.req)
|
s = str(self.req)
|
||||||
if self.link:
|
if self.link:
|
||||||
s += ' from {}'.format(redact_auth_from_url(self.link.url))
|
s += " from {}".format(redact_auth_from_url(self.link.url))
|
||||||
elif self.link:
|
elif self.link:
|
||||||
s = redact_auth_from_url(self.link.url)
|
s = redact_auth_from_url(self.link.url)
|
||||||
else:
|
else:
|
||||||
s = '<InstallRequirement>'
|
s = "<InstallRequirement>"
|
||||||
if self.satisfied_by is not None:
|
if self.satisfied_by is not None:
|
||||||
s += ' in {}'.format(display_path(self.satisfied_by.location))
|
if self.satisfied_by.location is not None:
|
||||||
|
location = display_path(self.satisfied_by.location)
|
||||||
|
else:
|
||||||
|
location = "<memory>"
|
||||||
|
s += f" in {location}"
|
||||||
if self.comes_from:
|
if self.comes_from:
|
||||||
if isinstance(self.comes_from, str):
|
if isinstance(self.comes_from, str):
|
||||||
comes_from: Optional[str] = self.comes_from
|
comes_from: Optional[str] = self.comes_from
|
||||||
else:
|
else:
|
||||||
comes_from = self.comes_from.from_path()
|
comes_from = self.comes_from.from_path()
|
||||||
if comes_from:
|
if comes_from:
|
||||||
s += f' (from {comes_from})'
|
s += f" (from {comes_from})"
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return '<{} object: {} editable={!r}>'.format(
|
return "<{} object: {} editable={!r}>".format(
|
||||||
self.__class__.__name__, str(self), self.editable)
|
self.__class__.__name__, str(self), self.editable
|
||||||
|
)
|
||||||
|
|
||||||
def format_debug(self) -> str:
|
def format_debug(self) -> str:
|
||||||
"""An un-tested helper for getting state, for debugging.
|
"""An un-tested helper for getting state, for debugging."""
|
||||||
"""
|
|
||||||
attributes = vars(self)
|
attributes = vars(self)
|
||||||
names = sorted(attributes)
|
names = sorted(attributes)
|
||||||
|
|
||||||
state = (
|
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
|
||||||
"{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
|
return "<{name} object: {{{state}}}>".format(
|
||||||
)
|
|
||||||
return '<{name} object: {{{state}}}>'.format(
|
|
||||||
name=self.__class__.__name__,
|
name=self.__class__.__name__,
|
||||||
state=", ".join(state),
|
state=", ".join(state),
|
||||||
)
|
)
|
||||||
@@ -241,7 +226,19 @@ class InstallRequirement:
|
|||||||
def name(self) -> Optional[str]:
|
def name(self) -> Optional[str]:
|
||||||
if self.req is None:
|
if self.req is None:
|
||||||
return None
|
return None
|
||||||
return pkg_resources.safe_name(self.req.name)
|
return self.req.name
|
||||||
|
|
||||||
|
@functools.lru_cache() # use cached_property in python 3.8+
|
||||||
|
def supports_pyproject_editable(self) -> bool:
|
||||||
|
if not self.use_pep517:
|
||||||
|
return False
|
||||||
|
assert self.pep517_backend
|
||||||
|
with self.build_env:
|
||||||
|
runner = runner_with_spinner_message(
|
||||||
|
"Checking if build backend supports build_editable"
|
||||||
|
)
|
||||||
|
with self.pep517_backend.subprocess_runner(runner):
|
||||||
|
return "build_editable" in self.pep517_backend._supported_features()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def specifier(self) -> SpecifierSet:
|
def specifier(self) -> SpecifierSet:
|
||||||
@@ -254,18 +251,17 @@ class InstallRequirement:
|
|||||||
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||||
"""
|
"""
|
||||||
specifiers = self.specifier
|
specifiers = self.specifier
|
||||||
return (len(specifiers) == 1 and
|
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
||||||
next(iter(specifiers)).operator in {'==', '==='})
|
|
||||||
|
|
||||||
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
||||||
if not extras_requested:
|
if not extras_requested:
|
||||||
# Provide an extra to safely evaluate the markers
|
# Provide an extra to safely evaluate the markers
|
||||||
# without matching any extra
|
# without matching any extra
|
||||||
extras_requested = ('',)
|
extras_requested = ("",)
|
||||||
if self.markers is not None:
|
if self.markers is not None:
|
||||||
return any(
|
return any(
|
||||||
self.markers.evaluate({'extra': extra})
|
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
||||||
for extra in extras_requested)
|
)
|
||||||
else:
|
else:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -295,14 +291,18 @@ class InstallRequirement:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
good_hashes = self.hash_options.copy()
|
good_hashes = self.hash_options.copy()
|
||||||
link = self.link if trust_internet else self.original_link
|
if trust_internet:
|
||||||
|
link = self.link
|
||||||
|
elif self.original_link and self.user_supplied:
|
||||||
|
link = self.original_link
|
||||||
|
else:
|
||||||
|
link = None
|
||||||
if link and link.hash:
|
if link and link.hash:
|
||||||
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||||
return Hashes(good_hashes)
|
return Hashes(good_hashes)
|
||||||
|
|
||||||
def from_path(self) -> Optional[str]:
|
def from_path(self) -> Optional[str]:
|
||||||
"""Format a nice indicator to show where this "comes from"
|
"""Format a nice indicator to show where this "comes from" """
|
||||||
"""
|
|
||||||
if self.req is None:
|
if self.req is None:
|
||||||
return None
|
return None
|
||||||
s = str(self.req)
|
s = str(self.req)
|
||||||
@@ -312,7 +312,7 @@ class InstallRequirement:
|
|||||||
else:
|
else:
|
||||||
comes_from = self.comes_from.from_path()
|
comes_from = self.comes_from.from_path()
|
||||||
if comes_from:
|
if comes_from:
|
||||||
s += '->' + comes_from
|
s += "->" + comes_from
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def ensure_build_location(
|
def ensure_build_location(
|
||||||
@@ -345,7 +345,7 @@ class InstallRequirement:
|
|||||||
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
||||||
# need this)
|
# need this)
|
||||||
if not os.path.exists(build_dir):
|
if not os.path.exists(build_dir):
|
||||||
logger.debug('Creating directory %s', build_dir)
|
logger.debug("Creating directory %s", build_dir)
|
||||||
os.makedirs(build_dir)
|
os.makedirs(build_dir)
|
||||||
actual_build_dir = os.path.join(build_dir, dir_name)
|
actual_build_dir = os.path.join(build_dir, dir_name)
|
||||||
# `None` indicates that we respect the globally-configured deletion
|
# `None` indicates that we respect the globally-configured deletion
|
||||||
@@ -359,8 +359,7 @@ class InstallRequirement:
|
|||||||
).path
|
).path
|
||||||
|
|
||||||
def _set_requirement(self) -> None:
|
def _set_requirement(self) -> None:
|
||||||
"""Set requirement after generating metadata.
|
"""Set requirement after generating metadata."""
|
||||||
"""
|
|
||||||
assert self.req is None
|
assert self.req is None
|
||||||
assert self.metadata is not None
|
assert self.metadata is not None
|
||||||
assert self.source_dir is not None
|
assert self.source_dir is not None
|
||||||
@@ -372,11 +371,13 @@ class InstallRequirement:
|
|||||||
op = "==="
|
op = "==="
|
||||||
|
|
||||||
self.req = Requirement(
|
self.req = Requirement(
|
||||||
"".join([
|
"".join(
|
||||||
|
[
|
||||||
self.metadata["Name"],
|
self.metadata["Name"],
|
||||||
op,
|
op,
|
||||||
self.metadata["Version"],
|
self.metadata["Version"],
|
||||||
])
|
]
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def warn_on_mismatching_name(self) -> None:
|
def warn_on_mismatching_name(self) -> None:
|
||||||
@@ -387,10 +388,12 @@ class InstallRequirement:
|
|||||||
|
|
||||||
# If we're here, there's a mismatch. Log a warning about it.
|
# If we're here, there's a mismatch. Log a warning about it.
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Generating metadata for package %s '
|
"Generating metadata for package %s "
|
||||||
'produced metadata for project name %s. Fix your '
|
"produced metadata for project name %s. Fix your "
|
||||||
'#egg=%s fragments.',
|
"#egg=%s fragments.",
|
||||||
self.name, metadata_name, self.name
|
self.name,
|
||||||
|
metadata_name,
|
||||||
|
self.name,
|
||||||
)
|
)
|
||||||
self.req = Requirement(metadata_name)
|
self.req = Requirement(metadata_name)
|
||||||
|
|
||||||
@@ -401,30 +404,24 @@ class InstallRequirement:
|
|||||||
"""
|
"""
|
||||||
if self.req is None:
|
if self.req is None:
|
||||||
return
|
return
|
||||||
existing_dist = get_distribution(self.req.name)
|
existing_dist = get_default_environment().get_distribution(self.req.name)
|
||||||
if not existing_dist:
|
if not existing_dist:
|
||||||
return
|
return
|
||||||
|
|
||||||
# pkg_resouces may contain a different copy of packaging.version from
|
version_compatible = self.req.specifier.contains(
|
||||||
# pip in if the downstream distributor does a poor job debundling pip.
|
existing_dist.version,
|
||||||
# We avoid existing_dist.parsed_version and let SpecifierSet.contains
|
prereleases=True,
|
||||||
# parses the version instead.
|
|
||||||
existing_version = existing_dist.version
|
|
||||||
version_compatible = (
|
|
||||||
existing_version is not None and
|
|
||||||
self.req.specifier.contains(existing_version, prereleases=True)
|
|
||||||
)
|
)
|
||||||
if not version_compatible:
|
if not version_compatible:
|
||||||
self.satisfied_by = None
|
self.satisfied_by = None
|
||||||
if use_user_site:
|
if use_user_site:
|
||||||
if dist_in_usersite(existing_dist):
|
if existing_dist.in_usersite:
|
||||||
self.should_reinstall = True
|
self.should_reinstall = True
|
||||||
elif (running_under_virtualenv() and
|
elif running_under_virtualenv() and existing_dist.in_site_packages:
|
||||||
dist_in_site_packages(existing_dist)):
|
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
"Will not install to the user site because it will "
|
f"Will not install to the user site because it will "
|
||||||
"lack sys.path precedence to {} in {}".format(
|
f"lack sys.path precedence to {existing_dist.raw_name} "
|
||||||
existing_dist.project_name, existing_dist.location)
|
f"in {existing_dist.location}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.should_reinstall = True
|
self.should_reinstall = True
|
||||||
@@ -444,20 +441,33 @@ class InstallRequirement:
|
|||||||
return False
|
return False
|
||||||
return self.link.is_wheel
|
return self.link.is_wheel
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_wheel_from_cache(self) -> bool:
|
||||||
|
# When True, it means that this InstallRequirement is a local wheel file in the
|
||||||
|
# cache of locally built wheels.
|
||||||
|
return self.cached_wheel_source_link is not None
|
||||||
|
|
||||||
# Things valid for sdists
|
# Things valid for sdists
|
||||||
@property
|
@property
|
||||||
def unpacked_source_directory(self) -> str:
|
def unpacked_source_directory(self) -> str:
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
self.source_dir,
|
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
||||||
self.link and self.link.subdirectory_fragment or '')
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def setup_py_path(self) -> str:
|
def setup_py_path(self) -> str:
|
||||||
assert self.source_dir, f"No source dir for {self}"
|
assert self.source_dir, f"No source dir for {self}"
|
||||||
setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
|
setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
|
||||||
|
|
||||||
return setup_py
|
return setup_py
|
||||||
|
|
||||||
|
@property
|
||||||
|
def setup_cfg_path(self) -> str:
|
||||||
|
assert self.source_dir, f"No source dir for {self}"
|
||||||
|
setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
|
||||||
|
|
||||||
|
return setup_cfg
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pyproject_toml_path(self) -> str:
|
def pyproject_toml_path(self) -> str:
|
||||||
assert self.source_dir, f"No source dir for {self}"
|
assert self.source_dir, f"No source dir for {self}"
|
||||||
@@ -472,13 +482,19 @@ class InstallRequirement:
|
|||||||
follow the PEP 517 or legacy (setup.py) code path.
|
follow the PEP 517 or legacy (setup.py) code path.
|
||||||
"""
|
"""
|
||||||
pyproject_toml_data = load_pyproject_toml(
|
pyproject_toml_data = load_pyproject_toml(
|
||||||
self.use_pep517,
|
self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
|
||||||
self.pyproject_toml_path,
|
|
||||||
self.setup_py_path,
|
|
||||||
str(self)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if pyproject_toml_data is None:
|
if pyproject_toml_data is None:
|
||||||
|
if self.config_settings:
|
||||||
|
deprecated(
|
||||||
|
reason=f"Config settings are ignored for project {self}.",
|
||||||
|
replacement=(
|
||||||
|
"to use --use-pep517 or add a "
|
||||||
|
"pyproject.toml file to the project"
|
||||||
|
),
|
||||||
|
gone_in="23.3",
|
||||||
|
)
|
||||||
self.use_pep517 = False
|
self.use_pep517 = False
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -486,46 +502,69 @@ class InstallRequirement:
|
|||||||
requires, backend, check, backend_path = pyproject_toml_data
|
requires, backend, check, backend_path = pyproject_toml_data
|
||||||
self.requirements_to_check = check
|
self.requirements_to_check = check
|
||||||
self.pyproject_requires = requires
|
self.pyproject_requires = requires
|
||||||
self.pep517_backend = Pep517HookCaller(
|
self.pep517_backend = ConfiguredBuildBackendHookCaller(
|
||||||
self.unpacked_source_directory, backend, backend_path=backend_path,
|
self,
|
||||||
|
self.unpacked_source_directory,
|
||||||
|
backend,
|
||||||
|
backend_path=backend_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _generate_metadata(self) -> str:
|
def isolated_editable_sanity_check(self) -> None:
|
||||||
"""Invokes metadata generator functions, with the required arguments.
|
"""Check that an editable requirement if valid for use with PEP 517/518.
|
||||||
|
|
||||||
|
This verifies that an editable that has a pyproject.toml either supports PEP 660
|
||||||
|
or as a setup.py or a setup.cfg
|
||||||
"""
|
"""
|
||||||
if not self.use_pep517:
|
if (
|
||||||
assert self.unpacked_source_directory
|
self.editable
|
||||||
|
and self.use_pep517
|
||||||
if not os.path.exists(self.setup_py_path):
|
and not self.supports_pyproject_editable()
|
||||||
|
and not os.path.isfile(self.setup_py_path)
|
||||||
|
and not os.path.isfile(self.setup_cfg_path)
|
||||||
|
):
|
||||||
raise InstallationError(
|
raise InstallationError(
|
||||||
f'File "setup.py" not found for legacy project {self}.'
|
f"Project {self} has a 'pyproject.toml' and its build "
|
||||||
)
|
f"backend is missing the 'build_editable' hook. Since it does not "
|
||||||
|
f"have a 'setup.py' nor a 'setup.cfg', "
|
||||||
return generate_metadata_legacy(
|
f"it cannot be installed in editable mode. "
|
||||||
build_env=self.build_env,
|
f"Consider using a build backend that supports PEP 660."
|
||||||
setup_py_path=self.setup_py_path,
|
|
||||||
source_dir=self.unpacked_source_directory,
|
|
||||||
isolated=self.isolated,
|
|
||||||
details=self.name or f"from {self.link}"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert self.pep517_backend is not None
|
|
||||||
|
|
||||||
return generate_metadata(
|
|
||||||
build_env=self.build_env,
|
|
||||||
backend=self.pep517_backend,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def prepare_metadata(self) -> None:
|
def prepare_metadata(self) -> None:
|
||||||
"""Ensure that project metadata is available.
|
"""Ensure that project metadata is available.
|
||||||
|
|
||||||
Under PEP 517, call the backend hook to prepare the metadata.
|
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
||||||
Under legacy processing, call setup.py egg-info.
|
Under legacy processing, call setup.py egg-info.
|
||||||
"""
|
"""
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
|
details = self.name or f"from {self.link}"
|
||||||
|
|
||||||
with indent_log():
|
if self.use_pep517:
|
||||||
self.metadata_directory = self._generate_metadata()
|
assert self.pep517_backend is not None
|
||||||
|
if (
|
||||||
|
self.editable
|
||||||
|
and self.permit_editable_wheels
|
||||||
|
and self.supports_pyproject_editable()
|
||||||
|
):
|
||||||
|
self.metadata_directory = generate_editable_metadata(
|
||||||
|
build_env=self.build_env,
|
||||||
|
backend=self.pep517_backend,
|
||||||
|
details=details,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.metadata_directory = generate_metadata(
|
||||||
|
build_env=self.build_env,
|
||||||
|
backend=self.pep517_backend,
|
||||||
|
details=details,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.metadata_directory = generate_metadata_legacy(
|
||||||
|
build_env=self.build_env,
|
||||||
|
setup_py_path=self.setup_py_path,
|
||||||
|
source_dir=self.unpacked_source_directory,
|
||||||
|
isolated=self.isolated,
|
||||||
|
details=details,
|
||||||
|
)
|
||||||
|
|
||||||
# Act on the newly generated metadata, based on the name and version.
|
# Act on the newly generated metadata, based on the name and version.
|
||||||
if not self.name:
|
if not self.name:
|
||||||
@@ -537,26 +576,35 @@ class InstallRequirement:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata(self) -> Any:
|
def metadata(self) -> Any:
|
||||||
if not hasattr(self, '_metadata'):
|
if not hasattr(self, "_metadata"):
|
||||||
self._metadata = get_metadata(self.get_dist())
|
self._metadata = self.get_dist().metadata
|
||||||
|
|
||||||
return self._metadata
|
return self._metadata
|
||||||
|
|
||||||
def get_dist(self) -> Distribution:
|
def get_dist(self) -> BaseDistribution:
|
||||||
return _get_dist(self.metadata_directory)
|
if self.metadata_directory:
|
||||||
|
return get_directory_distribution(self.metadata_directory)
|
||||||
|
elif self.local_file_path and self.is_wheel:
|
||||||
|
return get_wheel_distribution(
|
||||||
|
FilesystemWheel(self.local_file_path), canonicalize_name(self.name)
|
||||||
|
)
|
||||||
|
raise AssertionError(
|
||||||
|
f"InstallRequirement {self} has no metadata directory and no wheel: "
|
||||||
|
f"can't make a distribution."
|
||||||
|
)
|
||||||
|
|
||||||
def assert_source_matches_version(self) -> None:
|
def assert_source_matches_version(self) -> None:
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
version = self.metadata['version']
|
version = self.metadata["version"]
|
||||||
if self.req.specifier and version not in self.req.specifier:
|
if self.req.specifier and version not in self.req.specifier:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Requested %s, but installing version %s',
|
"Requested %s, but installing version %s",
|
||||||
self,
|
self,
|
||||||
version,
|
version,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Source in %s has version %s, which satisfies requirement %s',
|
"Source in %s has version %s, which satisfies requirement %s",
|
||||||
display_path(self.source_dir),
|
display_path(self.source_dir),
|
||||||
version,
|
version,
|
||||||
self,
|
self,
|
||||||
@@ -589,14 +637,13 @@ class InstallRequirement:
|
|||||||
def update_editable(self) -> None:
|
def update_editable(self) -> None:
|
||||||
if not self.link:
|
if not self.link:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Cannot update repository at %s; repository location is "
|
"Cannot update repository at %s; repository location is unknown",
|
||||||
"unknown",
|
|
||||||
self.source_dir,
|
self.source_dir,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
assert self.editable
|
assert self.editable
|
||||||
assert self.source_dir
|
assert self.source_dir
|
||||||
if self.link.scheme == 'file':
|
if self.link.scheme == "file":
|
||||||
# Static paths don't get updated
|
# Static paths don't get updated
|
||||||
return
|
return
|
||||||
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
||||||
@@ -604,7 +651,7 @@ class InstallRequirement:
|
|||||||
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
||||||
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
||||||
hidden_url = hide_url(self.link.url)
|
hidden_url = hide_url(self.link.url)
|
||||||
vcs_backend.obtain(self.source_dir, url=hidden_url)
|
vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
|
||||||
|
|
||||||
# Top-level Actions
|
# Top-level Actions
|
||||||
def uninstall(
|
def uninstall(
|
||||||
@@ -623,29 +670,28 @@ class InstallRequirement:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
assert self.req
|
assert self.req
|
||||||
dist = get_distribution(self.req.name)
|
dist = get_default_environment().get_distribution(self.req.name)
|
||||||
if not dist:
|
if not dist:
|
||||||
logger.warning("Skipping %s as it is not installed.", self.name)
|
logger.warning("Skipping %s as it is not installed.", self.name)
|
||||||
return None
|
return None
|
||||||
logger.info('Found existing installation: %s', dist)
|
logger.info("Found existing installation: %s", dist)
|
||||||
|
|
||||||
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
||||||
uninstalled_pathset.remove(auto_confirm, verbose)
|
uninstalled_pathset.remove(auto_confirm, verbose)
|
||||||
return uninstalled_pathset
|
return uninstalled_pathset
|
||||||
|
|
||||||
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
|
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
|
||||||
|
|
||||||
def _clean_zip_name(name: str, prefix: str) -> str:
|
def _clean_zip_name(name: str, prefix: str) -> str:
|
||||||
assert name.startswith(prefix + os.path.sep), (
|
assert name.startswith(
|
||||||
f"name {name!r} doesn't start with prefix {prefix!r}"
|
prefix + os.path.sep
|
||||||
)
|
), f"name {name!r} doesn't start with prefix {prefix!r}"
|
||||||
name = name[len(prefix) + 1:]
|
name = name[len(prefix) + 1 :]
|
||||||
name = name.replace(os.path.sep, '/')
|
name = name.replace(os.path.sep, "/")
|
||||||
return name
|
return name
|
||||||
|
|
||||||
path = os.path.join(parentdir, path)
|
path = os.path.join(parentdir, path)
|
||||||
name = _clean_zip_name(path, rootdir)
|
name = _clean_zip_name(path, rootdir)
|
||||||
return self.name + '/' + name
|
return self.name + "/" + name
|
||||||
|
|
||||||
def archive(self, build_dir: Optional[str]) -> None:
|
def archive(self, build_dir: Optional[str]) -> None:
|
||||||
"""Saves archive to provided build_dir.
|
"""Saves archive to provided build_dir.
|
||||||
@@ -657,68 +703,72 @@ class InstallRequirement:
|
|||||||
return
|
return
|
||||||
|
|
||||||
create_archive = True
|
create_archive = True
|
||||||
archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
|
archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
|
||||||
archive_path = os.path.join(build_dir, archive_name)
|
archive_path = os.path.join(build_dir, archive_name)
|
||||||
|
|
||||||
if os.path.exists(archive_path):
|
if os.path.exists(archive_path):
|
||||||
response = ask_path_exists(
|
response = ask_path_exists(
|
||||||
'The file {} exists. (i)gnore, (w)ipe, '
|
"The file {} exists. (i)gnore, (w)ipe, "
|
||||||
'(b)ackup, (a)bort '.format(
|
"(b)ackup, (a)bort ".format(display_path(archive_path)),
|
||||||
display_path(archive_path)),
|
("i", "w", "b", "a"),
|
||||||
('i', 'w', 'b', 'a'))
|
)
|
||||||
if response == 'i':
|
if response == "i":
|
||||||
create_archive = False
|
create_archive = False
|
||||||
elif response == 'w':
|
elif response == "w":
|
||||||
logger.warning('Deleting %s', display_path(archive_path))
|
logger.warning("Deleting %s", display_path(archive_path))
|
||||||
os.remove(archive_path)
|
os.remove(archive_path)
|
||||||
elif response == 'b':
|
elif response == "b":
|
||||||
dest_file = backup_dir(archive_path)
|
dest_file = backup_dir(archive_path)
|
||||||
logger.warning(
|
logger.warning(
|
||||||
'Backing up %s to %s',
|
"Backing up %s to %s",
|
||||||
display_path(archive_path),
|
display_path(archive_path),
|
||||||
display_path(dest_file),
|
display_path(dest_file),
|
||||||
)
|
)
|
||||||
shutil.move(archive_path, dest_file)
|
shutil.move(archive_path, dest_file)
|
||||||
elif response == 'a':
|
elif response == "a":
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
if not create_archive:
|
if not create_archive:
|
||||||
return
|
return
|
||||||
|
|
||||||
zip_output = zipfile.ZipFile(
|
zip_output = zipfile.ZipFile(
|
||||||
archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
|
archive_path,
|
||||||
|
"w",
|
||||||
|
zipfile.ZIP_DEFLATED,
|
||||||
|
allowZip64=True,
|
||||||
)
|
)
|
||||||
with zip_output:
|
with zip_output:
|
||||||
dir = os.path.normcase(
|
dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
|
||||||
os.path.abspath(self.unpacked_source_directory)
|
|
||||||
)
|
|
||||||
for dirpath, dirnames, filenames in os.walk(dir):
|
for dirpath, dirnames, filenames in os.walk(dir):
|
||||||
for dirname in dirnames:
|
for dirname in dirnames:
|
||||||
dir_arcname = self._get_archive_name(
|
dir_arcname = self._get_archive_name(
|
||||||
dirname, parentdir=dirpath, rootdir=dir,
|
dirname,
|
||||||
|
parentdir=dirpath,
|
||||||
|
rootdir=dir,
|
||||||
)
|
)
|
||||||
zipdir = zipfile.ZipInfo(dir_arcname + '/')
|
zipdir = zipfile.ZipInfo(dir_arcname + "/")
|
||||||
zipdir.external_attr = 0x1ED << 16 # 0o755
|
zipdir.external_attr = 0x1ED << 16 # 0o755
|
||||||
zip_output.writestr(zipdir, '')
|
zip_output.writestr(zipdir, "")
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
file_arcname = self._get_archive_name(
|
file_arcname = self._get_archive_name(
|
||||||
filename, parentdir=dirpath, rootdir=dir,
|
filename,
|
||||||
|
parentdir=dirpath,
|
||||||
|
rootdir=dir,
|
||||||
)
|
)
|
||||||
filename = os.path.join(dirpath, filename)
|
filename = os.path.join(dirpath, filename)
|
||||||
zip_output.write(filename, file_arcname)
|
zip_output.write(filename, file_arcname)
|
||||||
|
|
||||||
logger.info('Saved %s', display_path(archive_path))
|
logger.info("Saved %s", display_path(archive_path))
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self,
|
||||||
install_options: List[str],
|
|
||||||
global_options: Optional[Sequence[str]] = None,
|
global_options: Optional[Sequence[str]] = None,
|
||||||
root: Optional[str] = None,
|
root: Optional[str] = None,
|
||||||
home: Optional[str] = None,
|
home: Optional[str] = None,
|
||||||
prefix: Optional[str] = None,
|
prefix: Optional[str] = None,
|
||||||
warn_script_location: bool = True,
|
warn_script_location: bool = True,
|
||||||
use_user_site: bool = False,
|
use_user_site: bool = False,
|
||||||
pycompile: bool = True
|
pycompile: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
scheme = get_scheme(
|
scheme = get_scheme(
|
||||||
self.name,
|
self.name,
|
||||||
@@ -729,11 +779,9 @@ class InstallRequirement:
|
|||||||
prefix=prefix,
|
prefix=prefix,
|
||||||
)
|
)
|
||||||
|
|
||||||
global_options = global_options if global_options is not None else []
|
if self.editable and not self.is_wheel:
|
||||||
if self.editable:
|
|
||||||
install_editable_legacy(
|
install_editable_legacy(
|
||||||
install_options,
|
global_options=global_options if global_options is not None else [],
|
||||||
global_options,
|
|
||||||
prefix=prefix,
|
prefix=prefix,
|
||||||
home=home,
|
home=home,
|
||||||
use_user_site=use_user_site,
|
use_user_site=use_user_site,
|
||||||
@@ -746,15 +794,9 @@ class InstallRequirement:
|
|||||||
self.install_succeeded = True
|
self.install_succeeded = True
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.is_wheel:
|
assert self.is_wheel
|
||||||
assert self.local_file_path
|
assert self.local_file_path
|
||||||
direct_url = None
|
|
||||||
if self.original_link:
|
|
||||||
direct_url = direct_url_from_link(
|
|
||||||
self.original_link,
|
|
||||||
self.source_dir,
|
|
||||||
self.original_link_is_in_wheel_cache,
|
|
||||||
)
|
|
||||||
install_wheel(
|
install_wheel(
|
||||||
self.name,
|
self.name,
|
||||||
self.local_file_path,
|
self.local_file_path,
|
||||||
@@ -762,63 +804,13 @@ class InstallRequirement:
|
|||||||
req_description=str(self.req),
|
req_description=str(self.req),
|
||||||
pycompile=pycompile,
|
pycompile=pycompile,
|
||||||
warn_script_location=warn_script_location,
|
warn_script_location=warn_script_location,
|
||||||
direct_url=direct_url,
|
direct_url=self.download_info if self.original_link else None,
|
||||||
requested=self.user_supplied,
|
requested=self.user_supplied,
|
||||||
)
|
)
|
||||||
self.install_succeeded = True
|
self.install_succeeded = True
|
||||||
return
|
|
||||||
|
|
||||||
# TODO: Why don't we do this for editable installs?
|
|
||||||
|
|
||||||
# Extend the list of global and install options passed on to
|
|
||||||
# the setup.py call with the ones from the requirements file.
|
|
||||||
# Options specified in requirements file override those
|
|
||||||
# specified on the command line, since the last option given
|
|
||||||
# to setup.py is the one that is used.
|
|
||||||
global_options = list(global_options) + self.global_options
|
|
||||||
install_options = list(install_options) + self.install_options
|
|
||||||
|
|
||||||
try:
|
|
||||||
success = install_legacy(
|
|
||||||
install_options=install_options,
|
|
||||||
global_options=global_options,
|
|
||||||
root=root,
|
|
||||||
home=home,
|
|
||||||
prefix=prefix,
|
|
||||||
use_user_site=use_user_site,
|
|
||||||
pycompile=pycompile,
|
|
||||||
scheme=scheme,
|
|
||||||
setup_py_path=self.setup_py_path,
|
|
||||||
isolated=self.isolated,
|
|
||||||
req_name=self.name,
|
|
||||||
build_env=self.build_env,
|
|
||||||
unpacked_source_directory=self.unpacked_source_directory,
|
|
||||||
req_description=str(self.req),
|
|
||||||
)
|
|
||||||
except LegacyInstallFailure as exc:
|
|
||||||
self.install_succeeded = False
|
|
||||||
six.reraise(*exc.parent)
|
|
||||||
except Exception:
|
|
||||||
self.install_succeeded = True
|
|
||||||
raise
|
|
||||||
|
|
||||||
self.install_succeeded = success
|
|
||||||
|
|
||||||
if success and self.legacy_install_reason == 8368:
|
|
||||||
deprecated(
|
|
||||||
reason=(
|
|
||||||
"{} was installed using the legacy 'setup.py install' "
|
|
||||||
"method, because a wheel could not be built for it.".
|
|
||||||
format(self.name)
|
|
||||||
),
|
|
||||||
replacement="to fix the wheel build issue reported above",
|
|
||||||
gone_in=None,
|
|
||||||
issue=8368,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
||||||
|
|
||||||
# Check for unsupported forms
|
# Check for unsupported forms
|
||||||
problem = ""
|
problem = ""
|
||||||
if not req.name:
|
if not req.name:
|
||||||
@@ -844,3 +836,32 @@ def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
|||||||
)
|
)
|
||||||
|
|
||||||
return problem
|
return problem
|
||||||
|
|
||||||
|
|
||||||
|
def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
|
||||||
|
if getattr(options, option, None):
|
||||||
|
return True
|
||||||
|
for req in reqs:
|
||||||
|
if getattr(req, option, None):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def check_legacy_setup_py_options(
|
||||||
|
options: Values,
|
||||||
|
reqs: List[InstallRequirement],
|
||||||
|
) -> None:
|
||||||
|
has_build_options = _has_option(options, reqs, "build_options")
|
||||||
|
has_global_options = _has_option(options, reqs, "global_options")
|
||||||
|
if has_build_options or has_global_options:
|
||||||
|
deprecated(
|
||||||
|
reason="--build-option and --global-option are deprecated.",
|
||||||
|
issue=11859,
|
||||||
|
replacement="to use --config-settings",
|
||||||
|
gone_in="23.3",
|
||||||
|
)
|
||||||
|
logger.warning(
|
||||||
|
"Implying --no-binary=:all: due to the presence of "
|
||||||
|
"--build-option / --global-option. "
|
||||||
|
)
|
||||||
|
options.format_control.disallow_binaries()
|
||||||
|
|||||||
@@ -1,22 +1,17 @@
|
|||||||
import logging
|
import logging
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from typing import Dict, Iterable, List, Optional, Tuple
|
from typing import Dict, List
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
from pip._vendor.packaging.utils import canonicalize_name
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
from pip._internal.models.wheel import Wheel
|
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.utils import compatibility_tags
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class RequirementSet:
|
class RequirementSet:
|
||||||
|
|
||||||
def __init__(self, check_supported_wheels: bool = True) -> None:
|
def __init__(self, check_supported_wheels: bool = True) -> None:
|
||||||
"""Create a RequirementSet.
|
"""Create a RequirementSet."""
|
||||||
"""
|
|
||||||
|
|
||||||
self.requirements: Dict[str, InstallRequirement] = OrderedDict()
|
self.requirements: Dict[str, InstallRequirement] = OrderedDict()
|
||||||
self.check_supported_wheels = check_supported_wheels
|
self.check_supported_wheels = check_supported_wheels
|
||||||
@@ -28,7 +23,7 @@ class RequirementSet:
|
|||||||
(req for req in self.requirements.values() if not req.comes_from),
|
(req for req in self.requirements.values() if not req.comes_from),
|
||||||
key=lambda req: canonicalize_name(req.name or ""),
|
key=lambda req: canonicalize_name(req.name or ""),
|
||||||
)
|
)
|
||||||
return ' '.join(str(req.req) for req in requirements)
|
return " ".join(str(req.req) for req in requirements)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
requirements = sorted(
|
requirements = sorted(
|
||||||
@@ -36,11 +31,11 @@ class RequirementSet:
|
|||||||
key=lambda req: canonicalize_name(req.name or ""),
|
key=lambda req: canonicalize_name(req.name or ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
format_string = '<{classname} object; {count} requirement(s): {reqs}>'
|
format_string = "<{classname} object; {count} requirement(s): {reqs}>"
|
||||||
return format_string.format(
|
return format_string.format(
|
||||||
classname=self.__class__.__name__,
|
classname=self.__class__.__name__,
|
||||||
count=len(requirements),
|
count=len(requirements),
|
||||||
reqs=', '.join(str(req.req) for req in requirements),
|
reqs=", ".join(str(req.req) for req in requirements),
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
|
def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
|
||||||
@@ -53,128 +48,12 @@ class RequirementSet:
|
|||||||
project_name = canonicalize_name(install_req.name)
|
project_name = canonicalize_name(install_req.name)
|
||||||
self.requirements[project_name] = install_req
|
self.requirements[project_name] = install_req
|
||||||
|
|
||||||
def add_requirement(
|
|
||||||
self,
|
|
||||||
install_req: InstallRequirement,
|
|
||||||
parent_req_name: Optional[str] = None,
|
|
||||||
extras_requested: Optional[Iterable[str]] = None
|
|
||||||
) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
|
|
||||||
"""Add install_req as a requirement to install.
|
|
||||||
|
|
||||||
:param parent_req_name: The name of the requirement that needed this
|
|
||||||
added. The name is used because when multiple unnamed requirements
|
|
||||||
resolve to the same name, we could otherwise end up with dependency
|
|
||||||
links that point outside the Requirements set. parent_req must
|
|
||||||
already be added. Note that None implies that this is a user
|
|
||||||
supplied requirement, vs an inferred one.
|
|
||||||
:param extras_requested: an iterable of extras used to evaluate the
|
|
||||||
environment markers.
|
|
||||||
:return: Additional requirements to scan. That is either [] if
|
|
||||||
the requirement is not applicable, or [install_req] if the
|
|
||||||
requirement is applicable and has just been added.
|
|
||||||
"""
|
|
||||||
# If the markers do not match, ignore this requirement.
|
|
||||||
if not install_req.match_markers(extras_requested):
|
|
||||||
logger.info(
|
|
||||||
"Ignoring %s: markers '%s' don't match your environment",
|
|
||||||
install_req.name, install_req.markers,
|
|
||||||
)
|
|
||||||
return [], None
|
|
||||||
|
|
||||||
# If the wheel is not supported, raise an error.
|
|
||||||
# Should check this after filtering out based on environment markers to
|
|
||||||
# allow specifying different wheels based on the environment/OS, in a
|
|
||||||
# single requirements file.
|
|
||||||
if install_req.link and install_req.link.is_wheel:
|
|
||||||
wheel = Wheel(install_req.link.filename)
|
|
||||||
tags = compatibility_tags.get_supported()
|
|
||||||
if (self.check_supported_wheels and not wheel.supported(tags)):
|
|
||||||
raise InstallationError(
|
|
||||||
"{} is not a supported wheel on this platform.".format(
|
|
||||||
wheel.filename)
|
|
||||||
)
|
|
||||||
|
|
||||||
# This next bit is really a sanity check.
|
|
||||||
assert not install_req.user_supplied or parent_req_name is None, (
|
|
||||||
"a user supplied req shouldn't have a parent"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Unnamed requirements are scanned again and the requirement won't be
|
|
||||||
# added as a dependency until after scanning.
|
|
||||||
if not install_req.name:
|
|
||||||
self.add_unnamed_requirement(install_req)
|
|
||||||
return [install_req], None
|
|
||||||
|
|
||||||
try:
|
|
||||||
existing_req: Optional[InstallRequirement] = self.get_requirement(
|
|
||||||
install_req.name)
|
|
||||||
except KeyError:
|
|
||||||
existing_req = None
|
|
||||||
|
|
||||||
has_conflicting_requirement = (
|
|
||||||
parent_req_name is None and
|
|
||||||
existing_req and
|
|
||||||
not existing_req.constraint and
|
|
||||||
existing_req.extras == install_req.extras and
|
|
||||||
existing_req.req and
|
|
||||||
install_req.req and
|
|
||||||
existing_req.req.specifier != install_req.req.specifier
|
|
||||||
)
|
|
||||||
if has_conflicting_requirement:
|
|
||||||
raise InstallationError(
|
|
||||||
"Double requirement given: {} (already in {}, name={!r})"
|
|
||||||
.format(install_req, existing_req, install_req.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# When no existing requirement exists, add the requirement as a
|
|
||||||
# dependency and it will be scanned again after.
|
|
||||||
if not existing_req:
|
|
||||||
self.add_named_requirement(install_req)
|
|
||||||
# We'd want to rescan this requirement later
|
|
||||||
return [install_req], install_req
|
|
||||||
|
|
||||||
# Assume there's no need to scan, and that we've already
|
|
||||||
# encountered this for scanning.
|
|
||||||
if install_req.constraint or not existing_req.constraint:
|
|
||||||
return [], existing_req
|
|
||||||
|
|
||||||
does_not_satisfy_constraint = (
|
|
||||||
install_req.link and
|
|
||||||
not (
|
|
||||||
existing_req.link and
|
|
||||||
install_req.link.path == existing_req.link.path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if does_not_satisfy_constraint:
|
|
||||||
raise InstallationError(
|
|
||||||
"Could not satisfy constraints for '{}': "
|
|
||||||
"installation from path or url cannot be "
|
|
||||||
"constrained to a version".format(install_req.name)
|
|
||||||
)
|
|
||||||
# If we're now installing a constraint, mark the existing
|
|
||||||
# object for real installation.
|
|
||||||
existing_req.constraint = False
|
|
||||||
# If we're now installing a user supplied requirement,
|
|
||||||
# mark the existing object as such.
|
|
||||||
if install_req.user_supplied:
|
|
||||||
existing_req.user_supplied = True
|
|
||||||
existing_req.extras = tuple(sorted(
|
|
||||||
set(existing_req.extras) | set(install_req.extras)
|
|
||||||
))
|
|
||||||
logger.debug(
|
|
||||||
"Setting %s extras to: %s",
|
|
||||||
existing_req, existing_req.extras,
|
|
||||||
)
|
|
||||||
# Return the existing requirement for addition to the parent and
|
|
||||||
# scanning again.
|
|
||||||
return [existing_req], existing_req
|
|
||||||
|
|
||||||
def has_requirement(self, name: str) -> bool:
|
def has_requirement(self, name: str) -> bool:
|
||||||
project_name = canonicalize_name(name)
|
project_name = canonicalize_name(name)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
project_name in self.requirements and
|
project_name in self.requirements
|
||||||
not self.requirements[project_name].constraint
|
and not self.requirements[project_name].constraint
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_requirement(self, name: str) -> InstallRequirement:
|
def get_requirement(self, name: str) -> InstallRequirement:
|
||||||
@@ -188,3 +67,16 @@ class RequirementSet:
|
|||||||
@property
|
@property
|
||||||
def all_requirements(self) -> List[InstallRequirement]:
|
def all_requirements(self) -> List[InstallRequirement]:
|
||||||
return self.unnamed_requirements + list(self.requirements.values())
|
return self.unnamed_requirements + list(self.requirements.values())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def requirements_to_install(self) -> List[InstallRequirement]:
|
||||||
|
"""Return the list of requirements that need to be installed.
|
||||||
|
|
||||||
|
TODO remove this property together with the legacy resolver, since the new
|
||||||
|
resolver only returns requirements that need to be installed.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
install_req
|
||||||
|
for install_req in self.all_requirements
|
||||||
|
if not install_req.constraint and not install_req.satisfied_by
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,130 +0,0 @@
|
|||||||
import contextlib
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from types import TracebackType
|
|
||||||
from typing import Dict, Iterator, Optional, Set, Type, Union
|
|
||||||
|
|
||||||
from pip._internal.models.link import Link
|
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def update_env_context_manager(**changes: str) -> Iterator[None]:
|
|
||||||
target = os.environ
|
|
||||||
|
|
||||||
# Save values from the target and change them.
|
|
||||||
non_existent_marker = object()
|
|
||||||
saved_values: Dict[str, Union[object, str]] = {}
|
|
||||||
for name, new_value in changes.items():
|
|
||||||
try:
|
|
||||||
saved_values[name] = target[name]
|
|
||||||
except KeyError:
|
|
||||||
saved_values[name] = non_existent_marker
|
|
||||||
target[name] = new_value
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
# Restore original values in the target.
|
|
||||||
for name, original_value in saved_values.items():
|
|
||||||
if original_value is non_existent_marker:
|
|
||||||
del target[name]
|
|
||||||
else:
|
|
||||||
assert isinstance(original_value, str) # for mypy
|
|
||||||
target[name] = original_value
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def get_requirement_tracker() -> Iterator["RequirementTracker"]:
|
|
||||||
root = os.environ.get('PIP_REQ_TRACKER')
|
|
||||||
with contextlib.ExitStack() as ctx:
|
|
||||||
if root is None:
|
|
||||||
root = ctx.enter_context(
|
|
||||||
TempDirectory(kind='req-tracker')
|
|
||||||
).path
|
|
||||||
ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
|
|
||||||
logger.debug("Initialized build tracking at %s", root)
|
|
||||||
|
|
||||||
with RequirementTracker(root) as tracker:
|
|
||||||
yield tracker
|
|
||||||
|
|
||||||
|
|
||||||
class RequirementTracker:
|
|
||||||
|
|
||||||
def __init__(self, root: str) -> None:
|
|
||||||
self._root = root
|
|
||||||
self._entries: Set[InstallRequirement] = set()
|
|
||||||
logger.debug("Created build tracker: %s", self._root)
|
|
||||||
|
|
||||||
def __enter__(self) -> "RequirementTracker":
|
|
||||||
logger.debug("Entered build tracker: %s", self._root)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(
|
|
||||||
self,
|
|
||||||
exc_type: Optional[Type[BaseException]],
|
|
||||||
exc_val: Optional[BaseException],
|
|
||||||
exc_tb: Optional[TracebackType]
|
|
||||||
) -> None:
|
|
||||||
self.cleanup()
|
|
||||||
|
|
||||||
def _entry_path(self, link: Link) -> str:
|
|
||||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
|
||||||
return os.path.join(self._root, hashed)
|
|
||||||
|
|
||||||
def add(self, req: InstallRequirement) -> None:
|
|
||||||
"""Add an InstallRequirement to build tracking.
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert req.link
|
|
||||||
# Get the file to write information about this requirement.
|
|
||||||
entry_path = self._entry_path(req.link)
|
|
||||||
|
|
||||||
# Try reading from the file. If it exists and can be read from, a build
|
|
||||||
# is already in progress, so a LookupError is raised.
|
|
||||||
try:
|
|
||||||
with open(entry_path) as fp:
|
|
||||||
contents = fp.read()
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
message = '{} is already being built: {}'.format(
|
|
||||||
req.link, contents)
|
|
||||||
raise LookupError(message)
|
|
||||||
|
|
||||||
# If we're here, req should really not be building already.
|
|
||||||
assert req not in self._entries
|
|
||||||
|
|
||||||
# Start tracking this requirement.
|
|
||||||
with open(entry_path, 'w', encoding="utf-8") as fp:
|
|
||||||
fp.write(str(req))
|
|
||||||
self._entries.add(req)
|
|
||||||
|
|
||||||
logger.debug('Added %s to build tracker %r', req, self._root)
|
|
||||||
|
|
||||||
def remove(self, req: InstallRequirement) -> None:
|
|
||||||
"""Remove an InstallRequirement from build tracking.
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert req.link
|
|
||||||
# Delete the created file and the corresponding entries.
|
|
||||||
os.unlink(self._entry_path(req.link))
|
|
||||||
self._entries.remove(req)
|
|
||||||
|
|
||||||
logger.debug('Removed %s from build tracker %r', req, self._root)
|
|
||||||
|
|
||||||
def cleanup(self) -> None:
|
|
||||||
for req in set(self._entries):
|
|
||||||
self.remove(req)
|
|
||||||
|
|
||||||
logger.debug("Removed build tracker: %r", self._root)
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def track(self, req: InstallRequirement) -> Iterator[None]:
|
|
||||||
self.add(req)
|
|
||||||
yield
|
|
||||||
self.remove(req)
|
|
||||||
@@ -1,67 +1,58 @@
|
|||||||
import csv
|
|
||||||
import functools
|
import functools
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
from importlib.util import cache_from_source
|
from importlib.util import cache_from_source
|
||||||
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple
|
from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.exceptions import UninstallationError
|
from pip._internal.exceptions import UninstallationError
|
||||||
from pip._internal.locations import get_bin_prefix, get_bin_user
|
from pip._internal.locations import get_bin_prefix, get_bin_user
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.utils.compat import WINDOWS
|
from pip._internal.utils.compat import WINDOWS
|
||||||
|
from pip._internal.utils.egg_link import egg_link_path_from_location
|
||||||
from pip._internal.utils.logging import getLogger, indent_log
|
from pip._internal.utils.logging import getLogger, indent_log
|
||||||
from pip._internal.utils.misc import (
|
from pip._internal.utils.misc import ask, normalize_path, renames, rmtree
|
||||||
ask,
|
|
||||||
dist_in_usersite,
|
|
||||||
dist_is_local,
|
|
||||||
egg_link_path,
|
|
||||||
is_local,
|
|
||||||
normalize_path,
|
|
||||||
renames,
|
|
||||||
rmtree,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
||||||
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
logger = getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _script_names(dist: Distribution, script_name: str, is_gui: bool) -> List[str]:
|
def _script_names(
|
||||||
|
bin_dir: str, script_name: str, is_gui: bool
|
||||||
|
) -> Generator[str, None, None]:
|
||||||
"""Create the fully qualified name of the files created by
|
"""Create the fully qualified name of the files created by
|
||||||
{console,gui}_scripts for the given ``dist``.
|
{console,gui}_scripts for the given ``dist``.
|
||||||
Returns the list of file names
|
Returns the list of file names
|
||||||
"""
|
"""
|
||||||
if dist_in_usersite(dist):
|
|
||||||
bin_dir = get_bin_user()
|
|
||||||
else:
|
|
||||||
bin_dir = get_bin_prefix()
|
|
||||||
exe_name = os.path.join(bin_dir, script_name)
|
exe_name = os.path.join(bin_dir, script_name)
|
||||||
paths_to_remove = [exe_name]
|
yield exe_name
|
||||||
if WINDOWS:
|
if not WINDOWS:
|
||||||
paths_to_remove.append(exe_name + '.exe')
|
return
|
||||||
paths_to_remove.append(exe_name + '.exe.manifest')
|
yield f"{exe_name}.exe"
|
||||||
|
yield f"{exe_name}.exe.manifest"
|
||||||
if is_gui:
|
if is_gui:
|
||||||
paths_to_remove.append(exe_name + '-script.pyw')
|
yield f"{exe_name}-script.pyw"
|
||||||
else:
|
else:
|
||||||
paths_to_remove.append(exe_name + '-script.py')
|
yield f"{exe_name}-script.py"
|
||||||
return paths_to_remove
|
|
||||||
|
|
||||||
|
|
||||||
def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]:
|
def _unique(
|
||||||
|
fn: Callable[..., Generator[Any, None, None]]
|
||||||
|
) -> Callable[..., Generator[Any, None, None]]:
|
||||||
@functools.wraps(fn)
|
@functools.wraps(fn)
|
||||||
def unique(*args: Any, **kw: Any) -> Iterator[Any]:
|
def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]:
|
||||||
seen: Set[Any] = set()
|
seen: Set[Any] = set()
|
||||||
for item in fn(*args, **kw):
|
for item in fn(*args, **kw):
|
||||||
if item not in seen:
|
if item not in seen:
|
||||||
seen.add(item)
|
seen.add(item)
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
return unique
|
return unique
|
||||||
|
|
||||||
|
|
||||||
@_unique
|
@_unique
|
||||||
def uninstallation_paths(dist: Distribution) -> Iterator[str]:
|
def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
|
||||||
"""
|
"""
|
||||||
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
||||||
|
|
||||||
@@ -75,30 +66,32 @@ def uninstallation_paths(dist: Distribution) -> Iterator[str]:
|
|||||||
|
|
||||||
https://packaging.python.org/specifications/recording-installed-packages/
|
https://packaging.python.org/specifications/recording-installed-packages/
|
||||||
"""
|
"""
|
||||||
try:
|
location = dist.location
|
||||||
r = csv.reader(dist.get_metadata_lines('RECORD'))
|
assert location is not None, "not installed"
|
||||||
except FileNotFoundError as missing_record_exception:
|
|
||||||
msg = 'Cannot uninstall {dist}, RECORD file not found.'.format(dist=dist)
|
entries = dist.iter_declared_entries()
|
||||||
try:
|
if entries is None:
|
||||||
installer = next(dist.get_metadata_lines('INSTALLER'))
|
msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
|
||||||
if not installer or installer == 'pip':
|
installer = dist.installer
|
||||||
raise ValueError()
|
if not installer or installer == "pip":
|
||||||
except (OSError, StopIteration, ValueError):
|
dep = "{}=={}".format(dist.raw_name, dist.version)
|
||||||
dep = '{}=={}'.format(dist.project_name, dist.version)
|
msg += (
|
||||||
msg += (" You might be able to recover from this via: "
|
" You might be able to recover from this via: "
|
||||||
"'pip install --force-reinstall --no-deps {}'.".format(dep))
|
"'pip install --force-reinstall --no-deps {}'.".format(dep)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
msg += ' Hint: The package was installed by {}.'.format(installer)
|
msg += " Hint: The package was installed by {}.".format(installer)
|
||||||
raise UninstallationError(msg) from missing_record_exception
|
raise UninstallationError(msg)
|
||||||
for row in r:
|
|
||||||
path = os.path.join(dist.location, row[0])
|
for entry in entries:
|
||||||
|
path = os.path.join(location, entry)
|
||||||
yield path
|
yield path
|
||||||
if path.endswith('.py'):
|
if path.endswith(".py"):
|
||||||
dn, fn = os.path.split(path)
|
dn, fn = os.path.split(path)
|
||||||
base = fn[:-3]
|
base = fn[:-3]
|
||||||
path = os.path.join(dn, base + '.pyc')
|
path = os.path.join(dn, base + ".pyc")
|
||||||
yield path
|
yield path
|
||||||
path = os.path.join(dn, base + '.pyo')
|
path = os.path.join(dn, base + ".pyo")
|
||||||
yield path
|
yield path
|
||||||
|
|
||||||
|
|
||||||
@@ -112,8 +105,8 @@ def compact(paths: Iterable[str]) -> Set[str]:
|
|||||||
short_paths: Set[str] = set()
|
short_paths: Set[str] = set()
|
||||||
for path in sorted(paths, key=len):
|
for path in sorted(paths, key=len):
|
||||||
should_skip = any(
|
should_skip = any(
|
||||||
path.startswith(shortpath.rstrip("*")) and
|
path.startswith(shortpath.rstrip("*"))
|
||||||
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||||
for shortpath in short_paths
|
for shortpath in short_paths
|
||||||
)
|
)
|
||||||
if not should_skip:
|
if not should_skip:
|
||||||
@@ -136,18 +129,15 @@ def compress_for_rename(paths: Iterable[str]) -> Set[str]:
|
|||||||
return os.path.normcase(os.path.join(*a))
|
return os.path.normcase(os.path.join(*a))
|
||||||
|
|
||||||
for root in unchecked:
|
for root in unchecked:
|
||||||
if any(os.path.normcase(root).startswith(w)
|
if any(os.path.normcase(root).startswith(w) for w in wildcards):
|
||||||
for w in wildcards):
|
|
||||||
# This directory has already been handled.
|
# This directory has already been handled.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
all_files: Set[str] = set()
|
all_files: Set[str] = set()
|
||||||
all_subdirs: Set[str] = set()
|
all_subdirs: Set[str] = set()
|
||||||
for dirname, subdirs, files in os.walk(root):
|
for dirname, subdirs, files in os.walk(root):
|
||||||
all_subdirs.update(norm_join(root, dirname, d)
|
all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
|
||||||
for d in subdirs)
|
all_files.update(norm_join(root, dirname, f) for f in files)
|
||||||
all_files.update(norm_join(root, dirname, f)
|
|
||||||
for f in files)
|
|
||||||
# If all the files we found are in our remaining set of files to
|
# If all the files we found are in our remaining set of files to
|
||||||
# remove, then remove them from the latter set and add a wildcard
|
# remove, then remove them from the latter set and add a wildcard
|
||||||
# for the directory.
|
# for the directory.
|
||||||
@@ -196,14 +186,14 @@ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
file_ = os.path.join(dirpath, fname)
|
file_ = os.path.join(dirpath, fname)
|
||||||
if (os.path.isfile(file_) and
|
if (
|
||||||
os.path.normcase(file_) not in _normcased_files):
|
os.path.isfile(file_)
|
||||||
|
and os.path.normcase(file_) not in _normcased_files
|
||||||
|
):
|
||||||
# We are skipping this file. Add it to the set.
|
# We are skipping this file. Add it to the set.
|
||||||
will_skip.add(file_)
|
will_skip.add(file_)
|
||||||
|
|
||||||
will_remove = files | {
|
will_remove = files | {os.path.join(folder, "*") for folder in folders}
|
||||||
os.path.join(folder, "*") for folder in folders
|
|
||||||
}
|
|
||||||
|
|
||||||
return will_remove, will_skip
|
return will_remove, will_skip
|
||||||
|
|
||||||
@@ -211,6 +201,7 @@ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str
|
|||||||
class StashedUninstallPathSet:
|
class StashedUninstallPathSet:
|
||||||
"""A set of file rename operations to stash files while
|
"""A set of file rename operations to stash files while
|
||||||
tentatively uninstalling them."""
|
tentatively uninstalling them."""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
# Mapping from source file root to [Adjacent]TempDirectory
|
# Mapping from source file root to [Adjacent]TempDirectory
|
||||||
# for files under that directory.
|
# for files under that directory.
|
||||||
@@ -252,7 +243,7 @@ class StashedUninstallPathSet:
|
|||||||
else:
|
else:
|
||||||
# Did not find any suitable root
|
# Did not find any suitable root
|
||||||
head = os.path.dirname(path)
|
head = os.path.dirname(path)
|
||||||
save_dir = TempDirectory(kind='uninstall')
|
save_dir = TempDirectory(kind="uninstall")
|
||||||
self._save_dirs[head] = save_dir
|
self._save_dirs[head] = save_dir
|
||||||
|
|
||||||
relpath = os.path.relpath(path, head)
|
relpath = os.path.relpath(path, head)
|
||||||
@@ -271,7 +262,7 @@ class StashedUninstallPathSet:
|
|||||||
new_path = self._get_file_stash(path)
|
new_path = self._get_file_stash(path)
|
||||||
|
|
||||||
self._moves.append((path, new_path))
|
self._moves.append((path, new_path))
|
||||||
if (path_is_dir and os.path.isdir(new_path)):
|
if path_is_dir and os.path.isdir(new_path):
|
||||||
# If we're moving a directory, we need to
|
# If we're moving a directory, we need to
|
||||||
# remove the destination first or else it will be
|
# remove the destination first or else it will be
|
||||||
# moved to inside the existing directory.
|
# moved to inside the existing directory.
|
||||||
@@ -295,7 +286,7 @@ class StashedUninstallPathSet:
|
|||||||
|
|
||||||
for new_path, path in self._moves:
|
for new_path, path in self._moves:
|
||||||
try:
|
try:
|
||||||
logger.debug('Replacing %s from %s', new_path, path)
|
logger.debug("Replacing %s from %s", new_path, path)
|
||||||
if os.path.isfile(new_path) or os.path.islink(new_path):
|
if os.path.isfile(new_path) or os.path.islink(new_path):
|
||||||
os.unlink(new_path)
|
os.unlink(new_path)
|
||||||
elif os.path.isdir(new_path):
|
elif os.path.isdir(new_path):
|
||||||
@@ -315,12 +306,17 @@ class StashedUninstallPathSet:
|
|||||||
class UninstallPathSet:
|
class UninstallPathSet:
|
||||||
"""A set of file paths to be removed in the uninstallation of a
|
"""A set of file paths to be removed in the uninstallation of a
|
||||||
requirement."""
|
requirement."""
|
||||||
def __init__(self, dist: Distribution) -> None:
|
|
||||||
self.paths: Set[str] = set()
|
def __init__(self, dist: BaseDistribution) -> None:
|
||||||
|
self._paths: Set[str] = set()
|
||||||
self._refuse: Set[str] = set()
|
self._refuse: Set[str] = set()
|
||||||
self.pth: Dict[str, UninstallPthEntries] = {}
|
self._pth: Dict[str, UninstallPthEntries] = {}
|
||||||
self.dist = dist
|
self._dist = dist
|
||||||
self._moved_paths = StashedUninstallPathSet()
|
self._moved_paths = StashedUninstallPathSet()
|
||||||
|
# Create local cache of normalize_path results. Creating an UninstallPathSet
|
||||||
|
# can result in hundreds/thousands of redundant calls to normalize_path with
|
||||||
|
# the same args, which hurts performance.
|
||||||
|
self._normalize_path_cached = functools.lru_cache()(normalize_path)
|
||||||
|
|
||||||
def _permitted(self, path: str) -> bool:
|
def _permitted(self, path: str) -> bool:
|
||||||
"""
|
"""
|
||||||
@@ -328,70 +324,70 @@ class UninstallPathSet:
|
|||||||
remove/modify, False otherwise.
|
remove/modify, False otherwise.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return is_local(path)
|
# aka is_local, but caching normalized sys.prefix
|
||||||
|
if not running_under_virtualenv():
|
||||||
|
return True
|
||||||
|
return path.startswith(self._normalize_path_cached(sys.prefix))
|
||||||
|
|
||||||
def add(self, path: str) -> None:
|
def add(self, path: str) -> None:
|
||||||
head, tail = os.path.split(path)
|
head, tail = os.path.split(path)
|
||||||
|
|
||||||
# we normalize the head to resolve parent directory symlinks, but not
|
# we normalize the head to resolve parent directory symlinks, but not
|
||||||
# the tail, since we only want to uninstall symlinks, not their targets
|
# the tail, since we only want to uninstall symlinks, not their targets
|
||||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail))
|
||||||
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
return
|
return
|
||||||
if self._permitted(path):
|
if self._permitted(path):
|
||||||
self.paths.add(path)
|
self._paths.add(path)
|
||||||
else:
|
else:
|
||||||
self._refuse.add(path)
|
self._refuse.add(path)
|
||||||
|
|
||||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||||
# due to imports
|
# due to imports
|
||||||
if os.path.splitext(path)[1] == '.py':
|
if os.path.splitext(path)[1] == ".py":
|
||||||
self.add(cache_from_source(path))
|
self.add(cache_from_source(path))
|
||||||
|
|
||||||
def add_pth(self, pth_file: str, entry: str) -> None:
|
def add_pth(self, pth_file: str, entry: str) -> None:
|
||||||
pth_file = normalize_path(pth_file)
|
pth_file = self._normalize_path_cached(pth_file)
|
||||||
if self._permitted(pth_file):
|
if self._permitted(pth_file):
|
||||||
if pth_file not in self.pth:
|
if pth_file not in self._pth:
|
||||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
self._pth[pth_file] = UninstallPthEntries(pth_file)
|
||||||
self.pth[pth_file].add(entry)
|
self._pth[pth_file].add(entry)
|
||||||
else:
|
else:
|
||||||
self._refuse.add(pth_file)
|
self._refuse.add(pth_file)
|
||||||
|
|
||||||
def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
|
def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
|
||||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
"""Remove paths in ``self._paths`` with confirmation (unless
|
||||||
``auto_confirm`` is True)."""
|
``auto_confirm`` is True)."""
|
||||||
|
|
||||||
if not self.paths:
|
if not self._paths:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||||
self.dist.project_name,
|
self._dist.raw_name,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
dist_name_version = (
|
dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
|
||||||
self.dist.project_name + "-" + self.dist.version
|
logger.info("Uninstalling %s:", dist_name_version)
|
||||||
)
|
|
||||||
logger.info('Uninstalling %s:', dist_name_version)
|
|
||||||
|
|
||||||
with indent_log():
|
with indent_log():
|
||||||
if auto_confirm or self._allowed_to_proceed(verbose):
|
if auto_confirm or self._allowed_to_proceed(verbose):
|
||||||
moved = self._moved_paths
|
moved = self._moved_paths
|
||||||
|
|
||||||
for_rename = compress_for_rename(self.paths)
|
for_rename = compress_for_rename(self._paths)
|
||||||
|
|
||||||
for path in sorted(compact(for_rename)):
|
for path in sorted(compact(for_rename)):
|
||||||
moved.stash(path)
|
moved.stash(path)
|
||||||
logger.verbose('Removing file or directory %s', path)
|
logger.verbose("Removing file or directory %s", path)
|
||||||
|
|
||||||
for pth in self.pth.values():
|
for pth in self._pth.values():
|
||||||
pth.remove()
|
pth.remove()
|
||||||
|
|
||||||
logger.info('Successfully uninstalled %s', dist_name_version)
|
logger.info("Successfully uninstalled %s", dist_name_version)
|
||||||
|
|
||||||
def _allowed_to_proceed(self, verbose: bool) -> bool:
|
def _allowed_to_proceed(self, verbose: bool) -> bool:
|
||||||
"""Display which files would be deleted and prompt for confirmation
|
"""Display which files would be deleted and prompt for confirmation"""
|
||||||
"""
|
|
||||||
|
|
||||||
def _display(msg: str, paths: Iterable[str]) -> None:
|
def _display(msg: str, paths: Iterable[str]) -> None:
|
||||||
if not paths:
|
if not paths:
|
||||||
@@ -403,32 +399,32 @@ class UninstallPathSet:
|
|||||||
logger.info(path)
|
logger.info(path)
|
||||||
|
|
||||||
if not verbose:
|
if not verbose:
|
||||||
will_remove, will_skip = compress_for_output_listing(self.paths)
|
will_remove, will_skip = compress_for_output_listing(self._paths)
|
||||||
else:
|
else:
|
||||||
# In verbose mode, display all the files that are going to be
|
# In verbose mode, display all the files that are going to be
|
||||||
# deleted.
|
# deleted.
|
||||||
will_remove = set(self.paths)
|
will_remove = set(self._paths)
|
||||||
will_skip = set()
|
will_skip = set()
|
||||||
|
|
||||||
_display('Would remove:', will_remove)
|
_display("Would remove:", will_remove)
|
||||||
_display('Would not remove (might be manually added):', will_skip)
|
_display("Would not remove (might be manually added):", will_skip)
|
||||||
_display('Would not remove (outside of prefix):', self._refuse)
|
_display("Would not remove (outside of prefix):", self._refuse)
|
||||||
if verbose:
|
if verbose:
|
||||||
_display('Will actually move:', compress_for_rename(self.paths))
|
_display("Will actually move:", compress_for_rename(self._paths))
|
||||||
|
|
||||||
return ask('Proceed (Y/n)? ', ('y', 'n', '')) != 'n'
|
return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
|
||||||
|
|
||||||
def rollback(self) -> None:
|
def rollback(self) -> None:
|
||||||
"""Rollback the changes previously made by remove()."""
|
"""Rollback the changes previously made by remove()."""
|
||||||
if not self._moved_paths.can_rollback:
|
if not self._moved_paths.can_rollback:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Can't roll back %s; was not uninstalled",
|
"Can't roll back %s; was not uninstalled",
|
||||||
self.dist.project_name,
|
self._dist.raw_name,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
logger.info("Rolling back uninstall of %s", self._dist.raw_name)
|
||||||
self._moved_paths.rollback()
|
self._moved_paths.rollback()
|
||||||
for pth in self.pth.values():
|
for pth in self._pth.values():
|
||||||
pth.rollback()
|
pth.rollback()
|
||||||
|
|
||||||
def commit(self) -> None:
|
def commit(self) -> None:
|
||||||
@@ -436,132 +432,161 @@ class UninstallPathSet:
|
|||||||
self._moved_paths.commit()
|
self._moved_paths.commit()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_dist(cls, dist: Distribution) -> "UninstallPathSet":
|
def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
|
||||||
dist_path = normalize_path(dist.location)
|
dist_location = dist.location
|
||||||
if not dist_is_local(dist):
|
info_location = dist.info_location
|
||||||
|
if dist_location is None:
|
||||||
|
logger.info(
|
||||||
|
"Not uninstalling %s since it is not installed",
|
||||||
|
dist.canonical_name,
|
||||||
|
)
|
||||||
|
return cls(dist)
|
||||||
|
|
||||||
|
normalized_dist_location = normalize_path(dist_location)
|
||||||
|
if not dist.local:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Not uninstalling %s at %s, outside environment %s",
|
"Not uninstalling %s at %s, outside environment %s",
|
||||||
dist.key,
|
dist.canonical_name,
|
||||||
dist_path,
|
normalized_dist_location,
|
||||||
sys.prefix,
|
sys.prefix,
|
||||||
)
|
)
|
||||||
return cls(dist)
|
return cls(dist)
|
||||||
|
|
||||||
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
if normalized_dist_location in {
|
||||||
sysconfig.get_path("platstdlib")}
|
p
|
||||||
if p}:
|
for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
|
||||||
|
if p
|
||||||
|
}:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Not uninstalling %s at %s, as it is in the standard library.",
|
"Not uninstalling %s at %s, as it is in the standard library.",
|
||||||
dist.key,
|
dist.canonical_name,
|
||||||
dist_path,
|
normalized_dist_location,
|
||||||
)
|
)
|
||||||
return cls(dist)
|
return cls(dist)
|
||||||
|
|
||||||
paths_to_remove = cls(dist)
|
paths_to_remove = cls(dist)
|
||||||
develop_egg_link = egg_link_path(dist)
|
develop_egg_link = egg_link_path_from_location(dist.raw_name)
|
||||||
develop_egg_link_egg_info = '{}.egg-info'.format(
|
|
||||||
pkg_resources.to_filename(dist.project_name))
|
# Distribution is installed with metadata in a "flat" .egg-info
|
||||||
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
# directory. This means it is not a modern .dist-info installation, an
|
||||||
# Special case for distutils installed package
|
# egg, or legacy editable.
|
||||||
distutils_egg_info = getattr(dist._provider, 'path', None)
|
setuptools_flat_installation = (
|
||||||
|
dist.installed_with_setuptools_egg_info
|
||||||
|
and info_location is not None
|
||||||
|
and os.path.exists(info_location)
|
||||||
|
# If dist is editable and the location points to a ``.egg-info``,
|
||||||
|
# we are in fact in the legacy editable case.
|
||||||
|
and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
|
||||||
|
)
|
||||||
|
|
||||||
# Uninstall cases order do matter as in the case of 2 installs of the
|
# Uninstall cases order do matter as in the case of 2 installs of the
|
||||||
# same package, pip needs to uninstall the currently detected version
|
# same package, pip needs to uninstall the currently detected version
|
||||||
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
if setuptools_flat_installation:
|
||||||
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
if info_location is not None:
|
||||||
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
paths_to_remove.add(info_location)
|
||||||
# are in fact in the develop_egg_link case
|
installed_files = dist.iter_declared_entries()
|
||||||
paths_to_remove.add(dist.egg_info)
|
if installed_files is not None:
|
||||||
if dist.has_metadata('installed-files.txt'):
|
for installed_file in installed_files:
|
||||||
for installed_file in dist.get_metadata(
|
paths_to_remove.add(os.path.join(dist_location, installed_file))
|
||||||
'installed-files.txt').splitlines():
|
|
||||||
path = os.path.normpath(
|
|
||||||
os.path.join(dist.egg_info, installed_file)
|
|
||||||
)
|
|
||||||
paths_to_remove.add(path)
|
|
||||||
# FIXME: need a test for this elif block
|
# FIXME: need a test for this elif block
|
||||||
# occurs with --single-version-externally-managed/--record outside
|
# occurs with --single-version-externally-managed/--record outside
|
||||||
# of pip
|
# of pip
|
||||||
elif dist.has_metadata('top_level.txt'):
|
elif dist.is_file("top_level.txt"):
|
||||||
if dist.has_metadata('namespace_packages.txt'):
|
try:
|
||||||
namespaces = dist.get_metadata('namespace_packages.txt')
|
namespace_packages = dist.read_text("namespace_packages.txt")
|
||||||
else:
|
except FileNotFoundError:
|
||||||
namespaces = []
|
namespaces = []
|
||||||
|
else:
|
||||||
|
namespaces = namespace_packages.splitlines(keepends=False)
|
||||||
for top_level_pkg in [
|
for top_level_pkg in [
|
||||||
p for p
|
p
|
||||||
in dist.get_metadata('top_level.txt').splitlines()
|
for p in dist.read_text("top_level.txt").splitlines()
|
||||||
if p and p not in namespaces]:
|
if p and p not in namespaces
|
||||||
path = os.path.join(dist.location, top_level_pkg)
|
]:
|
||||||
|
path = os.path.join(dist_location, top_level_pkg)
|
||||||
paths_to_remove.add(path)
|
paths_to_remove.add(path)
|
||||||
paths_to_remove.add(path + '.py')
|
paths_to_remove.add(f"{path}.py")
|
||||||
paths_to_remove.add(path + '.pyc')
|
paths_to_remove.add(f"{path}.pyc")
|
||||||
paths_to_remove.add(path + '.pyo')
|
paths_to_remove.add(f"{path}.pyo")
|
||||||
|
|
||||||
elif distutils_egg_info:
|
elif dist.installed_by_distutils:
|
||||||
raise UninstallationError(
|
raise UninstallationError(
|
||||||
"Cannot uninstall {!r}. It is a distutils installed project "
|
"Cannot uninstall {!r}. It is a distutils installed project "
|
||||||
"and thus we cannot accurately determine which files belong "
|
"and thus we cannot accurately determine which files belong "
|
||||||
"to it which would lead to only a partial uninstall.".format(
|
"to it which would lead to only a partial uninstall.".format(
|
||||||
dist.project_name,
|
dist.raw_name,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
elif dist.location.endswith('.egg'):
|
elif dist.installed_as_egg:
|
||||||
# package installed by easy_install
|
# package installed by easy_install
|
||||||
# We cannot match on dist.egg_name because it can slightly vary
|
# We cannot match on dist.egg_name because it can slightly vary
|
||||||
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
||||||
paths_to_remove.add(dist.location)
|
paths_to_remove.add(dist_location)
|
||||||
easy_install_egg = os.path.split(dist.location)[1]
|
easy_install_egg = os.path.split(dist_location)[1]
|
||||||
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
easy_install_pth = os.path.join(
|
||||||
'easy-install.pth')
|
os.path.dirname(dist_location),
|
||||||
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
"easy-install.pth",
|
||||||
|
)
|
||||||
|
paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
|
||||||
|
|
||||||
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
elif dist.installed_with_dist_info:
|
||||||
for path in uninstallation_paths(dist):
|
for path in uninstallation_paths(dist):
|
||||||
paths_to_remove.add(path)
|
paths_to_remove.add(path)
|
||||||
|
|
||||||
elif develop_egg_link:
|
elif develop_egg_link:
|
||||||
# develop egg
|
# PEP 660 modern editable is handled in the ``.dist-info`` case
|
||||||
|
# above, so this only covers the setuptools-style editable.
|
||||||
with open(develop_egg_link) as fh:
|
with open(develop_egg_link) as fh:
|
||||||
link_pointer = os.path.normcase(fh.readline().strip())
|
link_pointer = os.path.normcase(fh.readline().strip())
|
||||||
assert (link_pointer == dist.location), (
|
normalized_link_pointer = paths_to_remove._normalize_path_cached(
|
||||||
'Egg-link {} does not match installed location of {} '
|
link_pointer
|
||||||
'(at {})'.format(
|
)
|
||||||
link_pointer, dist.project_name, dist.location)
|
assert os.path.samefile(
|
||||||
|
normalized_link_pointer, normalized_dist_location
|
||||||
|
), (
|
||||||
|
f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
|
||||||
|
f"installed location of {dist.raw_name} (at {dist_location})"
|
||||||
)
|
)
|
||||||
paths_to_remove.add(develop_egg_link)
|
paths_to_remove.add(develop_egg_link)
|
||||||
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
easy_install_pth = os.path.join(
|
||||||
'easy-install.pth')
|
os.path.dirname(develop_egg_link), "easy-install.pth"
|
||||||
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
)
|
||||||
|
paths_to_remove.add_pth(easy_install_pth, dist_location)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
'Not sure how to uninstall: %s - Check: %s',
|
"Not sure how to uninstall: %s - Check: %s",
|
||||||
dist, dist.location,
|
dist,
|
||||||
|
dist_location,
|
||||||
)
|
)
|
||||||
|
|
||||||
# find distutils scripts= scripts
|
if dist.in_usersite:
|
||||||
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
|
||||||
for script in dist.metadata_listdir('scripts'):
|
|
||||||
if dist_in_usersite(dist):
|
|
||||||
bin_dir = get_bin_user()
|
bin_dir = get_bin_user()
|
||||||
else:
|
else:
|
||||||
bin_dir = get_bin_prefix()
|
bin_dir = get_bin_prefix()
|
||||||
|
|
||||||
|
# find distutils scripts= scripts
|
||||||
|
try:
|
||||||
|
for script in dist.iter_distutils_script_names():
|
||||||
paths_to_remove.add(os.path.join(bin_dir, script))
|
paths_to_remove.add(os.path.join(bin_dir, script))
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat"))
|
||||||
|
except (FileNotFoundError, NotADirectoryError):
|
||||||
|
pass
|
||||||
|
|
||||||
# find console_scripts
|
# find console_scripts and gui_scripts
|
||||||
_scripts_to_remove = []
|
def iter_scripts_to_remove(
|
||||||
console_scripts = dist.get_entry_map(group='console_scripts')
|
dist: BaseDistribution,
|
||||||
for name in console_scripts.keys():
|
bin_dir: str,
|
||||||
_scripts_to_remove.extend(_script_names(dist, name, False))
|
) -> Generator[str, None, None]:
|
||||||
# find gui_scripts
|
for entry_point in dist.iter_entry_points():
|
||||||
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
if entry_point.group == "console_scripts":
|
||||||
for name in gui_scripts.keys():
|
yield from _script_names(bin_dir, entry_point.name, False)
|
||||||
_scripts_to_remove.extend(_script_names(dist, name, True))
|
elif entry_point.group == "gui_scripts":
|
||||||
|
yield from _script_names(bin_dir, entry_point.name, True)
|
||||||
|
|
||||||
for s in _scripts_to_remove:
|
for s in iter_scripts_to_remove(dist, bin_dir):
|
||||||
paths_to_remove.add(s)
|
paths_to_remove.add(s)
|
||||||
|
|
||||||
return paths_to_remove
|
return paths_to_remove
|
||||||
@@ -585,45 +610,41 @@ class UninstallPthEntries:
|
|||||||
# have more than "\\sever\share". Valid examples: "\\server\share\" or
|
# have more than "\\sever\share". Valid examples: "\\server\share\" or
|
||||||
# "\\server\share\folder".
|
# "\\server\share\folder".
|
||||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||||
entry = entry.replace('\\', '/')
|
entry = entry.replace("\\", "/")
|
||||||
self.entries.add(entry)
|
self.entries.add(entry)
|
||||||
|
|
||||||
def remove(self) -> None:
|
def remove(self) -> None:
|
||||||
logger.verbose('Removing pth entries from %s:', self.file)
|
logger.verbose("Removing pth entries from %s:", self.file)
|
||||||
|
|
||||||
# If the file doesn't exist, log a warning and return
|
# If the file doesn't exist, log a warning and return
|
||||||
if not os.path.isfile(self.file):
|
if not os.path.isfile(self.file):
|
||||||
logger.warning(
|
logger.warning("Cannot remove entries from nonexistent file %s", self.file)
|
||||||
"Cannot remove entries from nonexistent file %s", self.file
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
with open(self.file, 'rb') as fh:
|
with open(self.file, "rb") as fh:
|
||||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||||
lines = fh.readlines()
|
lines = fh.readlines()
|
||||||
self._saved_lines = lines
|
self._saved_lines = lines
|
||||||
if any(b'\r\n' in line for line in lines):
|
if any(b"\r\n" in line for line in lines):
|
||||||
endline = '\r\n'
|
endline = "\r\n"
|
||||||
else:
|
else:
|
||||||
endline = '\n'
|
endline = "\n"
|
||||||
# handle missing trailing newline
|
# handle missing trailing newline
|
||||||
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
||||||
lines[-1] = lines[-1] + endline.encode("utf-8")
|
lines[-1] = lines[-1] + endline.encode("utf-8")
|
||||||
for entry in self.entries:
|
for entry in self.entries:
|
||||||
try:
|
try:
|
||||||
logger.verbose('Removing entry: %s', entry)
|
logger.verbose("Removing entry: %s", entry)
|
||||||
lines.remove((entry + endline).encode("utf-8"))
|
lines.remove((entry + endline).encode("utf-8"))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
with open(self.file, 'wb') as fh:
|
with open(self.file, "wb") as fh:
|
||||||
fh.writelines(lines)
|
fh.writelines(lines)
|
||||||
|
|
||||||
def rollback(self) -> bool:
|
def rollback(self) -> bool:
|
||||||
if self._saved_lines is None:
|
if self._saved_lines is None:
|
||||||
logger.error(
|
logger.error("Cannot roll back changes to %s, none were made", self.file)
|
||||||
'Cannot roll back changes to %s, none were made', self.file
|
|
||||||
)
|
|
||||||
return False
|
return False
|
||||||
logger.debug('Rolling %s back to previous state', self.file)
|
logger.debug("Rolling %s back to previous state", self.file)
|
||||||
with open(self.file, 'wb') as fh:
|
with open(self.file, "wb") as fh:
|
||||||
fh.writelines(self._saved_lines)
|
fh.writelines(self._saved_lines)
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
from typing import Callable, List
|
from typing import Callable, List, Optional
|
||||||
|
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.req.req_set import RequirementSet
|
from pip._internal.req.req_set import RequirementSet
|
||||||
|
|
||||||
InstallRequirementProvider = Callable[[str, InstallRequirement], InstallRequirement]
|
InstallRequirementProvider = Callable[
|
||||||
|
[str, Optional[InstallRequirement]], InstallRequirement
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class BaseResolver:
|
class BaseResolver:
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from itertools import chain
|
|||||||
from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
|
from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
|
||||||
|
|
||||||
from pip._vendor.packaging import specifiers
|
from pip._vendor.packaging import specifiers
|
||||||
from pip._vendor.pkg_resources import Distribution
|
from pip._vendor.packaging.requirements import Requirement
|
||||||
|
|
||||||
from pip._internal.cache import WheelCache
|
from pip._internal.cache import WheelCache
|
||||||
from pip._internal.exceptions import (
|
from pip._internal.exceptions import (
|
||||||
@@ -28,10 +28,14 @@ from pip._internal.exceptions import (
|
|||||||
DistributionNotFound,
|
DistributionNotFound,
|
||||||
HashError,
|
HashError,
|
||||||
HashErrors,
|
HashErrors,
|
||||||
|
InstallationError,
|
||||||
|
NoneMetadataError,
|
||||||
UnsupportedPythonVersion,
|
UnsupportedPythonVersion,
|
||||||
)
|
)
|
||||||
from pip._internal.index.package_finder import PackageFinder
|
from pip._internal.index.package_finder import PackageFinder
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.models.link import Link
|
from pip._internal.models.link import Link
|
||||||
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.operations.prepare import RequirementPreparer
|
from pip._internal.operations.prepare import RequirementPreparer
|
||||||
from pip._internal.req.req_install import (
|
from pip._internal.req.req_install import (
|
||||||
InstallRequirement,
|
InstallRequirement,
|
||||||
@@ -39,10 +43,12 @@ from pip._internal.req.req_install import (
|
|||||||
)
|
)
|
||||||
from pip._internal.req.req_set import RequirementSet
|
from pip._internal.req.req_set import RequirementSet
|
||||||
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
||||||
|
from pip._internal.utils import compatibility_tags
|
||||||
from pip._internal.utils.compatibility_tags import get_supported
|
from pip._internal.utils.compatibility_tags import get_supported
|
||||||
|
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
||||||
from pip._internal.utils.logging import indent_log
|
from pip._internal.utils.logging import indent_log
|
||||||
from pip._internal.utils.misc import dist_in_usersite, normalize_version_info
|
from pip._internal.utils.misc import normalize_version_info
|
||||||
from pip._internal.utils.packaging import check_requires_python, get_requires_python
|
from pip._internal.utils.packaging import check_requires_python
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -50,7 +56,7 @@ DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
|
|||||||
|
|
||||||
|
|
||||||
def _check_dist_requires_python(
|
def _check_dist_requires_python(
|
||||||
dist: Distribution,
|
dist: BaseDistribution,
|
||||||
version_info: Tuple[int, int, int],
|
version_info: Tuple[int, int, int],
|
||||||
ignore_requires_python: bool = False,
|
ignore_requires_python: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -66,14 +72,21 @@ def _check_dist_requires_python(
|
|||||||
:raises UnsupportedPythonVersion: When the given Python version isn't
|
:raises UnsupportedPythonVersion: When the given Python version isn't
|
||||||
compatible.
|
compatible.
|
||||||
"""
|
"""
|
||||||
requires_python = get_requires_python(dist)
|
# This idiosyncratically converts the SpecifierSet to str and let
|
||||||
|
# check_requires_python then parse it again into SpecifierSet. But this
|
||||||
|
# is the legacy resolver so I'm just not going to bother refactoring.
|
||||||
|
try:
|
||||||
|
requires_python = str(dist.requires_python)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
raise NoneMetadataError(dist, str(e))
|
||||||
try:
|
try:
|
||||||
is_compatible = check_requires_python(
|
is_compatible = check_requires_python(
|
||||||
requires_python, version_info=version_info
|
requires_python,
|
||||||
|
version_info=version_info,
|
||||||
)
|
)
|
||||||
except specifiers.InvalidSpecifier as exc:
|
except specifiers.InvalidSpecifier as exc:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Package %r has an invalid Requires-Python: %s", dist.project_name, exc
|
"Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -84,7 +97,7 @@ def _check_dist_requires_python(
|
|||||||
if ignore_requires_python:
|
if ignore_requires_python:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Ignoring failed Requires-Python check for package %r: %s not in %r",
|
"Ignoring failed Requires-Python check for package %r: %s not in %r",
|
||||||
dist.project_name,
|
dist.raw_name,
|
||||||
version,
|
version,
|
||||||
requires_python,
|
requires_python,
|
||||||
)
|
)
|
||||||
@@ -92,7 +105,7 @@ def _check_dist_requires_python(
|
|||||||
|
|
||||||
raise UnsupportedPythonVersion(
|
raise UnsupportedPythonVersion(
|
||||||
"Package {!r} requires a different Python: {} not in {!r}".format(
|
"Package {!r} requires a different Python: {} not in {!r}".format(
|
||||||
dist.project_name, version, requires_python
|
dist.raw_name, version, requires_python
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -159,7 +172,7 @@ class Resolver(BaseResolver):
|
|||||||
for req in root_reqs:
|
for req in root_reqs:
|
||||||
if req.constraint:
|
if req.constraint:
|
||||||
check_invalid_constraint_type(req)
|
check_invalid_constraint_type(req)
|
||||||
requirement_set.add_requirement(req)
|
self._add_requirement_to_set(requirement_set, req)
|
||||||
|
|
||||||
# Actually prepare the files, and collect any exceptions. Most hash
|
# Actually prepare the files, and collect any exceptions. Most hash
|
||||||
# exceptions cannot be checked ahead of time, because
|
# exceptions cannot be checked ahead of time, because
|
||||||
@@ -179,6 +192,124 @@ class Resolver(BaseResolver):
|
|||||||
|
|
||||||
return requirement_set
|
return requirement_set
|
||||||
|
|
||||||
|
def _add_requirement_to_set(
|
||||||
|
self,
|
||||||
|
requirement_set: RequirementSet,
|
||||||
|
install_req: InstallRequirement,
|
||||||
|
parent_req_name: Optional[str] = None,
|
||||||
|
extras_requested: Optional[Iterable[str]] = None,
|
||||||
|
) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
|
||||||
|
"""Add install_req as a requirement to install.
|
||||||
|
|
||||||
|
:param parent_req_name: The name of the requirement that needed this
|
||||||
|
added. The name is used because when multiple unnamed requirements
|
||||||
|
resolve to the same name, we could otherwise end up with dependency
|
||||||
|
links that point outside the Requirements set. parent_req must
|
||||||
|
already be added. Note that None implies that this is a user
|
||||||
|
supplied requirement, vs an inferred one.
|
||||||
|
:param extras_requested: an iterable of extras used to evaluate the
|
||||||
|
environment markers.
|
||||||
|
:return: Additional requirements to scan. That is either [] if
|
||||||
|
the requirement is not applicable, or [install_req] if the
|
||||||
|
requirement is applicable and has just been added.
|
||||||
|
"""
|
||||||
|
# If the markers do not match, ignore this requirement.
|
||||||
|
if not install_req.match_markers(extras_requested):
|
||||||
|
logger.info(
|
||||||
|
"Ignoring %s: markers '%s' don't match your environment",
|
||||||
|
install_req.name,
|
||||||
|
install_req.markers,
|
||||||
|
)
|
||||||
|
return [], None
|
||||||
|
|
||||||
|
# If the wheel is not supported, raise an error.
|
||||||
|
# Should check this after filtering out based on environment markers to
|
||||||
|
# allow specifying different wheels based on the environment/OS, in a
|
||||||
|
# single requirements file.
|
||||||
|
if install_req.link and install_req.link.is_wheel:
|
||||||
|
wheel = Wheel(install_req.link.filename)
|
||||||
|
tags = compatibility_tags.get_supported()
|
||||||
|
if requirement_set.check_supported_wheels and not wheel.supported(tags):
|
||||||
|
raise InstallationError(
|
||||||
|
"{} is not a supported wheel on this platform.".format(
|
||||||
|
wheel.filename
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# This next bit is really a sanity check.
|
||||||
|
assert (
|
||||||
|
not install_req.user_supplied or parent_req_name is None
|
||||||
|
), "a user supplied req shouldn't have a parent"
|
||||||
|
|
||||||
|
# Unnamed requirements are scanned again and the requirement won't be
|
||||||
|
# added as a dependency until after scanning.
|
||||||
|
if not install_req.name:
|
||||||
|
requirement_set.add_unnamed_requirement(install_req)
|
||||||
|
return [install_req], None
|
||||||
|
|
||||||
|
try:
|
||||||
|
existing_req: Optional[
|
||||||
|
InstallRequirement
|
||||||
|
] = requirement_set.get_requirement(install_req.name)
|
||||||
|
except KeyError:
|
||||||
|
existing_req = None
|
||||||
|
|
||||||
|
has_conflicting_requirement = (
|
||||||
|
parent_req_name is None
|
||||||
|
and existing_req
|
||||||
|
and not existing_req.constraint
|
||||||
|
and existing_req.extras == install_req.extras
|
||||||
|
and existing_req.req
|
||||||
|
and install_req.req
|
||||||
|
and existing_req.req.specifier != install_req.req.specifier
|
||||||
|
)
|
||||||
|
if has_conflicting_requirement:
|
||||||
|
raise InstallationError(
|
||||||
|
"Double requirement given: {} (already in {}, name={!r})".format(
|
||||||
|
install_req, existing_req, install_req.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# When no existing requirement exists, add the requirement as a
|
||||||
|
# dependency and it will be scanned again after.
|
||||||
|
if not existing_req:
|
||||||
|
requirement_set.add_named_requirement(install_req)
|
||||||
|
# We'd want to rescan this requirement later
|
||||||
|
return [install_req], install_req
|
||||||
|
|
||||||
|
# Assume there's no need to scan, and that we've already
|
||||||
|
# encountered this for scanning.
|
||||||
|
if install_req.constraint or not existing_req.constraint:
|
||||||
|
return [], existing_req
|
||||||
|
|
||||||
|
does_not_satisfy_constraint = install_req.link and not (
|
||||||
|
existing_req.link and install_req.link.path == existing_req.link.path
|
||||||
|
)
|
||||||
|
if does_not_satisfy_constraint:
|
||||||
|
raise InstallationError(
|
||||||
|
"Could not satisfy constraints for '{}': "
|
||||||
|
"installation from path or url cannot be "
|
||||||
|
"constrained to a version".format(install_req.name)
|
||||||
|
)
|
||||||
|
# If we're now installing a constraint, mark the existing
|
||||||
|
# object for real installation.
|
||||||
|
existing_req.constraint = False
|
||||||
|
# If we're now installing a user supplied requirement,
|
||||||
|
# mark the existing object as such.
|
||||||
|
if install_req.user_supplied:
|
||||||
|
existing_req.user_supplied = True
|
||||||
|
existing_req.extras = tuple(
|
||||||
|
sorted(set(existing_req.extras) | set(install_req.extras))
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
"Setting %s extras to: %s",
|
||||||
|
existing_req,
|
||||||
|
existing_req.extras,
|
||||||
|
)
|
||||||
|
# Return the existing requirement for addition to the parent and
|
||||||
|
# scanning again.
|
||||||
|
return [existing_req], existing_req
|
||||||
|
|
||||||
def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
|
def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
|
||||||
if self.upgrade_strategy == "to-satisfy-only":
|
if self.upgrade_strategy == "to-satisfy-only":
|
||||||
return False
|
return False
|
||||||
@@ -194,7 +325,7 @@ class Resolver(BaseResolver):
|
|||||||
"""
|
"""
|
||||||
# Don't uninstall the conflict if doing a user install and the
|
# Don't uninstall the conflict if doing a user install and the
|
||||||
# conflict is not a user install.
|
# conflict is not a user install.
|
||||||
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
|
if not self.use_user_site or req.satisfied_by.in_usersite:
|
||||||
req.should_reinstall = True
|
req.should_reinstall = True
|
||||||
req.satisfied_by = None
|
req.satisfied_by = None
|
||||||
|
|
||||||
@@ -300,10 +431,18 @@ class Resolver(BaseResolver):
|
|||||||
if cache_entry is not None:
|
if cache_entry is not None:
|
||||||
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
||||||
if req.link is req.original_link and cache_entry.persistent:
|
if req.link is req.original_link and cache_entry.persistent:
|
||||||
req.original_link_is_in_wheel_cache = True
|
req.cached_wheel_source_link = req.link
|
||||||
|
if cache_entry.origin is not None:
|
||||||
|
req.download_info = cache_entry.origin
|
||||||
|
else:
|
||||||
|
# Legacy cache entry that does not have origin.json.
|
||||||
|
# download_info may miss the archive_info.hashes field.
|
||||||
|
req.download_info = direct_url_from_link(
|
||||||
|
req.link, link_is_in_wheel_cache=cache_entry.persistent
|
||||||
|
)
|
||||||
req.link = cache_entry.link
|
req.link = cache_entry.link
|
||||||
|
|
||||||
def _get_dist_for(self, req: InstallRequirement) -> Distribution:
|
def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
|
||||||
"""Takes a InstallRequirement and returns a single AbstractDist \
|
"""Takes a InstallRequirement and returns a single AbstractDist \
|
||||||
representing a prepared variant of the same.
|
representing a prepared variant of the same.
|
||||||
"""
|
"""
|
||||||
@@ -378,13 +517,14 @@ class Resolver(BaseResolver):
|
|||||||
|
|
||||||
more_reqs: List[InstallRequirement] = []
|
more_reqs: List[InstallRequirement] = []
|
||||||
|
|
||||||
def add_req(subreq: Distribution, extras_requested: Iterable[str]) -> None:
|
def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
|
||||||
sub_install_req = self._make_install_req(
|
# This idiosyncratically converts the Requirement to str and let
|
||||||
str(subreq),
|
# make_install_req then parse it again into Requirement. But this is
|
||||||
req_to_install,
|
# the legacy resolver so I'm just not going to bother refactoring.
|
||||||
)
|
sub_install_req = self._make_install_req(str(subreq), req_to_install)
|
||||||
parent_req_name = req_to_install.name
|
parent_req_name = req_to_install.name
|
||||||
to_scan_again, add_to_parent = requirement_set.add_requirement(
|
to_scan_again, add_to_parent = self._add_requirement_to_set(
|
||||||
|
requirement_set,
|
||||||
sub_install_req,
|
sub_install_req,
|
||||||
parent_req_name=parent_req_name,
|
parent_req_name=parent_req_name,
|
||||||
extras_requested=extras_requested,
|
extras_requested=extras_requested,
|
||||||
@@ -401,7 +541,9 @@ class Resolver(BaseResolver):
|
|||||||
# 'unnamed' requirements can only come from being directly
|
# 'unnamed' requirements can only come from being directly
|
||||||
# provided by the user.
|
# provided by the user.
|
||||||
assert req_to_install.user_supplied
|
assert req_to_install.user_supplied
|
||||||
requirement_set.add_requirement(req_to_install, parent_req_name=None)
|
self._add_requirement_to_set(
|
||||||
|
requirement_set, req_to_install, parent_req_name=None
|
||||||
|
)
|
||||||
|
|
||||||
if not self.ignore_dependencies:
|
if not self.ignore_dependencies:
|
||||||
if req_to_install.extras:
|
if req_to_install.extras:
|
||||||
@@ -410,15 +552,20 @@ class Resolver(BaseResolver):
|
|||||||
",".join(req_to_install.extras),
|
",".join(req_to_install.extras),
|
||||||
)
|
)
|
||||||
missing_requested = sorted(
|
missing_requested = sorted(
|
||||||
set(req_to_install.extras) - set(dist.extras)
|
set(req_to_install.extras) - set(dist.iter_provided_extras())
|
||||||
)
|
)
|
||||||
for missing in missing_requested:
|
for missing in missing_requested:
|
||||||
logger.warning("%s does not provide the extra '%s'", dist, missing)
|
logger.warning(
|
||||||
|
"%s %s does not provide the extra '%s'",
|
||||||
|
dist.raw_name,
|
||||||
|
dist.version,
|
||||||
|
missing,
|
||||||
|
)
|
||||||
|
|
||||||
available_requested = sorted(
|
available_requested = sorted(
|
||||||
set(dist.extras) & set(req_to_install.extras)
|
set(dist.iter_provided_extras()) & set(req_to_install.extras)
|
||||||
)
|
)
|
||||||
for subreq in dist.requires(available_requested):
|
for subreq in dist.iter_dependencies(available_requested):
|
||||||
add_req(subreq, extras_requested=available_requested)
|
add_req(subreq, extras_requested=available_requested)
|
||||||
|
|
||||||
return more_reqs
|
return more_reqs
|
||||||
|
|||||||
@@ -36,11 +36,8 @@ class Constraint:
|
|||||||
links = frozenset([ireq.link]) if ireq.link else frozenset()
|
links = frozenset([ireq.link]) if ireq.link else frozenset()
|
||||||
return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
|
return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
|
||||||
|
|
||||||
def __nonzero__(self) -> bool:
|
|
||||||
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
|
|
||||||
|
|
||||||
def __bool__(self) -> bool:
|
def __bool__(self) -> bool:
|
||||||
return self.__nonzero__()
|
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
|
||||||
|
|
||||||
def __and__(self, other: InstallRequirement) -> "Constraint":
|
def __and__(self, other: InstallRequirement) -> "Constraint":
|
||||||
if not isinstance(other, InstallRequirement):
|
if not isinstance(other, InstallRequirement):
|
||||||
|
|||||||
@@ -2,13 +2,15 @@ import logging
|
|||||||
import sys
|
import sys
|
||||||
from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
|
from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
|
||||||
|
|
||||||
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
|
||||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||||
from pip._vendor.packaging.version import Version
|
from pip._vendor.packaging.version import Version
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
|
||||||
from pip._vendor.pkg_resources import Distribution
|
|
||||||
|
|
||||||
from pip._internal.exceptions import HashError, MetadataInconsistent
|
from pip._internal.exceptions import (
|
||||||
|
HashError,
|
||||||
|
InstallationSubprocessError,
|
||||||
|
MetadataInconsistent,
|
||||||
|
)
|
||||||
|
from pip._internal.metadata import BaseDistribution
|
||||||
from pip._internal.models.link import Link, links_equivalent
|
from pip._internal.models.link import Link, links_equivalent
|
||||||
from pip._internal.models.wheel import Wheel
|
from pip._internal.models.wheel import Wheel
|
||||||
from pip._internal.req.constructors import (
|
from pip._internal.req.constructors import (
|
||||||
@@ -16,8 +18,8 @@ from pip._internal.req.constructors import (
|
|||||||
install_req_from_line,
|
install_req_from_line,
|
||||||
)
|
)
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
from pip._internal.req.req_install import InstallRequirement
|
||||||
from pip._internal.utils.misc import dist_is_editable, normalize_version_info
|
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
||||||
from pip._internal.utils.packaging import get_requires_python
|
from pip._internal.utils.misc import normalize_version_info
|
||||||
|
|
||||||
from .base import Candidate, CandidateVersion, Requirement, format_name
|
from .base import Candidate, CandidateVersion, Requirement, format_name
|
||||||
|
|
||||||
@@ -63,14 +65,13 @@ def make_install_req_from_link(
|
|||||||
use_pep517=template.use_pep517,
|
use_pep517=template.use_pep517,
|
||||||
isolated=template.isolated,
|
isolated=template.isolated,
|
||||||
constraint=template.constraint,
|
constraint=template.constraint,
|
||||||
options=dict(
|
|
||||||
install_options=template.install_options,
|
|
||||||
global_options=template.global_options,
|
global_options=template.global_options,
|
||||||
hashes=template.hash_options,
|
hash_options=template.hash_options,
|
||||||
),
|
config_settings=template.config_settings,
|
||||||
)
|
)
|
||||||
ireq.original_link = template.original_link
|
ireq.original_link = template.original_link
|
||||||
ireq.link = link
|
ireq.link = link
|
||||||
|
ireq.extras = template.extras
|
||||||
return ireq
|
return ireq
|
||||||
|
|
||||||
|
|
||||||
@@ -78,31 +79,31 @@ def make_install_req_from_editable(
|
|||||||
link: Link, template: InstallRequirement
|
link: Link, template: InstallRequirement
|
||||||
) -> InstallRequirement:
|
) -> InstallRequirement:
|
||||||
assert template.editable, "template not editable"
|
assert template.editable, "template not editable"
|
||||||
return install_req_from_editable(
|
ireq = install_req_from_editable(
|
||||||
link.url,
|
link.url,
|
||||||
user_supplied=template.user_supplied,
|
user_supplied=template.user_supplied,
|
||||||
comes_from=template.comes_from,
|
comes_from=template.comes_from,
|
||||||
use_pep517=template.use_pep517,
|
use_pep517=template.use_pep517,
|
||||||
isolated=template.isolated,
|
isolated=template.isolated,
|
||||||
constraint=template.constraint,
|
constraint=template.constraint,
|
||||||
options=dict(
|
permit_editable_wheels=template.permit_editable_wheels,
|
||||||
install_options=template.install_options,
|
|
||||||
global_options=template.global_options,
|
global_options=template.global_options,
|
||||||
hashes=template.hash_options,
|
hash_options=template.hash_options,
|
||||||
),
|
config_settings=template.config_settings,
|
||||||
)
|
)
|
||||||
|
ireq.extras = template.extras
|
||||||
|
return ireq
|
||||||
|
|
||||||
|
|
||||||
def make_install_req_from_dist(
|
def _make_install_req_from_dist(
|
||||||
dist: Distribution, template: InstallRequirement
|
dist: BaseDistribution, template: InstallRequirement
|
||||||
) -> InstallRequirement:
|
) -> InstallRequirement:
|
||||||
project_name = canonicalize_name(dist.project_name)
|
|
||||||
if template.req:
|
if template.req:
|
||||||
line = str(template.req)
|
line = str(template.req)
|
||||||
elif template.link:
|
elif template.link:
|
||||||
line = f"{project_name} @ {template.link.url}"
|
line = f"{dist.canonical_name} @ {template.link.url}"
|
||||||
else:
|
else:
|
||||||
line = f"{project_name}=={dist.parsed_version}"
|
line = f"{dist.canonical_name}=={dist.version}"
|
||||||
ireq = install_req_from_line(
|
ireq = install_req_from_line(
|
||||||
line,
|
line,
|
||||||
user_supplied=template.user_supplied,
|
user_supplied=template.user_supplied,
|
||||||
@@ -110,11 +111,9 @@ def make_install_req_from_dist(
|
|||||||
use_pep517=template.use_pep517,
|
use_pep517=template.use_pep517,
|
||||||
isolated=template.isolated,
|
isolated=template.isolated,
|
||||||
constraint=template.constraint,
|
constraint=template.constraint,
|
||||||
options=dict(
|
|
||||||
install_options=template.install_options,
|
|
||||||
global_options=template.global_options,
|
global_options=template.global_options,
|
||||||
hashes=template.hash_options,
|
hash_options=template.hash_options,
|
||||||
),
|
config_settings=template.config_settings,
|
||||||
)
|
)
|
||||||
ireq.satisfied_by = dist
|
ireq.satisfied_by = dist
|
||||||
return ireq
|
return ireq
|
||||||
@@ -136,6 +135,7 @@ class _InstallRequirementBackedCandidate(Candidate):
|
|||||||
found remote link (e.g. from pypi.org).
|
found remote link (e.g. from pypi.org).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
dist: BaseDistribution
|
||||||
is_installed = False
|
is_installed = False
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -180,7 +180,7 @@ class _InstallRequirementBackedCandidate(Candidate):
|
|||||||
def project_name(self) -> NormalizedName:
|
def project_name(self) -> NormalizedName:
|
||||||
"""The normalised name of the project the candidate refers to"""
|
"""The normalised name of the project the candidate refers to"""
|
||||||
if self._name is None:
|
if self._name is None:
|
||||||
self._name = canonicalize_name(self.dist.project_name)
|
self._name = self.dist.canonical_name
|
||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -190,7 +190,7 @@ class _InstallRequirementBackedCandidate(Candidate):
|
|||||||
@property
|
@property
|
||||||
def version(self) -> CandidateVersion:
|
def version(self) -> CandidateVersion:
|
||||||
if self._version is None:
|
if self._version is None:
|
||||||
self._version = parse_version(self.dist.version)
|
self._version = self.dist.version
|
||||||
return self._version
|
return self._version
|
||||||
|
|
||||||
def format_for_error(self) -> str:
|
def format_for_error(self) -> str:
|
||||||
@@ -200,29 +200,27 @@ class _InstallRequirementBackedCandidate(Candidate):
|
|||||||
self._link.file_path if self._link.is_file else self._link,
|
self._link.file_path if self._link.is_file else self._link,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _prepare_distribution(self) -> Distribution:
|
def _prepare_distribution(self) -> BaseDistribution:
|
||||||
raise NotImplementedError("Override in subclass")
|
raise NotImplementedError("Override in subclass")
|
||||||
|
|
||||||
def _check_metadata_consistency(self, dist: Distribution) -> None:
|
def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
|
||||||
"""Check for consistency of project name and version of dist."""
|
"""Check for consistency of project name and version of dist."""
|
||||||
canonical_name = canonicalize_name(dist.project_name)
|
if self._name is not None and self._name != dist.canonical_name:
|
||||||
if self._name is not None and self._name != canonical_name:
|
|
||||||
raise MetadataInconsistent(
|
raise MetadataInconsistent(
|
||||||
self._ireq,
|
self._ireq,
|
||||||
"name",
|
"name",
|
||||||
self._name,
|
self._name,
|
||||||
dist.project_name,
|
dist.canonical_name,
|
||||||
)
|
)
|
||||||
parsed_version = parse_version(dist.version)
|
if self._version is not None and self._version != dist.version:
|
||||||
if self._version is not None and self._version != parsed_version:
|
|
||||||
raise MetadataInconsistent(
|
raise MetadataInconsistent(
|
||||||
self._ireq,
|
self._ireq,
|
||||||
"version",
|
"version",
|
||||||
str(self._version),
|
str(self._version),
|
||||||
dist.version,
|
str(dist.version),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _prepare(self) -> Distribution:
|
def _prepare(self) -> BaseDistribution:
|
||||||
try:
|
try:
|
||||||
dist = self._prepare_distribution()
|
dist = self._prepare_distribution()
|
||||||
except HashError as e:
|
except HashError as e:
|
||||||
@@ -231,26 +229,19 @@ class _InstallRequirementBackedCandidate(Candidate):
|
|||||||
# offending line to the user.
|
# offending line to the user.
|
||||||
e.req = self._ireq
|
e.req = self._ireq
|
||||||
raise
|
raise
|
||||||
|
except InstallationSubprocessError as exc:
|
||||||
|
# The output has been presented already, so don't duplicate it.
|
||||||
|
exc.context = "See above for output."
|
||||||
|
raise
|
||||||
|
|
||||||
self._check_metadata_consistency(dist)
|
self._check_metadata_consistency(dist)
|
||||||
return dist
|
return dist
|
||||||
|
|
||||||
def _get_requires_python_dependency(self) -> Optional[Requirement]:
|
|
||||||
requires_python = get_requires_python(self.dist)
|
|
||||||
if requires_python is None:
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
spec = SpecifierSet(requires_python)
|
|
||||||
except InvalidSpecifier as e:
|
|
||||||
message = "Package %r has an invalid Requires-Python: %s"
|
|
||||||
logger.warning(message, self.name, e)
|
|
||||||
return None
|
|
||||||
return self._factory.make_requires_python_requirement(spec)
|
|
||||||
|
|
||||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||||
requires = self.dist.requires() if with_requires else ()
|
requires = self.dist.iter_dependencies() if with_requires else ()
|
||||||
for r in requires:
|
for r in requires:
|
||||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||||
yield self._get_requires_python_dependency()
|
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
|
||||||
|
|
||||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||||
return self._ireq
|
return self._ireq
|
||||||
@@ -268,7 +259,7 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
|
|||||||
version: Optional[CandidateVersion] = None,
|
version: Optional[CandidateVersion] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
source_link = link
|
source_link = link
|
||||||
cache_entry = factory.get_wheel_cache_entry(link, name)
|
cache_entry = factory.get_wheel_cache_entry(source_link, name)
|
||||||
if cache_entry is not None:
|
if cache_entry is not None:
|
||||||
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
||||||
link = cache_entry.link
|
link = cache_entry.link
|
||||||
@@ -285,12 +276,19 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
|
|||||||
version, wheel_version, name
|
version, wheel_version, name
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
if cache_entry is not None:
|
||||||
cache_entry is not None
|
assert ireq.link.is_wheel
|
||||||
and cache_entry.persistent
|
assert ireq.link.is_file
|
||||||
and template.link is template.original_link
|
if cache_entry.persistent and template.link is template.original_link:
|
||||||
):
|
ireq.cached_wheel_source_link = source_link
|
||||||
ireq.original_link_is_in_wheel_cache = True
|
if cache_entry.origin is not None:
|
||||||
|
ireq.download_info = cache_entry.origin
|
||||||
|
else:
|
||||||
|
# Legacy cache entry that does not have origin.json.
|
||||||
|
# download_info may miss the archive_info.hashes field.
|
||||||
|
ireq.download_info = direct_url_from_link(
|
||||||
|
source_link, link_is_in_wheel_cache=cache_entry.persistent
|
||||||
|
)
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
link=link,
|
link=link,
|
||||||
@@ -301,10 +299,9 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
|
|||||||
version=version,
|
version=version,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _prepare_distribution(self) -> Distribution:
|
def _prepare_distribution(self) -> BaseDistribution:
|
||||||
return self._factory.preparer.prepare_linked_requirement(
|
preparer = self._factory.preparer
|
||||||
self._ireq, parallel_builds=True
|
return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EditableCandidate(_InstallRequirementBackedCandidate):
|
class EditableCandidate(_InstallRequirementBackedCandidate):
|
||||||
@@ -327,7 +324,7 @@ class EditableCandidate(_InstallRequirementBackedCandidate):
|
|||||||
version=version,
|
version=version,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _prepare_distribution(self) -> Distribution:
|
def _prepare_distribution(self) -> BaseDistribution:
|
||||||
return self._factory.preparer.prepare_editable_requirement(self._ireq)
|
return self._factory.preparer.prepare_editable_requirement(self._ireq)
|
||||||
|
|
||||||
|
|
||||||
@@ -337,17 +334,17 @@ class AlreadyInstalledCandidate(Candidate):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
dist: Distribution,
|
dist: BaseDistribution,
|
||||||
template: InstallRequirement,
|
template: InstallRequirement,
|
||||||
factory: "Factory",
|
factory: "Factory",
|
||||||
) -> None:
|
) -> None:
|
||||||
self.dist = dist
|
self.dist = dist
|
||||||
self._ireq = make_install_req_from_dist(dist, template)
|
self._ireq = _make_install_req_from_dist(dist, template)
|
||||||
self._factory = factory
|
self._factory = factory
|
||||||
|
|
||||||
# This is just logging some messages, so we can do it eagerly.
|
# This is just logging some messages, so we can do it eagerly.
|
||||||
# The returned dist would be exactly the same as self.dist because we
|
# The returned dist would be exactly the same as self.dist because we
|
||||||
# set satisfied_by in make_install_req_from_dist.
|
# set satisfied_by in _make_install_req_from_dist.
|
||||||
# TODO: Supply reason based on force_reinstall and upgrade_strategy.
|
# TODO: Supply reason based on force_reinstall and upgrade_strategy.
|
||||||
skip_reason = "already satisfied"
|
skip_reason = "already satisfied"
|
||||||
factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
|
factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
|
||||||
@@ -371,7 +368,7 @@ class AlreadyInstalledCandidate(Candidate):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def project_name(self) -> NormalizedName:
|
def project_name(self) -> NormalizedName:
|
||||||
return canonicalize_name(self.dist.project_name)
|
return self.dist.canonical_name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
@@ -379,11 +376,11 @@ class AlreadyInstalledCandidate(Candidate):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> CandidateVersion:
|
def version(self) -> CandidateVersion:
|
||||||
return parse_version(self.dist.version)
|
return self.dist.version
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_editable(self) -> bool:
|
def is_editable(self) -> bool:
|
||||||
return dist_is_editable(self.dist)
|
return self.dist.editable
|
||||||
|
|
||||||
def format_for_error(self) -> str:
|
def format_for_error(self) -> str:
|
||||||
return f"{self.name} {self.version} (Installed)"
|
return f"{self.name} {self.version} (Installed)"
|
||||||
@@ -391,7 +388,7 @@ class AlreadyInstalledCandidate(Candidate):
|
|||||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||||
if not with_requires:
|
if not with_requires:
|
||||||
return
|
return
|
||||||
for r in self.dist.requires():
|
for r in self.dist.iter_dependencies():
|
||||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||||
|
|
||||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||||
@@ -491,8 +488,8 @@ class ExtrasCandidate(Candidate):
|
|||||||
|
|
||||||
# The user may have specified extras that the candidate doesn't
|
# The user may have specified extras that the candidate doesn't
|
||||||
# support. We ignore any unsupported extras here.
|
# support. We ignore any unsupported extras here.
|
||||||
valid_extras = self.extras.intersection(self.base.dist.extras)
|
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
|
||||||
invalid_extras = self.extras.difference(self.base.dist.extras)
|
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
|
||||||
for extra in sorted(invalid_extras):
|
for extra in sorted(invalid_extras):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"%s %s does not provide the extra '%s'",
|
"%s %s does not provide the extra '%s'",
|
||||||
@@ -501,7 +498,7 @@ class ExtrasCandidate(Candidate):
|
|||||||
extra,
|
extra,
|
||||||
)
|
)
|
||||||
|
|
||||||
for r in self.base.dist.requires(valid_extras):
|
for r in self.base.dist.iter_dependencies(valid_extras):
|
||||||
requirement = factory.make_requirement_from_spec(
|
requirement = factory.make_requirement_from_spec(
|
||||||
str(r), self.base._ireq, valid_extras
|
str(r), self.base._ireq, valid_extras
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ from typing import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from pip._vendor.packaging.requirements import InvalidRequirement
|
from pip._vendor.packaging.requirements import InvalidRequirement
|
||||||
from pip._vendor.packaging.requirements import Requirement as PackagingRequirement
|
|
||||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||||
from pip._vendor.resolvelib import ResolutionImpossible
|
from pip._vendor.resolvelib import ResolutionImpossible
|
||||||
@@ -28,7 +27,6 @@ from pip._internal.cache import CacheEntry, WheelCache
|
|||||||
from pip._internal.exceptions import (
|
from pip._internal.exceptions import (
|
||||||
DistributionNotFound,
|
DistributionNotFound,
|
||||||
InstallationError,
|
InstallationError,
|
||||||
InstallationSubprocessError,
|
|
||||||
MetadataInconsistent,
|
MetadataInconsistent,
|
||||||
UnsupportedPythonVersion,
|
UnsupportedPythonVersion,
|
||||||
UnsupportedWheel,
|
UnsupportedWheel,
|
||||||
@@ -46,6 +44,7 @@ from pip._internal.req.req_install import (
|
|||||||
from pip._internal.resolution.base import InstallRequirementProvider
|
from pip._internal.resolution.base import InstallRequirementProvider
|
||||||
from pip._internal.utils.compatibility_tags import get_supported
|
from pip._internal.utils.compatibility_tags import get_supported
|
||||||
from pip._internal.utils.hashes import Hashes
|
from pip._internal.utils.hashes import Hashes
|
||||||
|
from pip._internal.utils.packaging import get_requirement
|
||||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||||
|
|
||||||
from .base import Candidate, CandidateVersion, Constraint, Requirement
|
from .base import Candidate, CandidateVersion, Constraint, Requirement
|
||||||
@@ -158,10 +157,7 @@ class Factory:
|
|||||||
try:
|
try:
|
||||||
base = self._installed_candidate_cache[dist.canonical_name]
|
base = self._installed_candidate_cache[dist.canonical_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
from pip._internal.metadata.pkg_resources import Distribution as _Dist
|
base = AlreadyInstalledCandidate(dist, template, factory=self)
|
||||||
|
|
||||||
compat_dist = cast(_Dist, dist)._dist
|
|
||||||
base = AlreadyInstalledCandidate(compat_dist, template, factory=self)
|
|
||||||
self._installed_candidate_cache[dist.canonical_name] = base
|
self._installed_candidate_cache[dist.canonical_name] = base
|
||||||
if not extras:
|
if not extras:
|
||||||
return base
|
return base
|
||||||
@@ -193,10 +189,16 @@ class Factory:
|
|||||||
name=name,
|
name=name,
|
||||||
version=version,
|
version=version,
|
||||||
)
|
)
|
||||||
except (InstallationSubprocessError, MetadataInconsistent) as e:
|
except MetadataInconsistent as e:
|
||||||
logger.warning("Discarding %s. %s", link, e)
|
logger.info(
|
||||||
|
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
|
||||||
|
link,
|
||||||
|
e,
|
||||||
|
extra={"markup": True},
|
||||||
|
)
|
||||||
self._build_failures[link] = e
|
self._build_failures[link] = e
|
||||||
return None
|
return None
|
||||||
|
|
||||||
base: BaseCandidate = self._editable_candidate_cache[link]
|
base: BaseCandidate = self._editable_candidate_cache[link]
|
||||||
else:
|
else:
|
||||||
if link not in self._link_candidate_cache:
|
if link not in self._link_candidate_cache:
|
||||||
@@ -208,8 +210,13 @@ class Factory:
|
|||||||
name=name,
|
name=name,
|
||||||
version=version,
|
version=version,
|
||||||
)
|
)
|
||||||
except (InstallationSubprocessError, MetadataInconsistent) as e:
|
except MetadataInconsistent as e:
|
||||||
logger.warning("Discarding %s. %s", link, e)
|
logger.info(
|
||||||
|
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
|
||||||
|
link,
|
||||||
|
e,
|
||||||
|
extra={"markup": True},
|
||||||
|
)
|
||||||
self._build_failures[link] = e
|
self._build_failures[link] = e
|
||||||
return None
|
return None
|
||||||
base = self._link_candidate_cache[link]
|
base = self._link_candidate_cache[link]
|
||||||
@@ -263,7 +270,7 @@ class Factory:
|
|||||||
extras=extras,
|
extras=extras,
|
||||||
template=template,
|
template=template,
|
||||||
)
|
)
|
||||||
# The candidate is a known incompatiblity. Don't use it.
|
# The candidate is a known incompatibility. Don't use it.
|
||||||
if id(candidate) in incompatible_ids:
|
if id(candidate) in incompatible_ids:
|
||||||
return None
|
return None
|
||||||
return candidate
|
return candidate
|
||||||
@@ -276,14 +283,27 @@ class Factory:
|
|||||||
)
|
)
|
||||||
icans = list(result.iter_applicable())
|
icans = list(result.iter_applicable())
|
||||||
|
|
||||||
# PEP 592: Yanked releases must be ignored unless only yanked
|
# PEP 592: Yanked releases are ignored unless the specifier
|
||||||
# releases can satisfy the version range. So if this is false,
|
# explicitly pins a version (via '==' or '===') that can be
|
||||||
# all yanked icans need to be skipped.
|
# solely satisfied by a yanked release.
|
||||||
all_yanked = all(ican.link.is_yanked for ican in icans)
|
all_yanked = all(ican.link.is_yanked for ican in icans)
|
||||||
|
|
||||||
|
def is_pinned(specifier: SpecifierSet) -> bool:
|
||||||
|
for sp in specifier:
|
||||||
|
if sp.operator == "===":
|
||||||
|
return True
|
||||||
|
if sp.operator != "==":
|
||||||
|
continue
|
||||||
|
if sp.version.endswith(".*"):
|
||||||
|
continue
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
pinned = is_pinned(specifier)
|
||||||
|
|
||||||
# PackageFinder returns earlier versions first, so we reverse.
|
# PackageFinder returns earlier versions first, so we reverse.
|
||||||
for ican in reversed(icans):
|
for ican in reversed(icans):
|
||||||
if not all_yanked and ican.link.is_yanked:
|
if not (all_yanked and pinned) and ican.link.is_yanked:
|
||||||
continue
|
continue
|
||||||
func = functools.partial(
|
func = functools.partial(
|
||||||
self._make_candidate_from_link,
|
self._make_candidate_from_link,
|
||||||
@@ -350,7 +370,7 @@ class Factory:
|
|||||||
def find_candidates(
|
def find_candidates(
|
||||||
self,
|
self,
|
||||||
identifier: str,
|
identifier: str,
|
||||||
requirements: Mapping[str, Iterator[Requirement]],
|
requirements: Mapping[str, Iterable[Requirement]],
|
||||||
incompatibilities: Mapping[str, Iterator[Candidate]],
|
incompatibilities: Mapping[str, Iterator[Candidate]],
|
||||||
constraint: Constraint,
|
constraint: Constraint,
|
||||||
prefers_installed: bool,
|
prefers_installed: bool,
|
||||||
@@ -368,7 +388,7 @@ class Factory:
|
|||||||
# If the current identifier contains extras, add explicit candidates
|
# If the current identifier contains extras, add explicit candidates
|
||||||
# from entries from extra-less identifier.
|
# from entries from extra-less identifier.
|
||||||
with contextlib.suppress(InvalidRequirement):
|
with contextlib.suppress(InvalidRequirement):
|
||||||
parsed_requirement = PackagingRequirement(identifier)
|
parsed_requirement = get_requirement(identifier)
|
||||||
explicit_candidates.update(
|
explicit_candidates.update(
|
||||||
self._iter_explicit_candidates_from_base(
|
self._iter_explicit_candidates_from_base(
|
||||||
requirements.get(parsed_requirement.name, ()),
|
requirements.get(parsed_requirement.name, ()),
|
||||||
@@ -377,7 +397,7 @@ class Factory:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Add explicit candidates from constraints. We only do this if there are
|
# Add explicit candidates from constraints. We only do this if there are
|
||||||
# kown ireqs, which represent requirements not already explicit. If
|
# known ireqs, which represent requirements not already explicit. If
|
||||||
# there are no ireqs, we're constraining already-explicit requirements,
|
# there are no ireqs, we're constraining already-explicit requirements,
|
||||||
# which is handled later when we return the explicit candidates.
|
# which is handled later when we return the explicit candidates.
|
||||||
if ireqs:
|
if ireqs:
|
||||||
@@ -487,16 +507,20 @@ class Factory:
|
|||||||
def make_requirement_from_spec(
|
def make_requirement_from_spec(
|
||||||
self,
|
self,
|
||||||
specifier: str,
|
specifier: str,
|
||||||
comes_from: InstallRequirement,
|
comes_from: Optional[InstallRequirement],
|
||||||
requested_extras: Iterable[str] = (),
|
requested_extras: Iterable[str] = (),
|
||||||
) -> Optional[Requirement]:
|
) -> Optional[Requirement]:
|
||||||
ireq = self._make_install_req_from_spec(specifier, comes_from)
|
ireq = self._make_install_req_from_spec(specifier, comes_from)
|
||||||
return self._make_requirement_from_install_req(ireq, requested_extras)
|
return self._make_requirement_from_install_req(ireq, requested_extras)
|
||||||
|
|
||||||
def make_requires_python_requirement(
|
def make_requires_python_requirement(
|
||||||
self, specifier: Optional[SpecifierSet]
|
self,
|
||||||
|
specifier: SpecifierSet,
|
||||||
) -> Optional[Requirement]:
|
) -> Optional[Requirement]:
|
||||||
if self._ignore_requires_python or specifier is None:
|
if self._ignore_requires_python:
|
||||||
|
return None
|
||||||
|
# Don't bother creating a dependency for an empty Requires-Python.
|
||||||
|
if not str(specifier):
|
||||||
return None
|
return None
|
||||||
return RequiresPythonRequirement(specifier, self._python_candidate)
|
return RequiresPythonRequirement(specifier, self._python_candidate)
|
||||||
|
|
||||||
@@ -511,7 +535,7 @@ class Factory:
|
|||||||
hash mismatches. Furthermore, cached wheels at present have
|
hash mismatches. Furthermore, cached wheels at present have
|
||||||
nondeterministic contents due to file modification times.
|
nondeterministic contents due to file modification times.
|
||||||
"""
|
"""
|
||||||
if self._wheel_cache is None or self.preparer.require_hashes:
|
if self._wheel_cache is None:
|
||||||
return None
|
return None
|
||||||
return self._wheel_cache.get_cache_entry(
|
return self._wheel_cache.get_cache_entry(
|
||||||
link=link,
|
link=link,
|
||||||
@@ -578,8 +602,15 @@ class Factory:
|
|||||||
req_disp = f"{req} (from {parent.name})"
|
req_disp = f"{req} (from {parent.name})"
|
||||||
|
|
||||||
cands = self._finder.find_all_candidates(req.project_name)
|
cands = self._finder.find_all_candidates(req.project_name)
|
||||||
|
skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
|
||||||
versions = [str(v) for v in sorted({c.version for c in cands})]
|
versions = [str(v) for v in sorted({c.version for c in cands})]
|
||||||
|
|
||||||
|
if skipped_by_requires_python:
|
||||||
|
logger.critical(
|
||||||
|
"Ignored the following versions that require a different python "
|
||||||
|
"version: %s",
|
||||||
|
"; ".join(skipped_by_requires_python) or "none",
|
||||||
|
)
|
||||||
logger.critical(
|
logger.critical(
|
||||||
"Could not find a version that satisfies the requirement %s "
|
"Could not find a version that satisfies the requirement %s "
|
||||||
"(from versions: %s)",
|
"(from versions: %s)",
|
||||||
@@ -601,7 +632,6 @@ class Factory:
|
|||||||
e: "ResolutionImpossible[Requirement, Candidate]",
|
e: "ResolutionImpossible[Requirement, Candidate]",
|
||||||
constraints: Dict[str, Constraint],
|
constraints: Dict[str, Constraint],
|
||||||
) -> InstallationError:
|
) -> InstallationError:
|
||||||
|
|
||||||
assert e.causes, "Installation error reported with no cause"
|
assert e.causes, "Installation error reported with no cause"
|
||||||
|
|
||||||
# If one of the things we can't solve is "we need Python X.Y",
|
# If one of the things we can't solve is "we need Python X.Y",
|
||||||
@@ -614,7 +644,7 @@ class Factory:
|
|||||||
]
|
]
|
||||||
if requires_python_causes:
|
if requires_python_causes:
|
||||||
# The comprehension above makes sure all Requirement instances are
|
# The comprehension above makes sure all Requirement instances are
|
||||||
# RequiresPythonRequirement, so let's cast for convinience.
|
# RequiresPythonRequirement, so let's cast for convenience.
|
||||||
return self._report_requires_python_error(
|
return self._report_requires_python_error(
|
||||||
cast("Sequence[ConflictCause]", requires_python_causes),
|
cast("Sequence[ConflictCause]", requires_python_causes),
|
||||||
)
|
)
|
||||||
@@ -695,6 +725,6 @@ class Factory:
|
|||||||
|
|
||||||
return DistributionNotFound(
|
return DistributionNotFound(
|
||||||
"ResolutionImpossible: for help visit "
|
"ResolutionImpossible: for help visit "
|
||||||
"https://pip.pypa.io/en/latest/user_guide/"
|
"https://pip.pypa.io/en/latest/topics/dependency-resolution/"
|
||||||
"#fixing-conflicting-dependencies"
|
"#dealing-with-dependency-conflicts"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,15 +9,30 @@ something.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
from typing import Callable, Iterator, Optional, Set, Tuple
|
from collections.abc import Sequence
|
||||||
|
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
|
||||||
|
|
||||||
from pip._vendor.packaging.version import _BaseVersion
|
from pip._vendor.packaging.version import _BaseVersion
|
||||||
from pip._vendor.six.moves import collections_abc # type: ignore
|
|
||||||
|
|
||||||
from .base import Candidate
|
from .base import Candidate
|
||||||
|
|
||||||
IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
|
IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
SequenceCandidate = Sequence[Candidate]
|
||||||
|
else:
|
||||||
|
# For compatibility: Python before 3.9 does not support using [] on the
|
||||||
|
# Sequence class.
|
||||||
|
#
|
||||||
|
# >>> from collections.abc import Sequence
|
||||||
|
# >>> Sequence[str]
|
||||||
|
# Traceback (most recent call last):
|
||||||
|
# File "<stdin>", line 1, in <module>
|
||||||
|
# TypeError: 'ABCMeta' object is not subscriptable
|
||||||
|
#
|
||||||
|
# TODO: Remove this block after dropping Python 3.8 support.
|
||||||
|
SequenceCandidate = Sequence
|
||||||
|
|
||||||
|
|
||||||
def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
|
def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
|
||||||
"""Iterator for ``FoundCandidates``.
|
"""Iterator for ``FoundCandidates``.
|
||||||
@@ -90,7 +105,7 @@ def _iter_built_with_inserted(
|
|||||||
yield installed
|
yield installed
|
||||||
|
|
||||||
|
|
||||||
class FoundCandidates(collections_abc.Sequence):
|
class FoundCandidates(SequenceCandidate):
|
||||||
"""A lazy sequence to provide candidates to the resolver.
|
"""A lazy sequence to provide candidates to the resolver.
|
||||||
|
|
||||||
The intended usage is to return this from `find_matches()` so the resolver
|
The intended usage is to return this from `find_matches()` so the resolver
|
||||||
@@ -111,7 +126,7 @@ class FoundCandidates(collections_abc.Sequence):
|
|||||||
self._prefers_installed = prefers_installed
|
self._prefers_installed = prefers_installed
|
||||||
self._incompatible_ids = incompatible_ids
|
self._incompatible_ids = incompatible_ids
|
||||||
|
|
||||||
def __getitem__(self, index: int) -> Candidate:
|
def __getitem__(self, index: Any) -> Any:
|
||||||
# Implemented to satisfy the ABC check. This is not needed by the
|
# Implemented to satisfy the ABC check. This is not needed by the
|
||||||
# resolver, and should not be used by the provider either (for
|
# resolver, and should not be used by the provider either (for
|
||||||
# performance reasons).
|
# performance reasons).
|
||||||
@@ -138,5 +153,3 @@ class FoundCandidates(collections_abc.Sequence):
|
|||||||
if self._prefers_installed and self._installed:
|
if self._prefers_installed and self._installed:
|
||||||
return True
|
return True
|
||||||
return any(self)
|
return any(self)
|
||||||
|
|
||||||
__nonzero__ = __bool__ # XXX: Python 2.
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user