From dc9efc856f180d755c618573050cf13266168127 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 8 Feb 2021 17:54:41 +0800 Subject: [PATCH 01/60] Always open tar files with UTF-8 The UTF-8 encoding was assumed in an sdist, but without explicit specifying, extraction may fail on obscure systems where the default encoding is not UTF-8. Co-Authored-By: Chris Hunt --- news/7667.bugfix.rst | 1 + src/pip/_internal/utils/unpacking.py | 2 +- tests/unit/test_utils_unpacking.py | 19 +++++++++++++++++++ 3 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 news/7667.bugfix.rst diff --git a/news/7667.bugfix.rst b/news/7667.bugfix.rst new file mode 100644 index 00000000000..e42e128e97e --- /dev/null +++ b/news/7667.bugfix.rst @@ -0,0 +1 @@ +Fix extraction of files with utf-8 encoded paths from tars. diff --git a/src/pip/_internal/utils/unpacking.py b/src/pip/_internal/utils/unpacking.py index 44ac475357d..bffb3cd653e 100644 --- a/src/pip/_internal/utils/unpacking.py +++ b/src/pip/_internal/utils/unpacking.py @@ -178,7 +178,7 @@ def untar_file(filename, location): filename, ) mode = "r:*" - tar = tarfile.open(filename, mode) + tar = tarfile.open(filename, mode, encoding="utf-8") try: leading = has_leading_dir([member.name for member in tar.getmembers()]) for member in tar.getmembers(): diff --git a/tests/unit/test_utils_unpacking.py b/tests/unit/test_utils_unpacking.py index aea70efbc07..760b09cf1f8 100644 --- a/tests/unit/test_utils_unpacking.py +++ b/tests/unit/test_utils_unpacking.py @@ -168,6 +168,25 @@ def test_unpack_tar_success(self): untar_file(test_tar, self.tempdir) +def test_unpack_tar_unicode(tmpdir): + test_tar = tmpdir / "test.tar" + # tarfile tries to decode incoming + with tarfile.open( + test_tar, "w", format=tarfile.PAX_FORMAT, encoding="utf-8" + ) as f: + metadata = tarfile.TarInfo("dir/åäö_日本語.py") + f.addfile(metadata, "hello world") + + output_dir = tmpdir / "output" + output_dir.mkdir() + + untar_file(test_tar, str(output_dir)) + + output_dir_name = str(output_dir) + contents = os.listdir(output_dir_name) + assert u"åäö_日本語.py" in contents + + @pytest.mark.parametrize('args, expected', [ # Test the second containing the first. (('parent/sub', 'parent/'), False), From 6123570b5e0b713e9985309a1e9f6701180e8b96 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 26 Apr 2021 14:56:07 +0800 Subject: [PATCH 02/60] Cache location calculation functions The practical difference is the mismatch detection is not performed at most once for every invocation, thus only warned once if there are any mismatches. --- src/pip/_internal/locations/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 18bf0319f3d..fbe79ea9f0e 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -1,3 +1,4 @@ +import functools import logging import pathlib import sys @@ -42,6 +43,7 @@ def _default_base(*, user: bool) -> str: return base +@functools.lru_cache(maxsize=None) def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: if old == new: return False @@ -55,6 +57,7 @@ def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool return True +@functools.lru_cache(maxsize=None) def _log_context( *, user: bool = False, From 9e220c6f3da355378f8088220787844883eb85f7 Mon Sep 17 00:00:00 2001 From: Inada Naoki Date: Wed, 16 Jun 2021 11:17:28 +0900 Subject: [PATCH 03/60] Use UTF-8 for log file --- src/pip/_internal/utils/logging.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index 0569b9248a8..39a18fd6cb0 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -375,6 +375,7 @@ def setup_logging(verbosity, no_color, user_log_file): "level": "DEBUG", "class": handler_classes["file"], "filename": additional_log_file, + "encoding": "utf-8", "delay": True, "formatter": "indent_with_timestamp", }, From f6a63eb082c6de07bc27d2281860a49006015f12 Mon Sep 17 00:00:00 2001 From: Inada Naoki Date: Wed, 16 Jun 2021 12:22:36 +0900 Subject: [PATCH 04/60] Add NEWS fragment --- news/10071.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/10071.feature.rst diff --git a/news/10071.feature.rst b/news/10071.feature.rst new file mode 100644 index 00000000000..d731f4c1317 --- /dev/null +++ b/news/10071.feature.rst @@ -0,0 +1 @@ +Change the encoding of log file from default text encoding to UTF-8. From 62f481679a614f7d9a1e43e88a425fce2b7d92bf Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 18 Jun 2021 18:52:40 +0800 Subject: [PATCH 05/60] Move "Link requires a different Python" to verbose --- src/pip/_internal/index/package_finder.py | 5 +++-- tests/unit/test_index.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py index 7f2e04e7c37..8fa310ee30b 100644 --- a/src/pip/_internal/index/package_finder.py +++ b/src/pip/_internal/index/package_finder.py @@ -30,6 +30,7 @@ from pip._internal.models.target_python import TargetPython from pip._internal.models.wheel import Wheel from pip._internal.req import InstallRequirement +from pip._internal.utils._log import getLogger from pip._internal.utils.filetypes import WHEEL_EXTENSION from pip._internal.utils.hashes import Hashes from pip._internal.utils.logging import indent_log @@ -41,7 +42,7 @@ __all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] -logger = logging.getLogger(__name__) +logger = getLogger(__name__) BuildTag = Union[Tuple[()], Tuple[int, str]] CandidateSortingKey = ( @@ -77,7 +78,7 @@ def _check_link_requires_python( if not is_compatible: version = '.'.join(map(str, version_info)) if not ignore_requires_python: - logger.debug( + logger.verbose( 'Link requires a different Python (%s not in: %r): %s', version, link.requires_python, link, ) diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 8b56d7854af..7859b4ac814 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -48,7 +48,7 @@ def check_caplog(caplog, expected_level, expected_message): @pytest.mark.parametrize('ignore_requires_python, expected', [ (None, ( - False, 'DEBUG', + False, 'VERBOSE', "Link requires a different Python (3.6.5 not in: '== 3.6.4'): " "https://example.com" )), From 8316051299246224349fb20fe9021bdd84635120 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sun, 20 Jun 2021 06:06:37 +0800 Subject: [PATCH 06/60] Upgrade resolvelib to 0.7.1 --- src/pip/_vendor/resolvelib/__init__.py | 2 +- src/pip/_vendor/resolvelib/resolvers.py | 29 ++++++++++++------------- src/pip/_vendor/resolvelib/structs.py | 14 +++++++++++- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 29 insertions(+), 18 deletions(-) diff --git a/src/pip/_vendor/resolvelib/__init__.py b/src/pip/_vendor/resolvelib/__init__.py index 184874d45cd..1bddc2fd4e0 100644 --- a/src/pip/_vendor/resolvelib/__init__.py +++ b/src/pip/_vendor/resolvelib/__init__.py @@ -11,7 +11,7 @@ "ResolutionTooDeep", ] -__version__ = "0.7.0" +__version__ = "0.7.1" from .providers import AbstractProvider, AbstractResolver diff --git a/src/pip/_vendor/resolvelib/resolvers.py b/src/pip/_vendor/resolvelib/resolvers.py index 99ee10516b8..42484423c9e 100644 --- a/src/pip/_vendor/resolvelib/resolvers.py +++ b/src/pip/_vendor/resolvelib/resolvers.py @@ -134,11 +134,11 @@ def _push_new_state(self): ) self._states.append(state) - def _merge_into_criterion(self, requirement, parent): + def _add_to_criteria(self, criteria, requirement, parent): self._r.adding_requirement(requirement=requirement, parent=parent) identifier = self._p.identify(requirement_or_candidate=requirement) - criterion = self.state.criteria.get(identifier) + criterion = criteria.get(identifier) if criterion: incompatibilities = list(criterion.incompatibilities) else: @@ -147,12 +147,12 @@ def _merge_into_criterion(self, requirement, parent): matches = self._p.find_matches( identifier=identifier, requirements=IteratorMapping( - self.state.criteria, + criteria, operator.methodcaller("iter_requirement"), {identifier: [requirement]}, ), incompatibilities=IteratorMapping( - self.state.criteria, + criteria, operator.attrgetter("incompatibilities"), {identifier: incompatibilities}, ), @@ -171,7 +171,7 @@ def _merge_into_criterion(self, requirement, parent): ) if not criterion.candidates: raise RequirementsConflicted(criterion) - return identifier, criterion + criteria[identifier] = criterion def _get_preference(self, name): return self._p.get_preference( @@ -197,11 +197,10 @@ def _is_current_pin_satisfying(self, name, criterion): for r in criterion.iter_requirement() ) - def _get_criteria_to_update(self, candidate): - criteria = {} - for r in self._p.get_dependencies(candidate=candidate): - name, crit = self._merge_into_criterion(r, parent=candidate) - criteria[name] = crit + def _get_updated_criteria(self, candidate): + criteria = self.state.criteria.copy() + for requirement in self._p.get_dependencies(candidate=candidate): + self._add_to_criteria(criteria, requirement, parent=candidate) return criteria def _attempt_to_pin_criterion(self, name): @@ -210,7 +209,7 @@ def _attempt_to_pin_criterion(self, name): causes = [] for candidate in criterion.candidates: try: - criteria = self._get_criteria_to_update(candidate) + criteria = self._get_updated_criteria(candidate) except RequirementsConflicted as e: causes.append(e.criterion) continue @@ -226,12 +225,13 @@ def _attempt_to_pin_criterion(self, name): if not satisfied: raise InconsistentCandidate(candidate, criterion) + self._r.pinning(candidate=candidate) + self.state.criteria.update(criteria) + # Put newly-pinned candidate at the end. This is essential because # backtracking looks at this mapping to get the last pin. - self._r.pinning(candidate=candidate) self.state.mapping.pop(name, None) self.state.mapping[name] = candidate - self.state.criteria.update(criteria) return [] @@ -338,10 +338,9 @@ def resolve(self, requirements, max_rounds): self._states = [State(mapping=collections.OrderedDict(), criteria={})] for r in requirements: try: - name, crit = self._merge_into_criterion(r, parent=None) + self._add_to_criteria(self.state.criteria, r, parent=None) except RequirementsConflicted as e: raise ResolutionImpossible(e.criterion.information) - self.state.criteria[name] = crit # The root state is saved as a sentinel so the first ever pin can have # something to backtrack to if it fails. The root state is basically diff --git a/src/pip/_vendor/resolvelib/structs.py b/src/pip/_vendor/resolvelib/structs.py index e1e7aa429e3..93d1568bd4d 100644 --- a/src/pip/_vendor/resolvelib/structs.py +++ b/src/pip/_vendor/resolvelib/structs.py @@ -75,6 +75,18 @@ def __init__(self, mapping, accessor, appends=None): self._accessor = accessor self._appends = appends or {} + def __repr__(self): + return "IteratorMapping({!r}, {!r}, {!r})".format( + self._mapping, + self._accessor, + self._appends, + ) + + def __bool__(self): + return bool(self._mapping or self._appends) + + __nonzero__ = __bool__ # XXX: Python 2. + def __contains__(self, key): return key in self._mapping or key in self._appends @@ -90,7 +102,7 @@ def __iter__(self): return itertools.chain(self._mapping, more) def __len__(self): - more = len(k for k in self._appends if k not in self._mapping) + more = sum(1 for k in self._appends if k not in self._mapping) return len(self._mapping) + more diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 8eb8a5d20da..13c21dfd484 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -14,7 +14,7 @@ requests==2.25.1 chardet==4.0.0 idna==3.1 urllib3==1.26.5 -resolvelib==0.7.0 +resolvelib==0.7.1 setuptools==44.0.0 six==1.16.0 tenacity==7.0.0 From 7a31c61d62f75f754c423cc401b9952f8e696aa5 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 23 Jun 2021 06:02:15 +0800 Subject: [PATCH 07/60] News for resolvelib 0.7.1 --- news/resolvelib.vendor.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/resolvelib.vendor.rst diff --git a/news/resolvelib.vendor.rst b/news/resolvelib.vendor.rst new file mode 100644 index 00000000000..0e114c45e56 --- /dev/null +++ b/news/resolvelib.vendor.rst @@ -0,0 +1 @@ +Upgrade resolvelib to 0.7.1. From 189f63ac122b7d002039c041ae356f0de03fe7ea Mon Sep 17 00:00:00 2001 From: Diego Ramirez Date: Wed, 23 Jun 2021 12:20:20 -0500 Subject: [PATCH 08/60] Fix the annotations for `setup.py` I have been converting type commentaries into annotations. For now, I'll fix some annotations from `setup.py`. --- setup.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index 91f537a40f0..a233bd2532f 100644 --- a/setup.py +++ b/setup.py @@ -4,8 +4,7 @@ from setuptools import find_packages, setup -def read(rel_path): - # type: (str) -> str +def read(rel_path: str) -> str: here = os.path.abspath(os.path.dirname(__file__)) # intentionally *not* adding an encoding option to open, See: # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 @@ -13,8 +12,7 @@ def read(rel_path): return fp.read() -def get_version(rel_path): - # type: (str) -> str +def get_version(rel_path: str) -> str: for line in read(rel_path).splitlines(): if line.startswith("__version__"): # __version__ = "0.9" From b37f3abb9e5597808ba177d0dc4d9788885d3fa5 Mon Sep 17 00:00:00 2001 From: Diego Ramirez Date: Wed, 23 Jun 2021 12:23:00 -0500 Subject: [PATCH 09/60] Create 10094.trivial.rst This is the news entry for my pull request. --- news/10094.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/10094.trivial.rst diff --git a/news/10094.trivial.rst b/news/10094.trivial.rst new file mode 100644 index 00000000000..df1799f5f99 --- /dev/null +++ b/news/10094.trivial.rst @@ -0,0 +1 @@ +Convert type hint commentaries into annotations on ``setup.py``. From ad8a1b8da07542a2b926ddb472cad80369937b20 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 25 Jun 2021 11:13:03 +0100 Subject: [PATCH 10/60] Add topic guide: VCS Support --- docs/html/topics/index.md | 1 + docs/html/topics/vcs-support.md | 163 ++++++++++++++++++++++++++++++++ 2 files changed, 164 insertions(+) create mode 100644 docs/html/topics/vcs-support.md diff --git a/docs/html/topics/index.md b/docs/html/topics/index.md index 478aacf2aa6..6b2db606aa3 100644 --- a/docs/html/topics/index.md +++ b/docs/html/topics/index.md @@ -13,4 +13,5 @@ This section of the documentation is currently being fleshed out. See authentication caching configuration +vcs-support ``` diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md new file mode 100644 index 00000000000..4ccde54cbd9 --- /dev/null +++ b/docs/html/topics/vcs-support.md @@ -0,0 +1,163 @@ +# VCS Support + +pip supports installing from various version control systems (VCS). +This support requires a working executable to be available (for the version +control system being used). It is used through URL prefixes: + +- Git -- `git+` +- Mercurial -- `hg+` +- Subversion -- `svn+` +- Bazaar -- `bzr+` + +## Supported VCS + +### Git + +The supported schemes are `git+file`, `git+https`, `git+ssh`, `git+http`, +`git+git` and `git`. Here are some of the supported forms: + +```none +git+ssh://git.example.com/MyProject#egg=MyProject +git+file:///home/user/projects/MyProject#egg=MyProject +git+https://git.example.com/MyProject#egg=MyProject +``` + +```{warning} +The use of `git`, `git+git`, and `git+http` schemes is discouraged. +The former two use [the Git Protocol], which lacks authentication, and HTTP is +insecure due to lack of TLS based encryption. +``` + +[the Git Protocol]: https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols + +It is also possible to specify a "git ref" such as branch name, a commit hash or +a tag name: + +```none +git+https://git.example.com/MyProject.git@master#egg=MyProject +git+https://git.example.com/MyProject.git@v1.0#egg=MyProject +git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject +git+https://git.example.com/MyProject.git@refs/pull/123/head#egg=MyProject +``` + +When passing a commit hash, specifying a full hash is preferable to a partial +hash because a full hash allows pip to operate more efficiently (e.g. by +making fewer network calls). + +### Mercurial + +The supported schemes are `hg+file`, `hg+http`, `hg+https`, `hg+ssh` +and `hg+static-http`. Here are some of the supported forms: + +``` +hg+http://hg.myproject.org/MyProject#egg=MyProject +hg+https://hg.myproject.org/MyProject#egg=MyProject +hg+ssh://hg.myproject.org/MyProject#egg=MyProject +hg+file:///home/user/projects/MyProject#egg=MyProject +``` + +It is also possible to specify a revision number, a revision hash, a tag name +or a local branch name: + +```none +hg+http://hg.example.com/MyProject@da39a3ee5e6b#egg=MyProject +hg+http://hg.example.com/MyProject@2019#egg=MyProject +hg+http://hg.example.com/MyProject@v1.0#egg=MyProject +hg+http://hg.example.com/MyProject@special_feature#egg=MyProject +``` + +### Subversion + +The supported schemes are `svn`, `svn+svn`, `svn+http`, `svn+https` and +`svn+ssh`. Here are some of the supported forms: + +```none +svn+https://svn.example.com/MyProject#egg=MyProject +svn+ssh://svn.example.com/MyProject#egg=MyProject +svn+ssh://user@svn.example.com/MyProject#egg=MyProject +``` + +You can also give specific revisions to an SVN URL, like so: + +```none +-e svn+http://svn.example.com/svn/MyProject/trunk@2019#egg=MyProject +-e svn+http://svn.example.com/svn/MyProject/trunk@{20080101}#egg=MyProject +``` + +Note that you need to use [Editable VCS installs](#editable-vcs-installs) for +using specific revisions. + +### Bazaar + +The supported schemes are `bzr+http`, `bzr+https`, `bzr+ssh`, `bzr+sftp`, +`bzr+ftp` and `bzr+lp`. Here are the supported forms: + +```none +bzr+http://bzr.example.com/MyProject/trunk#egg=MyProject +bzr+sftp://user@example.com/MyProject/trunk#egg=MyProject +bzr+ssh://user@example.com/MyProject/trunk#egg=MyProject +bzr+ftp://user@example.com/MyProject/trunk#egg=MyProject +bzr+lp:MyProject#egg=MyProject +``` + +Tags or revisions can be installed like so: + +```none +bzr+https://bzr.example.com/MyProject/trunk@2019#egg=MyProject +bzr+http://bzr.example.com/MyProject/trunk@v1.0#egg=MyProject +``` + +(editable-vcs-installs)= + +## Editable VCS installs + +VCS projects can be installed in {ref}`editable mode ` (using +the {ref}`--editable ` option) or not. + +- The default clone location (for editable installs) is: + + - `/src/SomeProject` in virtual environments + - `/src/SomeProject` for global Python installs + + The {ref}`--src ` option can be used to modify this location. + +- For non-editable installs, the project is built locally in a temp dir and then + installed normally. + +Note that if a satisfactory version of the package is already installed, the +VCS source will not overwrite it without an `--upgrade` flag. Further, pip +looks at the package version (specified in the `setup.py` file) of the target +commit to determine what action to take on the VCS requirement (not the commit +itself). + +The {ref}`pip freeze` subcommand will record the VCS requirement specifier +(referencing a specific commit) only if the install is done with the editable +option. + +## URL fragments + +pip looks at 2 fragments for VCS URLs: + +- `egg`: For specifying the "project name" for use in pip's dependency + resolution logic. eg: `egg=project_name` +- `subdirectory`: For specifying the path to the Python package, when it is not + in the root of the VCS directory. eg: `pkg_dir` + +````{admonition} Example +If your repository layout is: + +``` +pkg_dir +├── setup.py # setup.py for package "pkg" +└── some_module.py +other_dir +└── some_file +some_other_file +``` + +Then, to install from this repository, the syntax would be: + +```{pip-cli} +$ pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir" +``` +```` From 9cd3d5d547afb71aaff9094cd82a8101de774415 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 25 Jun 2021 11:14:12 +0100 Subject: [PATCH 11/60] Drop old VCS support section and update links --- docs/html/cli/pip_install.rst | 169 +--------------------------------- docs/html/getting-started.md | 2 +- docs/html/installation.md | 4 +- docs/html/user_guide.rst | 2 +- 4 files changed, 8 insertions(+), 169 deletions(-) diff --git a/docs/html/cli/pip_install.rst b/docs/html/cli/pip_install.rst index 6bc15349d71..e7851b5e948 100644 --- a/docs/html/cli/pip_install.rst +++ b/docs/html/cli/pip_install.rst @@ -66,7 +66,7 @@ for the name and project version (this is in theory slightly less reliable than using the ``egg_info`` command, but avoids downloading and processing unnecessary numbers of files). -Any URL may use the ``#egg=name`` syntax (see :ref:`VCS Support`) to +Any URL may use the ``#egg=name`` syntax (see :doc:`../topics/vcs-support`) to explicitly state the project name. Satisfying Requirements @@ -372,168 +372,7 @@ that enables installation of pre-releases and development releases. VCS Support ----------- -pip supports installing from Git, Mercurial, Subversion and Bazaar, and detects -the type of VCS using URL prefixes: ``git+``, ``hg+``, ``svn+``, and ``bzr+``. - -pip requires a working VCS command on your path: ``git``, ``hg``, ``svn``, or -``bzr``. - -VCS projects can be installed in :ref:`editable mode ` (using -the :ref:`--editable ` option) or not. - -* For editable installs, the clone location by default is ``/src/SomeProject`` in virtual environments, and - ``/src/SomeProject`` - for global installs. The :ref:`--src ` option can be used to - modify this location. -* For non-editable installs, the project is built locally in a temp dir and then - installed normally. Note that if a satisfactory version of the package is - already installed, the VCS source will not overwrite it without an - ``--upgrade`` flag. VCS requirements pin the package version (specified - in the ``setup.py`` file) of the target commit, not necessarily the commit - itself. -* The :ref:`pip freeze` subcommand will record the VCS requirement specifier - (referencing a specific commit) if and only if the install is done using the - editable option. - -The "project name" component of the URL suffix ``egg=`` -is used by pip in its dependency logic to identify the project prior -to pip downloading and analyzing the metadata. For projects -where ``setup.py`` is not in the root of project, the "subdirectory" component -is used. The value of the "subdirectory" component should be a path starting -from the root of the project to where ``setup.py`` is located. - -If your repository layout is:: - - pkg_dir - ├── setup.py # setup.py for package "pkg" - └── some_module.py - other_dir - └── some_file - some_other_file - -Then, to install from this repository, the syntax would be: - -.. tab:: Unix/macOS - - .. code-block:: shell - - python -m pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir" - -.. tab:: Windows - - .. code-block:: shell - - py -m pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir" - - -Git -^^^ - -pip currently supports cloning over ``git``, ``git+http``, ``git+https``, -``git+ssh``, ``git+git`` and ``git+file``. - -.. warning:: - - Note that the use of ``git``, ``git+git``, and ``git+http`` is discouraged. - The former two use `the Git Protocol`_, which lacks authentication, and HTTP is - insecure due to lack of TLS based encryption. - -Here are the supported forms:: - - [-e] git+http://git.example.com/MyProject#egg=MyProject - [-e] git+https://git.example.com/MyProject#egg=MyProject - [-e] git+ssh://git.example.com/MyProject#egg=MyProject - [-e] git+file:///home/user/projects/MyProject#egg=MyProject - -Passing a branch name, a commit hash, a tag name or a git ref is possible like so:: - - [-e] git+https://git.example.com/MyProject.git@main#egg=MyProject - [-e] git+https://git.example.com/MyProject.git@v1.0#egg=MyProject - [-e] git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject - [-e] git+https://git.example.com/MyProject.git@refs/pull/123/head#egg=MyProject - -When passing a commit hash, specifying a full hash is preferable to a partial -hash because a full hash allows pip to operate more efficiently (e.g. by -making fewer network calls). - -.. _`the Git Protocol`: https://git-scm.com/book/en/v2/Git-on-the-Server-The-Protocols - -Mercurial -^^^^^^^^^ - -The supported schemes are: ``hg+file``, ``hg+http``, ``hg+https``, -``hg+static-http``, and ``hg+ssh``. - -Here are the supported forms:: - - [-e] hg+http://hg.myproject.org/MyProject#egg=MyProject - [-e] hg+https://hg.myproject.org/MyProject#egg=MyProject - [-e] hg+ssh://hg.myproject.org/MyProject#egg=MyProject - [-e] hg+file:///home/user/projects/MyProject#egg=MyProject - -You can also specify a revision number, a revision hash, a tag name or a local -branch name like so:: - - [-e] hg+http://hg.example.com/MyProject@da39a3ee5e6b#egg=MyProject - [-e] hg+http://hg.example.com/MyProject@2019#egg=MyProject - [-e] hg+http://hg.example.com/MyProject@v1.0#egg=MyProject - [-e] hg+http://hg.example.com/MyProject@special_feature#egg=MyProject - -Subversion -^^^^^^^^^^ - -pip supports the URL schemes ``svn``, ``svn+svn``, ``svn+http``, ``svn+https``, ``svn+ssh``. - -Here are some of the supported forms:: - - [-e] svn+https://svn.example.com/MyProject#egg=MyProject - [-e] svn+ssh://svn.example.com/MyProject#egg=MyProject - [-e] svn+ssh://user@svn.example.com/MyProject#egg=MyProject - -You can also give specific revisions to an SVN URL, like so:: - - [-e] svn+svn://svn.example.com/svn/MyProject#egg=MyProject - [-e] svn+http://svn.example.com/svn/MyProject/trunk@2019#egg=MyProject - -which will check out revision 2019. ``@{20080101}`` would also check -out the revision from 2008-01-01. You can only check out specific -revisions using ``-e svn+...``. - -Bazaar -^^^^^^ - -pip supports Bazaar using the ``bzr+http``, ``bzr+https``, ``bzr+ssh``, -``bzr+sftp``, ``bzr+ftp`` and ``bzr+lp`` schemes. - -Here are the supported forms:: - - [-e] bzr+http://bzr.example.com/MyProject/trunk#egg=MyProject - [-e] bzr+sftp://user@example.com/MyProject/trunk#egg=MyProject - [-e] bzr+ssh://user@example.com/MyProject/trunk#egg=MyProject - [-e] bzr+ftp://user@example.com/MyProject/trunk#egg=MyProject - [-e] bzr+lp:MyProject#egg=MyProject - -Tags or revisions can be installed like so:: - - [-e] bzr+https://bzr.example.com/MyProject/trunk@2019#egg=MyProject - [-e] bzr+http://bzr.example.com/MyProject/trunk@v1.0#egg=MyProject - -Using Environment Variables -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Since version 10, pip also makes it possible to use environment variables which -makes it possible to reference private repositories without having to store -access tokens in the requirements file. For example, a private git repository -allowing Basic Auth for authentication can be refenced like this:: - - [-e] git+http://${AUTH_USER}:${AUTH_PASSWORD}@git.example.com/MyProject#egg=MyProject - [-e] git+https://${AUTH_USER}:${AUTH_PASSWORD}@git.example.com/MyProject#egg=MyProject - -.. note:: - - Only ``${VARIABLE}`` is supported, other formats like ``$VARIABLE`` or - ``%VARIABLE%`` won't work. +This is now covered in :doc:`../topics/vcs-support`. Finding Packages ---------------- @@ -801,7 +640,7 @@ You can install local projects or VCS projects in "editable" mode: py -m pip install -e git+http://repo/my_project.git#egg=SomeProject -(See the :ref:`VCS Support` section above for more information on VCS-related syntax.) +(See the :doc:`../topics/vcs-support` section above for more information on VCS-related syntax.) For local projects, the "SomeProject.egg-info" directory is created relative to the project path. This is one advantage over just using ``setup.py develop``, @@ -992,7 +831,7 @@ Examples py -m pip install SomeProject@git+https://git.repo/some_pkg.git@1.3.1 -#. Install a project from VCS in "editable" mode. See the sections on :ref:`VCS Support ` and :ref:`Editable Installs `. +#. Install a project from VCS in "editable" mode. See the sections on :doc:`../topics/vcs-support` and :ref:`Editable Installs `. .. tab:: Unix/macOS diff --git a/docs/html/getting-started.md b/docs/html/getting-started.md index 42ac2c93400..5c22d1abebc 100644 --- a/docs/html/getting-started.md +++ b/docs/html/getting-started.md @@ -47,7 +47,7 @@ $ pip install git+https://github.com/pypa/sampleproject.git@main Successfully installed sampleproject ``` -See {ref}`VCS Support` for more information about this syntax. +See {doc}`topics/vcs-support` for more information about this syntax. ### Install a package from a distribution file diff --git a/docs/html/installation.md b/docs/html/installation.md index e389a8fa4d4..ecb71a4aa16 100644 --- a/docs/html/installation.md +++ b/docs/html/installation.md @@ -13,8 +13,8 @@ Usually, pip is automatically installed if you are: If your Python environment does not have pip installed, there are 2 mechanisms to install pip supported directly by pip's maintainers: -- [`ensurepip`](#using-ensurepip) -- [`get-pip.py`](#using-get-pip-py) +- [`ensurepip`](#ensurepip) +- [`get-pip.py`](#get-pip-py) ### `ensurepip` diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index e86fdb48c88..78f5f073095 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -1528,7 +1528,7 @@ Setups to test with special attention * Continuous integration/continuous deployment setups -* Installing from any kind of version control systems (i.e., Git, Subversion, Mercurial, or CVS), per :ref:`VCS Support` +* Installing from any kind of version control systems (i.e., Git, Subversion, Mercurial, or CVS), per :doc:`topics/vcs-support` * Installing from source code held in local directories From 80285c686f76350a219ad6d9133f734640c45215 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 25 Jun 2021 11:14:29 +0100 Subject: [PATCH 12/60] Correctly look up `os.devnull` reference --- docs/html/topics/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/configuration.md b/docs/html/topics/configuration.md index 90799d574c0..eb392ecd493 100644 --- a/docs/html/topics/configuration.md +++ b/docs/html/topics/configuration.md @@ -82,7 +82,7 @@ Site Additionally, the environment variable `PIP_CONFIG_FILE` can be used to specify a configuration file that's loaded first, and whose values are overridden by -the values set in the aforementioned files. Setting this to {ref}`os.devnull` +the values set in the aforementioned files. Setting this to {any}`os.devnull` disables the loading of _all_ configuration files. ### Loading order From b9d27d6d284d97d80cddabdf2ad6d32cff942ab4 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 25 Jun 2021 11:58:02 +0100 Subject: [PATCH 13/60] Add topic guide: Repeatable Installs --- docs/html/cli/pip_install.rst | 6 +- docs/html/topics/index.md | 1 + docs/html/topics/repeatable-installs.md | 98 +++++++++++++++++++++++++ docs/html/user_guide.rst | 83 +-------------------- 4 files changed, 104 insertions(+), 84 deletions(-) create mode 100644 docs/html/topics/repeatable-installs.md diff --git a/docs/html/cli/pip_install.rst b/docs/html/cli/pip_install.rst index 6bc15349d71..6c07ef29fad 100644 --- a/docs/html/cli/pip_install.rst +++ b/docs/html/cli/pip_install.rst @@ -680,9 +680,9 @@ of having the wheel cache disabled is thus extra build time for sdists, and this can be solved by making sure pre-built wheels are available from the index server. -Hash-checking mode also works with :ref:`pip download` and :ref:`pip wheel`. A -:ref:`comparison of hash-checking mode with other repeatability strategies -` is available in the User Guide. +Hash-checking mode also works with :ref:`pip download` and :ref:`pip wheel`. +See :doc:`../topics/repeatable-installs` for a comparison of hash-checking mode +with other repeatability strategies. .. warning:: diff --git a/docs/html/topics/index.md b/docs/html/topics/index.md index 478aacf2aa6..57d3e99860e 100644 --- a/docs/html/topics/index.md +++ b/docs/html/topics/index.md @@ -13,4 +13,5 @@ This section of the documentation is currently being fleshed out. See authentication caching configuration +repeatable-installs ``` diff --git a/docs/html/topics/repeatable-installs.md b/docs/html/topics/repeatable-installs.md new file mode 100644 index 00000000000..eca633d4ade --- /dev/null +++ b/docs/html/topics/repeatable-installs.md @@ -0,0 +1,98 @@ +# Repeatable Installs + +pip can be used to achieve various levels of repeatable environments. This page +walks through increasingly stricter definitions of what "repeatable" means. + +## Pinning the package versions + +Pinning package versions of your dependencies in the requirements file +protects you from bugs or incompatibilities in newly released versions: + +``` +SomePackage == 1.2.3 +DependencyOfSomePackage == 4.5.6 +``` + +```{note} +Pinning refers to using the `==` operator to require the package to be a +specific version. +``` + +A requirements file, containing pinned package versions can be generated using +{ref}`pip freeze`. This would not only the top-level packages, but also all of +their transitive dependencies. Performing the installation using +{ref}`--no-deps ` would provide an extra dose of insurance +against installing anything not explicitly listed. + +This strategy is easy to implement and works across OSes and architectures. +However, it trusts the locations you're fetching the packages from (like PyPI) +and the certificate authority chain. It also relies on those locations not +allowing packages to change without a version increase. (PyPI does protect +against this.) + +## Hash-checking + +Beyond pinning version numbers, you can add hashes against which to verify +downloaded packages: + +```none +FooProject == 1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 +``` + +This protects against a compromise of PyPI or the HTTPS certificate chain. It +also guards against a package changing without its version number changing (on +indexes that allow this). This approach is a good fit for automated server +deployments. + +Hash-checking mode is a labour-saving alternative to running a private index +server containing approved packages: it removes the need to upload packages, +maintain ACLs, and keep an audit trail (which a VCS gives you on the +requirements file for free). It can also substitute for a vendored library, +providing easier upgrades and less VCS noise. It does not, of course, +provide the availability benefits of a private index or a vendored library. + +[pip-tools] is a package that builds upon pip, and provides a good workflow for +managing and generating requirements files. + +[pip-tools]: https://github.com/jazzband/pip-tools#readme + +## Using a wheelhouse (AKA Installation Bundles) + +{ref}`pip wheel` can be used to generate and package all of a project's +dependencies, with all the compilation performed, into a single directory that +can be converted into a single archive. This archive then allows installation +when index servers are unavailable and avoids time-consuming recompilation. + +````{admonition} Example +Creating the bundle, on a modern Unix system: + +``` +$ tempdir=$(mktemp -d /tmp/wheelhouse-XXXXX) +$ python -m pip wheel -r requirements.txt --wheel-dir=$tempdir +$ cwd=`pwd` +$ (cd "$tempdir"; tar -cjvf "$cwd/bundled.tar.bz2" *) +``` + +Installing from the bundle, on a modern Unix system: + +``` +$ tempdir=$(mktemp -d /tmp/wheelhouse-XXXXX) +$ (cd $tempdir; tar -xvf /path/to/bundled.tar.bz2) +$ python -m pip install --force-reinstall --no-index --no-deps $tempdir/* +``` +```` + +Note that such a wheelhouse contains compiled packages, which are typically +OS and architecture-specific, so these archives are not necessarily portable +across machines. + +Hash-checking mode can also be used along with this method (since this uses a +requirements file as well), to ensure that future archives are built with +identical packages. + +```{warning} +Beware of the `setup_requires` keyword arg in {file}`setup.py`. The (rare) +packages that use it will cause those dependencies to be downloaded by +setuptools directly, skipping pip's protections. If you need to use such a +package, see {ref}`Controlling setup_requires `. +``` diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index e86fdb48c88..10518940b55 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -122,7 +122,7 @@ installed by pip in any particular order. In practice, there are 4 common uses of Requirements files: 1. Requirements files are used to hold the result from :ref:`pip freeze` for the - purpose of achieving :ref:`repeatable installations `. In + purpose of achieving :doc:`topics/repeatable-installs`. In this case, your requirement file contains a pinned version of everything that was installed when ``pip freeze`` was run. @@ -762,86 +762,7 @@ is the latest version: Ensuring Repeatability ====================== -pip can achieve various levels of repeatability: - -Pinned Version Numbers ----------------------- - -Pinning the versions of your dependencies in the requirements file -protects you from bugs or incompatibilities in newly released versions:: - - SomePackage == 1.2.3 - DependencyOfSomePackage == 4.5.6 - -Using :ref:`pip freeze` to generate the requirements file will ensure that not -only the top-level dependencies are included but their sub-dependencies as -well, and so on. Perform the installation using :ref:`--no-deps -` for an extra dose of insurance against installing -anything not explicitly listed. - -This strategy is easy to implement and works across OSes and architectures. -However, it trusts PyPI and the certificate authority chain. It -also relies on indices and find-links locations not allowing -packages to change without a version increase. (PyPI does protect -against this.) - -Hash-checking Mode ------------------- - -Beyond pinning version numbers, you can add hashes against which to verify -downloaded packages:: - - FooProject == 1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824 - -This protects against a compromise of PyPI or the HTTPS -certificate chain. It also guards against a package changing -without its version number changing (on indexes that allow this). -This approach is a good fit for automated server deployments. - -Hash-checking mode is a labor-saving alternative to running a private index -server containing approved packages: it removes the need to upload packages, -maintain ACLs, and keep an audit trail (which a VCS gives you on the -requirements file for free). It can also substitute for a vendor library, -providing easier upgrades and less VCS noise. It does not, of course, -provide the availability benefits of a private index or a vendor library. - -For more, see -:ref:`pip install\'s discussion of hash-checking mode `. - -.. _`Installation Bundle`: - -Installation Bundles --------------------- - -Using :ref:`pip wheel`, you can bundle up all of a project's dependencies, with -any compilation done, into a single archive. This allows installation when -index servers are unavailable and avoids time-consuming recompilation. Create -an archive like this:: - - $ tempdir=$(mktemp -d /tmp/wheelhouse-XXXXX) - $ python -m pip wheel -r requirements.txt --wheel-dir=$tempdir - $ cwd=`pwd` - $ (cd "$tempdir"; tar -cjvf "$cwd/bundled.tar.bz2" *) - -You can then install from the archive like this:: - - $ tempdir=$(mktemp -d /tmp/wheelhouse-XXXXX) - $ (cd $tempdir; tar -xvf /path/to/bundled.tar.bz2) - $ python -m pip install --force-reinstall --ignore-installed --upgrade --no-index --no-deps $tempdir/* - -Note that compiled packages are typically OS- and architecture-specific, so -these archives are not necessarily portable across machines. - -Hash-checking mode can be used along with this method to ensure that future -archives are built with identical packages. - -.. warning:: - - Finally, beware of the ``setup_requires`` keyword arg in :file:`setup.py`. - The (rare) packages that use it will cause those dependencies to be - downloaded by setuptools directly, skipping pip's protections. If you need - to use such a package, see :ref:`Controlling - setup_requires`. +This is now covered in :doc:`../topics/repeatable-installs`. .. _`Fixing conflicting dependencies`: From 4fe94078d58b272c61503081db636e01c09eda40 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 25 Jun 2021 15:45:45 +0100 Subject: [PATCH 14/60] Update IRC links to libera.chat --- .github/ISSUE_TEMPLATE/config.yml | 4 ++-- .github/ISSUE_TEMPLATE/~good-first-issue.md | 2 +- README.rst | 4 ++-- docs/html/development/index.rst | 4 ++-- docs/html/index.md | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 8e5c268c114..98a92d60ee1 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -6,6 +6,6 @@ contact_links: about: | Please ask typical Q&A here: general ideas for Python packaging, questions about structuring projects and so on -- name: '💬 IRC: #pypa @ Freenode' - url: https://webchat.freenode.net/#pypa +- name: '💬 IRC: #pypa' + url: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa about: Chat with devs diff --git a/.github/ISSUE_TEMPLATE/~good-first-issue.md b/.github/ISSUE_TEMPLATE/~good-first-issue.md index 885198b63a7..912201676c8 100644 --- a/.github/ISSUE_TEMPLATE/~good-first-issue.md +++ b/.github/ISSUE_TEMPLATE/~good-first-issue.md @@ -12,4 +12,4 @@ labels: ["good first issue"] --- -**Good First Issue**: This issue is a good starting point for first time contributors -- the process of fixing this should be a good introduction to pip's development workflow. If you've already contributed to pip, work on [another issue without this label](https://github.com/pypa/pip/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://pip.pypa.io/en/latest/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Freenode](https://webchat.freenode.net/?channels=%23pypa-dev), or the [distutils-sig mailing list](https://mail.python.org/mailman3/lists/distutils-sig.python.org/). +**Good First Issue**: This issue is a good starting point for first time contributors -- the process of fixing this should be a good introduction to pip's development workflow. If you've already contributed to pip, work on [another issue without this label](https://github.com/pypa/pip/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen+-label%3A%22good+first+issue%22) instead. If there is not a corresponding pull request for this issue, it is up for grabs. For directions for getting set up, see our [Getting Started Guide](https://pip.pypa.io/en/latest/development/getting-started/). If you are working on this issue and have questions, feel free to ask them here, [`#pypa-dev` on Libera.chat](https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev), or the [distutils-sig mailing list](https://mail.python.org/mailman3/lists/distutils-sig.python.org/). diff --git a/README.rst b/README.rst index 222b055eb4c..43ad925be21 100644 --- a/README.rst +++ b/README.rst @@ -57,6 +57,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. .. _Issue tracking: https://github.com/pypa/pip/issues .. _Discourse channel: https://discuss.python.org/c/packaging .. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ -.. _User IRC: https://webchat.freenode.net/?channels=%23pypa -.. _Development IRC: https://webchat.freenode.net/?channels=%23pypa-dev +.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa +.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev .. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md diff --git a/docs/html/development/index.rst b/docs/html/development/index.rst index 47907584919..31df114ae1c 100644 --- a/docs/html/development/index.rst +++ b/docs/html/development/index.rst @@ -7,7 +7,7 @@ of all forms. The sections below will help you get started with development, testing, and documentation. You can also join ``#pypa`` (general packaging discussion and user support) and -``#pypa-dev`` (discussion about development of packaging tools) `on Freenode`_, +``#pypa-dev`` (discussion about development of packaging tools) `on Libera.chat`_, or the `distutils-sig mailing list`_, to ask questions or get involved. .. toctree:: @@ -26,5 +26,5 @@ or the `distutils-sig mailing list`_, to ask questions or get involved. pip's development documentation has been rearranged and some older references might be broken. -.. _`on Freenode`: https://webchat.freenode.net/?channels=%23pypa-dev,pypa +.. _`on Libera.chat`: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev .. _`distutils-sig mailing list`: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ diff --git a/docs/html/index.md b/docs/html/index.md index 9ab5df298be..4b565b9a38c 100644 --- a/docs/html/index.md +++ b/docs/html/index.md @@ -45,5 +45,5 @@ lists or chat rooms: [pypi]: https://pypi.org/ [issue-tracker]: https://github.com/pypa/pip/issues/ [packaging-discourse]: https://discuss.python.org/c/packaging/14 -[irc-pypa]: https://webchat.freenode.net/#pypa -[irc-pypa-dev]: https://webchat.freenode.net/#pypa-dev +[irc-pypa]: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa +[irc-pypa-dev]: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev From 4cab55f48ef2d05da40dd603910a68f81523ddd4 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 28 May 2021 11:22:39 +0100 Subject: [PATCH 15/60] Rephrase the warning printed when run as root on Unix The earlier warning phrasing has some awkwardness and doesn't clearly explain why this action is potentially harmful. The change from "you should" to "it is recommended" is also intentional, to take a different tone. --- src/pip/_internal/cli/req_command.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 3fc00d4f47b..7ed623880ac 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -177,8 +177,9 @@ def warn_if_run_as_root(): if os.getuid() != 0: return logger.warning( - "Running pip as root will break packages and permissions. " - "You should install packages reliably by using venv: " + "Running pip as the 'root' user can result in broken permissions and " + "conflicting behaviour with the system package manager. " + "It is recommended to use a virtual environment instead: " "https://pip.pypa.io/warnings/venv" ) From 363e90b62c3bfff14a4684545d54300007bb4d78 Mon Sep 17 00:00:00 2001 From: Ben Darnell <> Date: Fri, 28 May 2021 16:01:41 +0000 Subject: [PATCH 16/60] Avoid importing a non-vendored version of Tornado Code depending on this conditional import could break if an old version of Tornado is present in the environment, rendering pip unusable. --- news/10020.bugfix.rst | 1 + src/pip/_vendor/tenacity/__init__.py | 10 ++++++---- tools/vendoring/patches/tenacity.patch | 21 +++++++++++++++++++++ 3 files changed, 28 insertions(+), 4 deletions(-) create mode 100644 news/10020.bugfix.rst create mode 100644 tools/vendoring/patches/tenacity.patch diff --git a/news/10020.bugfix.rst b/news/10020.bugfix.rst new file mode 100644 index 00000000000..9425626fb07 --- /dev/null +++ b/news/10020.bugfix.rst @@ -0,0 +1 @@ +Remove unused optional ``tornado`` import in vendored ``tenacity`` to prevent old versions of Tornado from breaking pip. diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__init__.py index 5f8cb505896..42e9d8940b1 100644 --- a/src/pip/_vendor/tenacity/__init__.py +++ b/src/pip/_vendor/tenacity/__init__.py @@ -22,10 +22,12 @@ except ImportError: iscoroutinefunction = None -try: - import tornado -except ImportError: - tornado = None +# Replace a conditional import with a hard-coded None so that pip does +# not attempt to use tornado even if it is present in the environment. +# If tornado is non-None, tenacity will attempt to execute some code +# that is sensitive to the version of tornado, which could break pip +# if an old version is found. +tornado = None import sys import threading diff --git a/tools/vendoring/patches/tenacity.patch b/tools/vendoring/patches/tenacity.patch new file mode 100644 index 00000000000..006588b3653 --- /dev/null +++ b/tools/vendoring/patches/tenacity.patch @@ -0,0 +1,21 @@ +diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__init__.py +index 5f8cb5058..42e9d8940 100644 +--- a/src/pip/_vendor/tenacity/__init__.py ++++ b/src/pip/_vendor/tenacity/__init__.py +@@ -22,10 +22,12 @@ try: + except ImportError: + iscoroutinefunction = None + +-try: +- import tornado +-except ImportError: +- tornado = None ++# Replace a conditional import with a hard-coded None so that pip does ++# not attempt to use tornado even if it is present in the environment. ++# If tornado is non-None, tenacity will attempt to execute some code ++# that is sensitive to the version of tornado, which could break pip ++# if an old version is found. ++tornado = None + + import sys + import threading From c141edc5c90d0e3c2f40635516f151e2d02b5d95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filipe=20La=C3=ADns?= Date: Sat, 29 May 2021 16:11:31 +0100 Subject: [PATCH 17/60] remove support for setup.cfg only projects MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Per the discussion in #9945. Signed-off-by: Filipe Laíns --- news/10031.bugfix.rst | 1 + src/pip/_internal/req/req_install.py | 18 ++---------------- 2 files changed, 3 insertions(+), 16 deletions(-) create mode 100644 news/10031.bugfix.rst diff --git a/news/10031.bugfix.rst b/news/10031.bugfix.rst new file mode 100644 index 00000000000..8b5332bb06a --- /dev/null +++ b/news/10031.bugfix.rst @@ -0,0 +1 @@ +Require ``setup.cfg``-only projects to be built via PEP 517, by requiring an explicit dependency on setuptools declared in pyproject.toml. diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index c2eea37123e..f4d6251412b 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -509,19 +509,6 @@ def load_pyproject_toml(self): self.unpacked_source_directory, backend, backend_path=backend_path, ) - def _check_setup_py_or_cfg_exists(self) -> bool: - """Check if the requirement actually has a setuptools build file. - - If setup.py does not exist, we also check setup.cfg in the same - directory and allow the directory if that exists. - """ - if os.path.exists(self.setup_py_path): - return True - stem, ext = os.path.splitext(self.setup_py_path) - if ext == ".py" and os.path.exists(f"{stem}.cfg"): - return True - return False - def _generate_metadata(self): # type: () -> str """Invokes metadata generator functions, with the required arguments. @@ -529,10 +516,9 @@ def _generate_metadata(self): if not self.use_pep517: assert self.unpacked_source_directory - if not self._check_setup_py_or_cfg_exists(): + if not os.path.exists(self.setup_py_path): raise InstallationError( - f'File "setup.py" or "setup.cfg" not found for legacy ' - f'project {self}.' + f'File "setup.py" not found for legacy project {self}.' ) return generate_metadata_legacy( From e69a8f3987f44178dec8b9137158b4a6ed778ca3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 26 Jun 2021 10:48:30 +0200 Subject: [PATCH 18/60] Bump for release --- NEWS.rst | 10 ++++++++++ news/10020.bugfix.rst | 1 - news/10031.bugfix.rst | 1 - src/pip/__init__.py | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) delete mode 100644 news/10020.bugfix.rst delete mode 100644 news/10031.bugfix.rst diff --git a/NEWS.rst b/NEWS.rst index 69655db57e6..e4f63f0e8cc 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,16 @@ .. towncrier release notes start +21.1.3 (2021-06-26) +=================== + +Bug Fixes +--------- + +- Remove unused optional ``tornado`` import in vendored ``tenacity`` to prevent old versions of Tornado from breaking pip. (`#10020 `_) +- Require ``setup.cfg``-only projects to be built via PEP 517, by requiring an explicit dependency on setuptools declared in pyproject.toml. (`#10031 `_) + + 21.1.2 (2021-05-23) =================== diff --git a/news/10020.bugfix.rst b/news/10020.bugfix.rst deleted file mode 100644 index 9425626fb07..00000000000 --- a/news/10020.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Remove unused optional ``tornado`` import in vendored ``tenacity`` to prevent old versions of Tornado from breaking pip. diff --git a/news/10031.bugfix.rst b/news/10031.bugfix.rst deleted file mode 100644 index 8b5332bb06a..00000000000 --- a/news/10031.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Require ``setup.cfg``-only projects to be built via PEP 517, by requiring an explicit dependency on setuptools declared in pyproject.toml. diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 82f53c38cab..75815d8bb6a 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "21.1.2" +__version__ = "21.1.3" def main(args=None): From 5cf74b23cfb3afea408ab69304a02702a67e5878 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 26 Jun 2021 10:48:35 +0200 Subject: [PATCH 19/60] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 75815d8bb6a..c35872ca9db 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "21.1.3" +__version__ = "21.2.dev0" def main(args=None): From c06bb2ac979971aee835f7bfbd2355a011175f24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 26 Jun 2021 10:52:38 +0200 Subject: [PATCH 20/60] Remove 21.1.3 new fragments --- news/10020.bugfix.rst | 1 - news/10031.bugfix.rst | 1 - 2 files changed, 2 deletions(-) delete mode 100644 news/10020.bugfix.rst delete mode 100644 news/10031.bugfix.rst diff --git a/news/10020.bugfix.rst b/news/10020.bugfix.rst deleted file mode 100644 index 9425626fb07..00000000000 --- a/news/10020.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Remove unused optional ``tornado`` import in vendored ``tenacity`` to prevent old versions of Tornado from breaking pip. diff --git a/news/10031.bugfix.rst b/news/10031.bugfix.rst deleted file mode 100644 index 8b5332bb06a..00000000000 --- a/news/10031.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Require ``setup.cfg``-only projects to be built via PEP 517, by requiring an explicit dependency on setuptools declared in pyproject.toml. From 44b3c90bfd8cd584b5de4acbddd608d6343dd1a4 Mon Sep 17 00:00:00 2001 From: Diego Ramirez Date: Sat, 26 Jun 2021 19:36:35 -0500 Subject: [PATCH 21/60] Complete type annotations in pip/_internal/cli Co-authored-by: Tzu-ping Chung --- news/10065.trivial.rst | 1 + src/pip/_internal/cli/progress_bars.py | 35 +++----- src/pip/_internal/cli/req_command.py | 109 ++++++++++++------------- src/pip/_internal/cli/spinners.py | 51 ++++-------- 4 files changed, 81 insertions(+), 115 deletions(-) create mode 100644 news/10065.trivial.rst diff --git a/news/10065.trivial.rst b/news/10065.trivial.rst new file mode 100644 index 00000000000..d6bbe71c21a --- /dev/null +++ b/news/10065.trivial.rst @@ -0,0 +1 @@ +Fixed all the annotations from ``pip/_internal/cli``. diff --git a/src/pip/_internal/cli/progress_bars.py b/src/pip/_internal/cli/progress_bars.py index 3064c85697b..4d1413af33f 100644 --- a/src/pip/_internal/cli/progress_bars.py +++ b/src/pip/_internal/cli/progress_bars.py @@ -1,7 +1,7 @@ import itertools import sys from signal import SIGINT, default_int_handler, signal -from typing import Any, Dict, List +from typing import Any from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar from pip._vendor.progress.spinner import Spinner @@ -18,8 +18,7 @@ colorama = None -def _select_progress_class(preferred, fallback): - # type: (Bar, Bar) -> Bar +def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar: encoding = getattr(preferred.file, "encoding", None) # If we don't know what encoding this file is in, then we'll just assume @@ -67,8 +66,7 @@ class InterruptibleMixin: download has already completed, for example. """ - def __init__(self, *args, **kwargs): - # type: (List[Any], Dict[Any, Any]) -> None + def __init__(self, *args: Any, **kwargs: Any) -> None: """ Save the original SIGINT handler for later. """ @@ -85,8 +83,7 @@ def __init__(self, *args, **kwargs): if self.original_handler is None: self.original_handler = default_int_handler - def finish(self): - # type: () -> None + def finish(self) -> None: """ Restore the original SIGINT handler after finishing. @@ -108,8 +105,7 @@ def handle_sigint(self, signum, frame): # type: ignore class SilentBar(Bar): - def update(self): - # type: () -> None + def update(self) -> None: pass @@ -122,28 +118,24 @@ class BlueEmojiBar(IncrementalBar): class DownloadProgressMixin: - def __init__(self, *args, **kwargs): - # type: (List[Any], Dict[Any, Any]) -> None + def __init__(self, *args: Any, **kwargs: Any) -> None: # https://github.com/python/mypy/issues/5887 super().__init__(*args, **kwargs) # type: ignore self.message = (" " * (get_indentation() + 2)) + self.message # type: str @property - def downloaded(self): - # type: () -> str + def downloaded(self) -> str: return format_size(self.index) # type: ignore @property - def download_speed(self): - # type: () -> str + def download_speed(self) -> str: # Avoid zero division errors... if self.avg == 0.0: # type: ignore return "..." return format_size(1 / self.avg) + "/s" # type: ignore @property - def pretty_eta(self): - # type: () -> str + def pretty_eta(self) -> str: if self.eta: # type: ignore return f"eta {self.eta_td}" # type: ignore return "" @@ -158,8 +150,7 @@ def iter(self, it): # type: ignore class WindowsMixin: - def __init__(self, *args, **kwargs): - # type: (List[Any], Dict[Any, Any]) -> None + def __init__(self, *args: Any, **kwargs: Any) -> None: # The Windows terminal does not support the hide/show cursor ANSI codes # even with colorama. So we'll ensure that hide_cursor is False on # Windows. @@ -221,14 +212,12 @@ class DownloadProgressSpinner( file = sys.stdout suffix = "%(downloaded)s %(download_speed)s" - def next_phase(self): - # type: () -> str + def next_phase(self) -> str: if not hasattr(self, "_phaser"): self._phaser = itertools.cycle(self.phases) return next(self._phaser) - def update(self): - # type: () -> None + def update(self) -> None: message = self.message % self phase = self.next_phase() suffix = self.suffix % self diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 7ed623880ac..377351e20b9 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -50,14 +50,12 @@ class SessionCommandMixin(CommandContextMixIn): A class mixin for command classes needing _build_session(). """ - def __init__(self): - # type: () -> None + def __init__(self) -> None: super().__init__() - self._session = None # Optional[PipSession] + self._session: Optional[PipSession] = None @classmethod - def _get_index_urls(cls, options): - # type: (Values) -> Optional[List[str]] + def _get_index_urls(cls, options: Values) -> Optional[List[str]]: """Return a list of index urls from user-provided options.""" index_urls = [] if not getattr(options, "no_index", False): @@ -70,8 +68,7 @@ def _get_index_urls(cls, options): # Return None rather than an empty list return index_urls or None - def get_default_session(self, options): - # type: (Values) -> PipSession + def get_default_session(self, options: Values) -> PipSession: """Get a default-managed session.""" if self._session is None: self._session = self.enter_context(self._build_session(options)) @@ -81,8 +78,12 @@ def get_default_session(self, options): assert self._session is not None return self._session - def _build_session(self, options, retries=None, timeout=None): - # type: (Values, Optional[int], Optional[int]) -> PipSession + def _build_session( + self, + options: Values, + retries: Optional[int] = None, + timeout: Optional[int] = None, + ) -> PipSession: assert not options.cache_dir or os.path.isabs(options.cache_dir) session = PipSession( cache=( @@ -126,8 +127,7 @@ class IndexGroupCommand(Command, SessionCommandMixin): This also corresponds to the commands that permit the pip version check. """ - def handle_pip_version_check(self, options): - # type: (Values) -> None + def handle_pip_version_check(self, options: Values) -> None: """ Do the pip version check if not disabled. @@ -154,8 +154,7 @@ def handle_pip_version_check(self, options): ] -def warn_if_run_as_root(): - # type: () -> None +def warn_if_run_as_root() -> None: """Output a warning for sudo users on Unix. In a virtual environment, sudo pip still writes to virtualenv. @@ -184,19 +183,18 @@ def warn_if_run_as_root(): ) -def with_cleanup(func): - # type: (Any) -> Any +def with_cleanup(func: Any) -> Any: """Decorator for common logic related to managing temporary directories. """ - def configure_tempdir_registry(registry): - # type: (TempDirectoryTypeRegistry) -> None + def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: for t in KEEPABLE_TEMPDIR_TYPES: registry.set_delete(t, False) - def wrapper(self, options, args): - # type: (RequirementCommand, Values, List[Any]) -> Optional[int] + def wrapper( + self: RequirementCommand, options: Values, args: List[Any] + ) -> Optional[int]: assert self.tempdir_registry is not None if options.no_clean: configure_tempdir_registry(self.tempdir_registry) @@ -214,15 +212,13 @@ def wrapper(self, options, args): class RequirementCommand(IndexGroupCommand): - def __init__(self, *args, **kw): - # type: (Any, Any) -> None + def __init__(self, *args: Any, **kw: Any) -> None: super().__init__(*args, **kw) self.cmd_opts.add_option(cmdoptions.no_clean()) @staticmethod - def determine_resolver_variant(options): - # type: (Values) -> str + def determine_resolver_variant(options: Values) -> str: """Determines which resolver should be used, based on the given options.""" if "legacy-resolver" in options.deprecated_features_enabled: return "legacy" @@ -232,15 +228,14 @@ def determine_resolver_variant(options): @classmethod def make_requirement_preparer( cls, - temp_build_dir, # type: TempDirectory - options, # type: Values - req_tracker, # type: RequirementTracker - session, # type: PipSession - finder, # type: PackageFinder - use_user_site, # type: bool - download_dir=None, # type: str - ): - # type: (...) -> RequirementPreparer + temp_build_dir: TempDirectory, + options: Values, + req_tracker: RequirementTracker, + session: PipSession, + finder: PackageFinder, + use_user_site: bool, + download_dir: Optional[str] = None, + ) -> RequirementPreparer: """ Create a RequirementPreparer instance for the given parameters. """ @@ -283,19 +278,18 @@ def make_requirement_preparer( @classmethod def make_resolver( cls, - preparer, # type: RequirementPreparer - finder, # type: PackageFinder - options, # type: Values - wheel_cache=None, # type: Optional[WheelCache] - use_user_site=False, # type: bool - ignore_installed=True, # type: bool - ignore_requires_python=False, # type: bool - force_reinstall=False, # type: bool - upgrade_strategy="to-satisfy-only", # type: str - use_pep517=None, # type: Optional[bool] - py_version_info=None, # type: Optional[Tuple[int, ...]] - ): - # type: (...) -> BaseResolver + preparer: RequirementPreparer, + finder: PackageFinder, + options: Values, + wheel_cache: Optional[WheelCache] = None, + use_user_site: bool = False, + ignore_installed: bool = True, + ignore_requires_python: bool = False, + force_reinstall: bool = False, + upgrade_strategy: str = "to-satisfy-only", + use_pep517: Optional[bool] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> BaseResolver: """ Create a Resolver instance for the given parameters. """ @@ -342,12 +336,11 @@ def make_resolver( def get_requirements( self, - args, # type: List[str] - options, # type: Values - finder, # type: PackageFinder - session, # type: PipSession - ): - # type: (...) -> List[InstallRequirement] + args: List[str], + options: Values, + finder: PackageFinder, + session: PipSession, + ) -> List[InstallRequirement]: """ Parse command-line arguments into the corresponding requirements. """ @@ -421,8 +414,7 @@ def get_requirements( return requirements @staticmethod - def trace_basic_info(finder): - # type: (PackageFinder) -> None + def trace_basic_info(finder: PackageFinder) -> None: """ Trace basic information about the provided objects. """ @@ -434,12 +426,11 @@ def trace_basic_info(finder): def _build_package_finder( self, - options, # type: Values - session, # type: PipSession - target_python=None, # type: Optional[TargetPython] - ignore_requires_python=None, # type: Optional[bool] - ): - # type: (...) -> PackageFinder + options: Values, + session: PipSession, + target_python: Optional[TargetPython] = None, + ignore_requires_python: Optional[bool] = None, + ) -> PackageFinder: """ Create a package finder appropriate to this requirement command. diff --git a/src/pip/_internal/cli/spinners.py b/src/pip/_internal/cli/spinners.py index 08e156617c4..4d3ae6e2997 100644 --- a/src/pip/_internal/cli/spinners.py +++ b/src/pip/_internal/cli/spinners.py @@ -14,25 +14,22 @@ class SpinnerInterface: - def spin(self): - # type: () -> None + def spin(self) -> None: raise NotImplementedError() - def finish(self, final_status): - # type: (str) -> None + def finish(self, final_status: str) -> None: raise NotImplementedError() class InteractiveSpinner(SpinnerInterface): def __init__( self, - message, - file=None, - spin_chars="-\\|/", + message: str, + file: IO[str] = None, + spin_chars: str = "-\\|/", # Empirically, 8 updates/second looks nice - min_update_interval_seconds=0.125, + min_update_interval_seconds: float = 0.125, ): - # type: (str, IO[str], str, float) -> None self._message = message if file is None: file = sys.stdout @@ -45,8 +42,7 @@ def __init__( self._file.write(" " * get_indentation() + self._message + " ... ") self._width = 0 - def _write(self, status): - # type: (str) -> None + def _write(self, status: str) -> None: assert not self._finished # Erase what we wrote before by backspacing to the beginning, writing # spaces to overwrite the old text, and then backspacing again @@ -58,16 +54,14 @@ def _write(self, status): self._file.flush() self._rate_limiter.reset() - def spin(self): - # type: () -> None + def spin(self) -> None: if self._finished: return if not self._rate_limiter.ready(): return self._write(next(self._spin_cycle)) - def finish(self, final_status): - # type: (str) -> None + def finish(self, final_status: str) -> None: if self._finished: return self._write(final_status) @@ -81,29 +75,25 @@ def finish(self, final_status): # act as a keep-alive for systems like Travis-CI that take lack-of-output as # an indication that a task has frozen. class NonInteractiveSpinner(SpinnerInterface): - def __init__(self, message, min_update_interval_seconds=60): - # type: (str, float) -> None + def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None: self._message = message self._finished = False self._rate_limiter = RateLimiter(min_update_interval_seconds) self._update("started") - def _update(self, status): - # type: (str) -> None + def _update(self, status: str) -> None: assert not self._finished self._rate_limiter.reset() logger.info("%s: %s", self._message, status) - def spin(self): - # type: () -> None + def spin(self) -> None: if self._finished: return if not self._rate_limiter.ready(): return self._update("still running...") - def finish(self, final_status): - # type: (str) -> None + def finish(self, final_status: str) -> None: if self._finished: return self._update(f"finished with status '{final_status}'") @@ -111,25 +101,21 @@ def finish(self, final_status): class RateLimiter: - def __init__(self, min_update_interval_seconds): - # type: (float) -> None + def __init__(self, min_update_interval_seconds: float) -> None: self._min_update_interval_seconds = min_update_interval_seconds self._last_update = 0 # type: float - def ready(self): - # type: () -> bool + def ready(self) -> bool: now = time.time() delta = now - self._last_update return delta >= self._min_update_interval_seconds - def reset(self): - # type: () -> None + def reset(self) -> None: self._last_update = time.time() @contextlib.contextmanager -def open_spinner(message): - # type: (str) -> Iterator[SpinnerInterface] +def open_spinner(message: str) -> Iterator[SpinnerInterface]: # Interactive spinner goes directly to sys.stdout rather than being routed # through the logging system, but it acts like it has level INFO, # i.e. it's only displayed if we're at level INFO or better. @@ -153,8 +139,7 @@ def open_spinner(message): @contextlib.contextmanager -def hidden_cursor(file): - # type: (IO[str]) -> Iterator[None] +def hidden_cursor(file: IO[str]) -> Iterator[None]: # The Windows terminal does not support the hide/show cursor ANSI codes, # even via colorama. So don't even try. if WINDOWS: From 6a346a49484d1a445f32a322154d1c4d72c76766 Mon Sep 17 00:00:00 2001 From: Joe Michelini Date: Thu, 4 Mar 2021 17:46:02 -0500 Subject: [PATCH 22/60] add default proceed to uninstall --- src/pip/_internal/req/req_uninstall.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 83decc61d2e..946b8dd90b4 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -438,7 +438,10 @@ def _display(msg, paths): if verbose: _display('Will actually move:', compress_for_rename(self.paths)) - return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' + if ask('Proceed (Y/n)? ', ('y', 'n', '')) in ('y', ''): + return True + else: + return False def rollback(self): # type: () -> None From 0a08cf98b6123a4f497ed626748a50b33a6e6d9d Mon Sep 17 00:00:00 2001 From: Joe Michelini Date: Thu, 4 Mar 2021 17:55:44 -0500 Subject: [PATCH 23/60] add default proceed to uninstall --- news/9686.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/9686.feature.rst diff --git a/news/9686.feature.rst b/news/9686.feature.rst new file mode 100644 index 00000000000..3f12beabc46 --- /dev/null +++ b/news/9686.feature.rst @@ -0,0 +1 @@ +Allow 'Enter' to automatically proceed with uninstall. \ No newline at end of file From bf7ae72ff73cf78943673fa1cd1a067f01aa6ec2 Mon Sep 17 00:00:00 2001 From: Joe Michelini Date: Thu, 4 Mar 2021 18:06:57 -0500 Subject: [PATCH 24/60] add newline to news rst --- news/9686.feature.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/9686.feature.rst b/news/9686.feature.rst index 3f12beabc46..784cd5666fc 100644 --- a/news/9686.feature.rst +++ b/news/9686.feature.rst @@ -1 +1 @@ -Allow 'Enter' to automatically proceed with uninstall. \ No newline at end of file +Allow 'Enter' to automatically proceed with uninstall. From c7c4ade6013e71ea9f2e08869b8a193807c65fa3 Mon Sep 17 00:00:00 2001 From: Joe Michelini <66066937+afolksetapart@users.noreply.github.com> Date: Sat, 6 Mar 2021 12:36:28 -0500 Subject: [PATCH 25/60] Update news/9686.feature.rst Co-authored-by: Pradyun Gedam <3275593+pradyunsg@users.noreply.github.com> --- news/9686.feature.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/9686.feature.rst b/news/9686.feature.rst index 784cd5666fc..fefd7768f8b 100644 --- a/news/9686.feature.rst +++ b/news/9686.feature.rst @@ -1 +1 @@ -Allow 'Enter' to automatically proceed with uninstall. +Make "yes" the default choice in ``pip uninstall``'s prompt. From 197392ca3d62c02be1a4bedd3ab89837b4eeb1d3 Mon Sep 17 00:00:00 2001 From: Joe Michelini <66066937+afolksetapart@users.noreply.github.com> Date: Sat, 6 Mar 2021 12:36:46 -0500 Subject: [PATCH 26/60] Update src/pip/_internal/req/req_uninstall.py Co-authored-by: Pradyun Gedam <3275593+pradyunsg@users.noreply.github.com> --- src/pip/_internal/req/req_uninstall.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 946b8dd90b4..e83f03d6f3c 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -438,10 +438,7 @@ def _display(msg, paths): if verbose: _display('Will actually move:', compress_for_rename(self.paths)) - if ask('Proceed (Y/n)? ', ('y', 'n', '')) in ('y', ''): - return True - else: - return False + return ask('Proceed (Y/n)? ', ('y', 'n', '')) != 'n' def rollback(self): # type: () -> None From a5ea1d8d508dd56e2bd9c31ce306592e648c98c6 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Tue, 29 Jun 2021 19:21:12 +0000 Subject: [PATCH 27/60] Add `.venv` to `.gitignore` This directory is a fairly common virtual environment name, and, more importantly, is used by GitHub Codespaces as their default virtual environment path without adding it to a global gitignore. --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index da9a31ab521..79b8ab84b06 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ tests/data/common_wheels/ *~ .*.sw? .env/ +.venv/ # For IntelliJ IDEs (basically PyCharm) .idea/ From e68ac94595aa3af9c8981479ce305fdb4143c8b6 Mon Sep 17 00:00:00 2001 From: Taneli Hukkinen <3275109+hukkin@users.noreply.github.com> Date: Fri, 2 Jul 2021 00:11:35 +0300 Subject: [PATCH 28/60] Vendor `tomli` v1.0.3 --- src/pip/_vendor/__init__.py | 1 + src/pip/_vendor/tomli.pyi | 1 + src/pip/_vendor/tomli/LICENSE | 21 + src/pip/_vendor/tomli/__init__.py | 6 + src/pip/_vendor/tomli/_parser.py | 703 ++++++++++++++++++++++++++++++ src/pip/_vendor/tomli/_re.py | 83 ++++ src/pip/_vendor/tomli/py.typed | 1 + src/pip/_vendor/vendor.txt | 1 + 8 files changed, 817 insertions(+) create mode 100644 src/pip/_vendor/tomli.pyi create mode 100644 src/pip/_vendor/tomli/LICENSE create mode 100644 src/pip/_vendor/tomli/__init__.py create mode 100644 src/pip/_vendor/tomli/_parser.py create mode 100644 src/pip/_vendor/tomli/_re.py create mode 100644 src/pip/_vendor/tomli/py.typed diff --git a/src/pip/_vendor/__init__.py b/src/pip/_vendor/__init__.py index a10ecd6074a..744a43b039e 100644 --- a/src/pip/_vendor/__init__.py +++ b/src/pip/_vendor/__init__.py @@ -110,4 +110,5 @@ def vendored(modulename): vendored("toml") vendored("toml.encoder") vendored("toml.decoder") + vendored("tomli") vendored("urllib3") diff --git a/src/pip/_vendor/tomli.pyi b/src/pip/_vendor/tomli.pyi new file mode 100644 index 00000000000..b894db6919b --- /dev/null +++ b/src/pip/_vendor/tomli.pyi @@ -0,0 +1 @@ +from tomli import * \ No newline at end of file diff --git a/src/pip/_vendor/tomli/LICENSE b/src/pip/_vendor/tomli/LICENSE new file mode 100644 index 00000000000..e859590f886 --- /dev/null +++ b/src/pip/_vendor/tomli/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/pip/_vendor/tomli/__init__.py b/src/pip/_vendor/tomli/__init__.py new file mode 100644 index 00000000000..1cd8e07279a --- /dev/null +++ b/src/pip/_vendor/tomli/__init__.py @@ -0,0 +1,6 @@ +"""A lil' TOML parser.""" + +__all__ = ("loads", "load", "TOMLDecodeError") +__version__ = "1.0.3" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT + +from pip._vendor.tomli._parser import TOMLDecodeError, load, loads diff --git a/src/pip/_vendor/tomli/_parser.py b/src/pip/_vendor/tomli/_parser.py new file mode 100644 index 00000000000..730a746843b --- /dev/null +++ b/src/pip/_vendor/tomli/_parser.py @@ -0,0 +1,703 @@ +import string +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + FrozenSet, + Iterable, + Optional, + TextIO, + Tuple, +) + +from pip._vendor.tomli._re import ( + RE_BIN, + RE_DATETIME, + RE_HEX, + RE_LOCALTIME, + RE_NUMBER, + RE_OCT, + match_to_datetime, + match_to_localtime, + match_to_number, +) + +if TYPE_CHECKING: + from re import Pattern + + +ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) + +# Neither of these sets include quotation mark or backslash. They are +# currently handled as separate cases in the parser functions. +ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") +ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n\r") + +ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS +ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ASCII_CTRL - frozenset("\t\n") + +ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS + +TOML_WS = frozenset(" \t") +TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") +BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") + +BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( + { + "\\b": "\u0008", # backspace + "\\t": "\u0009", # tab + "\\n": "\u000A", # linefeed + "\\f": "\u000C", # form feed + "\\r": "\u000D", # carriage return + '\\"': "\u0022", # quote + "\\\\": "\u005C", # backslash + } +) + +# Type annotations +ParseFloat = Callable[[str], Any] +Key = Tuple[str, ...] +Pos = int + + +class TOMLDecodeError(ValueError): + """An error raised if a document is not valid TOML.""" + + +def load(fp: TextIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]: + """Parse TOML from a file object.""" + s = fp.read() + return loads(s, parse_float=parse_float) + + +def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901 + """Parse TOML from a string.""" + + # The spec allows converting "\r\n" to "\n", even in string + # literals. Let's do so to simplify parsing. + src = s.replace("\r\n", "\n") + pos = 0 + state = State() + + # Parse one statement at a time + # (typically means one line in TOML source) + while True: + # 1. Skip line leading whitespace + pos = skip_chars(src, pos, TOML_WS) + + # 2. Parse rules. Expect one of the following: + # - end of file + # - end of line + # - comment + # - key/value pair + # - append dict to list (and move to its namespace) + # - create dict (and move to its namespace) + # Skip trailing whitespace when applicable. + try: + char = src[pos] + except IndexError: + break + if char == "\n": + pos += 1 + continue + if char in KEY_INITIAL_CHARS: + pos = key_value_rule(src, pos, state, parse_float) + pos = skip_chars(src, pos, TOML_WS) + elif char == "[": + try: + second_char: Optional[str] = src[pos + 1] + except IndexError: + second_char = None + if second_char == "[": + pos = create_list_rule(src, pos, state) + else: + pos = create_dict_rule(src, pos, state) + pos = skip_chars(src, pos, TOML_WS) + elif char != "#": + raise suffixed_err(src, pos, "Invalid statement") + + # 3. Skip comment + pos = skip_comment(src, pos) + + # 4. Expect end of line or end of file + try: + char = src[pos] + except IndexError: + break + if char != "\n": + raise suffixed_err( + src, pos, "Expected newline or end of document after a statement" + ) + pos += 1 + + return state.out.dict + + +class State: + def __init__(self) -> None: + # Mutable, read-only + self.out = NestedDict() + self.flags = Flags() + + # Immutable, read and write + self.header_namespace: Key = () + + +class Flags: + """Flags that map to parsed keys/namespaces.""" + + # Marks an immutable namespace (inline array or inline table). + FROZEN = 0 + # Marks a nest that has been explicitly created and can no longer + # be opened using the "[table]" syntax. + EXPLICIT_NEST = 1 + + def __init__(self) -> None: + self._flags: Dict[str, dict] = {} + + def unset_all(self, key: Key) -> None: + cont = self._flags + for k in key[:-1]: + if k not in cont: + return + cont = cont[k]["nested"] + cont.pop(key[-1], None) + + def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None: + cont = self._flags + for k in head_key: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + for k in rel_key: + if k in cont: + cont[k]["flags"].add(flag) + else: + cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + + def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 + cont = self._flags + key_parent, key_stem = key[:-1], key[-1] + for k in key_parent: + if k not in cont: + cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont = cont[k]["nested"] + if key_stem not in cont: + cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} + cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) + + def is_(self, key: Key, flag: int) -> bool: + if not key: + return False # document root has no flags + cont = self._flags + for k in key[:-1]: + if k not in cont: + return False + inner_cont = cont[k] + if flag in inner_cont["recursive_flags"]: + return True + cont = inner_cont["nested"] + key_stem = key[-1] + if key_stem in cont: + cont = cont[key_stem] + return flag in cont["flags"] or flag in cont["recursive_flags"] + return False + + +class NestedDict: + def __init__(self) -> None: + # The parsed content of the TOML document + self.dict: Dict[str, Any] = {} + + def get_or_create_nest( + self, + key: Key, + *, + access_lists: bool = True, + ) -> dict: + cont: Any = self.dict + for k in key: + if k not in cont: + cont[k] = {} + cont = cont[k] + if access_lists and isinstance(cont, list): + cont = cont[-1] + if not isinstance(cont, dict): + raise KeyError("There is no nest behind this key") + return cont + + def append_nest_to_list(self, key: Key) -> None: + cont = self.get_or_create_nest(key[:-1]) + last_key = key[-1] + if last_key in cont: + list_ = cont[last_key] + if not isinstance(list_, list): + raise KeyError("An object other than list found behind this key") + list_.append({}) + else: + cont[last_key] = [{}] + + +def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: + try: + while src[pos] in chars: + pos += 1 + except IndexError: + pass + return pos + + +def skip_until( + src: str, + pos: Pos, + expect: str, + *, + error_on: FrozenSet[str], + error_on_eof: bool, +) -> Pos: + try: + new_pos = src.index(expect, pos) + except ValueError: + new_pos = len(src) + if error_on_eof: + raise suffixed_err(src, new_pos, f'Expected "{expect!r}"') + + bad_chars = error_on.intersection(src[pos:new_pos]) + if bad_chars: + bad_char = next(iter(bad_chars)) + bad_pos = src.index(bad_char, pos) + raise suffixed_err(src, bad_pos, f'Found invalid character "{bad_char!r}"') + return new_pos + + +def skip_comment(src: str, pos: Pos) -> Pos: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char == "#": + return skip_until( + src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False + ) + return pos + + +def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: + while True: + pos_before_skip = pos + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + pos = skip_comment(src, pos) + if pos == pos_before_skip: + return pos + + +def create_dict_rule(src: str, pos: Pos, state: State) -> Pos: + pos += 1 # Skip "[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if state.flags.is_(key, Flags.EXPLICIT_NEST) or state.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not declare {key} twice") + state.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + state.out.get_or_create_nest(key) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") + state.header_namespace = key + + if src[pos : pos + 1] != "]": + raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration') + return pos + 1 + + +def create_list_rule(src: str, pos: Pos, state: State) -> Pos: + pos += 2 # Skip "[[" + pos = skip_chars(src, pos, TOML_WS) + pos, key = parse_key(src, pos) + + if state.flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + # Free the namespace now that it points to another empty list item... + state.flags.unset_all(key) + # ...but this key precisely is still prohibited from table declaration + state.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) + try: + state.out.append_nest_to_list(key) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") + state.header_namespace = key + + end_marker = src[pos : pos + 2] + if end_marker != "]]": + raise suffixed_err( + src, + pos, + f'Found "{end_marker!r}" at the end of an array declaration.' + ' Expected "]]"', + ) + return pos + 2 + + +def key_value_rule(src: str, pos: Pos, state: State, parse_float: ParseFloat) -> Pos: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + abs_key_parent = state.header_namespace + key_parent + + if state.flags.is_(abs_key_parent, Flags.FROZEN): + raise suffixed_err( + src, pos, f"Can not mutate immutable namespace {abs_key_parent}" + ) + # Containers in the relative path can't be opened with the table syntax after this + state.flags.set_for_relative_key(state.header_namespace, key, Flags.EXPLICIT_NEST) + try: + nest = state.out.get_or_create_nest(abs_key_parent) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") + if key_stem in nest: + raise suffixed_err(src, pos, "Can not overwrite a value") + # Mark inline table and array namespaces recursively immutable + if isinstance(value, (dict, list)): + abs_key = state.header_namespace + key + state.flags.set(abs_key, Flags.FROZEN, recursive=True) + nest[key_stem] = value + return pos + + +def parse_key_value_pair( + src: str, pos: Pos, parse_float: ParseFloat +) -> Tuple[Pos, Key, Any]: + pos, key = parse_key(src, pos) + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char != "=": + raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair') + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, value = parse_value(src, pos, parse_float) + return pos, key, value + + +def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]: + pos, key_part = parse_key_part(src, pos) + key = [key_part] + pos = skip_chars(src, pos, TOML_WS) + while True: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char != ".": + return pos, tuple(key) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + pos, key_part = parse_key_part(src, pos) + key.append(key_part) + pos = skip_chars(src, pos, TOML_WS) + + +def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + if char in BARE_KEY_CHARS: + start_pos = pos + pos = skip_chars(src, pos, BARE_KEY_CHARS) + return pos, src[start_pos:pos] + if char == "'": + return parse_literal_str(src, pos) + if char == '"': + return parse_one_line_basic_str(src, pos) + raise suffixed_err(src, pos, "Invalid initial character for a key part") + + +def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]: + pos += 1 + return parse_basic_str(src, pos, multiline=False) + + +def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]: + pos += 1 + array: list = [] + + pos = skip_comments_and_array_ws(src, pos) + if src[pos : pos + 1] == "]": + return pos + 1, array + while True: + pos, val = parse_value(src, pos, parse_float) + array.append(val) + pos = skip_comments_and_array_ws(src, pos) + + c = src[pos : pos + 1] + if c == "]": + return pos + 1, array + if c != ",": + raise suffixed_err(src, pos, "Unclosed array") + pos += 1 + + pos = skip_comments_and_array_ws(src, pos) + if src[pos : pos + 1] == "]": + return pos + 1, array + + +def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]: + pos += 1 + nested_dict = NestedDict() + flags = Flags() + + pos = skip_chars(src, pos, TOML_WS) + if src[pos : pos + 1] == "}": + return pos + 1, nested_dict.dict + while True: + pos, key, value = parse_key_value_pair(src, pos, parse_float) + key_parent, key_stem = key[:-1], key[-1] + if flags.is_(key, Flags.FROZEN): + raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") + try: + nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) + except KeyError: + raise suffixed_err(src, pos, "Can not overwrite a value") + if key_stem in nest: + raise suffixed_err(src, pos, f'Duplicate inline table key "{key_stem}"') + nest[key_stem] = value + pos = skip_chars(src, pos, TOML_WS) + c = src[pos : pos + 1] + if c == "}": + return pos + 1, nested_dict.dict + if c != ",": + raise suffixed_err(src, pos, "Unclosed inline table") + if isinstance(value, (dict, list)): + flags.set(key, Flags.FROZEN, recursive=True) + pos += 1 + pos = skip_chars(src, pos, TOML_WS) + + +def parse_basic_str_escape( + src: str, pos: Pos, *, multiline: bool = False +) -> Tuple[Pos, str]: + escape_id = src[pos : pos + 2] + pos += 2 + if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: + # Skip whitespace until next non-whitespace character or end of + # the doc. Error if non-whitespace is found before newline. + if escape_id != "\\\n": + pos = skip_chars(src, pos, TOML_WS) + char = src[pos : pos + 1] + if not char: + return pos, "" + if char != "\n": + raise suffixed_err(src, pos, 'Unescaped "\\" in a string') + pos += 1 + pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) + return pos, "" + if escape_id == "\\u": + return parse_hex_char(src, pos, 4) + if escape_id == "\\U": + return parse_hex_char(src, pos, 8) + try: + return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] + except KeyError: + if len(escape_id) != 2: + raise suffixed_err(src, pos, "Unterminated string") + raise suffixed_err(src, pos, 'Unescaped "\\" in a string') + + +def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]: + return parse_basic_str_escape(src, pos, multiline=True) + + +def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]: + hex_str = src[pos : pos + hex_len] + if len(hex_str) != hex_len or any(c not in string.hexdigits for c in hex_str): + raise suffixed_err(src, pos, "Invalid hex value") + pos += hex_len + hex_int = int(hex_str, 16) + if not is_unicode_scalar_value(hex_int): + raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + return pos, chr(hex_int) + + +def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]: + pos += 1 # Skip starting apostrophe + start_pos = pos + pos = skip_until( + src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True + ) + return pos + 1, src[start_pos:pos] # Skip ending apostrophe + + +def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]: + pos += 3 + if src[pos : pos + 1] == "\n": + pos += 1 + + if literal: + delim = "'" + end_pos = skip_until( + src, + pos, + "'''", + error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, + error_on_eof=True, + ) + result = src[pos:end_pos] + pos = end_pos + 3 + else: + delim = '"' + pos, result = parse_basic_str(src, pos, multiline=True) + + # Add at maximum two extra apostrophes/quotes if the end sequence + # is 4 or 5 chars long instead of just 3. + if src[pos : pos + 1] != delim: + return pos, result + pos += 1 + if src[pos : pos + 1] != delim: + return pos, result + delim + pos += 1 + return pos, result + (delim * 2) + + +def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]: + if multiline: + error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape_multiline + else: + error_on = ILLEGAL_BASIC_STR_CHARS + parse_escapes = parse_basic_str_escape + result = "" + start_pos = pos + while True: + try: + char = src[pos] + except IndexError: + raise suffixed_err(src, pos, "Unterminated string") + if char == '"': + if not multiline: + return pos + 1, result + src[start_pos:pos] + if src[pos + 1 : pos + 3] == '""': + return pos + 3, result + src[start_pos:pos] + pos += 1 + continue + if char == "\\": + result += src[start_pos:pos] + pos, parsed_escape = parse_escapes(src, pos) + result += parsed_escape + start_pos = pos + continue + if char in error_on: + raise suffixed_err(src, pos, f'Illegal character "{char!r}"') + pos += 1 + + +def parse_regex(src: str, pos: Pos, regex: "Pattern") -> Tuple[Pos, str]: + match = regex.match(src, pos) + if not match: + raise suffixed_err(src, pos, "Unexpected sequence") + return match.end(), match.group() + + +def parse_value( # noqa: C901 + src: str, pos: Pos, parse_float: ParseFloat +) -> Tuple[Pos, Any]: + try: + char: Optional[str] = src[pos] + except IndexError: + char = None + + # Basic strings + if char == '"': + if src[pos + 1 : pos + 3] == '""': + return parse_multiline_str(src, pos, literal=False) + return parse_one_line_basic_str(src, pos) + + # Literal strings + if char == "'": + if src[pos + 1 : pos + 3] == "''": + return parse_multiline_str(src, pos, literal=True) + return parse_literal_str(src, pos) + + # Booleans + if char == "t": + if src[pos + 1 : pos + 4] == "rue": + return pos + 4, True + if char == "f": + if src[pos + 1 : pos + 5] == "alse": + return pos + 5, False + + # Dates and times + datetime_match = RE_DATETIME.match(src, pos) + if datetime_match: + try: + datetime_obj = match_to_datetime(datetime_match) + except ValueError: + raise suffixed_err(src, pos, "Invalid date or datetime") + return datetime_match.end(), datetime_obj + localtime_match = RE_LOCALTIME.match(src, pos) + if localtime_match: + return localtime_match.end(), match_to_localtime(localtime_match) + + # Non-decimal integers + if char == "0": + second_char = src[pos + 1 : pos + 2] + if second_char == "x": + pos, hex_str = parse_regex(src, pos + 2, RE_HEX) + return pos, int(hex_str, 16) + if second_char == "o": + pos, oct_str = parse_regex(src, pos + 2, RE_OCT) + return pos, int(oct_str, 8) + if second_char == "b": + pos, bin_str = parse_regex(src, pos + 2, RE_BIN) + return pos, int(bin_str, 2) + + # Decimal integers and "normal" floats. + # The regex will greedily match any type starting with a decimal + # char, so needs to be located after handling of non-decimal ints, + # and dates and times. + number_match = RE_NUMBER.match(src, pos) + if number_match: + return number_match.end(), match_to_number(number_match, parse_float) + + # Arrays + if char == "[": + return parse_array(src, pos, parse_float) + + # Inline tables + if char == "{": + return parse_inline_table(src, pos, parse_float) + + # Special floats + first_three = src[pos : pos + 3] + if first_three in {"inf", "nan"}: + return pos + 3, parse_float(first_three) + first_four = src[pos : pos + 4] + if first_four in {"-inf", "+inf", "-nan", "+nan"}: + return pos + 4, parse_float(first_four) + + raise suffixed_err(src, pos, "Invalid value") + + +def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: + """Return a `TOMLDecodeError` where error message is suffixed with + coordinates in source.""" + + def coord_repr(src: str, pos: Pos) -> str: + if pos >= len(src): + return "end of document" + line = src.count("\n", 0, pos) + 1 + if line == 1: + column = pos + 1 + else: + column = pos - src.rindex("\n", 0, pos) + return f"line {line}, column {column}" + + return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + + +def is_unicode_scalar_value(codepoint: int) -> bool: + return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) diff --git a/src/pip/_vendor/tomli/_re.py b/src/pip/_vendor/tomli/_re.py new file mode 100644 index 00000000000..3883fdd9c90 --- /dev/null +++ b/src/pip/_vendor/tomli/_re.py @@ -0,0 +1,83 @@ +from datetime import date, datetime, time, timedelta, timezone, tzinfo +import re +from typing import TYPE_CHECKING, Any, Optional, Union + +if TYPE_CHECKING: + from re import Match + + from pip._vendor.tomli._parser import ParseFloat + +# E.g. +# - 00:32:00.999999 +# - 00:32:00 +_TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(\.[0-9]+)?" + +RE_HEX = re.compile(r"[0-9A-Fa-f](?:_?[0-9A-Fa-f])*") +RE_BIN = re.compile(r"[01](?:_?[01])*") +RE_OCT = re.compile(r"[0-7](?:_?[0-7])*") +RE_NUMBER = re.compile( + r"[+-]?(?:0|[1-9](?:_?[0-9])*)" # integer + + r"(?:\.[0-9](?:_?[0-9])*)?" # optional fractional part + + r"(?:[eE][+-]?[0-9](?:_?[0-9])*)?" # optional exponent part +) +RE_LOCALTIME = re.compile(_TIME_RE_STR) +RE_DATETIME = re.compile( + r"([0-9]{4})-(0[1-9]|1[0-2])-(0[1-9]|1[0-9]|2[0-9]|3[01])" # date, e.g. 1988-10-27 + + r"(?:" + + r"[T ]" + + _TIME_RE_STR + + r"(?:(Z)|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))?" # time offset + + r")?" +) + + +def match_to_datetime(match: "Match") -> Union[datetime, date]: + """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. + + Raises ValueError if the match does not correspond to a valid date + or datetime. + """ + ( + year_str, + month_str, + day_str, + hour_str, + minute_str, + sec_str, + micros_str, + zulu_time, + offset_dir_str, + offset_hour_str, + offset_minute_str, + ) = match.groups() + year, month, day = int(year_str), int(month_str), int(day_str) + if hour_str is None: + return date(year, month, day) + hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) + micros = int(micros_str[1:].ljust(6, "0")[:6]) if micros_str else 0 + if offset_dir_str: + offset_dir = 1 if offset_dir_str == "+" else -1 + tz: Optional[tzinfo] = timezone( + timedelta( + hours=offset_dir * int(offset_hour_str), + minutes=offset_dir * int(offset_minute_str), + ) + ) + elif zulu_time: + tz = timezone.utc + else: # local date-time + tz = None + return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) + + +def match_to_localtime(match: "Match") -> time: + hour_str, minute_str, sec_str, micros_str = match.groups() + micros = int(micros_str[1:].ljust(6, "0")[:6]) if micros_str else 0 + return time(int(hour_str), int(minute_str), int(sec_str), micros) + + +def match_to_number(match: "Match", parse_float: "ParseFloat") -> Any: + match_str = match.group() + if "." in match_str or "e" in match_str or "E" in match_str: + return parse_float(match_str) + return int(match_str) diff --git a/src/pip/_vendor/tomli/py.typed b/src/pip/_vendor/tomli/py.typed new file mode 100644 index 00000000000..7632ecf7754 --- /dev/null +++ b/src/pip/_vendor/tomli/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561 diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 13c21dfd484..34420cee3be 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -19,4 +19,5 @@ setuptools==44.0.0 six==1.16.0 tenacity==7.0.0 toml==0.10.2 +tomli==1.0.3 webencodings==0.5.1 From b778db5372785000acc3f72401c82a09f1d75ab6 Mon Sep 17 00:00:00 2001 From: Taneli Hukkinen <3275109+hukkin@users.noreply.github.com> Date: Fri, 2 Jul 2021 00:17:47 +0300 Subject: [PATCH 29/60] Replace `toml` usage with `tomli` --- src/pip/_internal/pyproject.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index 9016d355f87..5aa6160b469 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -2,7 +2,7 @@ from collections import namedtuple from typing import Any, List, Optional -from pip._vendor import toml +from pip._vendor import tomli from pip._vendor.packaging.requirements import InvalidRequirement, Requirement from pip._internal.exceptions import InstallationError @@ -59,7 +59,7 @@ def load_pyproject_toml( if has_pyproject: with open(pyproject_toml, encoding="utf-8") as f: - pp_toml = toml.load(f) + pp_toml = tomli.load(f) build_system = pp_toml.get("build-system") else: build_system = None From af79d0d39370c6b3612dc671105d42f007f35f6f Mon Sep 17 00:00:00 2001 From: Taneli Hukkinen <3275109+hukkin@users.noreply.github.com> Date: Fri, 2 Jul 2021 00:21:00 +0300 Subject: [PATCH 30/60] Replace use of vendored `toml` in tests with PyPI `toml` --- tests/functional/test_pep517.py | 5 ++++- tools/requirements/tests.txt | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_pep517.py b/tests/functional/test_pep517.py index 4458a7ad56e..b4556584416 100644 --- a/tests/functional/test_pep517.py +++ b/tests/functional/test_pep517.py @@ -1,5 +1,8 @@ import pytest -from pip._vendor import toml + +# The vendored `tomli` package is not used here because it doesn't +# have write capability +import toml from pip._internal.build_env import BuildEnvironment from pip._internal.req import InstallRequirement diff --git a/tools/requirements/tests.txt b/tools/requirements/tests.txt index 7badf2a27ff..ee453e07302 100644 --- a/tools/requirements/tests.txt +++ b/tools/requirements/tests.txt @@ -10,3 +10,4 @@ setuptools virtualenv < 20.0 werkzeug wheel +toml From 3ba1951705d048ba7c0544dfe7a568877f4174a2 Mon Sep 17 00:00:00 2001 From: Taneli Hukkinen <3275109+hukkin@users.noreply.github.com> Date: Fri, 2 Jul 2021 00:29:55 +0300 Subject: [PATCH 31/60] Unvendor no longer used `toml` --- src/pip/_vendor/__init__.py | 3 - src/pip/_vendor/pep517/build.py | 2 +- src/pip/_vendor/pep517/check.py | 2 +- src/pip/_vendor/pep517/envbuild.py | 2 +- src/pip/_vendor/toml.pyi | 1 - src/pip/_vendor/toml/LICENSE | 27 - src/pip/_vendor/toml/__init__.py | 25 - src/pip/_vendor/toml/decoder.py | 1057 ---------------------------- src/pip/_vendor/toml/encoder.py | 304 -------- src/pip/_vendor/toml/ordered.py | 15 - src/pip/_vendor/toml/tz.py | 24 - src/pip/_vendor/vendor.txt | 1 - 12 files changed, 3 insertions(+), 1460 deletions(-) delete mode 100644 src/pip/_vendor/toml.pyi delete mode 100644 src/pip/_vendor/toml/LICENSE delete mode 100644 src/pip/_vendor/toml/__init__.py delete mode 100644 src/pip/_vendor/toml/decoder.py delete mode 100644 src/pip/_vendor/toml/encoder.py delete mode 100644 src/pip/_vendor/toml/ordered.py delete mode 100644 src/pip/_vendor/toml/tz.py diff --git a/src/pip/_vendor/__init__.py b/src/pip/_vendor/__init__.py index 744a43b039e..57e32dab105 100644 --- a/src/pip/_vendor/__init__.py +++ b/src/pip/_vendor/__init__.py @@ -107,8 +107,5 @@ def vendored(modulename): vendored("requests.packages.urllib3.util.url") vendored("resolvelib") vendored("tenacity") - vendored("toml") - vendored("toml.encoder") - vendored("toml.decoder") vendored("tomli") vendored("urllib3") diff --git a/src/pip/_vendor/pep517/build.py b/src/pip/_vendor/pep517/build.py index f884bcf1097..573005b63a1 100644 --- a/src/pip/_vendor/pep517/build.py +++ b/src/pip/_vendor/pep517/build.py @@ -3,7 +3,7 @@ import argparse import logging import os -from pip._vendor import toml +import toml import shutil from .envbuild import BuildEnvironment diff --git a/src/pip/_vendor/pep517/check.py b/src/pip/_vendor/pep517/check.py index decab8a3423..8985007063a 100644 --- a/src/pip/_vendor/pep517/check.py +++ b/src/pip/_vendor/pep517/check.py @@ -4,7 +4,7 @@ import logging import os from os.path import isfile, join as pjoin -from pip._vendor.toml import TomlDecodeError, load as toml_load +from toml import TomlDecodeError, load as toml_load import shutil from subprocess import CalledProcessError import sys diff --git a/src/pip/_vendor/pep517/envbuild.py b/src/pip/_vendor/pep517/envbuild.py index 4088dcdb40a..cacd2b12c01 100644 --- a/src/pip/_vendor/pep517/envbuild.py +++ b/src/pip/_vendor/pep517/envbuild.py @@ -3,7 +3,7 @@ import os import logging -from pip._vendor import toml +import toml import shutil from subprocess import check_call import sys diff --git a/src/pip/_vendor/toml.pyi b/src/pip/_vendor/toml.pyi deleted file mode 100644 index 018a1ad1061..00000000000 --- a/src/pip/_vendor/toml.pyi +++ /dev/null @@ -1 +0,0 @@ -from toml import * \ No newline at end of file diff --git a/src/pip/_vendor/toml/LICENSE b/src/pip/_vendor/toml/LICENSE deleted file mode 100644 index 5010e3075e6..00000000000 --- a/src/pip/_vendor/toml/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -The MIT License - -Copyright 2013-2019 William Pearson -Copyright 2015-2016 Julien Enselme -Copyright 2016 Google Inc. -Copyright 2017 Samuel Vasko -Copyright 2017 Nate Prewitt -Copyright 2017 Jack Evans -Copyright 2019 Filippo Broggini - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. \ No newline at end of file diff --git a/src/pip/_vendor/toml/__init__.py b/src/pip/_vendor/toml/__init__.py deleted file mode 100644 index 34a5eabb6ea..00000000000 --- a/src/pip/_vendor/toml/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Python module which parses and emits TOML. - -Released under the MIT license. -""" - -from pip._vendor.toml import encoder -from pip._vendor.toml import decoder - -__version__ = "0.10.2" -_spec_ = "0.5.0" - -load = decoder.load -loads = decoder.loads -TomlDecoder = decoder.TomlDecoder -TomlDecodeError = decoder.TomlDecodeError -TomlPreserveCommentDecoder = decoder.TomlPreserveCommentDecoder - -dump = encoder.dump -dumps = encoder.dumps -TomlEncoder = encoder.TomlEncoder -TomlArraySeparatorEncoder = encoder.TomlArraySeparatorEncoder -TomlPreserveInlineDictEncoder = encoder.TomlPreserveInlineDictEncoder -TomlNumpyEncoder = encoder.TomlNumpyEncoder -TomlPreserveCommentEncoder = encoder.TomlPreserveCommentEncoder -TomlPathlibEncoder = encoder.TomlPathlibEncoder diff --git a/src/pip/_vendor/toml/decoder.py b/src/pip/_vendor/toml/decoder.py deleted file mode 100644 index e071100de0f..00000000000 --- a/src/pip/_vendor/toml/decoder.py +++ /dev/null @@ -1,1057 +0,0 @@ -import datetime -import io -from os import linesep -import re -import sys - -from pip._vendor.toml.tz import TomlTz - -if sys.version_info < (3,): - _range = xrange # noqa: F821 -else: - unicode = str - _range = range - basestring = str - unichr = chr - - -def _detect_pathlib_path(p): - if (3, 4) <= sys.version_info: - import pathlib - if isinstance(p, pathlib.PurePath): - return True - return False - - -def _ispath(p): - if isinstance(p, (bytes, basestring)): - return True - return _detect_pathlib_path(p) - - -def _getpath(p): - if (3, 6) <= sys.version_info: - import os - return os.fspath(p) - if _detect_pathlib_path(p): - return str(p) - return p - - -try: - FNFError = FileNotFoundError -except NameError: - FNFError = IOError - - -TIME_RE = re.compile(r"([0-9]{2}):([0-9]{2}):([0-9]{2})(\.([0-9]{3,6}))?") - - -class TomlDecodeError(ValueError): - """Base toml Exception / Error.""" - - def __init__(self, msg, doc, pos): - lineno = doc.count('\n', 0, pos) + 1 - colno = pos - doc.rfind('\n', 0, pos) - emsg = '{} (line {} column {} char {})'.format(msg, lineno, colno, pos) - ValueError.__init__(self, emsg) - self.msg = msg - self.doc = doc - self.pos = pos - self.lineno = lineno - self.colno = colno - - -# Matches a TOML number, which allows underscores for readability -_number_with_underscores = re.compile('([0-9])(_([0-9]))*') - - -class CommentValue(object): - def __init__(self, val, comment, beginline, _dict): - self.val = val - separator = "\n" if beginline else " " - self.comment = separator + comment - self._dict = _dict - - def __getitem__(self, key): - return self.val[key] - - def __setitem__(self, key, value): - self.val[key] = value - - def dump(self, dump_value_func): - retstr = dump_value_func(self.val) - if isinstance(self.val, self._dict): - return self.comment + "\n" + unicode(retstr) - else: - return unicode(retstr) + self.comment - - -def _strictly_valid_num(n): - n = n.strip() - if not n: - return False - if n[0] == '_': - return False - if n[-1] == '_': - return False - if "_." in n or "._" in n: - return False - if len(n) == 1: - return True - if n[0] == '0' and n[1] not in ['.', 'o', 'b', 'x']: - return False - if n[0] == '+' or n[0] == '-': - n = n[1:] - if len(n) > 1 and n[0] == '0' and n[1] != '.': - return False - if '__' in n: - return False - return True - - -def load(f, _dict=dict, decoder=None): - """Parses named file or files as toml and returns a dictionary - - Args: - f: Path to the file to open, array of files to read into single dict - or a file descriptor - _dict: (optional) Specifies the class of the returned toml dictionary - decoder: The decoder to use - - Returns: - Parsed toml file represented as a dictionary - - Raises: - TypeError -- When f is invalid type - TomlDecodeError: Error while decoding toml - IOError / FileNotFoundError -- When an array with no valid (existing) - (Python 2 / Python 3) file paths is passed - """ - - if _ispath(f): - with io.open(_getpath(f), encoding='utf-8') as ffile: - return loads(ffile.read(), _dict, decoder) - elif isinstance(f, list): - from os import path as op - from warnings import warn - if not [path for path in f if op.exists(path)]: - error_msg = "Load expects a list to contain filenames only." - error_msg += linesep - error_msg += ("The list needs to contain the path of at least one " - "existing file.") - raise FNFError(error_msg) - if decoder is None: - decoder = TomlDecoder(_dict) - d = decoder.get_empty_table() - for l in f: # noqa: E741 - if op.exists(l): - d.update(load(l, _dict, decoder)) - else: - warn("Non-existent filename in list with at least one valid " - "filename") - return d - else: - try: - return loads(f.read(), _dict, decoder) - except AttributeError: - raise TypeError("You can only load a file descriptor, filename or " - "list") - - -_groupname_re = re.compile(r'^[A-Za-z0-9_-]+$') - - -def loads(s, _dict=dict, decoder=None): - """Parses string as toml - - Args: - s: String to be parsed - _dict: (optional) Specifies the class of the returned toml dictionary - - Returns: - Parsed toml file represented as a dictionary - - Raises: - TypeError: When a non-string is passed - TomlDecodeError: Error while decoding toml - """ - - implicitgroups = [] - if decoder is None: - decoder = TomlDecoder(_dict) - retval = decoder.get_empty_table() - currentlevel = retval - if not isinstance(s, basestring): - raise TypeError("Expecting something like a string") - - if not isinstance(s, unicode): - s = s.decode('utf8') - - original = s - sl = list(s) - openarr = 0 - openstring = False - openstrchar = "" - multilinestr = False - arrayoftables = False - beginline = True - keygroup = False - dottedkey = False - keyname = 0 - key = '' - prev_key = '' - line_no = 1 - - for i, item in enumerate(sl): - if item == '\r' and sl[i + 1] == '\n': - sl[i] = ' ' - continue - if keyname: - key += item - if item == '\n': - raise TomlDecodeError("Key name found without value." - " Reached end of line.", original, i) - if openstring: - if item == openstrchar: - oddbackslash = False - k = 1 - while i >= k and sl[i - k] == '\\': - oddbackslash = not oddbackslash - k += 1 - if not oddbackslash: - keyname = 2 - openstring = False - openstrchar = "" - continue - elif keyname == 1: - if item.isspace(): - keyname = 2 - continue - elif item == '.': - dottedkey = True - continue - elif item.isalnum() or item == '_' or item == '-': - continue - elif (dottedkey and sl[i - 1] == '.' and - (item == '"' or item == "'")): - openstring = True - openstrchar = item - continue - elif keyname == 2: - if item.isspace(): - if dottedkey: - nextitem = sl[i + 1] - if not nextitem.isspace() and nextitem != '.': - keyname = 1 - continue - if item == '.': - dottedkey = True - nextitem = sl[i + 1] - if not nextitem.isspace() and nextitem != '.': - keyname = 1 - continue - if item == '=': - keyname = 0 - prev_key = key[:-1].rstrip() - key = '' - dottedkey = False - else: - raise TomlDecodeError("Found invalid character in key name: '" + - item + "'. Try quoting the key name.", - original, i) - if item == "'" and openstrchar != '"': - k = 1 - try: - while sl[i - k] == "'": - k += 1 - if k == 3: - break - except IndexError: - pass - if k == 3: - multilinestr = not multilinestr - openstring = multilinestr - else: - openstring = not openstring - if openstring: - openstrchar = "'" - else: - openstrchar = "" - if item == '"' and openstrchar != "'": - oddbackslash = False - k = 1 - tripquote = False - try: - while sl[i - k] == '"': - k += 1 - if k == 3: - tripquote = True - break - if k == 1 or (k == 3 and tripquote): - while sl[i - k] == '\\': - oddbackslash = not oddbackslash - k += 1 - except IndexError: - pass - if not oddbackslash: - if tripquote: - multilinestr = not multilinestr - openstring = multilinestr - else: - openstring = not openstring - if openstring: - openstrchar = '"' - else: - openstrchar = "" - if item == '#' and (not openstring and not keygroup and - not arrayoftables): - j = i - comment = "" - try: - while sl[j] != '\n': - comment += s[j] - sl[j] = ' ' - j += 1 - except IndexError: - break - if not openarr: - decoder.preserve_comment(line_no, prev_key, comment, beginline) - if item == '[' and (not openstring and not keygroup and - not arrayoftables): - if beginline: - if len(sl) > i + 1 and sl[i + 1] == '[': - arrayoftables = True - else: - keygroup = True - else: - openarr += 1 - if item == ']' and not openstring: - if keygroup: - keygroup = False - elif arrayoftables: - if sl[i - 1] == ']': - arrayoftables = False - else: - openarr -= 1 - if item == '\n': - if openstring or multilinestr: - if not multilinestr: - raise TomlDecodeError("Unbalanced quotes", original, i) - if ((sl[i - 1] == "'" or sl[i - 1] == '"') and ( - sl[i - 2] == sl[i - 1])): - sl[i] = sl[i - 1] - if sl[i - 3] == sl[i - 1]: - sl[i - 3] = ' ' - elif openarr: - sl[i] = ' ' - else: - beginline = True - line_no += 1 - elif beginline and sl[i] != ' ' and sl[i] != '\t': - beginline = False - if not keygroup and not arrayoftables: - if sl[i] == '=': - raise TomlDecodeError("Found empty keyname. ", original, i) - keyname = 1 - key += item - if keyname: - raise TomlDecodeError("Key name found without value." - " Reached end of file.", original, len(s)) - if openstring: # reached EOF and have an unterminated string - raise TomlDecodeError("Unterminated string found." - " Reached end of file.", original, len(s)) - s = ''.join(sl) - s = s.split('\n') - multikey = None - multilinestr = "" - multibackslash = False - pos = 0 - for idx, line in enumerate(s): - if idx > 0: - pos += len(s[idx - 1]) + 1 - - decoder.embed_comments(idx, currentlevel) - - if not multilinestr or multibackslash or '\n' not in multilinestr: - line = line.strip() - if line == "" and (not multikey or multibackslash): - continue - if multikey: - if multibackslash: - multilinestr += line - else: - multilinestr += line - multibackslash = False - closed = False - if multilinestr[0] == '[': - closed = line[-1] == ']' - elif len(line) > 2: - closed = (line[-1] == multilinestr[0] and - line[-2] == multilinestr[0] and - line[-3] == multilinestr[0]) - if closed: - try: - value, vtype = decoder.load_value(multilinestr) - except ValueError as err: - raise TomlDecodeError(str(err), original, pos) - currentlevel[multikey] = value - multikey = None - multilinestr = "" - else: - k = len(multilinestr) - 1 - while k > -1 and multilinestr[k] == '\\': - multibackslash = not multibackslash - k -= 1 - if multibackslash: - multilinestr = multilinestr[:-1] - else: - multilinestr += "\n" - continue - if line[0] == '[': - arrayoftables = False - if len(line) == 1: - raise TomlDecodeError("Opening key group bracket on line by " - "itself.", original, pos) - if line[1] == '[': - arrayoftables = True - line = line[2:] - splitstr = ']]' - else: - line = line[1:] - splitstr = ']' - i = 1 - quotesplits = decoder._get_split_on_quotes(line) - quoted = False - for quotesplit in quotesplits: - if not quoted and splitstr in quotesplit: - break - i += quotesplit.count(splitstr) - quoted = not quoted - line = line.split(splitstr, i) - if len(line) < i + 1 or line[-1].strip() != "": - raise TomlDecodeError("Key group not on a line by itself.", - original, pos) - groups = splitstr.join(line[:-1]).split('.') - i = 0 - while i < len(groups): - groups[i] = groups[i].strip() - if len(groups[i]) > 0 and (groups[i][0] == '"' or - groups[i][0] == "'"): - groupstr = groups[i] - j = i + 1 - while ((not groupstr[0] == groupstr[-1]) or - len(groupstr) == 1): - j += 1 - if j > len(groups) + 2: - raise TomlDecodeError("Invalid group name '" + - groupstr + "' Something " + - "went wrong.", original, pos) - groupstr = '.'.join(groups[i:j]).strip() - groups[i] = groupstr[1:-1] - groups[i + 1:j] = [] - else: - if not _groupname_re.match(groups[i]): - raise TomlDecodeError("Invalid group name '" + - groups[i] + "'. Try quoting it.", - original, pos) - i += 1 - currentlevel = retval - for i in _range(len(groups)): - group = groups[i] - if group == "": - raise TomlDecodeError("Can't have a keygroup with an empty " - "name", original, pos) - try: - currentlevel[group] - if i == len(groups) - 1: - if group in implicitgroups: - implicitgroups.remove(group) - if arrayoftables: - raise TomlDecodeError("An implicitly defined " - "table can't be an array", - original, pos) - elif arrayoftables: - currentlevel[group].append(decoder.get_empty_table() - ) - else: - raise TomlDecodeError("What? " + group + - " already exists?" + - str(currentlevel), - original, pos) - except TypeError: - currentlevel = currentlevel[-1] - if group not in currentlevel: - currentlevel[group] = decoder.get_empty_table() - if i == len(groups) - 1 and arrayoftables: - currentlevel[group] = [decoder.get_empty_table()] - except KeyError: - if i != len(groups) - 1: - implicitgroups.append(group) - currentlevel[group] = decoder.get_empty_table() - if i == len(groups) - 1 and arrayoftables: - currentlevel[group] = [decoder.get_empty_table()] - currentlevel = currentlevel[group] - if arrayoftables: - try: - currentlevel = currentlevel[-1] - except KeyError: - pass - elif line[0] == "{": - if line[-1] != "}": - raise TomlDecodeError("Line breaks are not allowed in inline" - "objects", original, pos) - try: - decoder.load_inline_object(line, currentlevel, multikey, - multibackslash) - except ValueError as err: - raise TomlDecodeError(str(err), original, pos) - elif "=" in line: - try: - ret = decoder.load_line(line, currentlevel, multikey, - multibackslash) - except ValueError as err: - raise TomlDecodeError(str(err), original, pos) - if ret is not None: - multikey, multilinestr, multibackslash = ret - return retval - - -def _load_date(val): - microsecond = 0 - tz = None - try: - if len(val) > 19: - if val[19] == '.': - if val[-1].upper() == 'Z': - subsecondval = val[20:-1] - tzval = "Z" - else: - subsecondvalandtz = val[20:] - if '+' in subsecondvalandtz: - splitpoint = subsecondvalandtz.index('+') - subsecondval = subsecondvalandtz[:splitpoint] - tzval = subsecondvalandtz[splitpoint:] - elif '-' in subsecondvalandtz: - splitpoint = subsecondvalandtz.index('-') - subsecondval = subsecondvalandtz[:splitpoint] - tzval = subsecondvalandtz[splitpoint:] - else: - tzval = None - subsecondval = subsecondvalandtz - if tzval is not None: - tz = TomlTz(tzval) - microsecond = int(int(subsecondval) * - (10 ** (6 - len(subsecondval)))) - else: - tz = TomlTz(val[19:]) - except ValueError: - tz = None - if "-" not in val[1:]: - return None - try: - if len(val) == 10: - d = datetime.date( - int(val[:4]), int(val[5:7]), - int(val[8:10])) - else: - d = datetime.datetime( - int(val[:4]), int(val[5:7]), - int(val[8:10]), int(val[11:13]), - int(val[14:16]), int(val[17:19]), microsecond, tz) - except ValueError: - return None - return d - - -def _load_unicode_escapes(v, hexbytes, prefix): - skip = False - i = len(v) - 1 - while i > -1 and v[i] == '\\': - skip = not skip - i -= 1 - for hx in hexbytes: - if skip: - skip = False - i = len(hx) - 1 - while i > -1 and hx[i] == '\\': - skip = not skip - i -= 1 - v += prefix - v += hx - continue - hxb = "" - i = 0 - hxblen = 4 - if prefix == "\\U": - hxblen = 8 - hxb = ''.join(hx[i:i + hxblen]).lower() - if hxb.strip('0123456789abcdef'): - raise ValueError("Invalid escape sequence: " + hxb) - if hxb[0] == "d" and hxb[1].strip('01234567'): - raise ValueError("Invalid escape sequence: " + hxb + - ". Only scalar unicode points are allowed.") - v += unichr(int(hxb, 16)) - v += unicode(hx[len(hxb):]) - return v - - -# Unescape TOML string values. - -# content after the \ -_escapes = ['0', 'b', 'f', 'n', 'r', 't', '"'] -# What it should be replaced by -_escapedchars = ['\0', '\b', '\f', '\n', '\r', '\t', '\"'] -# Used for substitution -_escape_to_escapedchars = dict(zip(_escapes, _escapedchars)) - - -def _unescape(v): - """Unescape characters in a TOML string.""" - i = 0 - backslash = False - while i < len(v): - if backslash: - backslash = False - if v[i] in _escapes: - v = v[:i - 1] + _escape_to_escapedchars[v[i]] + v[i + 1:] - elif v[i] == '\\': - v = v[:i - 1] + v[i:] - elif v[i] == 'u' or v[i] == 'U': - i += 1 - else: - raise ValueError("Reserved escape sequence used") - continue - elif v[i] == '\\': - backslash = True - i += 1 - return v - - -class InlineTableDict(object): - """Sentinel subclass of dict for inline tables.""" - - -class TomlDecoder(object): - - def __init__(self, _dict=dict): - self._dict = _dict - - def get_empty_table(self): - return self._dict() - - def get_empty_inline_table(self): - class DynamicInlineTableDict(self._dict, InlineTableDict): - """Concrete sentinel subclass for inline tables. - It is a subclass of _dict which is passed in dynamically at load - time - - It is also a subclass of InlineTableDict - """ - - return DynamicInlineTableDict() - - def load_inline_object(self, line, currentlevel, multikey=False, - multibackslash=False): - candidate_groups = line[1:-1].split(",") - groups = [] - if len(candidate_groups) == 1 and not candidate_groups[0].strip(): - candidate_groups.pop() - while len(candidate_groups) > 0: - candidate_group = candidate_groups.pop(0) - try: - _, value = candidate_group.split('=', 1) - except ValueError: - raise ValueError("Invalid inline table encountered") - value = value.strip() - if ((value[0] == value[-1] and value[0] in ('"', "'")) or ( - value[0] in '-0123456789' or - value in ('true', 'false') or - (value[0] == "[" and value[-1] == "]") or - (value[0] == '{' and value[-1] == '}'))): - groups.append(candidate_group) - elif len(candidate_groups) > 0: - candidate_groups[0] = (candidate_group + "," + - candidate_groups[0]) - else: - raise ValueError("Invalid inline table value encountered") - for group in groups: - status = self.load_line(group, currentlevel, multikey, - multibackslash) - if status is not None: - break - - def _get_split_on_quotes(self, line): - doublequotesplits = line.split('"') - quoted = False - quotesplits = [] - if len(doublequotesplits) > 1 and "'" in doublequotesplits[0]: - singlequotesplits = doublequotesplits[0].split("'") - doublequotesplits = doublequotesplits[1:] - while len(singlequotesplits) % 2 == 0 and len(doublequotesplits): - singlequotesplits[-1] += '"' + doublequotesplits[0] - doublequotesplits = doublequotesplits[1:] - if "'" in singlequotesplits[-1]: - singlequotesplits = (singlequotesplits[:-1] + - singlequotesplits[-1].split("'")) - quotesplits += singlequotesplits - for doublequotesplit in doublequotesplits: - if quoted: - quotesplits.append(doublequotesplit) - else: - quotesplits += doublequotesplit.split("'") - quoted = not quoted - return quotesplits - - def load_line(self, line, currentlevel, multikey, multibackslash): - i = 1 - quotesplits = self._get_split_on_quotes(line) - quoted = False - for quotesplit in quotesplits: - if not quoted and '=' in quotesplit: - break - i += quotesplit.count('=') - quoted = not quoted - pair = line.split('=', i) - strictly_valid = _strictly_valid_num(pair[-1]) - if _number_with_underscores.match(pair[-1]): - pair[-1] = pair[-1].replace('_', '') - while len(pair[-1]) and (pair[-1][0] != ' ' and pair[-1][0] != '\t' and - pair[-1][0] != "'" and pair[-1][0] != '"' and - pair[-1][0] != '[' and pair[-1][0] != '{' and - pair[-1].strip() != 'true' and - pair[-1].strip() != 'false'): - try: - float(pair[-1]) - break - except ValueError: - pass - if _load_date(pair[-1]) is not None: - break - if TIME_RE.match(pair[-1]): - break - i += 1 - prev_val = pair[-1] - pair = line.split('=', i) - if prev_val == pair[-1]: - raise ValueError("Invalid date or number") - if strictly_valid: - strictly_valid = _strictly_valid_num(pair[-1]) - pair = ['='.join(pair[:-1]).strip(), pair[-1].strip()] - if '.' in pair[0]: - if '"' in pair[0] or "'" in pair[0]: - quotesplits = self._get_split_on_quotes(pair[0]) - quoted = False - levels = [] - for quotesplit in quotesplits: - if quoted: - levels.append(quotesplit) - else: - levels += [level.strip() for level in - quotesplit.split('.')] - quoted = not quoted - else: - levels = pair[0].split('.') - while levels[-1] == "": - levels = levels[:-1] - for level in levels[:-1]: - if level == "": - continue - if level not in currentlevel: - currentlevel[level] = self.get_empty_table() - currentlevel = currentlevel[level] - pair[0] = levels[-1].strip() - elif (pair[0][0] == '"' or pair[0][0] == "'") and \ - (pair[0][-1] == pair[0][0]): - pair[0] = _unescape(pair[0][1:-1]) - k, koffset = self._load_line_multiline_str(pair[1]) - if k > -1: - while k > -1 and pair[1][k + koffset] == '\\': - multibackslash = not multibackslash - k -= 1 - if multibackslash: - multilinestr = pair[1][:-1] - else: - multilinestr = pair[1] + "\n" - multikey = pair[0] - else: - value, vtype = self.load_value(pair[1], strictly_valid) - try: - currentlevel[pair[0]] - raise ValueError("Duplicate keys!") - except TypeError: - raise ValueError("Duplicate keys!") - except KeyError: - if multikey: - return multikey, multilinestr, multibackslash - else: - currentlevel[pair[0]] = value - - def _load_line_multiline_str(self, p): - poffset = 0 - if len(p) < 3: - return -1, poffset - if p[0] == '[' and (p.strip()[-1] != ']' and - self._load_array_isstrarray(p)): - newp = p[1:].strip().split(',') - while len(newp) > 1 and newp[-1][0] != '"' and newp[-1][0] != "'": - newp = newp[:-2] + [newp[-2] + ',' + newp[-1]] - newp = newp[-1] - poffset = len(p) - len(newp) - p = newp - if p[0] != '"' and p[0] != "'": - return -1, poffset - if p[1] != p[0] or p[2] != p[0]: - return -1, poffset - if len(p) > 5 and p[-1] == p[0] and p[-2] == p[0] and p[-3] == p[0]: - return -1, poffset - return len(p) - 1, poffset - - def load_value(self, v, strictly_valid=True): - if not v: - raise ValueError("Empty value is invalid") - if v == 'true': - return (True, "bool") - elif v.lower() == 'true': - raise ValueError("Only all lowercase booleans allowed") - elif v == 'false': - return (False, "bool") - elif v.lower() == 'false': - raise ValueError("Only all lowercase booleans allowed") - elif v[0] == '"' or v[0] == "'": - quotechar = v[0] - testv = v[1:].split(quotechar) - triplequote = False - triplequotecount = 0 - if len(testv) > 1 and testv[0] == '' and testv[1] == '': - testv = testv[2:] - triplequote = True - closed = False - for tv in testv: - if tv == '': - if triplequote: - triplequotecount += 1 - else: - closed = True - else: - oddbackslash = False - try: - i = -1 - j = tv[i] - while j == '\\': - oddbackslash = not oddbackslash - i -= 1 - j = tv[i] - except IndexError: - pass - if not oddbackslash: - if closed: - raise ValueError("Found tokens after a closed " + - "string. Invalid TOML.") - else: - if not triplequote or triplequotecount > 1: - closed = True - else: - triplequotecount = 0 - if quotechar == '"': - escapeseqs = v.split('\\')[1:] - backslash = False - for i in escapeseqs: - if i == '': - backslash = not backslash - else: - if i[0] not in _escapes and (i[0] != 'u' and - i[0] != 'U' and - not backslash): - raise ValueError("Reserved escape sequence used") - if backslash: - backslash = False - for prefix in ["\\u", "\\U"]: - if prefix in v: - hexbytes = v.split(prefix) - v = _load_unicode_escapes(hexbytes[0], hexbytes[1:], - prefix) - v = _unescape(v) - if len(v) > 1 and v[1] == quotechar and (len(v) < 3 or - v[1] == v[2]): - v = v[2:-2] - return (v[1:-1], "str") - elif v[0] == '[': - return (self.load_array(v), "array") - elif v[0] == '{': - inline_object = self.get_empty_inline_table() - self.load_inline_object(v, inline_object) - return (inline_object, "inline_object") - elif TIME_RE.match(v): - h, m, s, _, ms = TIME_RE.match(v).groups() - time = datetime.time(int(h), int(m), int(s), int(ms) if ms else 0) - return (time, "time") - else: - parsed_date = _load_date(v) - if parsed_date is not None: - return (parsed_date, "date") - if not strictly_valid: - raise ValueError("Weirdness with leading zeroes or " - "underscores in your number.") - itype = "int" - neg = False - if v[0] == '-': - neg = True - v = v[1:] - elif v[0] == '+': - v = v[1:] - v = v.replace('_', '') - lowerv = v.lower() - if '.' in v or ('x' not in v and ('e' in v or 'E' in v)): - if '.' in v and v.split('.', 1)[1] == '': - raise ValueError("This float is missing digits after " - "the point") - if v[0] not in '0123456789': - raise ValueError("This float doesn't have a leading " - "digit") - v = float(v) - itype = "float" - elif len(lowerv) == 3 and (lowerv == 'inf' or lowerv == 'nan'): - v = float(v) - itype = "float" - if itype == "int": - v = int(v, 0) - if neg: - return (0 - v, itype) - return (v, itype) - - def bounded_string(self, s): - if len(s) == 0: - return True - if s[-1] != s[0]: - return False - i = -2 - backslash = False - while len(s) + i > 0: - if s[i] == "\\": - backslash = not backslash - i -= 1 - else: - break - return not backslash - - def _load_array_isstrarray(self, a): - a = a[1:-1].strip() - if a != '' and (a[0] == '"' or a[0] == "'"): - return True - return False - - def load_array(self, a): - atype = None - retval = [] - a = a.strip() - if '[' not in a[1:-1] or "" != a[1:-1].split('[')[0].strip(): - strarray = self._load_array_isstrarray(a) - if not a[1:-1].strip().startswith('{'): - a = a[1:-1].split(',') - else: - # a is an inline object, we must find the matching parenthesis - # to define groups - new_a = [] - start_group_index = 1 - end_group_index = 2 - open_bracket_count = 1 if a[start_group_index] == '{' else 0 - in_str = False - while end_group_index < len(a[1:]): - if a[end_group_index] == '"' or a[end_group_index] == "'": - if in_str: - backslash_index = end_group_index - 1 - while (backslash_index > -1 and - a[backslash_index] == '\\'): - in_str = not in_str - backslash_index -= 1 - in_str = not in_str - if not in_str and a[end_group_index] == '{': - open_bracket_count += 1 - if in_str or a[end_group_index] != '}': - end_group_index += 1 - continue - elif a[end_group_index] == '}' and open_bracket_count > 1: - open_bracket_count -= 1 - end_group_index += 1 - continue - - # Increase end_group_index by 1 to get the closing bracket - end_group_index += 1 - - new_a.append(a[start_group_index:end_group_index]) - - # The next start index is at least after the closing - # bracket, a closing bracket can be followed by a comma - # since we are in an array. - start_group_index = end_group_index + 1 - while (start_group_index < len(a[1:]) and - a[start_group_index] != '{'): - start_group_index += 1 - end_group_index = start_group_index + 1 - a = new_a - b = 0 - if strarray: - while b < len(a) - 1: - ab = a[b].strip() - while (not self.bounded_string(ab) or - (len(ab) > 2 and - ab[0] == ab[1] == ab[2] and - ab[-2] != ab[0] and - ab[-3] != ab[0])): - a[b] = a[b] + ',' + a[b + 1] - ab = a[b].strip() - if b < len(a) - 2: - a = a[:b + 1] + a[b + 2:] - else: - a = a[:b + 1] - b += 1 - else: - al = list(a[1:-1]) - a = [] - openarr = 0 - j = 0 - for i in _range(len(al)): - if al[i] == '[': - openarr += 1 - elif al[i] == ']': - openarr -= 1 - elif al[i] == ',' and not openarr: - a.append(''.join(al[j:i])) - j = i + 1 - a.append(''.join(al[j:])) - for i in _range(len(a)): - a[i] = a[i].strip() - if a[i] != '': - nval, ntype = self.load_value(a[i]) - if atype: - if ntype != atype: - raise ValueError("Not a homogeneous array") - else: - atype = ntype - retval.append(nval) - return retval - - def preserve_comment(self, line_no, key, comment, beginline): - pass - - def embed_comments(self, idx, currentlevel): - pass - - -class TomlPreserveCommentDecoder(TomlDecoder): - - def __init__(self, _dict=dict): - self.saved_comments = {} - super(TomlPreserveCommentDecoder, self).__init__(_dict) - - def preserve_comment(self, line_no, key, comment, beginline): - self.saved_comments[line_no] = (key, comment, beginline) - - def embed_comments(self, idx, currentlevel): - if idx not in self.saved_comments: - return - - key, comment, beginline = self.saved_comments[idx] - currentlevel[key] = CommentValue(currentlevel[key], comment, beginline, - self._dict) diff --git a/src/pip/_vendor/toml/encoder.py b/src/pip/_vendor/toml/encoder.py deleted file mode 100644 index 7fb94da98ac..00000000000 --- a/src/pip/_vendor/toml/encoder.py +++ /dev/null @@ -1,304 +0,0 @@ -import datetime -import re -import sys -from decimal import Decimal - -from pip._vendor.toml.decoder import InlineTableDict - -if sys.version_info >= (3,): - unicode = str - - -def dump(o, f, encoder=None): - """Writes out dict as toml to a file - - Args: - o: Object to dump into toml - f: File descriptor where the toml should be stored - encoder: The ``TomlEncoder`` to use for constructing the output string - - Returns: - String containing the toml corresponding to dictionary - - Raises: - TypeError: When anything other than file descriptor is passed - """ - - if not f.write: - raise TypeError("You can only dump an object to a file descriptor") - d = dumps(o, encoder=encoder) - f.write(d) - return d - - -def dumps(o, encoder=None): - """Stringifies input dict as toml - - Args: - o: Object to dump into toml - encoder: The ``TomlEncoder`` to use for constructing the output string - - Returns: - String containing the toml corresponding to dict - - Examples: - ```python - >>> import toml - >>> output = { - ... 'a': "I'm a string", - ... 'b': ["I'm", "a", "list"], - ... 'c': 2400 - ... } - >>> toml.dumps(output) - 'a = "I\'m a string"\nb = [ "I\'m", "a", "list",]\nc = 2400\n' - ``` - """ - - retval = "" - if encoder is None: - encoder = TomlEncoder(o.__class__) - addtoretval, sections = encoder.dump_sections(o, "") - retval += addtoretval - outer_objs = [id(o)] - while sections: - section_ids = [id(section) for section in sections.values()] - for outer_obj in outer_objs: - if outer_obj in section_ids: - raise ValueError("Circular reference detected") - outer_objs += section_ids - newsections = encoder.get_empty_table() - for section in sections: - addtoretval, addtosections = encoder.dump_sections( - sections[section], section) - - if addtoretval or (not addtoretval and not addtosections): - if retval and retval[-2:] != "\n\n": - retval += "\n" - retval += "[" + section + "]\n" - if addtoretval: - retval += addtoretval - for s in addtosections: - newsections[section + "." + s] = addtosections[s] - sections = newsections - return retval - - -def _dump_str(v): - if sys.version_info < (3,) and hasattr(v, 'decode') and isinstance(v, str): - v = v.decode('utf-8') - v = "%r" % v - if v[0] == 'u': - v = v[1:] - singlequote = v.startswith("'") - if singlequote or v.startswith('"'): - v = v[1:-1] - if singlequote: - v = v.replace("\\'", "'") - v = v.replace('"', '\\"') - v = v.split("\\x") - while len(v) > 1: - i = -1 - if not v[0]: - v = v[1:] - v[0] = v[0].replace("\\\\", "\\") - # No, I don't know why != works and == breaks - joinx = v[0][i] != "\\" - while v[0][:i] and v[0][i] == "\\": - joinx = not joinx - i -= 1 - if joinx: - joiner = "x" - else: - joiner = "u00" - v = [v[0] + joiner + v[1]] + v[2:] - return unicode('"' + v[0] + '"') - - -def _dump_float(v): - return "{}".format(v).replace("e+0", "e+").replace("e-0", "e-") - - -def _dump_time(v): - utcoffset = v.utcoffset() - if utcoffset is None: - return v.isoformat() - # The TOML norm specifies that it's local time thus we drop the offset - return v.isoformat()[:-6] - - -class TomlEncoder(object): - - def __init__(self, _dict=dict, preserve=False): - self._dict = _dict - self.preserve = preserve - self.dump_funcs = { - str: _dump_str, - unicode: _dump_str, - list: self.dump_list, - bool: lambda v: unicode(v).lower(), - int: lambda v: v, - float: _dump_float, - Decimal: _dump_float, - datetime.datetime: lambda v: v.isoformat().replace('+00:00', 'Z'), - datetime.time: _dump_time, - datetime.date: lambda v: v.isoformat() - } - - def get_empty_table(self): - return self._dict() - - def dump_list(self, v): - retval = "[" - for u in v: - retval += " " + unicode(self.dump_value(u)) + "," - retval += "]" - return retval - - def dump_inline_table(self, section): - """Preserve inline table in its compact syntax instead of expanding - into subsection. - - https://github.com/toml-lang/toml#user-content-inline-table - """ - retval = "" - if isinstance(section, dict): - val_list = [] - for k, v in section.items(): - val = self.dump_inline_table(v) - val_list.append(k + " = " + val) - retval += "{ " + ", ".join(val_list) + " }\n" - return retval - else: - return unicode(self.dump_value(section)) - - def dump_value(self, v): - # Lookup function corresponding to v's type - dump_fn = self.dump_funcs.get(type(v)) - if dump_fn is None and hasattr(v, '__iter__'): - dump_fn = self.dump_funcs[list] - # Evaluate function (if it exists) else return v - return dump_fn(v) if dump_fn is not None else self.dump_funcs[str](v) - - def dump_sections(self, o, sup): - retstr = "" - if sup != "" and sup[-1] != ".": - sup += '.' - retdict = self._dict() - arraystr = "" - for section in o: - section = unicode(section) - qsection = section - if not re.match(r'^[A-Za-z0-9_-]+$', section): - qsection = _dump_str(section) - if not isinstance(o[section], dict): - arrayoftables = False - if isinstance(o[section], list): - for a in o[section]: - if isinstance(a, dict): - arrayoftables = True - if arrayoftables: - for a in o[section]: - arraytabstr = "\n" - arraystr += "[[" + sup + qsection + "]]\n" - s, d = self.dump_sections(a, sup + qsection) - if s: - if s[0] == "[": - arraytabstr += s - else: - arraystr += s - while d: - newd = self._dict() - for dsec in d: - s1, d1 = self.dump_sections(d[dsec], sup + - qsection + "." + - dsec) - if s1: - arraytabstr += ("[" + sup + qsection + - "." + dsec + "]\n") - arraytabstr += s1 - for s1 in d1: - newd[dsec + "." + s1] = d1[s1] - d = newd - arraystr += arraytabstr - else: - if o[section] is not None: - retstr += (qsection + " = " + - unicode(self.dump_value(o[section])) + '\n') - elif self.preserve and isinstance(o[section], InlineTableDict): - retstr += (qsection + " = " + - self.dump_inline_table(o[section])) - else: - retdict[qsection] = o[section] - retstr += arraystr - return (retstr, retdict) - - -class TomlPreserveInlineDictEncoder(TomlEncoder): - - def __init__(self, _dict=dict): - super(TomlPreserveInlineDictEncoder, self).__init__(_dict, True) - - -class TomlArraySeparatorEncoder(TomlEncoder): - - def __init__(self, _dict=dict, preserve=False, separator=","): - super(TomlArraySeparatorEncoder, self).__init__(_dict, preserve) - if separator.strip() == "": - separator = "," + separator - elif separator.strip(' \t\n\r,'): - raise ValueError("Invalid separator for arrays") - self.separator = separator - - def dump_list(self, v): - t = [] - retval = "[" - for u in v: - t.append(self.dump_value(u)) - while t != []: - s = [] - for u in t: - if isinstance(u, list): - for r in u: - s.append(r) - else: - retval += " " + unicode(u) + self.separator - t = s - retval += "]" - return retval - - -class TomlNumpyEncoder(TomlEncoder): - - def __init__(self, _dict=dict, preserve=False): - import numpy as np - super(TomlNumpyEncoder, self).__init__(_dict, preserve) - self.dump_funcs[np.float16] = _dump_float - self.dump_funcs[np.float32] = _dump_float - self.dump_funcs[np.float64] = _dump_float - self.dump_funcs[np.int16] = self._dump_int - self.dump_funcs[np.int32] = self._dump_int - self.dump_funcs[np.int64] = self._dump_int - - def _dump_int(self, v): - return "{}".format(int(v)) - - -class TomlPreserveCommentEncoder(TomlEncoder): - - def __init__(self, _dict=dict, preserve=False): - from pip._vendor.toml.decoder import CommentValue - super(TomlPreserveCommentEncoder, self).__init__(_dict, preserve) - self.dump_funcs[CommentValue] = lambda v: v.dump(self.dump_value) - - -class TomlPathlibEncoder(TomlEncoder): - - def _dump_pathlib_path(self, v): - return _dump_str(str(v)) - - def dump_value(self, v): - if (3, 4) <= sys.version_info: - import pathlib - if isinstance(v, pathlib.PurePath): - v = str(v) - return super(TomlPathlibEncoder, self).dump_value(v) diff --git a/src/pip/_vendor/toml/ordered.py b/src/pip/_vendor/toml/ordered.py deleted file mode 100644 index 6052016e8e6..00000000000 --- a/src/pip/_vendor/toml/ordered.py +++ /dev/null @@ -1,15 +0,0 @@ -from collections import OrderedDict -from pip._vendor.toml import TomlEncoder -from pip._vendor.toml import TomlDecoder - - -class TomlOrderedDecoder(TomlDecoder): - - def __init__(self): - super(self.__class__, self).__init__(_dict=OrderedDict) - - -class TomlOrderedEncoder(TomlEncoder): - - def __init__(self): - super(self.__class__, self).__init__(_dict=OrderedDict) diff --git a/src/pip/_vendor/toml/tz.py b/src/pip/_vendor/toml/tz.py deleted file mode 100644 index bf20593a264..00000000000 --- a/src/pip/_vendor/toml/tz.py +++ /dev/null @@ -1,24 +0,0 @@ -from datetime import tzinfo, timedelta - - -class TomlTz(tzinfo): - def __init__(self, toml_offset): - if toml_offset == "Z": - self._raw_offset = "+00:00" - else: - self._raw_offset = toml_offset - self._sign = -1 if self._raw_offset[0] == '-' else 1 - self._hours = int(self._raw_offset[1:3]) - self._minutes = int(self._raw_offset[4:6]) - - def __deepcopy__(self, memo): - return self.__class__(self._raw_offset) - - def tzname(self, dt): - return "UTC" + self._raw_offset - - def utcoffset(self, dt): - return self._sign * timedelta(hours=self._hours, minutes=self._minutes) - - def dst(self, dt): - return timedelta(0) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 34420cee3be..3e0e3343b40 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -18,6 +18,5 @@ resolvelib==0.7.1 setuptools==44.0.0 six==1.16.0 tenacity==7.0.0 -toml==0.10.2 tomli==1.0.3 webencodings==0.5.1 From d2e0c4c429facce189ed19ab30f8dd17ae2a9e03 Mon Sep 17 00:00:00 2001 From: Taneli Hukkinen <3275109+hukkin@users.noreply.github.com> Date: Fri, 2 Jul 2021 00:32:16 +0300 Subject: [PATCH 32/60] Fix outdated paths in vendoring README --- src/pip/_vendor/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_vendor/README.rst b/src/pip/_vendor/README.rst index 9363558129e..12b421a0417 100644 --- a/src/pip/_vendor/README.rst +++ b/src/pip/_vendor/README.rst @@ -16,7 +16,7 @@ Vendoring Policy pure Python. * Any modifications made to libraries **MUST** be noted in ``pip/_vendor/README.rst`` and their corresponding patches **MUST** be - included ``tools/automation/vendoring/patches``. + included ``tools/vendoring/patches``. * Vendored libraries should have corresponding ``vendored()`` entries in ``pip/_vendor/__init__.py``. @@ -118,7 +118,7 @@ Automatic Vendoring Vendoring is automated via the `vendoring `_ tool from the content of ``pip/_vendor/vendor.txt`` and the different patches in -``tools/automation/vendoring/patches``. +``tools/vendoring/patches``. Launch it via ``vendoring sync . -v`` (requires ``vendoring>=0.2.2``). From 1915c4da3e035b2c7d3af098037889e5c3ac0b6d Mon Sep 17 00:00:00 2001 From: Taneli Hukkinen <3275109+hukkin@users.noreply.github.com> Date: Fri, 2 Jul 2021 00:35:02 +0300 Subject: [PATCH 33/60] Add news --- news/10034.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/10034.feature.rst diff --git a/news/10034.feature.rst b/news/10034.feature.rst new file mode 100644 index 00000000000..198d9ade3c2 --- /dev/null +++ b/news/10034.feature.rst @@ -0,0 +1 @@ +Support TOML v1.0.0 syntax in ``pyproject.toml``. From 72e38ca3df4d1690a972eef9a27e20f154aa0b63 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 1 Jul 2021 13:33:38 +0800 Subject: [PATCH 34/60] Re-implement Git version parsing with regex After packaging drops LegacyVersion, version.parse() will no longer guarantee to be able to parse Git versions, so we need to invent our own parser. Since we really only care about the major and minor segments, the logic is pretty simple. --- news/10117.removal.rst | 2 ++ src/pip/_internal/vcs/git.py | 30 ++++++++++++++---------------- tests/unit/test_vcs.py | 3 +-- 3 files changed, 17 insertions(+), 18 deletions(-) create mode 100644 news/10117.removal.rst diff --git a/news/10117.removal.rst b/news/10117.removal.rst new file mode 100644 index 00000000000..c0191a35358 --- /dev/null +++ b/news/10117.removal.rst @@ -0,0 +1,2 @@ +Git version parsing is now done with regular expression to prepare for the +pending upstream removal of non-PEP-440 version parsing logic. diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index b860e350a2d..269bf6a65b3 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -6,9 +6,6 @@ import urllib.request from typing import List, Optional, Tuple -from pip._vendor.packaging.version import _BaseVersion -from pip._vendor.packaging.version import parse as parse_version - from pip._internal.exceptions import BadCommand, InstallationError from pip._internal.utils.misc import HiddenText, display_path, hide_url from pip._internal.utils.subprocess import make_command @@ -29,6 +26,14 @@ logger = logging.getLogger(__name__) +GIT_VERSION_REGEX = re.compile( + r"^git version " # Prefix. + r"(\d+)" # Major. + r"\.(\d+)" # Dot, minor. + r"(?:\.(\d+))?" # Optional dot, patch. + r".*$" # Suffix, including any pre- and post-release segments we don't care about. +) + HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') # SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git' @@ -83,21 +88,14 @@ def is_immutable_rev_checkout(self, url, dest): ) return not is_tag_or_branch - def get_git_version(self): - # type: () -> _BaseVersion - VERSION_PFX = 'git version ' + def get_git_version(self) -> Tuple[int, ...]: version = self.run_command( ['version'], show_stdout=False, stdout_only=True ) - if version.startswith(VERSION_PFX): - version = version[len(VERSION_PFX):].split()[0] - else: - version = '' - # get first 3 positions of the git version because - # on windows it is x.y.z.windows.t, and this parses as - # LegacyVersion which always smaller than a Version. - version = '.'.join(version.split('.')[:3]) - return parse_version(version) + match = GIT_VERSION_REGEX.match(version) + if not match: + return () + return tuple(int(c) for c in match.groups()) @classmethod def get_current_branch(cls, location): @@ -301,7 +299,7 @@ def switch(self, dest, url, rev_options): def update(self, dest, url, rev_options): # type: (str, HiddenText, RevOptions) -> None # First fetch changes from the default remote - if self.get_git_version() >= parse_version('1.9.0'): + if self.get_git_version() >= (1, 9): # fetch tags in addition to everything else self.run_command(['fetch', '-q', '--tags'], cwd=dest) else: diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index 305c45fd857..403f3946673 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -4,7 +4,6 @@ from unittest.mock import patch import pytest -from pip._vendor.packaging.version import parse as parse_version from pip._internal.exceptions import BadCommand, InstallationError from pip._internal.utils.misc import hide_url, hide_value @@ -483,7 +482,7 @@ def test_subversion__get_url_rev_options(): def test_get_git_version(): git_version = Git().get_git_version() - assert git_version >= parse_version('1.0.0') + assert git_version >= (1, 0, 0) @pytest.mark.parametrize('use_interactive,is_atty,expected', [ From 9582341e08d0d774e8271ba133fe10afbda510a9 Mon Sep 17 00:00:00 2001 From: Harutaka Kawamura Date: Sun, 4 Jul 2021 20:29:15 +0900 Subject: [PATCH 35/60] Complete type annotations in `pip/_internal/metadata` (#10124) --- news/10124.trivial.rst | 1 + src/pip/_internal/metadata/__init__.py | 9 ++-- src/pip/_internal/metadata/base.py | 52 ++++++++------------- src/pip/_internal/metadata/pkg_resources.py | 48 +++++++------------ 4 files changed, 39 insertions(+), 71 deletions(-) create mode 100644 news/10124.trivial.rst diff --git a/news/10124.trivial.rst b/news/10124.trivial.rst new file mode 100644 index 00000000000..e866529ac62 --- /dev/null +++ b/news/10124.trivial.rst @@ -0,0 +1 @@ +Converted type commentaries into annotations in ``pip/_internal/metadata``. diff --git a/src/pip/_internal/metadata/__init__.py b/src/pip/_internal/metadata/__init__.py index 63335a19193..708a8fbdf1b 100644 --- a/src/pip/_internal/metadata/__init__.py +++ b/src/pip/_internal/metadata/__init__.py @@ -3,8 +3,7 @@ from .base import BaseDistribution, BaseEnvironment -def get_default_environment(): - # type: () -> BaseEnvironment +def get_default_environment() -> BaseEnvironment: """Get the default representation for the current environment. This returns an Environment instance from the chosen backend. The default @@ -16,8 +15,7 @@ def get_default_environment(): return Environment.default() -def get_environment(paths): - # type: (Optional[List[str]]) -> BaseEnvironment +def get_environment(paths: Optional[List[str]]) -> BaseEnvironment: """Get a representation of the environment specified by ``paths``. This returns an Environment instance from the chosen backend based on the @@ -29,8 +27,7 @@ def get_environment(paths): return Environment.from_paths(paths) -def get_wheel_distribution(wheel_path, canonical_name): - # type: (str, str) -> BaseDistribution +def get_wheel_distribution(wheel_path: str, canonical_name: str) -> BaseDistribution: """Get the representation of the specified wheel's distribution metadata. This returns a Distribution instance from the chosen backend based on diff --git a/src/pip/_internal/metadata/base.py b/src/pip/_internal/metadata/base.py index 37f9a8232f6..659adc12daf 100644 --- a/src/pip/_internal/metadata/base.py +++ b/src/pip/_internal/metadata/base.py @@ -13,8 +13,7 @@ class BaseDistribution: @property - def location(self): - # type: () -> Optional[str] + def location(self) -> Optional[str]: """Where the distribution is loaded from. A string value is not necessarily a filesystem path, since distributions @@ -24,39 +23,32 @@ def location(self): raise NotImplementedError() @property - def metadata_version(self): - # type: () -> Optional[str] + def metadata_version(self) -> Optional[str]: """Value of "Metadata-Version:" in the distribution, if available.""" raise NotImplementedError() @property - def canonical_name(self): - # type: () -> str + def canonical_name(self) -> str: raise NotImplementedError() @property - def version(self): - # type: () -> DistributionVersion + def version(self) -> DistributionVersion: raise NotImplementedError() @property - def installer(self): - # type: () -> str + def installer(self) -> str: raise NotImplementedError() @property - def editable(self): - # type: () -> bool + def editable(self) -> bool: raise NotImplementedError() @property - def local(self): - # type: () -> bool + def local(self) -> bool: raise NotImplementedError() @property - def in_usersite(self): - # type: () -> bool + def in_usersite(self) -> bool: raise NotImplementedError() @@ -64,22 +56,18 @@ class BaseEnvironment: """An environment containing distributions to introspect.""" @classmethod - def default(cls): - # type: () -> BaseEnvironment + def default(cls) -> "BaseEnvironment": raise NotImplementedError() @classmethod - def from_paths(cls, paths): - # type: (Optional[List[str]]) -> BaseEnvironment + def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment": raise NotImplementedError() - def get_distribution(self, name): - # type: (str) -> Optional[BaseDistribution] + def get_distribution(self, name: str) -> Optional["BaseDistribution"]: """Given a requirement name, return the installed distributions.""" raise NotImplementedError() - def _iter_distributions(self): - # type: () -> Iterator[BaseDistribution] + def _iter_distributions(self) -> Iterator["BaseDistribution"]: """Iterate through installed distributions. This function should be implemented by subclass, but never called @@ -88,8 +76,7 @@ def _iter_distributions(self): """ raise NotImplementedError() - def iter_distributions(self): - # type: () -> Iterator[BaseDistribution] + def iter_distributions(self) -> Iterator["BaseDistribution"]: """Iterate through installed distributions.""" for dist in self._iter_distributions(): # Make sure the distribution actually comes from a valid Python @@ -112,13 +99,12 @@ def iter_distributions(self): def iter_installed_distributions( self, - local_only=True, # type: bool - skip=stdlib_pkgs, # type: Container[str] - include_editables=True, # type: bool - editables_only=False, # type: bool - user_only=False, # type: bool - ): - # type: (...) -> Iterator[BaseDistribution] + local_only: bool = True, + skip: Container[str] = stdlib_pkgs, + include_editables: bool = True, + editables_only: bool = False, + user_only: bool = False, + ) -> Iterator[BaseDistribution]: """Return a list of installed distributions. :param local_only: If True (default), only return installations diff --git a/src/pip/_internal/metadata/pkg_resources.py b/src/pip/_internal/metadata/pkg_resources.py index f39a39ebebf..b7248c3119d 100644 --- a/src/pip/_internal/metadata/pkg_resources.py +++ b/src/pip/_internal/metadata/pkg_resources.py @@ -13,78 +13,64 @@ class Distribution(BaseDistribution): - def __init__(self, dist): - # type: (pkg_resources.Distribution) -> None + def __init__(self, dist: pkg_resources.Distribution) -> None: self._dist = dist @classmethod - def from_wheel(cls, path, name): - # type: (str, str) -> Distribution + def from_wheel(cls, path: str, name: str) -> "Distribution": with zipfile.ZipFile(path, allowZip64=True) as zf: dist = pkg_resources_distribution_for_wheel(zf, name, path) return cls(dist) @property - def location(self): - # type: () -> Optional[str] + def location(self) -> Optional[str]: return self._dist.location @property - def metadata_version(self): - # type: () -> Optional[str] + def metadata_version(self) -> Optional[str]: for line in self._dist.get_metadata_lines(self._dist.PKG_INFO): if line.lower().startswith("metadata-version:"): return line.split(":", 1)[-1].strip() return None @property - def canonical_name(self): - # type: () -> str + def canonical_name(self) -> str: return canonicalize_name(self._dist.project_name) @property - def version(self): - # type: () -> DistributionVersion + def version(self) -> DistributionVersion: return parse_version(self._dist.version) @property - def installer(self): - # type: () -> str + def installer(self) -> str: return get_installer(self._dist) @property - def editable(self): - # type: () -> bool + def editable(self) -> bool: return misc.dist_is_editable(self._dist) @property - def local(self): - # type: () -> bool + def local(self) -> bool: return misc.dist_is_local(self._dist) @property - def in_usersite(self): - # type: () -> bool + def in_usersite(self) -> bool: return misc.dist_in_usersite(self._dist) class Environment(BaseEnvironment): - def __init__(self, ws): - # type: (pkg_resources.WorkingSet) -> None + def __init__(self, ws: pkg_resources.WorkingSet) -> None: self._ws = ws @classmethod - def default(cls): - # type: () -> BaseEnvironment + def default(cls) -> BaseEnvironment: return cls(pkg_resources.working_set) @classmethod - def from_paths(cls, paths): - # type: (Optional[List[str]]) -> BaseEnvironment + def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment: return cls(pkg_resources.WorkingSet(paths)) - def _search_distribution(self, name): - # type: (str) -> Optional[BaseDistribution] + def _search_distribution(self, name: str) -> Optional[BaseDistribution]: """Find a distribution matching the ``name`` in the environment. This searches from *all* distributions available in the environment, to @@ -96,8 +82,7 @@ def _search_distribution(self, name): return dist return None - def get_distribution(self, name): - # type: (str) -> Optional[BaseDistribution] + def get_distribution(self, name: str) -> Optional[BaseDistribution]: # Search the distribution by looking through the working set. dist = self._search_distribution(name) @@ -120,7 +105,6 @@ def get_distribution(self, name): return None return self._search_distribution(name) - def _iter_distributions(self): - # type: () -> Iterator[BaseDistribution] + def _iter_distributions(self) -> Iterator[BaseDistribution]: for dist in self._ws: yield Distribution(dist) From 2395bc5059cf7ba15ec49aa59c0e20a4391d8f3b Mon Sep 17 00:00:00 2001 From: harupy Date: Sun, 4 Jul 2021 22:14:22 +0900 Subject: [PATCH 36/60] Use --color=yes --- news/10126.trivial.rst | 1 + setup.cfg | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 news/10126.trivial.rst diff --git a/news/10126.trivial.rst b/news/10126.trivial.rst new file mode 100644 index 00000000000..ba563cd4d87 --- /dev/null +++ b/news/10126.trivial.rst @@ -0,0 +1 @@ +Use ``--color=yes`` to color pytest outputs. diff --git a/setup.cfg b/setup.cfg index cb1ecab65a9..d5dfb587e7b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,7 +54,7 @@ follow_imports = skip follow_imports = skip [tool:pytest] -addopts = --ignore src/pip/_vendor --ignore tests/tests_cache -r aR +addopts = --ignore src/pip/_vendor --ignore tests/tests_cache -r aR --color=yes markers = network: tests that need network incompatible_with_test_venv From 7cae5f2e199973e02d48e0acbb29eff05da3fa50 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sun, 25 Apr 2021 17:10:53 +0800 Subject: [PATCH 37/60] Relax PyPy compatibility location warning --- news/9845.bugfix.rst | 2 ++ src/pip/_internal/locations/__init__.py | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 news/9845.bugfix.rst diff --git a/news/9845.bugfix.rst b/news/9845.bugfix.rst new file mode 100644 index 00000000000..be37cb03953 --- /dev/null +++ b/news/9845.bugfix.rst @@ -0,0 +1,2 @@ +Relax interpreter detection to quelch a location mismatch warning where PyPy +is deliberately breaking backwards compatibility. diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 3acb51bc454..0cd48dd56a1 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -110,8 +110,8 @@ def get_scheme( and home is not None and k in ("platlib", "purelib") and old_v.parent == new_v.parent - and old_v.name == "python" - and new_v.name == "pypy" + and old_v.name.startswith("python") + and new_v.name.startswith("pypy") ) if skip_pypy_special_case: continue From c1295afdd52b4172625ea4ea7f2bfbe44a273795 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 9 Jul 2021 11:05:07 +0100 Subject: [PATCH 38/60] Address review comments --- docs/html/topics/vcs-support.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md index 4ccde54cbd9..30bb579300a 100644 --- a/docs/html/topics/vcs-support.md +++ b/docs/html/topics/vcs-support.md @@ -85,7 +85,7 @@ You can also give specific revisions to an SVN URL, like so: ``` Note that you need to use [Editable VCS installs](#editable-vcs-installs) for -using specific revisions. +using specific revisions from Subversion. ### Bazaar @@ -126,9 +126,8 @@ the {ref}`--editable ` option) or not. Note that if a satisfactory version of the package is already installed, the VCS source will not overwrite it without an `--upgrade` flag. Further, pip -looks at the package version (specified in the `setup.py` file) of the target -commit to determine what action to take on the VCS requirement (not the commit -itself). +looks at the package version, at the target revision to determine what action to +take on the VCS requirement (not the commit itself). The {ref}`pip freeze` subcommand will record the VCS requirement specifier (referencing a specific commit) only if the install is done with the editable From f2ce7741abad0cdae77ca516889a13ddc6d0d076 Mon Sep 17 00:00:00 2001 From: Nikita Chepanov Date: Fri, 30 Apr 2021 14:59:34 -0400 Subject: [PATCH 39/60] Suppress "not on PATH" warning when `--prefix` is given Similar to how it works with `--target`, avoid printing the warning since it's clear from the context that the final destionation of the executables is unlikely to be in the PATH. --- news/9931.feature.rst | 1 + src/pip/_internal/commands/install.py | 4 ++-- tests/functional/test_install.py | 8 ++++++-- 3 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 news/9931.feature.rst diff --git a/news/9931.feature.rst b/news/9931.feature.rst new file mode 100644 index 00000000000..0ea1cbf7837 --- /dev/null +++ b/news/9931.feature.rst @@ -0,0 +1 @@ +Suppress "not on PATH" warning when ``--prefix`` is given. diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index f2969630eb7..c1b68d3ddea 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -386,9 +386,9 @@ def run(self, options, args): conflicts = self._determine_conflicts(to_install) # Don't warn about script install locations if - # --target has been specified + # --target or --prefix has been specified warn_script_location = options.warn_script_location - if options.target_dir: + if options.target_dir or options.prefix_path: warn_script_location = False installed = install_given_reqs( diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 2742e873e33..8ae8ab80c2c 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -960,7 +960,8 @@ def test_install_nonlocal_compatible_wheel_path( assert result.returncode == ERROR -def test_install_with_target_and_scripts_no_warning(script, with_wheel): +@pytest.mark.parametrize('opt', ('--target', '--prefix')) +def test_install_with_target_or_prefix_and_scripts_no_warning(opt, script, with_wheel): """ Test that installing with --target does not trigger the "script not in PATH" warning (issue #5201) @@ -981,7 +982,7 @@ def test_install_with_target_and_scripts_no_warning(script, with_wheel): pkga_path.joinpath("pkga.py").write_text(textwrap.dedent(""" def main(): pass """)) - result = script.pip('install', '--target', target_dir, pkga_path) + result = script.pip('install', opt, target_dir, pkga_path) # This assertion isn't actually needed, if we get the script warning # the script.pip() call will fail with "stderr not expected". But we # leave the assertion to make the intention of the code clearer. @@ -1666,6 +1667,9 @@ def test_install_from_test_pypi_with_ext_url_dep_is_blocked(script, index): assert error_cause in res.stderr, str(res) +@pytest.mark.xfail( + reason="No longer possible to trigger the warning with either --prefix or --target" +) def test_installing_scripts_outside_path_prints_warning(script): result = script.pip_install_local( "--prefix", script.scratch_path, "script_wheel1" From 37a2b12a21450ed03c886e16bb454464783ef687 Mon Sep 17 00:00:00 2001 From: briantracy Date: Sun, 11 Jul 2021 20:40:43 -0700 Subject: [PATCH 40/60] Special case warning for requirements.txt install (#9915) Co-authored-by: Tzu-ping Chung --- news/9915.feature.rst | 1 + src/pip/_internal/resolution/resolvelib/factory.py | 7 +++++++ tests/functional/test_install_extras.py | 6 ++++++ 3 files changed, 14 insertions(+) create mode 100644 news/9915.feature.rst diff --git a/news/9915.feature.rst b/news/9915.feature.rst new file mode 100644 index 00000000000..6d7d2bc057f --- /dev/null +++ b/news/9915.feature.rst @@ -0,0 +1 @@ +Add a special error message when users forget the ``-r`` flag when installing. diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 5816a0ede38..bec25b59888 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -551,6 +551,13 @@ def _report_single_requirement_conflict(self, req, parent): req_disp, ", ".join(versions) or "none", ) + if str(req) == "requirements.txt": + logger.info( + "HINT: You are attempting to install a package literally " + 'named "requirements.txt" (which cannot exist). Consider ' + "using the '-r' flag to install the packages listed in " + "requirements.txt" + ) return DistributionNotFound(f"No matching distribution found for {req}") diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index de1ee3795ea..83bcc548295 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -143,6 +143,12 @@ def test_install_special_extra(script): ) in result.stderr, str(result) +def test_install_requirements_no_r_flag(script): + '''Beginners sometimes forget the -r and this leads to confusion''' + result = script.pip('install', 'requirements.txt', expect_error=True) + assert 'literally named "requirements.txt"' in result.stdout + + @pytest.mark.parametrize( "extra_to_install, simple_version", [ ['', '3.0'], From 72e1ff35c7ad04169b1dcc4258c477d4341e72f8 Mon Sep 17 00:00:00 2001 From: Harutaka Kawamura Date: Mon, 12 Jul 2021 12:41:48 +0900 Subject: [PATCH 41/60] Complete type annotations in `pip/_internal/resolution` (#10125) --- news/10125.trivial.rst | 1 + src/pip/_internal/resolution/base.py | 10 +- .../_internal/resolution/legacy/resolver.py | 87 ++++--- .../_internal/resolution/resolvelib/base.py | 71 ++---- .../resolution/resolvelib/candidates.py | 219 +++++++----------- .../resolution/resolvelib/factory.py | 133 +++++------ .../resolution/resolvelib/found_candidates.py | 31 ++- .../resolution/resolvelib/provider.py | 25 +- .../resolution/resolvelib/reporter.py | 29 +-- .../resolution/resolvelib/requirements.py | 96 +++----- .../resolution/resolvelib/resolver.py | 63 ++--- 11 files changed, 317 insertions(+), 448 deletions(-) create mode 100644 news/10125.trivial.rst diff --git a/news/10125.trivial.rst b/news/10125.trivial.rst new file mode 100644 index 00000000000..166470f0b75 --- /dev/null +++ b/news/10125.trivial.rst @@ -0,0 +1 @@ +Converted type commentaries into annotations in ``pip/_internal/resolution``. diff --git a/src/pip/_internal/resolution/base.py b/src/pip/_internal/resolution/base.py index 1be0cb279a0..3f83ef0f533 100644 --- a/src/pip/_internal/resolution/base.py +++ b/src/pip/_internal/resolution/base.py @@ -7,10 +7,12 @@ class BaseResolver: - def resolve(self, root_reqs, check_supported_wheels): - # type: (List[InstallRequirement], bool) -> RequirementSet + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: raise NotImplementedError() - def get_installation_order(self, req_set): - # type: (RequirementSet) -> List[InstallRequirement] + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: raise NotImplementedError() diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py index 17de7f09a37..1c3ed72bc6c 100644 --- a/src/pip/_internal/resolution/legacy/resolver.py +++ b/src/pip/_internal/resolution/legacy/resolver.py @@ -50,11 +50,10 @@ def _check_dist_requires_python( - dist, # type: Distribution - version_info, # type: Tuple[int, int, int] - ignore_requires_python=False, # type: bool -): - # type: (...) -> None + dist: Distribution, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> None: """ Check whether the given Python version is compatible with a distribution's "Requires-Python" value. @@ -107,19 +106,18 @@ class Resolver(BaseResolver): def __init__( self, - preparer, # type: RequirementPreparer - finder, # type: PackageFinder - wheel_cache, # type: Optional[WheelCache] - make_install_req, # type: InstallRequirementProvider - use_user_site, # type: bool - ignore_dependencies, # type: bool - ignore_installed, # type: bool - ignore_requires_python, # type: bool - force_reinstall, # type: bool - upgrade_strategy, # type: str - py_version_info=None, # type: Optional[Tuple[int, ...]] - ): - # type: (...) -> None + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: super().__init__() assert upgrade_strategy in self._allowed_strategies @@ -142,12 +140,11 @@ def __init__( self.use_user_site = use_user_site self._make_install_req = make_install_req - self._discovered_dependencies = defaultdict( - list - ) # type: DiscoveredDependencies + self._discovered_dependencies: DiscoveredDependencies = defaultdict(list) - def resolve(self, root_reqs, check_supported_wheels): - # type: (List[InstallRequirement], bool) -> RequirementSet + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: """Resolve what operations need to be done As a side-effect of this method, the packages (and their dependencies) @@ -168,7 +165,7 @@ def resolve(self, root_reqs, check_supported_wheels): # exceptions cannot be checked ahead of time, because # _populate_link() needs to be called before we can make decisions # based on link type. - discovered_reqs = [] # type: List[InstallRequirement] + discovered_reqs: List[InstallRequirement] = [] hash_errors = HashErrors() for req in chain(requirement_set.all_requirements, discovered_reqs): try: @@ -182,8 +179,7 @@ def resolve(self, root_reqs, check_supported_wheels): return requirement_set - def _is_upgrade_allowed(self, req): - # type: (InstallRequirement) -> bool + def _is_upgrade_allowed(self, req: InstallRequirement) -> bool: if self.upgrade_strategy == "to-satisfy-only": return False elif self.upgrade_strategy == "eager": @@ -192,8 +188,7 @@ def _is_upgrade_allowed(self, req): assert self.upgrade_strategy == "only-if-needed" return req.user_supplied or req.constraint - def _set_req_to_reinstall(self, req): - # type: (InstallRequirement) -> None + def _set_req_to_reinstall(self, req: InstallRequirement) -> None: """ Set a requirement to be installed. """ @@ -203,8 +198,9 @@ def _set_req_to_reinstall(self, req): req.should_reinstall = True req.satisfied_by = None - def _check_skip_installed(self, req_to_install): - # type: (InstallRequirement) -> Optional[str] + def _check_skip_installed( + self, req_to_install: InstallRequirement + ) -> Optional[str]: """Check if req_to_install should be skipped. This will check if the req is installed, and whether we should upgrade @@ -256,8 +252,7 @@ def _check_skip_installed(self, req_to_install): self._set_req_to_reinstall(req_to_install) return None - def _find_requirement_link(self, req): - # type: (InstallRequirement) -> Optional[Link] + def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]: upgrade = self._is_upgrade_allowed(req) best_candidate = self.finder.find_requirement(req, upgrade) if not best_candidate: @@ -279,8 +274,7 @@ def _find_requirement_link(self, req): return link - def _populate_link(self, req): - # type: (InstallRequirement) -> None + def _populate_link(self, req: InstallRequirement) -> None: """Ensure that if a link can be found for this, that it is found. Note that req.link may still be None - if the requirement is already @@ -309,8 +303,7 @@ def _populate_link(self, req): req.original_link_is_in_wheel_cache = True req.link = cache_entry.link - def _get_dist_for(self, req): - # type: (InstallRequirement) -> Distribution + def _get_dist_for(self, req: InstallRequirement) -> Distribution: """Takes a InstallRequirement and returns a single AbstractDist \ representing a prepared variant of the same. """ @@ -358,10 +351,9 @@ def _get_dist_for(self, req): def _resolve_one( self, - requirement_set, # type: RequirementSet - req_to_install, # type: InstallRequirement - ): - # type: (...) -> List[InstallRequirement] + requirement_set: RequirementSet, + req_to_install: InstallRequirement, + ) -> List[InstallRequirement]: """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. @@ -384,10 +376,9 @@ def _resolve_one( ignore_requires_python=self.ignore_requires_python, ) - more_reqs = [] # type: List[InstallRequirement] + more_reqs: List[InstallRequirement] = [] - def add_req(subreq, extras_requested): - # type: (Distribution, Iterable[str]) -> None + def add_req(subreq: Distribution, extras_requested: Iterable[str]) -> None: sub_install_req = self._make_install_req( str(subreq), req_to_install, @@ -432,8 +423,9 @@ def add_req(subreq, extras_requested): return more_reqs - def get_installation_order(self, req_set): - # type: (RequirementSet) -> List[InstallRequirement] + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: """Create the installation order. The installation order is topological - requirements are installed @@ -444,10 +436,9 @@ def get_installation_order(self, req_set): # installs the user specified things in the order given, except when # dependencies must come earlier to achieve topological order. order = [] - ordered_reqs = set() # type: Set[InstallRequirement] + ordered_reqs: Set[InstallRequirement] = set() - def schedule(req): - # type: (InstallRequirement) -> None + def schedule(req: InstallRequirement) -> None: if req.satisfied_by or req in ordered_reqs: return if req.constraint: diff --git a/src/pip/_internal/resolution/resolvelib/base.py b/src/pip/_internal/resolution/resolvelib/base.py index 26821a1facd..7f258c57481 100644 --- a/src/pip/_internal/resolution/resolvelib/base.py +++ b/src/pip/_internal/resolution/resolvelib/base.py @@ -12,8 +12,7 @@ CandidateVersion = Union[LegacyVersion, Version] -def format_name(project, extras): - # type: (str, FrozenSet[str]) -> str +def format_name(project: str, extras: FrozenSet[str]) -> str: if not extras: return project canonical_extras = sorted(canonicalize_name(e) for e in extras) @@ -21,33 +20,29 @@ def format_name(project, extras): class Constraint: - def __init__(self, specifier, hashes, links): - # type: (SpecifierSet, Hashes, FrozenSet[Link]) -> None + def __init__( + self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link] + ) -> None: self.specifier = specifier self.hashes = hashes self.links = links @classmethod - def empty(cls): - # type: () -> Constraint + def empty(cls) -> "Constraint": return Constraint(SpecifierSet(), Hashes(), frozenset()) @classmethod - def from_ireq(cls, ireq): - # type: (InstallRequirement) -> Constraint + def from_ireq(cls, ireq: InstallRequirement) -> "Constraint": links = frozenset([ireq.link]) if ireq.link else frozenset() return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links) - def __nonzero__(self): - # type: () -> bool + def __nonzero__(self) -> bool: return bool(self.specifier) or bool(self.hashes) or bool(self.links) - def __bool__(self): - # type: () -> bool + def __bool__(self) -> bool: return self.__nonzero__() - def __and__(self, other): - # type: (InstallRequirement) -> Constraint + def __and__(self, other: InstallRequirement) -> "Constraint": if not isinstance(other, InstallRequirement): return NotImplemented specifier = self.specifier & other.specifier @@ -57,8 +52,7 @@ def __and__(self, other): links = links.union([other.link]) return Constraint(specifier, hashes, links) - def is_satisfied_by(self, candidate): - # type: (Candidate) -> bool + def is_satisfied_by(self, candidate: "Candidate") -> bool: # Reject if there are any mismatched URL constraints on this package. if self.links and not all(_match_link(link, candidate) for link in self.links): return False @@ -70,8 +64,7 @@ def is_satisfied_by(self, candidate): class Requirement: @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: """The "project name" of a requirement. This is different from ``name`` if this requirement contains extras, @@ -81,8 +74,7 @@ def project_name(self): raise NotImplementedError("Subclass should override") @property - def name(self): - # type: () -> str + def name(self) -> str: """The name identifying this requirement in the resolver. This is different from ``project_name`` if this requirement contains @@ -90,21 +82,17 @@ def name(self): """ raise NotImplementedError("Subclass should override") - def is_satisfied_by(self, candidate): - # type: (Candidate) -> bool + def is_satisfied_by(self, candidate: "Candidate") -> bool: return False - def get_candidate_lookup(self): - # type: () -> CandidateLookup + def get_candidate_lookup(self) -> CandidateLookup: raise NotImplementedError("Subclass should override") - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: raise NotImplementedError("Subclass should override") -def _match_link(link, candidate): - # type: (Link, Candidate) -> bool +def _match_link(link: Link, candidate: "Candidate") -> bool: if candidate.source_link: return links_equivalent(link, candidate.source_link) return False @@ -112,8 +100,7 @@ def _match_link(link, candidate): class Candidate: @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: """The "project name" of the candidate. This is different from ``name`` if this candidate contains extras, @@ -123,8 +110,7 @@ def project_name(self): raise NotImplementedError("Override in subclass") @property - def name(self): - # type: () -> str + def name(self) -> str: """The name identifying this candidate in the resolver. This is different from ``project_name`` if this candidate contains @@ -133,33 +119,26 @@ def name(self): raise NotImplementedError("Override in subclass") @property - def version(self): - # type: () -> CandidateVersion + def version(self) -> CandidateVersion: raise NotImplementedError("Override in subclass") @property - def is_installed(self): - # type: () -> bool + def is_installed(self) -> bool: raise NotImplementedError("Override in subclass") @property - def is_editable(self): - # type: () -> bool + def is_editable(self) -> bool: raise NotImplementedError("Override in subclass") @property - def source_link(self): - # type: () -> Optional[Link] + def source_link(self) -> Optional[Link]: raise NotImplementedError("Override in subclass") - def iter_dependencies(self, with_requires): - # type: (bool) -> Iterable[Optional[Requirement]] + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: raise NotImplementedError("Override in subclass") - def get_install_requirement(self): - # type: () -> Optional[InstallRequirement] + def get_install_requirement(self) -> Optional[InstallRequirement]: raise NotImplementedError("Override in subclass") - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: raise NotImplementedError("Subclass should override") diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index e496e10dde8..5d510db868f 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -48,8 +48,9 @@ def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]: return None -def make_install_req_from_link(link, template): - # type: (Link, InstallRequirement) -> InstallRequirement +def make_install_req_from_link( + link: Link, template: InstallRequirement +) -> InstallRequirement: assert not template.editable, "template is editable" if template.req: line = str(template.req) @@ -73,8 +74,9 @@ def make_install_req_from_link(link, template): return ireq -def make_install_req_from_editable(link, template): - # type: (Link, InstallRequirement) -> InstallRequirement +def make_install_req_from_editable( + link: Link, template: InstallRequirement +) -> InstallRequirement: assert template.editable, "template not editable" return install_req_from_editable( link.url, @@ -91,8 +93,9 @@ def make_install_req_from_editable(link, template): ) -def make_install_req_from_dist(dist, template): - # type: (Distribution, InstallRequirement) -> InstallRequirement +def make_install_req_from_dist( + dist: Distribution, template: InstallRequirement +) -> InstallRequirement: project_name = canonicalize_name(dist.project_name) if template.req: line = str(template.req) @@ -137,14 +140,13 @@ class exposes appropriate information to the resolver. def __init__( self, - link, # type: Link - source_link, # type: Link - ireq, # type: InstallRequirement - factory, # type: Factory - name=None, # type: Optional[NormalizedName] - version=None, # type: Optional[CandidateVersion] - ): - # type: (...) -> None + link: Link, + source_link: Link, + ireq: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: self._link = link self._source_link = source_link self._factory = factory @@ -153,66 +155,55 @@ def __init__( self._version = version self.dist = self._prepare() - def __str__(self): - # type: () -> str + def __str__(self) -> str: return f"{self.name} {self.version}" - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{class_name}({link!r})".format( class_name=self.__class__.__name__, link=str(self._link), ) - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash((self.__class__, self._link)) - def __eq__(self, other): - # type: (Any) -> bool + def __eq__(self, other: Any) -> bool: if isinstance(other, self.__class__): return links_equivalent(self._link, other._link) return False @property - def source_link(self): - # type: () -> Optional[Link] + def source_link(self) -> Optional[Link]: return self._source_link @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: """The normalised name of the project the candidate refers to""" if self._name is None: self._name = canonicalize_name(self.dist.project_name) return self._name @property - def name(self): - # type: () -> str + def name(self) -> str: return self.project_name @property - def version(self): - # type: () -> CandidateVersion + def version(self) -> CandidateVersion: if self._version is None: self._version = parse_version(self.dist.version) return self._version - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: return "{} {} (from {})".format( self.name, self.version, self._link.file_path if self._link.is_file else self._link, ) - def _prepare_distribution(self): - # type: () -> Distribution + def _prepare_distribution(self) -> Distribution: raise NotImplementedError("Override in subclass") - def _check_metadata_consistency(self, dist): - # type: (Distribution) -> None + def _check_metadata_consistency(self, dist: Distribution) -> None: """Check for consistency of project name and version of dist.""" canonical_name = canonicalize_name(dist.project_name) if self._name is not None and self._name != canonical_name: @@ -231,8 +222,7 @@ def _check_metadata_consistency(self, dist): dist.version, ) - def _prepare(self): - # type: () -> Distribution + def _prepare(self) -> Distribution: try: dist = self._prepare_distribution() except HashError as e: @@ -244,8 +234,7 @@ def _prepare(self): self._check_metadata_consistency(dist) return dist - def _get_requires_python_dependency(self): - # type: () -> Optional[Requirement] + def _get_requires_python_dependency(self) -> Optional[Requirement]: requires_python = get_requires_python(self.dist) if requires_python is None: return None @@ -257,15 +246,13 @@ def _get_requires_python_dependency(self): return None return self._factory.make_requires_python_requirement(spec) - def iter_dependencies(self, with_requires): - # type: (bool) -> Iterable[Optional[Requirement]] + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: requires = self.dist.requires() if with_requires else () for r in requires: yield self._factory.make_requirement_from_spec(str(r), self._ireq) yield self._get_requires_python_dependency() - def get_install_requirement(self): - # type: () -> Optional[InstallRequirement] + def get_install_requirement(self) -> Optional[InstallRequirement]: return self._ireq @@ -274,13 +261,12 @@ class LinkCandidate(_InstallRequirementBackedCandidate): def __init__( self, - link, # type: Link - template, # type: InstallRequirement - factory, # type: Factory - name=None, # type: Optional[NormalizedName] - version=None, # type: Optional[CandidateVersion] - ): - # type: (...) -> None + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: source_link = link cache_entry = factory.get_wheel_cache_entry(link, name) if cache_entry is not None: @@ -315,8 +301,7 @@ def __init__( version=version, ) - def _prepare_distribution(self): - # type: () -> Distribution + def _prepare_distribution(self) -> Distribution: return self._factory.preparer.prepare_linked_requirement( self._ireq, parallel_builds=True ) @@ -327,13 +312,12 @@ class EditableCandidate(_InstallRequirementBackedCandidate): def __init__( self, - link, # type: Link - template, # type: InstallRequirement - factory, # type: Factory - name=None, # type: Optional[NormalizedName] - version=None, # type: Optional[CandidateVersion] - ): - # type: (...) -> None + link: Link, + template: InstallRequirement, + factory: "Factory", + name: Optional[NormalizedName] = None, + version: Optional[CandidateVersion] = None, + ) -> None: super().__init__( link=link, source_link=link, @@ -343,8 +327,7 @@ def __init__( version=version, ) - def _prepare_distribution(self): - # type: () -> Distribution + def _prepare_distribution(self) -> Distribution: return self._factory.preparer.prepare_editable_requirement(self._ireq) @@ -354,11 +337,10 @@ class AlreadyInstalledCandidate(Candidate): def __init__( self, - dist, # type: Distribution - template, # type: InstallRequirement - factory, # type: Factory - ): - # type: (...) -> None + dist: Distribution, + template: InstallRequirement, + factory: "Factory", + ) -> None: self.dist = dist self._ireq = make_install_req_from_dist(dist, template) self._factory = factory @@ -370,60 +352,49 @@ def __init__( skip_reason = "already satisfied" factory.preparer.prepare_installed_requirement(self._ireq, skip_reason) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return str(self.dist) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{class_name}({distribution!r})".format( class_name=self.__class__.__name__, distribution=self.dist, ) - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash((self.__class__, self.name, self.version)) - def __eq__(self, other): - # type: (Any) -> bool + def __eq__(self, other: Any) -> bool: if isinstance(other, self.__class__): return self.name == other.name and self.version == other.version return False @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: return canonicalize_name(self.dist.project_name) @property - def name(self): - # type: () -> str + def name(self) -> str: return self.project_name @property - def version(self): - # type: () -> CandidateVersion + def version(self) -> CandidateVersion: return parse_version(self.dist.version) @property - def is_editable(self): - # type: () -> bool + def is_editable(self) -> bool: return dist_is_editable(self.dist) - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: return f"{self.name} {self.version} (Installed)" - def iter_dependencies(self, with_requires): - # type: (bool) -> Iterable[Optional[Requirement]] + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: if not with_requires: return for r in self.dist.requires(): yield self._factory.make_requirement_from_spec(str(r), self._ireq) - def get_install_requirement(self): - # type: () -> Optional[InstallRequirement] + def get_install_requirement(self) -> Optional[InstallRequirement]: return None @@ -454,75 +425,62 @@ class ExtrasCandidate(Candidate): def __init__( self, - base, # type: BaseCandidate - extras, # type: FrozenSet[str] - ): - # type: (...) -> None + base: BaseCandidate, + extras: FrozenSet[str], + ) -> None: self.base = base self.extras = extras - def __str__(self): - # type: () -> str + def __str__(self) -> str: name, rest = str(self.base).split(" ", 1) return "{}[{}] {}".format(name, ",".join(self.extras), rest) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{class_name}(base={base!r}, extras={extras!r})".format( class_name=self.__class__.__name__, base=self.base, extras=self.extras, ) - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash((self.base, self.extras)) - def __eq__(self, other): - # type: (Any) -> bool + def __eq__(self, other: Any) -> bool: if isinstance(other, self.__class__): return self.base == other.base and self.extras == other.extras return False @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: return self.base.project_name @property - def name(self): - # type: () -> str + def name(self) -> str: """The normalised name of the project the candidate refers to""" return format_name(self.base.project_name, self.extras) @property - def version(self): - # type: () -> CandidateVersion + def version(self) -> CandidateVersion: return self.base.version - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: return "{} [{}]".format( self.base.format_for_error(), ", ".join(sorted(self.extras)) ) @property - def is_installed(self): - # type: () -> bool + def is_installed(self) -> bool: return self.base.is_installed @property - def is_editable(self): - # type: () -> bool + def is_editable(self) -> bool: return self.base.is_editable @property - def source_link(self): - # type: () -> Optional[Link] + def source_link(self) -> Optional[Link]: return self.base.source_link - def iter_dependencies(self, with_requires): - # type: (bool) -> Iterable[Optional[Requirement]] + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: factory = self.base._factory # Add a dependency on the exact base @@ -550,8 +508,7 @@ def iter_dependencies(self, with_requires): if requirement: yield requirement - def get_install_requirement(self): - # type: () -> Optional[InstallRequirement] + def get_install_requirement(self) -> Optional[InstallRequirement]: # We don't return anything here, because we always # depend on the base candidate, and we'll get the # install requirement from that. @@ -562,8 +519,7 @@ class RequiresPythonCandidate(Candidate): is_installed = False source_link = None - def __init__(self, py_version_info): - # type: (Optional[Tuple[int, ...]]) -> None + def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None: if py_version_info is not None: version_info = normalize_version_info(py_version_info) else: @@ -574,33 +530,26 @@ def __init__(self, py_version_info): # only one RequiresPythonCandidate in a resolution, i.e. the host Python. # The built-in object.__eq__() and object.__ne__() do exactly what we want. - def __str__(self): - # type: () -> str + def __str__(self) -> str: return f"Python {self._version}" @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: return REQUIRES_PYTHON_IDENTIFIER @property - def name(self): - # type: () -> str + def name(self) -> str: return REQUIRES_PYTHON_IDENTIFIER @property - def version(self): - # type: () -> CandidateVersion + def version(self) -> CandidateVersion: return self._version - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: return f"Python {self.version}" - def iter_dependencies(self, with_requires): - # type: (bool) -> Iterable[Optional[Requirement]] + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: return () - def get_install_requirement(self): - # type: () -> Optional[InstallRequirement] + def get_install_requirement(self) -> Optional[InstallRequirement]: return None diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index bec25b59888..792a930468b 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -84,17 +84,16 @@ class ConflictCause(Protocol): class Factory: def __init__( self, - finder, # type: PackageFinder - preparer, # type: RequirementPreparer - make_install_req, # type: InstallRequirementProvider - wheel_cache, # type: Optional[WheelCache] - use_user_site, # type: bool - force_reinstall, # type: bool - ignore_installed, # type: bool - ignore_requires_python, # type: bool - py_version_info=None, # type: Optional[Tuple[int, ...]] - ): - # type: (...) -> None + finder: PackageFinder, + preparer: RequirementPreparer, + make_install_req: InstallRequirementProvider, + wheel_cache: Optional[WheelCache], + use_user_site: bool, + force_reinstall: bool, + ignore_installed: bool, + ignore_requires_python: bool, + py_version_info: Optional[Tuple[int, ...]] = None, + ) -> None: self._finder = finder self.preparer = preparer self._wheel_cache = wheel_cache @@ -104,15 +103,13 @@ def __init__( self._force_reinstall = force_reinstall self._ignore_requires_python = ignore_requires_python - self._build_failures = {} # type: Cache[InstallationError] - self._link_candidate_cache = {} # type: Cache[LinkCandidate] - self._editable_candidate_cache = {} # type: Cache[EditableCandidate] - self._installed_candidate_cache = ( - {} - ) # type: Dict[str, AlreadyInstalledCandidate] - self._extras_candidate_cache = ( - {} - ) # type: Dict[Tuple[int, FrozenSet[str]], ExtrasCandidate] + self._build_failures: Cache[InstallationError] = {} + self._link_candidate_cache: Cache[LinkCandidate] = {} + self._editable_candidate_cache: Cache[EditableCandidate] = {} + self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} + self._extras_candidate_cache: Dict[ + Tuple[int, FrozenSet[str]], ExtrasCandidate + ] = {} if not ignore_installed: self._installed_dists = { @@ -123,8 +120,7 @@ def __init__( self._installed_dists = {} @property - def force_reinstall(self): - # type: () -> bool + def force_reinstall(self) -> bool: return self._force_reinstall def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: @@ -136,8 +132,9 @@ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: msg = f"{link.filename} is not a supported wheel on this platform." raise UnsupportedWheel(msg) - def _make_extras_candidate(self, base, extras): - # type: (BaseCandidate, FrozenSet[str]) -> ExtrasCandidate + def _make_extras_candidate( + self, base: BaseCandidate, extras: FrozenSet[str] + ) -> ExtrasCandidate: cache_key = (id(base), extras) try: candidate = self._extras_candidate_cache[cache_key] @@ -148,11 +145,10 @@ def _make_extras_candidate(self, base, extras): def _make_candidate_from_dist( self, - dist, # type: Distribution - extras, # type: FrozenSet[str] - template, # type: InstallRequirement - ): - # type: (...) -> Candidate + dist: Distribution, + extras: FrozenSet[str], + template: InstallRequirement, + ) -> Candidate: try: base = self._installed_candidate_cache[dist.key] except KeyError: @@ -164,13 +160,12 @@ def _make_candidate_from_dist( def _make_candidate_from_link( self, - link, # type: Link - extras, # type: FrozenSet[str] - template, # type: InstallRequirement - name, # type: Optional[NormalizedName] - version, # type: Optional[CandidateVersion] - ): - # type: (...) -> Optional[Candidate] + link: Link, + extras: FrozenSet[str], + template: InstallRequirement, + name: Optional[NormalizedName], + version: Optional[CandidateVersion], + ) -> Optional[Candidate]: # TODO: Check already installed candidate, and use it if the link and # editable flag match. @@ -193,7 +188,7 @@ def _make_candidate_from_link( logger.warning("Discarding %s. %s", link, e) self._build_failures[link] = e return None - base = self._editable_candidate_cache[link] # type: BaseCandidate + base: BaseCandidate = self._editable_candidate_cache[link] else: if link not in self._link_candidate_cache: try: @@ -233,7 +228,7 @@ def _iter_found_candidates( assert template.req, "Candidates found on index must be PEP 508" name = canonicalize_name(template.req.name) - extras = frozenset() # type: FrozenSet[str] + extras: FrozenSet[str] = frozenset() for ireq in ireqs: assert ireq.req, "Candidates found on index must be PEP 508" specifier &= ireq.req.specifier @@ -264,8 +259,7 @@ def _get_installed_candidate() -> Optional[Candidate]: return None return candidate - def iter_index_candidate_infos(): - # type: () -> Iterator[IndexCandidateInfo] + def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]: result = self._finder.find_best_candidate( project_name=name, specifier=specifier, @@ -353,8 +347,8 @@ def find_candidates( prefers_installed: bool, ) -> Iterable[Candidate]: # Collect basic lookup information from the requirements. - explicit_candidates = set() # type: Set[Candidate] - ireqs = [] # type: List[InstallRequirement] + explicit_candidates: Set[Candidate] = set() + ireqs: List[InstallRequirement] = [] for req in requirements[identifier]: cand, ireq = req.get_candidate_lookup() if cand is not None: @@ -414,8 +408,9 @@ def find_candidates( and all(req.is_satisfied_by(c) for req in requirements[identifier]) ) - def make_requirement_from_install_req(self, ireq, requested_extras): - # type: (InstallRequirement, Iterable[str]) -> Optional[Requirement] + def make_requirement_from_install_req( + self, ireq: InstallRequirement, requested_extras: Iterable[str] + ) -> Optional[Requirement]: if not ireq.match_markers(requested_extras): logger.info( "Ignoring %s: markers '%s' don't match your environment", @@ -445,28 +440,30 @@ def make_requirement_from_install_req(self, ireq, requested_extras): return UnsatisfiableRequirement(canonicalize_name(ireq.name)) return self.make_requirement_from_candidate(cand) - def make_requirement_from_candidate(self, candidate): - # type: (Candidate) -> ExplicitRequirement + def make_requirement_from_candidate( + self, candidate: Candidate + ) -> ExplicitRequirement: return ExplicitRequirement(candidate) def make_requirement_from_spec( self, - specifier, # type: str - comes_from, # type: InstallRequirement - requested_extras=(), # type: Iterable[str] - ): - # type: (...) -> Optional[Requirement] + specifier: str, + comes_from: InstallRequirement, + requested_extras: Iterable[str] = (), + ) -> Optional[Requirement]: ireq = self._make_install_req_from_spec(specifier, comes_from) return self.make_requirement_from_install_req(ireq, requested_extras) - def make_requires_python_requirement(self, specifier): - # type: (Optional[SpecifierSet]) -> Optional[Requirement] + def make_requires_python_requirement( + self, specifier: Optional[SpecifierSet] + ) -> Optional[Requirement]: if self._ignore_requires_python or specifier is None: return None return RequiresPythonRequirement(specifier, self._python_candidate) - def get_wheel_cache_entry(self, link, name): - # type: (Link, Optional[str]) -> Optional[CacheEntry] + def get_wheel_cache_entry( + self, link: Link, name: Optional[str] + ) -> Optional[CacheEntry]: """Look up the link in the wheel cache. If ``preparer.require_hashes`` is True, don't use the wheel cache, @@ -483,8 +480,7 @@ def get_wheel_cache_entry(self, link, name): supported_tags=get_supported(), ) - def get_dist_to_uninstall(self, candidate): - # type: (Candidate) -> Optional[Distribution] + def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[Distribution]: # TODO: Are there more cases this needs to return True? Editable? dist = self._installed_dists.get(candidate.project_name) if dist is None: # Not installed, no uninstallation required. @@ -514,8 +510,9 @@ def get_dist_to_uninstall(self, candidate): ) return None - def _report_requires_python_error(self, causes): - # type: (Sequence[ConflictCause]) -> UnsupportedPythonVersion + def _report_requires_python_error( + self, causes: Sequence["ConflictCause"] + ) -> UnsupportedPythonVersion: assert causes, "Requires-Python error reported with no cause" version = self._python_candidate.version @@ -535,8 +532,9 @@ def _report_requires_python_error(self, causes): message += f"\n{specifier!r} (required by {package})" return UnsupportedPythonVersion(message) - def _report_single_requirement_conflict(self, req, parent): - # type: (Requirement, Optional[Candidate]) -> DistributionNotFound + def _report_single_requirement_conflict( + self, req: Requirement, parent: Optional[Candidate] + ) -> DistributionNotFound: if parent is None: req_disp = str(req) else: @@ -563,10 +561,9 @@ def _report_single_requirement_conflict(self, req, parent): def get_installation_error( self, - e, # type: ResolutionImpossible[Requirement, Candidate] - constraints, # type: Dict[str, Constraint] - ): - # type: (...) -> InstallationError + e: "ResolutionImpossible[Requirement, Candidate]", + constraints: Dict[str, Constraint], + ) -> InstallationError: assert e.causes, "Installation error reported with no cause" @@ -599,15 +596,13 @@ def get_installation_error( # satisfied at once. # A couple of formatting helpers - def text_join(parts): - # type: (List[str]) -> str + def text_join(parts: List[str]) -> str: if len(parts) == 1: return parts[0] return ", ".join(parts[:-1]) + " and " + parts[-1] - def describe_trigger(parent): - # type: (Candidate) -> str + def describe_trigger(parent: Candidate) -> str: ireq = parent.get_install_requirement() if not ireq or not ireq.comes_from: return f"{parent.name}=={parent.version}" diff --git a/src/pip/_internal/resolution/resolvelib/found_candidates.py b/src/pip/_internal/resolution/resolvelib/found_candidates.py index 21fa08ec938..d2fa5ef5591 100644 --- a/src/pip/_internal/resolution/resolvelib/found_candidates.py +++ b/src/pip/_internal/resolution/resolvelib/found_candidates.py @@ -19,14 +19,13 @@ IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]] -def _iter_built(infos): - # type: (Iterator[IndexCandidateInfo]) -> Iterator[Candidate] +def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]: """Iterator for ``FoundCandidates``. This iterator is used when the package is not already installed. Candidates from index come later in their normal ordering. """ - versions_found = set() # type: Set[_BaseVersion] + versions_found: Set[_BaseVersion] = set() for version, func in infos: if version in versions_found: continue @@ -37,8 +36,9 @@ def _iter_built(infos): versions_found.add(version) -def _iter_built_with_prepended(installed, infos): - # type: (Candidate, Iterator[IndexCandidateInfo]) -> Iterator[Candidate] +def _iter_built_with_prepended( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: """Iterator for ``FoundCandidates``. This iterator is used when the resolver prefers the already-installed @@ -47,7 +47,7 @@ def _iter_built_with_prepended(installed, infos): normal ordering, except skipped when the version is already installed. """ yield installed - versions_found = {installed.version} # type: Set[_BaseVersion] + versions_found: Set[_BaseVersion] = {installed.version} for version, func in infos: if version in versions_found: continue @@ -58,8 +58,9 @@ def _iter_built_with_prepended(installed, infos): versions_found.add(version) -def _iter_built_with_inserted(installed, infos): - # type: (Candidate, Iterator[IndexCandidateInfo]) -> Iterator[Candidate] +def _iter_built_with_inserted( + installed: Candidate, infos: Iterator[IndexCandidateInfo] +) -> Iterator[Candidate]: """Iterator for ``FoundCandidates``. This iterator is used when the resolver prefers to upgrade an @@ -70,7 +71,7 @@ def _iter_built_with_inserted(installed, infos): the installed candidate exactly once before we start yielding older or equivalent candidates, or after all other candidates if they are all newer. """ - versions_found = set() # type: Set[_BaseVersion] + versions_found: Set[_BaseVersion] = set() for version, func in infos: if version in versions_found: continue @@ -110,15 +111,13 @@ def __init__( self._prefers_installed = prefers_installed self._incompatible_ids = incompatible_ids - def __getitem__(self, index): - # type: (int) -> Candidate + def __getitem__(self, index: int) -> Candidate: # Implemented to satisfy the ABC check. This is not needed by the # resolver, and should not be used by the provider either (for # performance reasons). raise NotImplementedError("don't do this") - def __iter__(self): - # type: () -> Iterator[Candidate] + def __iter__(self) -> Iterator[Candidate]: infos = self._get_infos() if not self._installed: iterator = _iter_built(infos) @@ -128,16 +127,14 @@ def __iter__(self): iterator = _iter_built_with_inserted(self._installed, infos) return (c for c in iterator if id(c) not in self._incompatible_ids) - def __len__(self): - # type: () -> int + def __len__(self) -> int: # Implemented to satisfy the ABC check. This is not needed by the # resolver, and should not be used by the provider either (for # performance reasons). raise NotImplementedError("don't do this") @functools.lru_cache(maxsize=1) - def __bool__(self): - # type: () -> bool + def __bool__(self) -> bool: if self._prefers_installed and self._installed: return True return any(self) diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index e6b5bd544f6..c86fdc31fa3 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -50,13 +50,12 @@ class PipProvider(_ProviderBase): def __init__( self, - factory, # type: Factory - constraints, # type: Dict[str, Constraint] - ignore_dependencies, # type: bool - upgrade_strategy, # type: str - user_requested, # type: Dict[str, int] - ): - # type: (...) -> None + factory: Factory, + constraints: Dict[str, Constraint], + ignore_dependencies: bool, + upgrade_strategy: str, + user_requested: Dict[str, int], + ) -> None: self._factory = factory self._constraints = constraints self._ignore_dependencies = ignore_dependencies @@ -64,8 +63,7 @@ def __init__( self._user_requested = user_requested self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf) - def identify(self, requirement_or_candidate): - # type: (Union[Requirement, Candidate]) -> str + def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str: return requirement_or_candidate.name def get_preference( @@ -151,8 +149,7 @@ def find_matches( requirements: Mapping[str, Iterator[Requirement]], incompatibilities: Mapping[str, Iterator[Candidate]], ) -> Iterable[Candidate]: - def _eligible_for_upgrade(name): - # type: (str) -> bool + def _eligible_for_upgrade(name: str) -> bool: """Are upgrades allowed for this project? This checks the upgrade strategy, and whether the project was one @@ -177,11 +174,9 @@ def _eligible_for_upgrade(name): incompatibilities=incompatibilities, ) - def is_satisfied_by(self, requirement, candidate): - # type: (Requirement, Candidate) -> bool + def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool: return requirement.is_satisfied_by(candidate) - def get_dependencies(self, candidate): - # type: (Candidate) -> Sequence[Requirement] + def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]: with_requires = not self._ignore_dependencies return [r for r in candidate.iter_dependencies(with_requires) if r is not None] diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py index 074583de0d9..7cf88ba113c 100644 --- a/src/pip/_internal/resolution/resolvelib/reporter.py +++ b/src/pip/_internal/resolution/resolvelib/reporter.py @@ -10,9 +10,8 @@ class PipReporter(BaseReporter): - def __init__(self): - # type: () -> None - self.backtracks_by_package = defaultdict(int) # type: DefaultDict[str, int] + def __init__(self) -> None: + self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int) self._messages_at_backtrack = { 1: ( @@ -34,8 +33,7 @@ def __init__(self): ), } - def backtracking(self, candidate): - # type: (Candidate) -> None + def backtracking(self, candidate: Candidate) -> None: self.backtracks_by_package[candidate.name] += 1 count = self.backtracks_by_package[candidate.name] @@ -49,30 +47,23 @@ def backtracking(self, candidate): class PipDebuggingReporter(BaseReporter): """A reporter that does an info log for every event it sees.""" - def starting(self): - # type: () -> None + def starting(self) -> None: logger.info("Reporter.starting()") - def starting_round(self, index): - # type: (int) -> None + def starting_round(self, index: int) -> None: logger.info("Reporter.starting_round(%r)", index) - def ending_round(self, index, state): - # type: (int, Any) -> None + def ending_round(self, index: int, state: Any) -> None: logger.info("Reporter.ending_round(%r, state)", index) - def ending(self, state): - # type: (Any) -> None + def ending(self, state: Any) -> None: logger.info("Reporter.ending(%r)", state) - def adding_requirement(self, requirement, parent): - # type: (Requirement, Candidate) -> None + def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None: logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) - def backtracking(self, candidate): - # type: (Candidate) -> None + def backtracking(self, candidate: Candidate) -> None: logger.info("Reporter.backtracking(%r)", candidate) - def pinning(self, candidate): - # type: (Candidate) -> None + def pinning(self, candidate: Candidate) -> None: logger.info("Reporter.pinning(%r)", candidate) diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index a7fcdd1e345..c19f83c172c 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -7,77 +7,63 @@ class ExplicitRequirement(Requirement): - def __init__(self, candidate): - # type: (Candidate) -> None + def __init__(self, candidate: Candidate) -> None: self.candidate = candidate - def __str__(self): - # type: () -> str + def __str__(self) -> str: return str(self.candidate) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{class_name}({candidate!r})".format( class_name=self.__class__.__name__, candidate=self.candidate, ) @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: # No need to canonicalise - the candidate did this return self.candidate.project_name @property - def name(self): - # type: () -> str + def name(self) -> str: # No need to canonicalise - the candidate did this return self.candidate.name - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: return self.candidate.format_for_error() - def get_candidate_lookup(self): - # type: () -> CandidateLookup + def get_candidate_lookup(self) -> CandidateLookup: return self.candidate, None - def is_satisfied_by(self, candidate): - # type: (Candidate) -> bool + def is_satisfied_by(self, candidate: Candidate) -> bool: return candidate == self.candidate class SpecifierRequirement(Requirement): - def __init__(self, ireq): - # type: (InstallRequirement) -> None + def __init__(self, ireq: InstallRequirement) -> None: assert ireq.link is None, "This is a link, not a specifier" self._ireq = ireq self._extras = frozenset(ireq.extras) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return str(self._ireq.req) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{class_name}({requirement!r})".format( class_name=self.__class__.__name__, requirement=str(self._ireq.req), ) @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: assert self._ireq.req, "Specifier-backed ireq is always PEP 508" return canonicalize_name(self._ireq.req.name) @property - def name(self): - # type: () -> str + def name(self) -> str: return format_name(self.project_name, self._extras) - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: # Convert comma-separated specifiers into "A, B, ..., F and G" # This makes the specifier a bit more "human readable", without @@ -91,12 +77,10 @@ def format_for_error(self): return ", ".join(parts[:-1]) + " and " + parts[-1] - def get_candidate_lookup(self): - # type: () -> CandidateLookup + def get_candidate_lookup(self) -> CandidateLookup: return None, self._ireq - def is_satisfied_by(self, candidate): - # type: (Candidate) -> bool + def is_satisfied_by(self, candidate: Candidate) -> bool: assert candidate.name == self.name, ( f"Internal issue: Candidate is not for this requirement " f"{candidate.name} vs {self.name}" @@ -112,44 +96,36 @@ def is_satisfied_by(self, candidate): class RequiresPythonRequirement(Requirement): """A requirement representing Requires-Python metadata.""" - def __init__(self, specifier, match): - # type: (SpecifierSet, Candidate) -> None + def __init__(self, specifier: SpecifierSet, match: Candidate) -> None: self.specifier = specifier self._candidate = match - def __str__(self): - # type: () -> str + def __str__(self) -> str: return f"Python {self.specifier}" - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{class_name}({specifier!r})".format( class_name=self.__class__.__name__, specifier=str(self.specifier), ) @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: return self._candidate.project_name @property - def name(self): - # type: () -> str + def name(self) -> str: return self._candidate.name - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: return str(self) - def get_candidate_lookup(self): - # type: () -> CandidateLookup + def get_candidate_lookup(self) -> CandidateLookup: if self.specifier.contains(self._candidate.version, prereleases=True): return self._candidate, None return None, None - def is_satisfied_by(self, candidate): - # type: (Candidate) -> bool + def is_satisfied_by(self, candidate: Candidate) -> bool: assert candidate.name == self._candidate.name, "Not Python candidate" # We can safely always allow prereleases here since PackageFinder # already implements the prerelease logic, and would have filtered out @@ -160,39 +136,31 @@ def is_satisfied_by(self, candidate): class UnsatisfiableRequirement(Requirement): """A requirement that cannot be satisfied.""" - def __init__(self, name): - # type: (NormalizedName) -> None + def __init__(self, name: NormalizedName) -> None: self._name = name - def __str__(self): - # type: () -> str + def __str__(self) -> str: return f"{self._name} (unavailable)" - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{class_name}({name!r})".format( class_name=self.__class__.__name__, name=str(self._name), ) @property - def project_name(self): - # type: () -> NormalizedName + def project_name(self) -> NormalizedName: return self._name @property - def name(self): - # type: () -> str + def name(self) -> str: return self._name - def format_for_error(self): - # type: () -> str + def format_for_error(self) -> str: return str(self) - def get_candidate_lookup(self): - # type: () -> CandidateLookup + def get_candidate_lookup(self) -> CandidateLookup: return None, None - def is_satisfied_by(self, candidate): - # type: (Candidate) -> bool + def is_satisfied_by(self, candidate: Candidate) -> bool: return False diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index b90f82cfa26..2f05a0ea9fb 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -45,17 +45,17 @@ class Resolver(BaseResolver): def __init__( self, - preparer, # type: RequirementPreparer - finder, # type: PackageFinder - wheel_cache, # type: Optional[WheelCache] - make_install_req, # type: InstallRequirementProvider - use_user_site, # type: bool - ignore_dependencies, # type: bool - ignore_installed, # type: bool - ignore_requires_python, # type: bool - force_reinstall, # type: bool - upgrade_strategy, # type: str - py_version_info=None, # type: Optional[Tuple[int, ...]] + preparer: RequirementPreparer, + finder: PackageFinder, + wheel_cache: Optional[WheelCache], + make_install_req: InstallRequirementProvider, + use_user_site: bool, + ignore_dependencies: bool, + ignore_installed: bool, + ignore_requires_python: bool, + force_reinstall: bool, + upgrade_strategy: str, + py_version_info: Optional[Tuple[int, ...]] = None, ): super().__init__() assert upgrade_strategy in self._allowed_strategies @@ -73,13 +73,14 @@ def __init__( ) self.ignore_dependencies = ignore_dependencies self.upgrade_strategy = upgrade_strategy - self._result = None # type: Optional[Result] + self._result: Optional[Result] = None - def resolve(self, root_reqs, check_supported_wheels): - # type: (List[InstallRequirement], bool) -> RequirementSet + def resolve( + self, root_reqs: List[InstallRequirement], check_supported_wheels: bool + ) -> RequirementSet: - constraints = {} # type: Dict[str, Constraint] - user_requested = {} # type: Dict[str, int] + constraints: Dict[str, Constraint] = {} + user_requested: Dict[str, int] = {} requirements = [] for i, req in enumerate(root_reqs): if req.constraint: @@ -114,13 +115,13 @@ def resolve(self, root_reqs, check_supported_wheels): user_requested=user_requested, ) if "PIP_RESOLVER_DEBUG" in os.environ: - reporter = PipDebuggingReporter() # type: BaseReporter + reporter: BaseReporter = PipDebuggingReporter() else: reporter = PipReporter() - resolver = RLResolver( + resolver: RLResolver[Requirement, Candidate, str] = RLResolver( provider, reporter, - ) # type: RLResolver[Requirement, Candidate, str] + ) try: try_to_avoid_resolution_too_deep = 2000000 @@ -215,8 +216,9 @@ def resolve(self, root_reqs, check_supported_wheels): self.factory.preparer.prepare_linked_requirements_more(reqs) return req_set - def get_installation_order(self, req_set): - # type: (RequirementSet) -> List[InstallRequirement] + def get_installation_order( + self, req_set: RequirementSet + ) -> List[InstallRequirement]: """Get order for installation of requirements in RequirementSet. The returned list contains a requirement before another that depends on @@ -244,8 +246,9 @@ def get_installation_order(self, req_set): return [ireq for _, ireq in sorted_items] -def get_topological_weights(graph, expected_node_count): - # type: (DirectedGraph[Optional[str]], int) -> Dict[Optional[str], int] +def get_topological_weights( + graph: "DirectedGraph[Optional[str]]", expected_node_count: int +) -> Dict[Optional[str], int]: """Assign weights to each node based on how "deep" they are. This implementation may change at any point in the future without prior @@ -262,11 +265,10 @@ def get_topological_weights(graph, expected_node_count): When assigning weight, the longer path (i.e. larger length) is preferred. """ - path = set() # type: Set[Optional[str]] - weights = {} # type: Dict[Optional[str], int] + path: Set[Optional[str]] = set() + weights: Dict[Optional[str], int] = {} - def visit(node): - # type: (Optional[str]) -> None + def visit(node: Optional[str]) -> None: if node in path: # We hit a cycle, so we'll break it here. return @@ -291,10 +293,9 @@ def visit(node): def _req_set_item_sorter( - item, # type: Tuple[str, InstallRequirement] - weights, # type: Dict[Optional[str], int] -): - # type: (...) -> Tuple[int, str] + item: Tuple[str, InstallRequirement], + weights: Dict[Optional[str], int], +) -> Tuple[int, str]: """Key function used to sort install requirements for installation. Based on the "weight" mapping calculated in ``get_installation_order()``. From 23eb69fe219b6bc82fe5bbbdd7521b9dc42cf4c2 Mon Sep 17 00:00:00 2001 From: Harutaka Kawamura Date: Mon, 12 Jul 2021 12:42:16 +0900 Subject: [PATCH 42/60] Complete type annotations in `pip/_internal/locations` (#10127) --- news/10127.trivial.rst | 1 + src/pip/_internal/locations/__init__.py | 30 +++++++---------- src/pip/_internal/locations/_distutils.py | 41 +++++++++++------------ src/pip/_internal/locations/_sysconfig.py | 36 ++++++++------------ src/pip/_internal/locations/base.py | 10 +++--- 5 files changed, 51 insertions(+), 67 deletions(-) create mode 100644 news/10127.trivial.rst diff --git a/news/10127.trivial.rst b/news/10127.trivial.rst new file mode 100644 index 00000000000..e58a99f34d9 --- /dev/null +++ b/news/10127.trivial.rst @@ -0,0 +1 @@ +Converted type commentaries into annotations in ``pip/_internal/locations``. diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 0cd48dd56a1..8eedd992661 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -69,14 +69,13 @@ def _log_context( def get_scheme( - dist_name, # type: str - user=False, # type: bool - home=None, # type: Optional[str] - root=None, # type: Optional[str] - isolated=False, # type: bool - prefix=None, # type: Optional[str] -): - # type: (...) -> Scheme + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: old = _distutils.get_scheme( dist_name, user=user, @@ -124,8 +123,7 @@ def get_scheme( return old -def get_bin_prefix(): - # type: () -> str +def get_bin_prefix() -> str: old = _distutils.get_bin_prefix() new = _sysconfig.get_bin_prefix() if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): @@ -133,13 +131,11 @@ def get_bin_prefix(): return old -def get_bin_user(): - # type: () -> str +def get_bin_user() -> str: return _sysconfig.get_scheme("", user=True).scripts -def get_purelib(): - # type: () -> str +def get_purelib() -> str: """Return the default pure-Python lib location.""" old = _distutils.get_purelib() new = _sysconfig.get_purelib() @@ -148,8 +144,7 @@ def get_purelib(): return old -def get_platlib(): - # type: () -> str +def get_platlib() -> str: """Return the default platform-shared lib location.""" old = _distutils.get_platlib() new = _sysconfig.get_platlib() @@ -158,8 +153,7 @@ def get_platlib(): return old -def get_prefixed_libs(prefix): - # type: (str) -> List[str] +def get_prefixed_libs(prefix: str) -> List[str]: """Return the lib locations under ``prefix``.""" old_pure, old_plat = _distutils.get_prefixed_libs(prefix) new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix) diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index 2d7ab73213c..b34b8e3877a 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -19,21 +19,25 @@ def _distutils_scheme( - dist_name, user=False, home=None, root=None, isolated=False, prefix=None -): - # type:(str, bool, str, str, bool, str) -> Dict[str, str] + dist_name: str, + user: bool = False, + home: str = None, + root: str = None, + isolated: bool = False, + prefix: str = None, +) -> Dict[str, str]: """ Return a distutils install scheme """ from distutils.dist import Distribution - dist_args = {"name": dist_name} # type: Dict[str, Union[str, List[str]]] + dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name} if isolated: dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) d.parse_config_files() - obj = None # type: Optional[DistutilsCommand] + obj: Optional[DistutilsCommand] = None obj = d.get_command_obj("install", create=True) assert obj is not None i = cast(distutils_install_command, obj) @@ -82,14 +86,13 @@ def _distutils_scheme( def get_scheme( - dist_name, # type: str - user=False, # type: bool - home=None, # type: Optional[str] - root=None, # type: Optional[str] - isolated=False, # type: bool - prefix=None, # type: Optional[str] -): - # type: (...) -> Scheme + dist_name: str, + user: bool = False, + home: Optional[str] = None, + root: Optional[str] = None, + isolated: bool = False, + prefix: Optional[str] = None, +) -> Scheme: """ Get the "scheme" corresponding to the input parameters. The distutils documentation provides the context for the available schemes: @@ -117,8 +120,7 @@ def get_scheme( ) -def get_bin_prefix(): - # type: () -> str +def get_bin_prefix() -> str: if WINDOWS: bin_py = os.path.join(sys.prefix, "Scripts") # buildout uses 'bin' on Windows too? @@ -132,18 +134,15 @@ def get_bin_prefix(): return os.path.join(sys.prefix, "bin") -def get_purelib(): - # type: () -> str +def get_purelib() -> str: return get_python_lib(plat_specific=False) -def get_platlib(): - # type: () -> str +def get_platlib() -> str: return get_python_lib(plat_specific=True) -def get_prefixed_libs(prefix): - # type: (str) -> Tuple[str, str] +def get_prefixed_libs(prefix: str) -> Tuple[str, str]: return ( get_python_lib(plat_specific=False, prefix=prefix), get_python_lib(plat_specific=True, prefix=prefix), diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 03366ce6e71..bccf9853f3a 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -25,8 +25,7 @@ _AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) -def _infer_prefix(): - # type: () -> str +def _infer_prefix() -> str: """Try to find a prefix scheme for the current platform. This tries: @@ -51,8 +50,7 @@ def _infer_prefix(): return "posix_prefix" -def _infer_user(): - # type: () -> str +def _infer_user() -> str: """Try to find a user scheme for the current platform.""" suffixed = f"{os.name}_user" if suffixed in _AVAILABLE_SCHEMES: @@ -62,8 +60,7 @@ def _infer_user(): return "posix_user" -def _infer_home(): - # type: () -> str +def _infer_home() -> str: """Try to find a home for the current platform.""" suffixed = f"{os.name}_home" if suffixed in _AVAILABLE_SCHEMES: @@ -85,14 +82,13 @@ def _infer_home(): def get_scheme( - dist_name, # type: str - user=False, # type: bool - home=None, # type: typing.Optional[str] - root=None, # type: typing.Optional[str] - isolated=False, # type: bool - prefix=None, # type: typing.Optional[str] -): - # type: (...) -> Scheme + dist_name: str, + user: bool = False, + home: typing.Optional[str] = None, + root: typing.Optional[str] = None, + isolated: bool = False, + prefix: typing.Optional[str] = None, +) -> Scheme: """ Get the "scheme" corresponding to the input parameters. @@ -156,25 +152,21 @@ def get_scheme( return scheme -def get_bin_prefix(): - # type: () -> str +def get_bin_prefix() -> str: # Forcing to use /usr/local/bin for standard macOS framework installs. if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/": return "/usr/local/bin" return sysconfig.get_paths()["scripts"] -def get_purelib(): - # type: () -> str +def get_purelib() -> str: return sysconfig.get_paths()["purelib"] -def get_platlib(): - # type: () -> str +def get_platlib() -> str: return sysconfig.get_paths()["platlib"] -def get_prefixed_libs(prefix): - # type: (str) -> typing.Tuple[str, str] +def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]: paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix}) return (paths["purelib"], paths["platlib"]) diff --git a/src/pip/_internal/locations/base.py b/src/pip/_internal/locations/base.py index 98557abbe63..e9d59701ba4 100644 --- a/src/pip/_internal/locations/base.py +++ b/src/pip/_internal/locations/base.py @@ -11,11 +11,10 @@ USER_CACHE_DIR = appdirs.user_cache_dir("pip") # FIXME doesn't account for venv linked to global site-packages -site_packages = sysconfig.get_path("purelib") # type: typing.Optional[str] +site_packages: typing.Optional[str] = sysconfig.get_path("purelib") -def get_major_minor_version(): - # type: () -> str +def get_major_minor_version() -> str: """ Return the major-minor version of the current Python as a string, e.g. "3.7" or "3.10". @@ -23,8 +22,7 @@ def get_major_minor_version(): return "{}.{}".format(*sys.version_info) -def get_src_prefix(): - # type: () -> str +def get_src_prefix() -> str: if running_under_virtualenv(): src_prefix = os.path.join(sys.prefix, "src") else: @@ -43,6 +41,6 @@ def get_src_prefix(): try: # Use getusersitepackages if this is present, as it ensures that the # value is initialised properly. - user_site = site.getusersitepackages() # type: typing.Optional[str] + user_site: typing.Optional[str] = site.getusersitepackages() except AttributeError: user_site = site.USER_SITE From 6958b87d3dc42d175493395b748ee6271e29be36 Mon Sep 17 00:00:00 2001 From: Diego Ramirez Date: Sun, 11 Jul 2021 22:43:52 -0500 Subject: [PATCH 43/60] Complete type annotations: `pip/_internal/models` (#10138) --- news/10138.trivial.rst | 1 + src/pip/_internal/models/candidate.py | 9 +-- src/pip/_internal/models/direct_url.py | 83 +++++++++----------- src/pip/_internal/models/format_control.py | 22 +++--- src/pip/_internal/models/index.py | 6 +- src/pip/_internal/models/link.py | 85 ++++++++------------- src/pip/_internal/models/scheme.py | 12 +-- src/pip/_internal/models/search_scope.py | 23 +++--- src/pip/_internal/models/selection_prefs.py | 10 +-- src/pip/_internal/models/target_python.py | 17 ++--- src/pip/_internal/models/wheel.py | 17 ++--- 11 files changed, 115 insertions(+), 170 deletions(-) create mode 100644 news/10138.trivial.rst diff --git a/news/10138.trivial.rst b/news/10138.trivial.rst new file mode 100644 index 00000000000..524867fd07d --- /dev/null +++ b/news/10138.trivial.rst @@ -0,0 +1 @@ +Convert type commentaries to annotations on ``pip/_internal/models``. diff --git a/src/pip/_internal/models/candidate.py b/src/pip/_internal/models/candidate.py index 3b91704a21c..c673d8d05bd 100644 --- a/src/pip/_internal/models/candidate.py +++ b/src/pip/_internal/models/candidate.py @@ -10,8 +10,7 @@ class InstallationCandidate(KeyBasedCompareMixin): __slots__ = ["name", "version", "link"] - def __init__(self, name, version, link): - # type: (str, str, Link) -> None + def __init__(self, name: str, version: str, link: Link) -> None: self.name = name self.version = parse_version(version) self.link = link @@ -21,14 +20,12 @@ def __init__(self, name, version, link): defining_class=InstallationCandidate ) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "".format( self.name, self.version, self.link, ) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return '{!r} candidate (version {} at {})'.format( self.name, self.version, self.link, ) diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py index 345dbaf109a..3f9b6993e3b 100644 --- a/src/pip/_internal/models/direct_url.py +++ b/src/pip/_internal/models/direct_url.py @@ -22,8 +22,9 @@ class DirectUrlValidationError(Exception): pass -def _get(d, expected_type, key, default=None): - # type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T] +def _get( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> Optional[T]: """Get value from dictionary and verify expected type.""" if key not in d: return default @@ -37,16 +38,16 @@ def _get(d, expected_type, key, default=None): return value -def _get_required(d, expected_type, key, default=None): - # type: (Dict[str, Any], Type[T], str, Optional[T]) -> T +def _get_required( + d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None +) -> T: value = _get(d, expected_type, key, default) if value is None: raise DirectUrlValidationError(f"{key} must have a value") return value -def _exactly_one_of(infos): - # type: (Iterable[Optional[InfoType]]) -> InfoType +def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType": infos = [info for info in infos if info is not None] if not infos: raise DirectUrlValidationError( @@ -60,8 +61,7 @@ def _exactly_one_of(infos): return infos[0] -def _filter_none(**kwargs): - # type: (Any) -> Dict[str, Any] +def _filter_none(**kwargs: Any) -> Dict[str, Any]: """Make dict excluding None values.""" return {k: v for k, v in kwargs.items() if v is not None} @@ -71,12 +71,12 @@ class VcsInfo: def __init__( self, - vcs, # type: str - commit_id, # type: str - requested_revision=None, # type: Optional[str] - resolved_revision=None, # type: Optional[str] - resolved_revision_type=None, # type: Optional[str] - ): + vcs: str, + commit_id: str, + requested_revision: Optional[str] = None, + resolved_revision: Optional[str] = None, + resolved_revision_type: Optional[str] = None, + ) -> None: self.vcs = vcs self.requested_revision = requested_revision self.commit_id = commit_id @@ -84,8 +84,7 @@ def __init__( self.resolved_revision_type = resolved_revision_type @classmethod - def _from_dict(cls, d): - # type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo] + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]: if d is None: return None return cls( @@ -96,8 +95,7 @@ def _from_dict(cls, d): resolved_revision_type=_get(d, str, "resolved_revision_type"), ) - def _to_dict(self): - # type: () -> Dict[str, Any] + def _to_dict(self) -> Dict[str, Any]: return _filter_none( vcs=self.vcs, requested_revision=self.requested_revision, @@ -112,19 +110,17 @@ class ArchiveInfo: def __init__( self, - hash=None, # type: Optional[str] - ): + hash: Optional[str] = None, + ) -> None: self.hash = hash @classmethod - def _from_dict(cls, d): - # type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo] + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: if d is None: return None return cls(hash=_get(d, str, "hash")) - def _to_dict(self): - # type: () -> Dict[str, Any] + def _to_dict(self) -> Dict[str, Any]: return _filter_none(hash=self.hash) @@ -133,21 +129,19 @@ class DirInfo: def __init__( self, - editable=False, # type: bool - ): + editable: bool = False, + ) -> None: self.editable = editable @classmethod - def _from_dict(cls, d): - # type: (Optional[Dict[str, Any]]) -> Optional[DirInfo] + def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]: if d is None: return None return cls( editable=_get_required(d, bool, "editable", default=False) ) - def _to_dict(self): - # type: () -> Dict[str, Any] + def _to_dict(self) -> Dict[str, Any]: return _filter_none(editable=self.editable or None) @@ -158,16 +152,15 @@ class DirectUrl: def __init__( self, - url, # type: str - info, # type: InfoType - subdirectory=None, # type: Optional[str] - ): + url: str, + info: InfoType, + subdirectory: Optional[str] = None, + ) -> None: self.url = url self.info = info self.subdirectory = subdirectory - def _remove_auth_from_netloc(self, netloc): - # type: (str) -> str + def _remove_auth_from_netloc(self, netloc: str) -> str: if "@" not in netloc: return netloc user_pass, netloc_no_user_pass = netloc.split("@", 1) @@ -182,8 +175,7 @@ def _remove_auth_from_netloc(self, netloc): return netloc_no_user_pass @property - def redacted_url(self): - # type: () -> str + def redacted_url(self) -> str: """url with user:password part removed unless it is formed with environment variables as specified in PEP 610, or it is ``git`` in the case of a git URL. @@ -195,13 +187,11 @@ def redacted_url(self): ) return surl - def validate(self): - # type: () -> None + def validate(self) -> None: self.from_dict(self.to_dict()) @classmethod - def from_dict(cls, d): - # type: (Dict[str, Any]) -> DirectUrl + def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl": return DirectUrl( url=_get_required(d, str, "url"), subdirectory=_get(d, str, "subdirectory"), @@ -214,8 +204,7 @@ def from_dict(cls, d): ), ) - def to_dict(self): - # type: () -> Dict[str, Any] + def to_dict(self) -> Dict[str, Any]: res = _filter_none( url=self.redacted_url, subdirectory=self.subdirectory, @@ -224,10 +213,8 @@ def to_dict(self): return res @classmethod - def from_json(cls, s): - # type: (str) -> DirectUrl + def from_json(cls, s: str) -> "DirectUrl": return cls.from_dict(json.loads(s)) - def to_json(self): - # type: () -> str + def to_json(self) -> str: return json.dumps(self.to_dict(), sort_keys=True) diff --git a/src/pip/_internal/models/format_control.py b/src/pip/_internal/models/format_control.py index cf262af2918..010c3620d4a 100644 --- a/src/pip/_internal/models/format_control.py +++ b/src/pip/_internal/models/format_control.py @@ -11,8 +11,11 @@ class FormatControl: __slots__ = ["no_binary", "only_binary"] - def __init__(self, no_binary=None, only_binary=None): - # type: (Optional[Set[str]], Optional[Set[str]]) -> None + def __init__( + self, + no_binary: Optional[Set[str]] = None, + only_binary: Optional[Set[str]] = None + ) -> None: if no_binary is None: no_binary = set() if only_binary is None: @@ -21,8 +24,7 @@ def __init__(self, no_binary=None, only_binary=None): self.no_binary = no_binary self.only_binary = only_binary - def __eq__(self, other): - # type: (object) -> bool + def __eq__(self, other: object) -> bool: if not isinstance(other, self.__class__): return NotImplemented @@ -34,8 +36,7 @@ def __eq__(self, other): for k in self.__slots__ ) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "{}({}, {})".format( self.__class__.__name__, self.no_binary, @@ -43,8 +44,7 @@ def __repr__(self): ) @staticmethod - def handle_mutual_excludes(value, target, other): - # type: (str, Set[str], Set[str]) -> None + def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None: if value.startswith('-'): raise CommandError( "--no-binary / --only-binary option requires 1 argument." @@ -66,8 +66,7 @@ def handle_mutual_excludes(value, target, other): other.discard(name) target.add(name) - def get_allowed_formats(self, canonical_name): - # type: (str) -> FrozenSet[str] + def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]: result = {"binary", "source"} if canonical_name in self.only_binary: result.discard('source') @@ -79,8 +78,7 @@ def get_allowed_formats(self, canonical_name): result.discard('binary') return frozenset(result) - def disallow_binaries(self): - # type: () -> None + def disallow_binaries(self) -> None: self.handle_mutual_excludes( ':all:', self.no_binary, self.only_binary, ) diff --git a/src/pip/_internal/models/index.py b/src/pip/_internal/models/index.py index b148abb4250..1874a5b60de 100644 --- a/src/pip/_internal/models/index.py +++ b/src/pip/_internal/models/index.py @@ -8,8 +8,7 @@ class PackageIndex: __slots__ = ['url', 'netloc', 'simple_url', 'pypi_url', 'file_storage_domain'] - def __init__(self, url, file_storage_domain): - # type: (str, str) -> None + def __init__(self, url: str, file_storage_domain: str) -> None: super().__init__() self.url = url self.netloc = urllib.parse.urlsplit(url).netloc @@ -21,8 +20,7 @@ def __init__(self, url, file_storage_domain): # block such packages themselves self.file_storage_domain = file_storage_domain - def _url_for_path(self, path): - # type: (str) -> str + def _url_for_path(self, path: str) -> str: return urllib.parse.urljoin(self.url, path) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index ebee3839598..411cfb62489 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -33,13 +33,12 @@ class Link(KeyBasedCompareMixin): def __init__( self, - url, # type: str - comes_from=None, # type: Optional[Union[str, HTMLPage]] - requires_python=None, # type: Optional[str] - yanked_reason=None, # type: Optional[str] - cache_link_parsing=True, # type: bool - ): - # type: (...) -> None + url: str, + comes_from: Optional[Union[str, "HTMLPage"]] = None, + requires_python: Optional[str] = None, + yanked_reason: Optional[str] = None, + cache_link_parsing: bool = True, + ) -> None: """ :param url: url of the resource pointed to (href of the link) :param comes_from: instance of HTMLPage where the link was found, @@ -78,8 +77,7 @@ def __init__( self.cache_link_parsing = cache_link_parsing - def __str__(self): - # type: () -> str + def __str__(self) -> str: if self.requires_python: rp = f' (requires-python:{self.requires_python})' else: @@ -90,18 +88,15 @@ def __str__(self): else: return redact_auth_from_url(str(self._url)) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return f'' @property - def url(self): - # type: () -> str + def url(self) -> str: return self._url @property - def filename(self): - # type: () -> str + def filename(self) -> str: path = self.path.rstrip('/') name = posixpath.basename(path) if not name: @@ -115,48 +110,40 @@ def filename(self): return name @property - def file_path(self): - # type: () -> str + def file_path(self) -> str: return url_to_path(self.url) @property - def scheme(self): - # type: () -> str + def scheme(self) -> str: return self._parsed_url.scheme @property - def netloc(self): - # type: () -> str + def netloc(self) -> str: """ This can contain auth information. """ return self._parsed_url.netloc @property - def path(self): - # type: () -> str + def path(self) -> str: return urllib.parse.unquote(self._parsed_url.path) - def splitext(self): - # type: () -> Tuple[str, str] + def splitext(self) -> Tuple[str, str]: return splitext(posixpath.basename(self.path.rstrip('/'))) @property - def ext(self): - # type: () -> str + def ext(self) -> str: return self.splitext()[1] @property - def url_without_fragment(self): - # type: () -> str + def url_without_fragment(self) -> str: scheme, netloc, path, query, fragment = self._parsed_url return urllib.parse.urlunsplit((scheme, netloc, path, query, '')) _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') @property - def egg_fragment(self): - # type: () -> Optional[str] + def egg_fragment(self) -> Optional[str]: match = self._egg_fragment_re.search(self._url) if not match: return None @@ -165,8 +152,7 @@ def egg_fragment(self): _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') @property - def subdirectory_fragment(self): - # type: () -> Optional[str] + def subdirectory_fragment(self) -> Optional[str]: match = self._subdirectory_fragment_re.search(self._url) if not match: return None @@ -177,59 +163,49 @@ def subdirectory_fragment(self): ) @property - def hash(self): - # type: () -> Optional[str] + def hash(self) -> Optional[str]: match = self._hash_re.search(self._url) if match: return match.group(2) return None @property - def hash_name(self): - # type: () -> Optional[str] + def hash_name(self) -> Optional[str]: match = self._hash_re.search(self._url) if match: return match.group(1) return None @property - def show_url(self): - # type: () -> str + def show_url(self) -> str: return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) @property - def is_file(self): - # type: () -> bool + def is_file(self) -> bool: return self.scheme == 'file' - def is_existing_dir(self): - # type: () -> bool + def is_existing_dir(self) -> bool: return self.is_file and os.path.isdir(self.file_path) @property - def is_wheel(self): - # type: () -> bool + def is_wheel(self) -> bool: return self.ext == WHEEL_EXTENSION @property - def is_vcs(self): - # type: () -> bool + def is_vcs(self) -> bool: from pip._internal.vcs import vcs return self.scheme in vcs.all_schemes @property - def is_yanked(self): - # type: () -> bool + def is_yanked(self) -> bool: return self.yanked_reason is not None @property - def has_hash(self): - # type: () -> bool + def has_hash(self) -> bool: return self.hash_name is not None - def is_hash_allowed(self, hashes): - # type: (Optional[Hashes]) -> bool + def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: """ Return True if the link has a hash and it is allowed. """ @@ -243,6 +219,5 @@ def is_hash_allowed(self, hashes): # TODO: Relax this comparison logic to ignore, for example, fragments. -def links_equivalent(link1, link2): - # type: (Link, Link) -> bool +def links_equivalent(link1: Link, link2: Link) -> bool: return link1 == link2 diff --git a/src/pip/_internal/models/scheme.py b/src/pip/_internal/models/scheme.py index 697cd19b478..9a8dafba307 100644 --- a/src/pip/_internal/models/scheme.py +++ b/src/pip/_internal/models/scheme.py @@ -18,12 +18,12 @@ class Scheme: def __init__( self, - platlib, # type: str - purelib, # type: str - headers, # type: str - scripts, # type: str - data, # type: str - ): + platlib: str, + purelib: str, + headers: str, + scripts: str, + data: str, + ) -> None: self.platlib = platlib self.purelib = purelib self.headers = headers diff --git a/src/pip/_internal/models/search_scope.py b/src/pip/_internal/models/search_scope.py index a3f0a5c0f87..2e4a08c1c52 100644 --- a/src/pip/_internal/models/search_scope.py +++ b/src/pip/_internal/models/search_scope.py @@ -25,10 +25,9 @@ class SearchScope: @classmethod def create( cls, - find_links, # type: List[str] - index_urls, # type: List[str] - ): - # type: (...) -> SearchScope + find_links: List[str], + index_urls: List[str], + ) -> "SearchScope": """ Create a SearchScope object after normalizing the `find_links`. """ @@ -65,15 +64,13 @@ def create( def __init__( self, - find_links, # type: List[str] - index_urls, # type: List[str] - ): - # type: (...) -> None + find_links: List[str], + index_urls: List[str], + ) -> None: self.find_links = find_links self.index_urls = index_urls - def get_formatted_locations(self): - # type: () -> str + def get_formatted_locations(self) -> str: lines = [] redacted_index_urls = [] if self.index_urls and self.index_urls != [PyPI.simple_url]: @@ -106,16 +103,14 @@ def get_formatted_locations(self): ) return '\n'.join(lines) - def get_index_urls_locations(self, project_name): - # type: (str) -> List[str] + def get_index_urls_locations(self, project_name: str) -> List[str]: """Returns the locations found via self.index_urls Checks the url_name on the main (first in the list) index and use this url_name to produce all locations """ - def mkurl_pypi_url(url): - # type: (str) -> str + def mkurl_pypi_url(url: str) -> str: loc = posixpath.join( url, urllib.parse.quote(canonicalize_name(project_name))) diff --git a/src/pip/_internal/models/selection_prefs.py b/src/pip/_internal/models/selection_prefs.py index edc1cf79955..1fd79cba91a 100644 --- a/src/pip/_internal/models/selection_prefs.py +++ b/src/pip/_internal/models/selection_prefs.py @@ -18,11 +18,11 @@ class SelectionPreferences: # people when reading the code. def __init__( self, - allow_yanked, # type: bool - allow_all_prereleases=False, # type: bool - format_control=None, # type: Optional[FormatControl] - prefer_binary=False, # type: bool - ignore_requires_python=None, # type: Optional[bool] + allow_yanked: bool, + allow_all_prereleases: bool = False, + format_control: Optional[FormatControl] = None, + prefer_binary: bool = False, + ignore_requires_python: Optional[bool] = None, ): # type: (...) -> None """Create a SelectionPreferences object. diff --git a/src/pip/_internal/models/target_python.py b/src/pip/_internal/models/target_python.py index b91e349f566..ac69e152bd7 100644 --- a/src/pip/_internal/models/target_python.py +++ b/src/pip/_internal/models/target_python.py @@ -26,12 +26,11 @@ class TargetPython: def __init__( self, - platforms=None, # type: Optional[List[str]] - py_version_info=None, # type: Optional[Tuple[int, ...]] - abis=None, # type: Optional[List[str]] - implementation=None, # type: Optional[str] - ): - # type: (...) -> None + platforms: Optional[List[str]] = None, + py_version_info: Optional[Tuple[int, ...]] = None, + abis: Optional[List[str]] = None, + implementation: Optional[str] = None, + ) -> None: """ :param platforms: A list of strings or None. If None, searches for packages that are supported by the current system. Otherwise, will @@ -65,8 +64,7 @@ def __init__( # This is used to cache the return value of get_tags(). self._valid_tags = None # type: Optional[List[Tag]] - def format_given(self): - # type: () -> str + def format_given(self) -> str: """ Format the given, non-None attributes for display. """ @@ -87,8 +85,7 @@ def format_given(self): if value is not None ) - def get_tags(self): - # type: () -> List[Tag] + def get_tags(self) -> List[Tag]: """ Return the supported PEP 425 tags to check wheel candidates against. diff --git a/src/pip/_internal/models/wheel.py b/src/pip/_internal/models/wheel.py index 827ebca91c6..a79a86106ed 100644 --- a/src/pip/_internal/models/wheel.py +++ b/src/pip/_internal/models/wheel.py @@ -19,8 +19,7 @@ class Wheel: re.VERBOSE ) - def __init__(self, filename): - # type: (str) -> None + def __init__(self, filename: str) -> None: """ :raises InvalidWheelFilename: when the filename is invalid for a wheel """ @@ -45,13 +44,11 @@ def __init__(self, filename): for y in self.abis for z in self.plats } - def get_formatted_file_tags(self): - # type: () -> List[str] + def get_formatted_file_tags(self) -> List[str]: """Return the wheel's tags as a sorted list of strings.""" return sorted(str(tag) for tag in self.file_tags) - def support_index_min(self, tags): - # type: (List[Tag]) -> int + def support_index_min(self, tags: List[Tag]) -> int: """Return the lowest index that one of the wheel's file_tag combinations achieves in the given list of supported tags. @@ -66,8 +63,9 @@ def support_index_min(self, tags): """ return min(tags.index(tag) for tag in self.file_tags if tag in tags) - def find_most_preferred_tag(self, tags, tag_to_priority): - # type: (List[Tag], Dict[Tag, int]) -> int + def find_most_preferred_tag( + self, tags: List[Tag], tag_to_priority: Dict[Tag, int] + ) -> int: """Return the priority of the most preferred tag that one of the wheel's file tag combinations achieves in the given list of supported tags using the given tag_to_priority mapping, where lower priorities are more-preferred. @@ -86,8 +84,7 @@ def find_most_preferred_tag(self, tags, tag_to_priority): tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority ) - def supported(self, tags): - # type: (Iterable[Tag]) -> bool + def supported(self, tags: Iterable[Tag]) -> bool: """Return whether the wheel is compatible with one of the given tags. :param tags: the PEP 425 tags to check the wheel against. From c1da53daccd297c21b38c315f73c25f12c5c21a4 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sat, 24 Apr 2021 15:51:17 +0800 Subject: [PATCH 44/60] Migrate 'pip list' to use metadata abstraction --- src/pip/_internal/commands/list.py | 106 +++++++++++--------- src/pip/_internal/metadata/base.py | 25 ++++- src/pip/_internal/metadata/pkg_resources.py | 19 +++- 3 files changed, 99 insertions(+), 51 deletions(-) diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index dcf9432638a..43f58624aaf 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -1,9 +1,9 @@ import json import logging from optparse import Values -from typing import Iterator, List, Set, Tuple +from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast -from pip._vendor.pkg_resources import Distribution +from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cli import cmdoptions from pip._internal.cli.req_command import IndexGroupCommand @@ -11,18 +11,27 @@ from pip._internal.exceptions import CommandError from pip._internal.index.collector import LinkCollector from pip._internal.index.package_finder import PackageFinder +from pip._internal.metadata import BaseDistribution, get_environment from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.network.session import PipSession -from pip._internal.utils.compat import stdlib_pkgs -from pip._internal.utils.misc import ( - dist_is_editable, - get_installed_distributions, - tabulate, - write_output, -) -from pip._internal.utils.packaging import get_installer +from pip._internal.utils.misc import stdlib_pkgs, tabulate, write_output from pip._internal.utils.parallel import map_multithread +if TYPE_CHECKING: + from pip._internal.metadata.base import DistributionVersion + + class _DistWithLatestInfo(BaseDistribution): + """Give the distribution object a couple of extra fields. + + These will be populated during ``get_outdated()``. This is dirty but + makes the rest of the code much cleaner. + """ + latest_version: DistributionVersion + latest_filetype: str + + _ProcessedDists = Sequence[_DistWithLatestInfo] + + logger = logging.getLogger(__name__) @@ -145,14 +154,16 @@ def run(self, options, args): if options.excludes: skip.update(options.excludes) - packages = get_installed_distributions( - local_only=options.local, - user_only=options.user, - editables_only=options.editable, - include_editables=options.include_editable, - paths=options.path, - skip=skip, - ) + packages: "_ProcessedDists" = [ + cast("_DistWithLatestInfo", d) + for d in get_environment(options.path).iter_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + skip=skip, + ) + ] # get_not_required must be called firstly in order to find and # filter out all dependencies correctly. Otherwise a package @@ -170,45 +181,47 @@ def run(self, options, args): return SUCCESS def get_outdated(self, packages, options): - # type: (List[Distribution], Values) -> List[Distribution] + # type: (_ProcessedDists, Values) -> _ProcessedDists return [ dist for dist in self.iter_packages_latest_infos(packages, options) - if dist.latest_version > dist.parsed_version + if dist.latest_version > dist.version ] def get_uptodate(self, packages, options): - # type: (List[Distribution], Values) -> List[Distribution] + # type: (_ProcessedDists, Values) -> _ProcessedDists return [ dist for dist in self.iter_packages_latest_infos(packages, options) - if dist.latest_version == dist.parsed_version + if dist.latest_version == dist.version ] def get_not_required(self, packages, options): - # type: (List[Distribution], Values) -> List[Distribution] - dep_keys = set() # type: Set[Distribution] - for dist in packages: - dep_keys.update(requirement.key for requirement in dist.requires()) + # type: (_ProcessedDists, Values) -> _ProcessedDists + dep_keys = { + canonicalize_name(dep.name) + for dist in packages + for dep in dist.iter_dependencies() + } # Create a set to remove duplicate packages, and cast it to a list # to keep the return type consistent with get_outdated and # get_uptodate - return list({pkg for pkg in packages if pkg.key not in dep_keys}) + return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys}) def iter_packages_latest_infos(self, packages, options): - # type: (List[Distribution], Values) -> Iterator[Distribution] + # type: (_ProcessedDists, Values) -> Iterator[_DistWithLatestInfo] with self._build_session(options) as session: finder = self._build_package_finder(options, session) def latest_info(dist): - # type: (Distribution) -> Distribution - all_candidates = finder.find_all_candidates(dist.key) + # type: (_DistWithLatestInfo) -> Optional[_DistWithLatestInfo] + all_candidates = finder.find_all_candidates(dist.canonical_name) if not options.pre: # Remove prereleases all_candidates = [candidate for candidate in all_candidates if not candidate.version.is_prerelease] evaluator = finder.make_candidate_evaluator( - project_name=dist.project_name, + project_name=dist.canonical_name, ) best_candidate = evaluator.sort_best_candidate(all_candidates) if best_candidate is None: @@ -219,7 +232,6 @@ def latest_info(dist): typ = 'wheel' else: typ = 'sdist' - # This is dirty but makes the rest of the code much cleaner dist.latest_version = remote_version dist.latest_filetype = typ return dist @@ -229,10 +241,10 @@ def latest_info(dist): yield dist def output_package_listing(self, packages, options): - # type: (List[Distribution], Values) -> None + # type: (_ProcessedDists, Values) -> None packages = sorted( packages, - key=lambda dist: dist.project_name.lower(), + key=lambda dist: dist.canonical_name, ) if options.list_format == 'columns' and packages: data, header = format_for_columns(packages, options) @@ -240,10 +252,10 @@ def output_package_listing(self, packages, options): elif options.list_format == 'freeze': for dist in packages: if options.verbose >= 1: - write_output("%s==%s (%s)", dist.project_name, + write_output("%s==%s (%s)", dist.canonical_name, dist.version, dist.location) else: - write_output("%s==%s", dist.project_name, dist.version) + write_output("%s==%s", dist.canonical_name, dist.version) elif options.list_format == 'json': write_output(format_for_json(packages, options)) @@ -264,7 +276,7 @@ def output_package_listing_columns(self, data, header): def format_for_columns(pkgs, options): - # type: (List[Distribution], Values) -> Tuple[List[List[str]], List[str]] + # type: (_ProcessedDists, Values) -> Tuple[List[List[str]], List[str]] """ Convert the package data into something usable by output_package_listing_columns. @@ -277,7 +289,7 @@ def format_for_columns(pkgs, options): header = ["Package", "Version"] data = [] - if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs): + if options.verbose >= 1 or any(x.editable for x in pkgs): header.append("Location") if options.verbose >= 1: header.append("Installer") @@ -285,16 +297,16 @@ def format_for_columns(pkgs, options): for proj in pkgs: # if we're working on the 'outdated' list, separate out the # latest_version and type - row = [proj.project_name, proj.version] + row = [proj.canonical_name, str(proj.version)] if running_outdated: - row.append(proj.latest_version) + row.append(str(proj.latest_version)) row.append(proj.latest_filetype) - if options.verbose >= 1 or dist_is_editable(proj): - row.append(proj.location) + if options.verbose >= 1 or proj.editable: + row.append(proj.location or "") if options.verbose >= 1: - row.append(get_installer(proj)) + row.append(proj.installer) data.append(row) @@ -302,16 +314,16 @@ def format_for_columns(pkgs, options): def format_for_json(packages, options): - # type: (List[Distribution], Values) -> str + # type: (_ProcessedDists, Values) -> str data = [] for dist in packages: info = { - 'name': dist.project_name, + 'name': dist.canonical_name, 'version': str(dist.version), } if options.verbose >= 1: - info['location'] = dist.location - info['installer'] = get_installer(dist) + info['location'] = dist.location or "" + info['installer'] = dist.installer if options.outdated: info['latest_version'] = str(dist.latest_version) info['latest_filetype'] = dist.latest_filetype diff --git a/src/pip/_internal/metadata/base.py b/src/pip/_internal/metadata/base.py index 659adc12daf..ae400ae3646 100644 --- a/src/pip/_internal/metadata/base.py +++ b/src/pip/_internal/metadata/base.py @@ -1,17 +1,32 @@ import logging import re -from typing import Container, Iterator, List, Optional, Union - +from typing import ( + TYPE_CHECKING, + Collection, + Container, + Iterable, + Iterator, + List, + Optional, + Union, +) + +from pip._vendor.packaging.requirements import Requirement from pip._vendor.packaging.version import LegacyVersion, Version from pip._internal.utils.misc import stdlib_pkgs # TODO: Move definition here. +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + DistributionVersion = Union[LegacyVersion, Version] logger = logging.getLogger(__name__) -class BaseDistribution: +class BaseDistribution(Protocol): @property def location(self) -> Optional[str]: """Where the distribution is loaded from. @@ -51,6 +66,10 @@ def local(self) -> bool: def in_usersite(self) -> bool: raise NotImplementedError() + def iter_dependencies(self, extras=()): + # type: (Collection[str]) -> Iterable[Requirement] + raise NotImplementedError() + class BaseEnvironment: """An environment containing distributions to introspect.""" diff --git a/src/pip/_internal/metadata/pkg_resources.py b/src/pip/_internal/metadata/pkg_resources.py index b7248c3119d..d7ac9d819fe 100644 --- a/src/pip/_internal/metadata/pkg_resources.py +++ b/src/pip/_internal/metadata/pkg_resources.py @@ -1,7 +1,9 @@ +import logging import zipfile -from typing import Iterator, List, Optional +from typing import Collection, Iterable, Iterator, List, Optional from pip._vendor import pkg_resources +from pip._vendor.packaging.requirements import Requirement from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.packaging.version import parse as parse_version @@ -11,6 +13,8 @@ from .base import BaseDistribution, BaseEnvironment, DistributionVersion +logger = logging.getLogger(__name__) + class Distribution(BaseDistribution): def __init__(self, dist: pkg_resources.Distribution) -> None: @@ -57,6 +61,19 @@ def local(self) -> bool: def in_usersite(self) -> bool: return misc.dist_in_usersite(self._dist) + def iter_dependencies(self, extras=()): + # type: (Collection[str]) -> Iterable[Requirement] + # pkg_resources raises on invalid extras, so we sanitize. + requested_extras = set(extras) + valid_extras = requested_extras & set(self._dist.extras) + for invalid_extra in requested_extras ^ valid_extras: + logger.warning( + "Invalid extra %r for package %r discarded", + invalid_extra, + self.canonical_name, + ) + return self._dist.requires(valid_extras) + class Environment(BaseEnvironment): def __init__(self, ws: pkg_resources.WorkingSet) -> None: From ef8299d559e3f014534043eacc797584efdcf69b Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sat, 10 Jul 2021 08:38:27 +0800 Subject: [PATCH 45/60] pip list now normalizes the project name --- tests/functional/test_list.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index 40dfbdea30d..c35116cc5c4 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -100,10 +100,10 @@ def test_multiple_exclude_and_normalization(script, tmpdir): script.pip("install", "--no-index", req_path) result = script.pip("list") print(result.stdout) - assert "Normalizable-Name" in result.stdout + assert "normalizable-name" in result.stdout assert "pip" in result.stdout result = script.pip("list", "--exclude", "normalizablE-namE", "--exclude", "pIp") - assert "Normalizable-Name" not in result.stdout + assert "normalizable-name" not in result.stdout assert "pip" not in result.stdout @@ -477,10 +477,10 @@ def test_not_required_flag(script, data): 'install', '-f', data.find_links, '--no-index', 'TopoRequires4' ) result = script.pip('list', '--not-required', expect_stderr=True) - assert 'TopoRequires4 ' in result.stdout, str(result) - assert 'TopoRequires ' not in result.stdout - assert 'TopoRequires2 ' not in result.stdout - assert 'TopoRequires3 ' not in result.stdout + assert 'toporequires4 ' in result.stdout, str(result) + assert 'toporequires ' not in result.stdout + assert 'toporequires2 ' not in result.stdout + assert 'toporequires3 ' not in result.stdout def test_list_freeze(simple_script): From e6c317769a7aa3765e7cc2b3d1968dd9dcb1d336 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sat, 10 Jul 2021 08:38:41 +0800 Subject: [PATCH 46/60] Move assert_[not_]installed to Script and use it This help function is much better than the previous ad-hoc logic used in test_uninstall.py, which has trouble identifying normalized names. --- tests/functional/test_new_resolver.py | 22 +++--------- tests/functional/test_new_resolver_hashes.py | 30 ++-------------- tests/functional/test_uninstall.py | 37 ++++++-------------- tests/lib/__init__.py | 23 ++++++++++++ 4 files changed, 40 insertions(+), 72 deletions(-) diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index 68eb3b1c010..9fb9cb9bd2e 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -1,10 +1,8 @@ -import json import os import sys import textwrap import pytest -from pip._vendor.packaging.utils import canonicalize_name from tests.lib import ( create_basic_sdist_for_package, @@ -17,26 +15,14 @@ from tests.lib.wheel import make_wheel +# TODO: Remove me. def assert_installed(script, **kwargs): - ret = script.pip('list', '--format=json') - installed = { - (canonicalize_name(val['name']), val['version']) - for val in json.loads(ret.stdout) - } - expected = {(canonicalize_name(k), v) for k, v in kwargs.items()} - assert expected <= installed, f"{expected!r} not all in {installed!r}" + script.assert_installed(**kwargs) +# TODO: Remove me. def assert_not_installed(script, *args): - ret = script.pip("list", "--format=json") - installed = { - canonicalize_name(val["name"]) - for val in json.loads(ret.stdout) - } - # None of the given names should be listed as installed, i.e. their - # intersection should be empty. - expected = {canonicalize_name(k) for k in args} - assert not (expected & installed), f"{expected!r} contained in {installed!r}" + script.assert_not_installed(*args) def assert_editable(script, *args): diff --git a/tests/functional/test_new_resolver_hashes.py b/tests/functional/test_new_resolver_hashes.py index fed1be5e85e..5540e9b5490 100644 --- a/tests/functional/test_new_resolver_hashes.py +++ b/tests/functional/test_new_resolver_hashes.py @@ -1,9 +1,7 @@ import collections import hashlib -import json import pytest -from pip._vendor.packaging.utils import canonicalize_name from pip._internal.utils.urls import path_to_url from tests.lib import create_basic_sdist_for_package, create_basic_wheel_for_package @@ -13,30 +11,6 @@ ) -def assert_installed(script, **kwargs): - ret = script.pip('list', '--format=json') - installed = set( - (canonicalize_name(val['name']), val['version']) - for val in json.loads(ret.stdout) - ) - expected = set((canonicalize_name(k), v) for k, v in kwargs.items()) - assert expected <= installed, \ - "{!r} not all in {!r}".format(expected, installed) - - -def assert_not_installed(script, *args): - ret = script.pip("list", "--format=json") - installed = set( - canonicalize_name(val["name"]) - for val in json.loads(ret.stdout) - ) - # None of the given names should be listed as installed, i.e. their - # intersection should be empty. - expected = set(canonicalize_name(k) for k in args) - assert not (expected & installed), \ - "{!r} contained in {!r}".format(expected, installed) - - def _create_find_links(script): sdist_path = create_basic_sdist_for_package(script, "base", "0.1.0") wheel_path = create_basic_wheel_for_package(script, "base", "0.1.0") @@ -265,7 +239,7 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_succeed( "--requirement", requirements_txt, ) - assert_installed(script, base="0.1.0") + script.assert_installed(base="0.1.0") @pytest.mark.parametrize("constrain_by_hash", [False, True]) @@ -307,4 +281,4 @@ def test_new_resolver_hash_requirement_and_url_constraint_can_fail( "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS FILE." ) in result.stderr, str(result) - assert_not_installed(script, "base", "other") + script.assert_not_installed("base", "other") diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index cbce8746a23..e6e32dad9a7 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -1,4 +1,3 @@ -import json import logging import os import sys @@ -46,8 +45,7 @@ def test_basic_uninstall_distutils(script): """)) result = script.run('python', pkg_path / 'setup.py', 'install') result = script.pip('list', '--format=json') - assert {"name": "distutils-install", "version": "0.1"} \ - in json.loads(result.stdout) + script.assert_installed(distutils_install="0.1") result = script.pip('uninstall', 'distutils_install', '-y', expect_stderr=True, expect_error=True) assert ( @@ -217,16 +215,13 @@ def test_uninstall_entry_point_colon_in_name(script, console_scripts): ) if sys.platform == 'win32': script_name += '.exe' - result = script.pip('install', pkg_path) + script.pip('install', pkg_path) assert script_name.exists() - result = script.pip('list', '--format=json') - assert {"name": "ep-install", "version": "0.1"} \ - in json.loads(result.stdout) + script.assert_installed(ep_install="0.1") + script.pip('uninstall', 'ep_install', '-y') assert not script_name.exists() - result2 = script.pip('list', '--format=json') - assert {"name": "ep-install", "version": "0.1"} \ - not in json.loads(result2.stdout) + script.assert_not_installed("ep-install") def test_uninstall_gui_scripts(script): @@ -550,9 +545,7 @@ def test_uninstall_setuptools_develop_install(script, data): expect_stderr=True, cwd=pkg_path) script.run('python', 'setup.py', 'install', expect_stderr=True, cwd=pkg_path) - list_result = script.pip('list', '--format=json') - assert {"name": os.path.normcase("FSPkg"), "version": "0.1.dev0"} \ - in json.loads(list_result.stdout), str(list_result) + script.assert_installed(FSPkg="0.1.dev0") # Uninstall both develop and install uninstall = script.pip('uninstall', 'FSPkg', '-y') assert any(filename.endswith('.egg') @@ -561,8 +554,7 @@ def test_uninstall_setuptools_develop_install(script, data): assert join( script.site_packages, 'FSPkg.egg-link' ) in uninstall2.files_deleted, list(uninstall2.files_deleted.keys()) - list_result2 = script.pip('list', '--format=json') - assert "FSPkg" not in {p["name"] for p in json.loads(list_result2.stdout)} + script.assert_not_installed("FSPkg") def test_uninstall_editable_and_pip_install(script, data): @@ -578,9 +570,7 @@ def test_uninstall_editable_and_pip_install(script, data): # ensure both are installed with --ignore-installed: script.pip('install', '--ignore-installed', '.', expect_stderr=True, cwd=pkg_path) - list_result = script.pip('list', '--format=json') - assert {"name": "FSPkg", "version": "0.1.dev0"} \ - in json.loads(list_result.stdout) + script.assert_installed(FSPkg="0.1.dev0") # Uninstall both develop and install uninstall = script.pip('uninstall', 'FSPkg', '-y') assert not any(filename.endswith('.egg-link') @@ -589,8 +579,7 @@ def test_uninstall_editable_and_pip_install(script, data): assert join( script.site_packages, 'FSPkg.egg-link' ) in uninstall2.files_deleted, list(uninstall2.files_deleted.keys()) - list_result2 = script.pip('list', '--format=json') - assert "FSPkg" not in {p["name"] for p in json.loads(list_result2.stdout)} + script.assert_not_installed("FSPkg") def test_uninstall_editable_and_pip_install_easy_install_remove(script, data): @@ -616,9 +605,7 @@ def test_uninstall_editable_and_pip_install_easy_install_remove(script, data): os.rename(easy_install_pth, pip_test_fspkg_pth) # Confirm that FSPkg is installed - list_result = script.pip('list', '--format=json') - assert {"name": "FSPkg", "version": "0.1.dev0"} \ - in json.loads(list_result.stdout) + script.assert_installed(FSPkg="0.1.dev0") # Remove pip-test-fspkg.pth os.remove(pip_test_fspkg_pth) @@ -632,9 +619,7 @@ def test_uninstall_editable_and_pip_install_easy_install_remove(script, data): ) in uninstall.files_deleted, list(uninstall.files_deleted.keys()) # Confirm that FSPkg is uninstalled - list_result = script.pip('list', '--format=json') - assert {"name": "FSPkg", "version": "0.1.dev0"} \ - not in json.loads(list_result.stdout) + script.assert_not_installed("FSPkg") # Rename pip-test.pth back to easy-install.pth os.rename(pip_test_pth, easy_install_pth) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index cbb7b7b6d86..67996955316 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -1,3 +1,4 @@ +import json import os import re import shutil @@ -14,6 +15,7 @@ from zipfile import ZipFile import pytest +from pip._vendor.packaging.utils import canonicalize_name from scripttest import FoundDir, TestFileEnvironment from pip._internal.index.collector import LinkCollector @@ -668,6 +670,27 @@ def easy_install(self, *args, **kwargs): args = ("-m", "easy_install") + args return self.run("python", *args, **kwargs) + def assert_installed(self, **kwargs): + ret = self.pip("list", "--format=json") + installed = set( + (canonicalize_name(val["name"]), val["version"]) + for val in json.loads(ret.stdout) + ) + expected = set((canonicalize_name(k), v) for k, v in kwargs.items()) + assert expected <= installed, "{!r} not all in {!r}".format(expected, installed) + + def assert_not_installed(self, *args): + ret = self.pip("list", "--format=json") + installed = set( + canonicalize_name(val["name"]) for val in json.loads(ret.stdout) + ) + # None of the given names should be listed as installed, i.e. their + # intersection should be empty. + expected = set(canonicalize_name(k) for k in args) + assert not (expected & installed), "{!r} contained in {!r}".format( + expected, installed + ) + # FIXME ScriptTest does something similar, but only within a single # ProcResult; this generalizes it so states can be compared across From 3209ad07aba84d041d8220c9e049fed9d2459a4e Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 12 Jul 2021 11:47:22 +0800 Subject: [PATCH 47/60] News for the pip list name normalization logic --- news/9825.process.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 news/9825.process.rst diff --git a/news/9825.process.rst b/news/9825.process.rst new file mode 100644 index 00000000000..584bac11b69 --- /dev/null +++ b/news/9825.process.rst @@ -0,0 +1,2 @@ +``pip list`` now normalized the distribution names. This is done as a part of +the refactoring to prepare for the migration to ``importlib.metadata``. From f9c49eff8d2b3537d84647f9ddc46c88b6ecbf78 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sun, 20 Jun 2021 18:13:28 +0800 Subject: [PATCH 48/60] Failing test for local unnamed dependency --- tests/functional/test_new_resolver.py | 29 +++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index 9fb9cb9bd2e..74191dbf361 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -1843,3 +1843,32 @@ def test_new_resolver_modifies_installed_incompatible(script): "d==1", ) assert_installed(script, d="1", c="2", b="2", a="2") + + +def test_new_resolver_transitively_depends_on_unnamed_local(script): + create_basic_wheel_for_package(script, name="certbot-docs", version="1") + certbot = create_test_package_with_setup( + script, + name="certbot", + version="99.99.0.dev0", + extras_require={"docs": ["certbot-docs"]} + ) + certbot_apache = create_test_package_with_setup( + script, + name="certbot-apache", + version="99.99.0.dev0", + install_requires=["certbot>=99.99.0.dev0"], + ) + + script.pip( + "install", + "--no-cache-dir", "--no-index", + "--find-links", script.scratch_path, + f"{certbot}[docs]", certbot_apache, + ) + assert_installed( + script, + certbot="99.99.0.dev0", + certbot_apache="99.99.0.dev0", + certbot_docs="1", + ) From 5d81040ee831a008fdaf9d2170f5a2a9a811aaac Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sun, 20 Jun 2021 18:29:15 +0800 Subject: [PATCH 49/60] Refactor root requirement collection into factory --- .../resolution/resolvelib/factory.py | 47 +++++++++++++++++-- .../resolution/resolvelib/resolver.py | 46 +++--------------- 2 files changed, 51 insertions(+), 42 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 792a930468b..25482ab7f0d 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -9,6 +9,7 @@ Iterator, List, Mapping, + NamedTuple, Optional, Sequence, Set, @@ -38,7 +39,10 @@ from pip._internal.models.wheel import Wheel from pip._internal.operations.prepare import RequirementPreparer from pip._internal.req.constructors import install_req_from_link_and_ireq -from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_install import ( + InstallRequirement, + check_invalid_constraint_type, +) from pip._internal.resolution.base import InstallRequirementProvider from pip._internal.utils.compatibility_tags import get_supported from pip._internal.utils.hashes import Hashes @@ -81,6 +85,12 @@ class ConflictCause(Protocol): Cache = Dict[Link, C] +class CollectedRootRequirements(NamedTuple): + requirements: List[Requirement] + constraints: Dict[str, Constraint] + user_requested: Dict[str, int] + + class Factory: def __init__( self, @@ -408,7 +418,7 @@ def find_candidates( and all(req.is_satisfied_by(c) for req in requirements[identifier]) ) - def make_requirement_from_install_req( + def _make_requirement_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str] ) -> Optional[Requirement]: if not ireq.match_markers(requested_extras): @@ -440,6 +450,37 @@ def make_requirement_from_install_req( return UnsatisfiableRequirement(canonicalize_name(ireq.name)) return self.make_requirement_from_candidate(cand) + def collect_root_requirements( + self, root_ireqs: List[InstallRequirement] + ) -> CollectedRootRequirements: + collected = CollectedRootRequirements([], {}, {}) + for i, ireq in enumerate(root_ireqs): + if ireq.constraint: + # Ensure we only accept valid constraints + problem = check_invalid_constraint_type(ireq) + if problem: + raise InstallationError(problem) + if not ireq.match_markers(): + continue + assert ireq.name, "Constraint must be named" + name = canonicalize_name(ireq.name) + if name in collected.constraints: + collected.constraints[name] &= ireq + else: + collected.constraints[name] = Constraint.from_ireq(ireq) + else: + if ireq.user_supplied and ireq.name: + canonical_name = canonicalize_name(ireq.name) + if canonical_name not in collected.user_requested: + collected.user_requested[canonical_name] = i + req = self._make_requirement_from_install_req( + ireq, + requested_extras=(), + ) + if req is not None: + collected.requirements.append(req) + return collected + def make_requirement_from_candidate( self, candidate: Candidate ) -> ExplicitRequirement: @@ -452,7 +493,7 @@ def make_requirement_from_spec( requested_extras: Iterable[str] = (), ) -> Optional[Requirement]: ireq = self._make_install_req_from_spec(specifier, comes_from) - return self.make_requirement_from_install_req(ireq, requested_extras) + return self._make_requirement_from_install_req(ireq, requested_extras) def make_requires_python_requirement( self, specifier: Optional[SpecifierSet] diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index 2f05a0ea9fb..4648052d454 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -10,13 +10,9 @@ from pip._vendor.resolvelib.structs import DirectedGraph from pip._internal.cache import WheelCache -from pip._internal.exceptions import InstallationError from pip._internal.index.package_finder import PackageFinder from pip._internal.operations.prepare import RequirementPreparer -from pip._internal.req.req_install import ( - InstallRequirement, - check_invalid_constraint_type, -) +from pip._internal.req.req_install import InstallRequirement from pip._internal.req.req_set import RequirementSet from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider from pip._internal.resolution.resolvelib.provider import PipProvider @@ -28,7 +24,7 @@ from pip._internal.utils.filetypes import is_archive_file from pip._internal.utils.misc import dist_is_editable -from .base import Candidate, Constraint, Requirement +from .base import Candidate, Requirement from .factory import Factory if TYPE_CHECKING: @@ -78,41 +74,13 @@ def __init__( def resolve( self, root_reqs: List[InstallRequirement], check_supported_wheels: bool ) -> RequirementSet: - - constraints: Dict[str, Constraint] = {} - user_requested: Dict[str, int] = {} - requirements = [] - for i, req in enumerate(root_reqs): - if req.constraint: - # Ensure we only accept valid constraints - problem = check_invalid_constraint_type(req) - if problem: - raise InstallationError(problem) - if not req.match_markers(): - continue - assert req.name, "Constraint must be named" - name = canonicalize_name(req.name) - if name in constraints: - constraints[name] &= req - else: - constraints[name] = Constraint.from_ireq(req) - else: - if req.user_supplied and req.name: - canonical_name = canonicalize_name(req.name) - if canonical_name not in user_requested: - user_requested[canonical_name] = i - r = self.factory.make_requirement_from_install_req( - req, requested_extras=() - ) - if r is not None: - requirements.append(r) - + collected = self.factory.collect_root_requirements(root_reqs) provider = PipProvider( factory=self.factory, - constraints=constraints, + constraints=collected.constraints, ignore_dependencies=self.ignore_dependencies, upgrade_strategy=self.upgrade_strategy, - user_requested=user_requested, + user_requested=collected.user_requested, ) if "PIP_RESOLVER_DEBUG" in os.environ: reporter: BaseReporter = PipDebuggingReporter() @@ -126,13 +94,13 @@ def resolve( try: try_to_avoid_resolution_too_deep = 2000000 result = self._result = resolver.resolve( - requirements, max_rounds=try_to_avoid_resolution_too_deep + collected.requirements, max_rounds=try_to_avoid_resolution_too_deep ) except ResolutionImpossible as e: error = self.factory.get_installation_error( cast("ResolutionImpossible[Requirement, Candidate]", e), - constraints, + collected.constraints, ) raise error from e From 73edd74c569bc3e1738aaf4ac99ac972a8ee1fb5 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sun, 20 Jun 2021 18:37:59 +0800 Subject: [PATCH 50/60] Detect user-requested package names by preparing This always prepares each explicit requirements when the resolver is run, instead of when they are being resolved. But we always want to prepare them anyway (an explicit requirement has only one candidate, so it's either do or die), so this is not really a problem. --- news/9204.bugfix.rst | 2 ++ src/pip/_internal/resolution/resolvelib/factory.py | 11 +++++------ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 news/9204.bugfix.rst diff --git a/news/9204.bugfix.rst b/news/9204.bugfix.rst new file mode 100644 index 00000000000..70b518a3438 --- /dev/null +++ b/news/9204.bugfix.rst @@ -0,0 +1,2 @@ +New resolver: Detect an unnamed requirement is user-specified (by building its +metadata for the project name) so it can be correctly ordered in the resolver. diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 25482ab7f0d..f1b750e252a 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -469,16 +469,15 @@ def collect_root_requirements( else: collected.constraints[name] = Constraint.from_ireq(ireq) else: - if ireq.user_supplied and ireq.name: - canonical_name = canonicalize_name(ireq.name) - if canonical_name not in collected.user_requested: - collected.user_requested[canonical_name] = i req = self._make_requirement_from_install_req( ireq, requested_extras=(), ) - if req is not None: - collected.requirements.append(req) + if req is None: + continue + if ireq.user_supplied and req.name not in collected.user_requested: + collected.user_requested[req.name] = i + collected.requirements.append(req) return collected def make_requirement_from_candidate( From 15012723079664f61f3b13674a664af1f6cdb1e3 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sun, 25 Apr 2021 16:24:42 +0800 Subject: [PATCH 51/60] Use osx_framework_user for Mac framework build --- news/9844.bugfix.rst | 1 + src/pip/_internal/locations/_sysconfig.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 news/9844.bugfix.rst diff --git a/news/9844.bugfix.rst b/news/9844.bugfix.rst new file mode 100644 index 00000000000..2ef39282851 --- /dev/null +++ b/news/9844.bugfix.rst @@ -0,0 +1 @@ +Fix ``--user`` install scheme selection for macOS framework build. diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index bccf9853f3a..a49e15b3e2f 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -52,7 +52,10 @@ def _infer_prefix() -> str: def _infer_user() -> str: """Try to find a user scheme for the current platform.""" - suffixed = f"{os.name}_user" + if sysconfig.get_config_var("PYTHONFRAMEWORK"): # Mac framework build. + suffixed = "osx_framework_user" + else: + suffixed = f"{os.name}_user" if suffixed in _AVAILABLE_SCHEMES: return suffixed if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable. From 77810bc9b4ab914375e641c86f5d97f3e88cfd03 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 26 Apr 2021 13:16:07 +0800 Subject: [PATCH 52/60] Special osx_framework_library prefix scheme --- news/9844.bugfix.rst | 3 ++- src/pip/_internal/locations/_sysconfig.py | 11 ++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/news/9844.bugfix.rst b/news/9844.bugfix.rst index 2ef39282851..c40b286fe6a 100644 --- a/news/9844.bugfix.rst +++ b/news/9844.bugfix.rst @@ -1 +1,2 @@ -Fix ``--user`` install scheme selection for macOS framework build. +Fix warnings about install scheme selection for Python framework builds +distributed by Apple's Command Line Tools. diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index a49e15b3e2f..b7842740ff8 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -25,11 +25,17 @@ _AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) +def _is_osx_framework() -> bool: + return sysconfig.get_config_var("PYTHONFRAMEWORK") + + def _infer_prefix() -> str: """Try to find a prefix scheme for the current platform. This tries: + * A special ``osx_framework_library`` for Python distributed by Apple's + Command Line Tools, when not running in a virtual environment. * Implementation + OS, used by PyPy on Windows (``pypy_nt``). * Implementation without OS, used by PyPy on POSIX (``pypy``). * OS + "prefix", used by CPython on POSIX (``posix_prefix``). @@ -37,6 +43,9 @@ def _infer_prefix() -> str: If none of the above works, fall back to ``posix_prefix``. """ + os_framework_global = _is_osx_framework() and not running_under_virtualenv() + if os_framework_global and "osx_framework_library" in _AVAILABLE_SCHEMES: + return "osx_framework_library" implementation_suffixed = f"{sys.implementation.name}_{os.name}" if implementation_suffixed in _AVAILABLE_SCHEMES: return implementation_suffixed @@ -52,7 +61,7 @@ def _infer_prefix() -> str: def _infer_user() -> str: """Try to find a user scheme for the current platform.""" - if sysconfig.get_config_var("PYTHONFRAMEWORK"): # Mac framework build. + if _is_osx_framework() and not running_under_virtualenv(): suffixed = "osx_framework_user" else: suffixed = f"{os.name}_user" From 57d9af2c4f5453c62caf63bf2a606afa80e13d1b Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 27 Apr 2021 15:56:12 +0800 Subject: [PATCH 53/60] Ignore header difference for osx_framework_user --- src/pip/_internal/locations/__init__.py | 14 ++++++++++++++ src/pip/_internal/locations/_sysconfig.py | 10 +++------- src/pip/_internal/locations/base.py | 4 ++++ 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 8eedd992661..452c232b6d4 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -11,6 +11,7 @@ USER_CACHE_DIR, get_major_minor_version, get_src_prefix, + is_osx_framework, site_packages, user_site, ) @@ -115,6 +116,19 @@ def get_scheme( if skip_pypy_special_case: continue + # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in + # the ``include`` value, but distutils's ``headers`` does. We'll let + # CPython decide whether this is a bug or feature. See bpo-43948. + skip_osx_framework_user_special_case = ( + user + and is_osx_framework() + and k == "headers" + and old_v.parent == new_v + and old_v.name.startswith("python") + ) + if skip_osx_framework_user_special_case: + continue + warned.append(_warn_if_mismatch(old_v, new_v, key=f"scheme.{k}")) if any(warned): diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index b7842740ff8..70a5893b480 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -9,7 +9,7 @@ from pip._internal.models.scheme import SCHEME_KEYS, Scheme from pip._internal.utils.virtualenv import running_under_virtualenv -from .base import get_major_minor_version +from .base import get_major_minor_version, is_osx_framework logger = logging.getLogger(__name__) @@ -25,10 +25,6 @@ _AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) -def _is_osx_framework() -> bool: - return sysconfig.get_config_var("PYTHONFRAMEWORK") - - def _infer_prefix() -> str: """Try to find a prefix scheme for the current platform. @@ -43,7 +39,7 @@ def _infer_prefix() -> str: If none of the above works, fall back to ``posix_prefix``. """ - os_framework_global = _is_osx_framework() and not running_under_virtualenv() + os_framework_global = is_osx_framework() and not running_under_virtualenv() if os_framework_global and "osx_framework_library" in _AVAILABLE_SCHEMES: return "osx_framework_library" implementation_suffixed = f"{sys.implementation.name}_{os.name}" @@ -61,7 +57,7 @@ def _infer_prefix() -> str: def _infer_user() -> str: """Try to find a user scheme for the current platform.""" - if _is_osx_framework() and not running_under_virtualenv(): + if is_osx_framework() and not running_under_virtualenv(): suffixed = "osx_framework_user" else: suffixed = f"{os.name}_user" diff --git a/src/pip/_internal/locations/base.py b/src/pip/_internal/locations/base.py index e9d59701ba4..315527f07b1 100644 --- a/src/pip/_internal/locations/base.py +++ b/src/pip/_internal/locations/base.py @@ -44,3 +44,7 @@ def get_src_prefix() -> str: user_site: typing.Optional[str] = site.getusersitepackages() except AttributeError: user_site = site.USER_SITE + + +def is_osx_framework() -> bool: + return bool(sysconfig.get_config_var("PYTHONFRAMEWORK")) From c5f9bc5f3bbe8e8f7a6ef3df284def313a64d5b1 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 4 Mar 2021 21:05:32 +0800 Subject: [PATCH 54/60] try-except distutils config parsing --- news/8931.bugfix.rst | 1 + src/pip/_internal/locations/_distutils.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 news/8931.bugfix.rst diff --git a/news/8931.bugfix.rst b/news/8931.bugfix.rst new file mode 100644 index 00000000000..4d854e363ce --- /dev/null +++ b/news/8931.bugfix.rst @@ -0,0 +1 @@ +Skip distutils configuration parsing on encoding errors. diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index b34b8e3877a..38742d1ddb6 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -3,6 +3,7 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False +import logging import os import sys from distutils.cmd import Command as DistutilsCommand @@ -17,6 +18,8 @@ from .base import get_major_minor_version +logger = logging.getLogger(__name__) + def _distutils_scheme( dist_name: str, @@ -36,7 +39,15 @@ def _distutils_scheme( dist_args["script_args"] = ["--no-user-cfg"] d = Distribution(dist_args) - d.parse_config_files() + try: + d.parse_config_files() + except UnicodeDecodeError: + # Typeshed does not include find_config_files() for some reason. + paths = d.find_config_files() # type: ignore + logger.warning( + "Ignore distutils configs in %s due to encoding errors.", + ", ".join(os.path.basename(p) for p in paths), + ) obj: Optional[DistutilsCommand] = None obj = d.get_command_obj("install", create=True) assert obj is not None From 889571a5e1cd1bb20c30d0a815c6cb3a47ee2f61 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 9 Jul 2021 13:31:49 +0800 Subject: [PATCH 55/60] Re-enable location warning --- news/10151.removal.rst | 2 ++ src/pip/_internal/locations/__init__.py | 12 +++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 news/10151.removal.rst diff --git a/news/10151.removal.rst b/news/10151.removal.rst new file mode 100644 index 00000000000..8e3ab145d6f --- /dev/null +++ b/news/10151.removal.rst @@ -0,0 +1,2 @@ +Re-enable the "Value for ... does not match" location warnings to field a new +round of feedback for the ``distutils``-``sysconfig`` transition. diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index f0b0143353a..79ca2c5e989 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -1,5 +1,6 @@ import functools import logging +import os import pathlib import sys import sysconfig @@ -34,6 +35,11 @@ logger = logging.getLogger(__name__) +if os.environ.get("_PIP_LOCATIONS_NO_WARN_ON_MISMATCH"): + _MISMATCH_LEVEL = logging.DEBUG +else: + _MISMATCH_LEVEL = logging.WARNING + def _default_base(*, user: bool) -> str: if user: @@ -48,13 +54,13 @@ def _default_base(*, user: bool) -> str: def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool: if old == new: return False - issue_url = "https://github.com/pypa/pip/issues/9617" + issue_url = "https://github.com/pypa/pip/issues/10151" message = ( "Value for %s does not match. Please report this to <%s>" "\ndistutils: %s" "\nsysconfig: %s" ) - logger.debug(message, key, issue_url, old, new) + logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new) return True @@ -69,7 +75,7 @@ def _log_context( message = ( "Additional context:" "\nuser = %r" "\nhome = %r" "\nroot = %r" "\nprefix = %r" ) - logger.debug(message, user, home, root, prefix) + logger.log(_MISMATCH_LEVEL, message, user, home, root, prefix) def get_scheme( From ca4aa121a9f7d876b4513cadf8d9e54c8515416a Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 8 Jul 2021 13:34:01 +0800 Subject: [PATCH 56/60] Use Python 3.10 sysconfig.get_preferred_scheme() This function is introduced to 3.10 to do precisely what we want, so let's use it if possible. --- src/pip/_internal/locations/_sysconfig.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 70a5893b480..c49d547396f 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -24,6 +24,8 @@ _AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names()) +_HAS_PREFERRED_SCHEME_API = sys.version_info >= (3, 10) + def _infer_prefix() -> str: """Try to find a prefix scheme for the current platform. @@ -39,6 +41,8 @@ def _infer_prefix() -> str: If none of the above works, fall back to ``posix_prefix``. """ + if _HAS_PREFERRED_SCHEME_API: + return sysconfig.get_preferred_scheme("prefix") os_framework_global = is_osx_framework() and not running_under_virtualenv() if os_framework_global and "osx_framework_library" in _AVAILABLE_SCHEMES: return "osx_framework_library" @@ -57,6 +61,8 @@ def _infer_prefix() -> str: def _infer_user() -> str: """Try to find a user scheme for the current platform.""" + if _HAS_PREFERRED_SCHEME_API: + return sysconfig.get_preferred_scheme("user") if is_osx_framework() and not running_under_virtualenv(): suffixed = "osx_framework_user" else: @@ -70,6 +76,8 @@ def _infer_user() -> str: def _infer_home() -> str: """Try to find a home for the current platform.""" + if _HAS_PREFERRED_SCHEME_API: + return sysconfig.get_preferred_scheme("home") suffixed = f"{os.name}_home" if suffixed in _AVAILABLE_SCHEMES: return suffixed From 8349b9a9ae947cb012370309c42832617f078dd6 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 12 Jul 2021 15:47:32 +0800 Subject: [PATCH 57/60] Ask mypy to shut up about get_preferred_scheme --- src/pip/_internal/locations/_sysconfig.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index c49d547396f..0fc67843f7a 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -42,7 +42,7 @@ def _infer_prefix() -> str: If none of the above works, fall back to ``posix_prefix``. """ if _HAS_PREFERRED_SCHEME_API: - return sysconfig.get_preferred_scheme("prefix") + return sysconfig.get_preferred_scheme("prefix") # type: ignore os_framework_global = is_osx_framework() and not running_under_virtualenv() if os_framework_global and "osx_framework_library" in _AVAILABLE_SCHEMES: return "osx_framework_library" @@ -62,7 +62,7 @@ def _infer_prefix() -> str: def _infer_user() -> str: """Try to find a user scheme for the current platform.""" if _HAS_PREFERRED_SCHEME_API: - return sysconfig.get_preferred_scheme("user") + return sysconfig.get_preferred_scheme("user") # type: ignore if is_osx_framework() and not running_under_virtualenv(): suffixed = "osx_framework_user" else: @@ -77,7 +77,7 @@ def _infer_user() -> str: def _infer_home() -> str: """Try to find a home for the current platform.""" if _HAS_PREFERRED_SCHEME_API: - return sysconfig.get_preferred_scheme("home") + return sysconfig.get_preferred_scheme("home") # type: ignore suffixed = f"{os.name}_home" if suffixed in _AVAILABLE_SCHEMES: return suffixed From ce86dc86d6f901b0d5aaa6ac3ec10bc115b35e93 Mon Sep 17 00:00:00 2001 From: Diego Ramirez Date: Mon, 12 Jul 2021 14:28:36 -0500 Subject: [PATCH 58/60] Complete type annotations in `pip/_internal/index` (#10111) --- news/10111.trivial.rst | 1 + src/pip/_internal/index/collector.py | 116 +++++------- src/pip/_internal/index/package_finder.py | 219 ++++++++++------------ 3 files changed, 142 insertions(+), 194 deletions(-) create mode 100644 news/10111.trivial.rst diff --git a/news/10111.trivial.rst b/news/10111.trivial.rst new file mode 100644 index 00000000000..e646d70f418 --- /dev/null +++ b/news/10111.trivial.rst @@ -0,0 +1 @@ +Converted type commentaries into annotations in ``pip/_internal/index``. diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index 0721e3683f9..14d745eefbb 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -46,8 +46,7 @@ ResponseHeaders = MutableMapping[str, str] -def _match_vcs_scheme(url): - # type: (str) -> Optional[str] +def _match_vcs_scheme(url: str) -> Optional[str]: """Look for VCS schemes in the URL. Returns the matched VCS scheme, or None if there's no match. @@ -59,15 +58,13 @@ def _match_vcs_scheme(url): class _NotHTML(Exception): - def __init__(self, content_type, request_desc): - # type: (str, str) -> None + def __init__(self, content_type: str, request_desc: str) -> None: super().__init__(content_type, request_desc) self.content_type = content_type self.request_desc = request_desc -def _ensure_html_header(response): - # type: (Response) -> None +def _ensure_html_header(response: Response) -> None: """Check the Content-Type header to ensure the response contains HTML. Raises `_NotHTML` if the content type is not text/html. @@ -81,8 +78,7 @@ class _NotHTTP(Exception): pass -def _ensure_html_response(url, session): - # type: (str, PipSession) -> None +def _ensure_html_response(url: str, session: PipSession) -> None: """Send a HEAD request to the URL, and ensure the response contains HTML. Raises `_NotHTTP` if the URL is not available for a HEAD request, or @@ -98,8 +94,7 @@ def _ensure_html_response(url, session): _ensure_html_header(resp) -def _get_html_response(url, session): - # type: (str, PipSession) -> Response +def _get_html_response(url: str, session: PipSession) -> Response: """Access an HTML page with GET, and return the response. This consists of three parts: @@ -149,8 +144,7 @@ def _get_html_response(url, session): return resp -def _get_encoding_from_headers(headers): - # type: (ResponseHeaders) -> Optional[str] +def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]: """Determine if we have any encoding information in our headers. """ if headers and "Content-Type" in headers: @@ -160,8 +154,7 @@ def _get_encoding_from_headers(headers): return None -def _determine_base_url(document, page_url): - # type: (HTMLElement, str) -> str +def _determine_base_url(document: HTMLElement, page_url: str) -> str: """Determine the HTML document's base URL. This looks for a ```` tag in the HTML document. If present, its href @@ -180,8 +173,7 @@ def _determine_base_url(document, page_url): return page_url -def _clean_url_path_part(part): - # type: (str) -> str +def _clean_url_path_part(part: str) -> str: """ Clean a "part" of a URL path (i.e. after splitting on "@" characters). """ @@ -189,8 +181,7 @@ def _clean_url_path_part(part): return urllib.parse.quote(urllib.parse.unquote(part)) -def _clean_file_url_path(part): - # type: (str) -> str +def _clean_file_url_path(part: str) -> str: """ Clean the first part of a URL path that corresponds to a local filesystem path (i.e. the first part after splitting on "@" characters). @@ -207,8 +198,7 @@ def _clean_file_url_path(part): _reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE) -def _clean_url_path(path, is_local_path): - # type: (str, bool) -> str +def _clean_url_path(path: str, is_local_path: bool) -> str: """ Clean the path portion of a URL. """ @@ -230,8 +220,7 @@ def _clean_url_path(path, is_local_path): return ''.join(cleaned_parts) -def _clean_link(url): - # type: (str) -> str +def _clean_link(url: str) -> str: """ Make sure a link is fully quoted. For example, if ' ' occurs in the URL, it will be replaced with "%20", @@ -247,11 +236,10 @@ def _clean_link(url): def _create_link_from_element( - anchor, # type: HTMLElement - page_url, # type: str - base_url, # type: str -): - # type: (...) -> Optional[Link] + anchor: HTMLElement, + page_url: str, + base_url: str, +) -> Optional[Link]: """ Convert an anchor element in a simple repository page to a Link. """ @@ -278,25 +266,21 @@ def _create_link_from_element( class CacheablePageContent: - def __init__(self, page): - # type: (HTMLPage) -> None + def __init__(self, page: "HTMLPage") -> None: assert page.cache_link_parsing self.page = page - def __eq__(self, other): - # type: (object) -> bool + def __eq__(self, other: object) -> bool: return (isinstance(other, type(self)) and self.page.url == other.page.url) - def __hash__(self): - # type: () -> int + def __hash__(self) -> int: return hash(self.page.url) def with_cached_html_pages( - fn, # type: Callable[[HTMLPage], Iterable[Link]] -): - # type: (...) -> Callable[[HTMLPage], List[Link]] + fn: Callable[["HTMLPage"], Iterable[Link]], +) -> Callable[["HTMLPage"], List[Link]]: """ Given a function that parses an Iterable[Link] from an HTMLPage, cache the function's result (keyed by CacheablePageContent), unless the HTMLPage @@ -304,13 +288,11 @@ def with_cached_html_pages( """ @functools.lru_cache(maxsize=None) - def wrapper(cacheable_page): - # type: (CacheablePageContent) -> List[Link] + def wrapper(cacheable_page: CacheablePageContent) -> List[Link]: return list(fn(cacheable_page.page)) @functools.wraps(fn) - def wrapper_wrapper(page): - # type: (HTMLPage) -> List[Link] + def wrapper_wrapper(page: "HTMLPage") -> List[Link]: if page.cache_link_parsing: return wrapper(CacheablePageContent(page)) return list(fn(page)) @@ -319,8 +301,7 @@ def wrapper_wrapper(page): @with_cached_html_pages -def parse_links(page): - # type: (HTMLPage) -> Iterable[Link] +def parse_links(page: "HTMLPage") -> Iterable[Link]: """ Parse an HTML document, and yield its anchor elements as Link objects. """ @@ -348,12 +329,11 @@ class HTMLPage: def __init__( self, - content, # type: bytes - encoding, # type: Optional[str] - url, # type: str - cache_link_parsing=True, # type: bool - ): - # type: (...) -> None + content: bytes, + encoding: Optional[str], + url: str, + cache_link_parsing: bool = True, + ) -> None: """ :param encoding: the encoding to decode the given content. :param url: the URL from which the HTML was downloaded. @@ -366,24 +346,21 @@ def __init__( self.url = url self.cache_link_parsing = cache_link_parsing - def __str__(self): - # type: () -> str + def __str__(self) -> str: return redact_auth_from_url(self.url) def _handle_get_page_fail( - link, # type: Link - reason, # type: Union[str, Exception] - meth=None # type: Optional[Callable[..., None]] -): - # type: (...) -> None + link: Link, + reason: Union[str, Exception], + meth: Optional[Callable[..., None]] = None +) -> None: if meth is None: meth = logger.debug meth("Could not fetch URL %s: %s - skipping", link, reason) -def _make_html_page(response, cache_link_parsing=True): - # type: (Response, bool) -> HTMLPage +def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage: encoding = _get_encoding_from_headers(response.headers) return HTMLPage( response.content, @@ -392,8 +369,9 @@ def _make_html_page(response, cache_link_parsing=True): cache_link_parsing=cache_link_parsing) -def _get_html_page(link, session=None): - # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] +def _get_html_page( + link: Link, session: Optional[PipSession] = None +) -> Optional["HTMLPage"]: if session is None: raise TypeError( "_get_html_page() missing 1 required keyword argument: 'session'" @@ -465,16 +443,18 @@ class LinkCollector: def __init__( self, - session, # type: PipSession - search_scope, # type: SearchScope - ): - # type: (...) -> None + session: PipSession, + search_scope: SearchScope, + ) -> None: self.search_scope = search_scope self.session = session @classmethod - def create(cls, session, options, suppress_no_index=False): - # type: (PipSession, Values, bool) -> LinkCollector + def create( + cls, session: PipSession, + options: Values, + suppress_no_index: bool = False + ) -> "LinkCollector": """ :param session: The Session to use to make requests. :param suppress_no_index: Whether to ignore the --no-index option @@ -500,12 +480,10 @@ def create(cls, session, options, suppress_no_index=False): return link_collector @property - def find_links(self): - # type: () -> List[str] + def find_links(self) -> List[str]: return self.search_scope.find_links - def fetch_page(self, location): - # type: (Link) -> Optional[HTMLPage] + def fetch_page(self, location: Link) -> Optional[HTMLPage]: """ Fetch an HTML page containing package links. """ diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py index 8fa310ee30b..a6451b62e4b 100644 --- a/src/pip/_internal/index/package_finder.py +++ b/src/pip/_internal/index/package_finder.py @@ -51,11 +51,10 @@ def _check_link_requires_python( - link, # type: Link - version_info, # type: Tuple[int, int, int] - ignore_requires_python=False, # type: bool -): - # type: (...) -> bool + link: Link, + version_info: Tuple[int, int, int], + ignore_requires_python: bool = False, +) -> bool: """ Return whether the given Python version is compatible with a link's "Requires-Python" value. @@ -107,14 +106,13 @@ class LinkEvaluator: # people when reading the code. def __init__( self, - project_name, # type: str - canonical_name, # type: str - formats, # type: FrozenSet[str] - target_python, # type: TargetPython - allow_yanked, # type: bool - ignore_requires_python=None, # type: Optional[bool] - ): - # type: (...) -> None + project_name: str, + canonical_name: str, + formats: FrozenSet[str], + target_python: TargetPython, + allow_yanked: bool, + ignore_requires_python: Optional[bool] = None, + ) -> None: """ :param project_name: The user supplied package name. :param canonical_name: The canonical package name. @@ -143,8 +141,7 @@ def __init__( self.project_name = project_name - def evaluate_link(self, link): - # type: (Link) -> Tuple[bool, Optional[str]] + def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]: """ Determine whether a link is a candidate for installation. @@ -233,11 +230,10 @@ def evaluate_link(self, link): def filter_unallowed_hashes( - candidates, # type: List[InstallationCandidate] - hashes, # type: Hashes - project_name, # type: str -): - # type: (...) -> List[InstallationCandidate] + candidates: List[InstallationCandidate], + hashes: Hashes, + project_name: str, +) -> List[InstallationCandidate]: """ Filter out candidates whose hashes aren't allowed, and return a new list of candidates. @@ -316,10 +312,9 @@ class CandidatePreferences: def __init__( self, - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - ): - # type: (...) -> None + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + ) -> None: """ :param allow_all_prereleases: Whether to allow all pre-releases. """ @@ -336,11 +331,10 @@ class BestCandidateResult: def __init__( self, - candidates, # type: List[InstallationCandidate] - applicable_candidates, # type: List[InstallationCandidate] - best_candidate, # type: Optional[InstallationCandidate] - ): - # type: (...) -> None + candidates: List[InstallationCandidate], + applicable_candidates: List[InstallationCandidate], + best_candidate: Optional[InstallationCandidate], + ) -> None: """ :param candidates: A sequence of all available candidates found. :param applicable_candidates: The applicable candidates. @@ -359,14 +353,12 @@ def __init__( self.best_candidate = best_candidate - def iter_all(self): - # type: () -> Iterable[InstallationCandidate] + def iter_all(self) -> Iterable[InstallationCandidate]: """Iterate through all candidates. """ return iter(self._candidates) - def iter_applicable(self): - # type: () -> Iterable[InstallationCandidate] + def iter_applicable(self) -> Iterable[InstallationCandidate]: """Iterate through the applicable candidates. """ return iter(self._applicable_candidates) @@ -382,14 +374,13 @@ class CandidateEvaluator: @classmethod def create( cls, - project_name, # type: str - target_python=None, # type: Optional[TargetPython] - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> CandidateEvaluator + project_name: str, + target_python: Optional[TargetPython] = None, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> "CandidateEvaluator": """Create a CandidateEvaluator object. :param target_python: The target Python interpreter to use when @@ -418,14 +409,13 @@ def create( def __init__( self, - project_name, # type: str - supported_tags, # type: List[Tag] - specifier, # type: specifiers.BaseSpecifier - prefer_binary=False, # type: bool - allow_all_prereleases=False, # type: bool - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> None + project_name: str, + supported_tags: List[Tag], + specifier: specifiers.BaseSpecifier, + prefer_binary: bool = False, + allow_all_prereleases: bool = False, + hashes: Optional[Hashes] = None, + ) -> None: """ :param supported_tags: The PEP 425 tags supported by the target Python in order of preference (most preferred first). @@ -445,9 +435,8 @@ def __init__( def get_applicable_candidates( self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> List[InstallationCandidate] + candidates: List[InstallationCandidate], + ) -> List[InstallationCandidate]: """ Return the applicable candidates from a list of candidates. """ @@ -481,8 +470,7 @@ def get_applicable_candidates( return sorted(filtered_applicable_candidates, key=self._sort_key) - def _sort_key(self, candidate): - # type: (InstallationCandidate) -> CandidateSortingKey + def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: """ Function to pass as the `key` argument to a call to sorted() to sort InstallationCandidates by preference. @@ -546,9 +534,8 @@ def _sort_key(self, candidate): def sort_best_candidate( self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> Optional[InstallationCandidate] + candidates: List[InstallationCandidate], + ) -> Optional[InstallationCandidate]: """ Return the best candidate per the instance's sort order, or None if no candidate is acceptable. @@ -560,9 +547,8 @@ def sort_best_candidate( def compute_best_candidate( self, - candidates, # type: List[InstallationCandidate] - ): - # type: (...) -> BestCandidateResult + candidates: List[InstallationCandidate], + ) -> BestCandidateResult: """ Compute and return a `BestCandidateResult` instance. """ @@ -586,14 +572,13 @@ class PackageFinder: def __init__( self, - link_collector, # type: LinkCollector - target_python, # type: TargetPython - allow_yanked, # type: bool - format_control=None, # type: Optional[FormatControl] - candidate_prefs=None, # type: CandidatePreferences - ignore_requires_python=None, # type: Optional[bool] - ): - # type: (...) -> None + link_collector: LinkCollector, + target_python: TargetPython, + allow_yanked: bool, + format_control: Optional[FormatControl] = None, + candidate_prefs: Optional[CandidatePreferences] = None, + ignore_requires_python: Optional[bool] = None, + ) -> None: """ This constructor is primarily meant to be used by the create() class method and from tests. @@ -627,11 +612,10 @@ def __init__( @classmethod def create( cls, - link_collector, # type: LinkCollector - selection_prefs, # type: SelectionPreferences - target_python=None, # type: Optional[TargetPython] - ): - # type: (...) -> PackageFinder + link_collector: LinkCollector, + selection_prefs: SelectionPreferences, + target_python: Optional[TargetPython] = None, + ) -> "PackageFinder": """Create a PackageFinder. :param selection_prefs: The candidate selection preferences, as a @@ -658,56 +642,45 @@ def create( ) @property - def target_python(self): - # type: () -> TargetPython + def target_python(self) -> TargetPython: return self._target_python @property - def search_scope(self): - # type: () -> SearchScope + def search_scope(self) -> SearchScope: return self._link_collector.search_scope @search_scope.setter - def search_scope(self, search_scope): - # type: (SearchScope) -> None + def search_scope(self, search_scope: SearchScope) -> None: self._link_collector.search_scope = search_scope @property - def find_links(self): - # type: () -> List[str] + def find_links(self) -> List[str]: return self._link_collector.find_links @property - def index_urls(self): - # type: () -> List[str] + def index_urls(self) -> List[str]: return self.search_scope.index_urls @property - def trusted_hosts(self): - # type: () -> Iterable[str] + def trusted_hosts(self) -> Iterable[str]: for host_port in self._link_collector.session.pip_trusted_origins: yield build_netloc(*host_port) @property - def allow_all_prereleases(self): - # type: () -> bool + def allow_all_prereleases(self) -> bool: return self._candidate_prefs.allow_all_prereleases - def set_allow_all_prereleases(self): - # type: () -> None + def set_allow_all_prereleases(self) -> None: self._candidate_prefs.allow_all_prereleases = True @property - def prefer_binary(self): - # type: () -> bool + def prefer_binary(self) -> bool: return self._candidate_prefs.prefer_binary - def set_prefer_binary(self): - # type: () -> None + def set_prefer_binary(self) -> None: self._candidate_prefs.prefer_binary = True - def make_link_evaluator(self, project_name): - # type: (str) -> LinkEvaluator + def make_link_evaluator(self, project_name: str) -> LinkEvaluator: canonical_name = canonicalize_name(project_name) formats = self.format_control.get_allowed_formats(canonical_name) @@ -720,8 +693,7 @@ def make_link_evaluator(self, project_name): ignore_requires_python=self._ignore_requires_python, ) - def _sort_links(self, links): - # type: (Iterable[Link]) -> List[Link] + def _sort_links(self, links: Iterable[Link]) -> List[Link]: """ Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates @@ -737,16 +709,16 @@ def _sort_links(self, links): no_eggs.append(link) return no_eggs + eggs - def _log_skipped_link(self, link, reason): - # type: (Link, str) -> None + def _log_skipped_link(self, link: Link, reason: str) -> None: if link not in self._logged_links: # Put the link at the end so the reason is more visible and because # the link string is usually very long. logger.debug('Skipping link: %s: %s', reason, link) self._logged_links.add(link) - def get_install_candidate(self, link_evaluator, link): - # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] + def get_install_candidate( + self, link_evaluator: LinkEvaluator, link: Link + ) -> Optional[InstallationCandidate]: """ If the link is a candidate for install, convert it to an InstallationCandidate and return it. Otherwise, return None. @@ -763,8 +735,9 @@ def get_install_candidate(self, link_evaluator, link): version=result, ) - def evaluate_links(self, link_evaluator, links): - # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] + def evaluate_links( + self, link_evaluator: LinkEvaluator, links: Iterable[Link] + ) -> List[InstallationCandidate]: """ Convert links that are candidates to InstallationCandidate objects. """ @@ -776,8 +749,9 @@ def evaluate_links(self, link_evaluator, links): return candidates - def process_project_url(self, project_url, link_evaluator): - # type: (Link, LinkEvaluator) -> List[InstallationCandidate] + def process_project_url( + self, project_url: Link, link_evaluator: LinkEvaluator + ) -> List[InstallationCandidate]: logger.debug( 'Fetching project page and analyzing links: %s', project_url, ) @@ -796,8 +770,7 @@ def process_project_url(self, project_url, link_evaluator): return package_links @functools.lru_cache(maxsize=None) - def find_all_candidates(self, project_name): - # type: (str) -> List[InstallationCandidate] + def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]: """Find all available InstallationCandidate for project_name This checks index_urls and find_links. @@ -844,11 +817,10 @@ def find_all_candidates(self, project_name): def make_candidate_evaluator( self, - project_name, # type: str - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> CandidateEvaluator + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> CandidateEvaluator: """Create a CandidateEvaluator object to use. """ candidate_prefs = self._candidate_prefs @@ -864,11 +836,10 @@ def make_candidate_evaluator( @functools.lru_cache(maxsize=None) def find_best_candidate( self, - project_name, # type: str - specifier=None, # type: Optional[specifiers.BaseSpecifier] - hashes=None, # type: Optional[Hashes] - ): - # type: (...) -> BestCandidateResult + project_name: str, + specifier: Optional[specifiers.BaseSpecifier] = None, + hashes: Optional[Hashes] = None, + ) -> BestCandidateResult: """Find matches for the given project and specifier. :param specifier: An optional object implementing `filter` @@ -885,8 +856,9 @@ def find_best_candidate( ) return candidate_evaluator.compute_best_candidate(candidates) - def find_requirement(self, req, upgrade): - # type: (InstallRequirement, bool) -> Optional[InstallationCandidate] + def find_requirement( + self, req: InstallRequirement, upgrade: bool + ) -> Optional[InstallationCandidate]: """Try to find a Link matching req Expects req, an InstallRequirement and upgrade, a boolean @@ -903,8 +875,7 @@ def find_requirement(self, req, upgrade): if req.satisfied_by is not None: installed_version = parse_version(req.satisfied_by.version) - def _format_versions(cand_iter): - # type: (Iterable[InstallationCandidate]) -> str + def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: # This repeated parse_version and str() conversion is needed to # handle different vendoring sources from pip and pkg_resources. # If we stop using the pkg_resources provided specifier and start @@ -967,8 +938,7 @@ def _format_versions(cand_iter): return best_candidate -def _find_name_version_sep(fragment, canonical_name): - # type: (str, str) -> int +def _find_name_version_sep(fragment: str, canonical_name: str) -> int: """Find the separator's index based on the package's canonical name. :param fragment: A + filename "fragment" (stem) or @@ -994,8 +964,7 @@ def _find_name_version_sep(fragment, canonical_name): raise ValueError(f"{fragment} does not match {canonical_name}") -def _extract_version_from_fragment(fragment, canonical_name): - # type: (str, str) -> Optional[str] +def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]: """Parse the version string from a + filename "fragment" (stem) or egg fragment. From 3b3fde2447447ee676ea09ea2d27cd5d38e9cf1c Mon Sep 17 00:00:00 2001 From: Diego Ramirez Date: Mon, 12 Jul 2021 17:29:21 -0500 Subject: [PATCH 59/60] Fix the `pip/_internal/distributions` annotations (#10074) --- news/10074.trivial.rst | 1 + src/pip/_internal/distributions/__init__.py | 5 +++-- src/pip/_internal/distributions/base.py | 11 +++++------ src/pip/_internal/distributions/installed.py | 8 ++++---- src/pip/_internal/distributions/sdist.py | 16 ++++++++-------- src/pip/_internal/distributions/wheel.py | 8 ++++---- 6 files changed, 25 insertions(+), 24 deletions(-) create mode 100644 news/10074.trivial.rst diff --git a/news/10074.trivial.rst b/news/10074.trivial.rst new file mode 100644 index 00000000000..2f844d92171 --- /dev/null +++ b/news/10074.trivial.rst @@ -0,0 +1 @@ +Fixed all the annotations from ``pip/_internal/distributions``. diff --git a/src/pip/_internal/distributions/__init__.py b/src/pip/_internal/distributions/__init__.py index a222f248f34..9a89a838b9a 100644 --- a/src/pip/_internal/distributions/__init__.py +++ b/src/pip/_internal/distributions/__init__.py @@ -4,8 +4,9 @@ from pip._internal.req.req_install import InstallRequirement -def make_distribution_for_install_requirement(install_req): - # type: (InstallRequirement) -> AbstractDistribution +def make_distribution_for_install_requirement( + install_req: InstallRequirement, +) -> AbstractDistribution: """Returns a Distribution for the given InstallRequirement""" # Editable requirements will always be source distributions. They use the # legacy logic until we create a modern standard for them. diff --git a/src/pip/_internal/distributions/base.py b/src/pip/_internal/distributions/base.py index 78ee91e76f1..fbdd5e41154 100644 --- a/src/pip/_internal/distributions/base.py +++ b/src/pip/_internal/distributions/base.py @@ -23,17 +23,16 @@ class AbstractDistribution(metaclass=abc.ABCMeta): above metadata. """ - def __init__(self, req): - # type: (InstallRequirement) -> None + def __init__(self, req: InstallRequirement) -> None: super().__init__() self.req = req @abc.abstractmethod - def get_pkg_resources_distribution(self): - # type: () -> Optional[Distribution] + def get_pkg_resources_distribution(self) -> Optional[Distribution]: raise NotImplementedError() @abc.abstractmethod - def prepare_distribution_metadata(self, finder, build_isolation): - # type: (PackageFinder, bool) -> None + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: raise NotImplementedError() diff --git a/src/pip/_internal/distributions/installed.py b/src/pip/_internal/distributions/installed.py index b19dfacb4db..0d452e27f35 100644 --- a/src/pip/_internal/distributions/installed.py +++ b/src/pip/_internal/distributions/installed.py @@ -13,10 +13,10 @@ class InstalledDistribution(AbstractDistribution): been computed. """ - def get_pkg_resources_distribution(self): - # type: () -> Optional[Distribution] + def get_pkg_resources_distribution(self) -> Optional[Distribution]: return self.req.satisfied_by - def prepare_distribution_metadata(self, finder, build_isolation): - # type: (PackageFinder, bool) -> None + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: pass diff --git a/src/pip/_internal/distributions/sdist.py b/src/pip/_internal/distributions/sdist.py index c873a9f10e1..596b516a5b6 100644 --- a/src/pip/_internal/distributions/sdist.py +++ b/src/pip/_internal/distributions/sdist.py @@ -19,12 +19,12 @@ class SourceDistribution(AbstractDistribution): generated, either using PEP 517 or using the legacy `setup.py egg_info`. """ - def get_pkg_resources_distribution(self): - # type: () -> Distribution + def get_pkg_resources_distribution(self) -> Distribution: return self.req.get_dist() - def prepare_distribution_metadata(self, finder, build_isolation): - # type: (PackageFinder, bool) -> None + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: # Load pyproject.toml, to determine whether PEP 517 is to be used self.req.load_pyproject_toml() @@ -35,10 +35,10 @@ def prepare_distribution_metadata(self, finder, build_isolation): self.req.prepare_metadata() - def _setup_isolation(self, finder): - # type: (PackageFinder) -> None - def _raise_conflicts(conflicting_with, conflicting_reqs): - # type: (str, Set[Tuple[str, str]]) -> None + def _setup_isolation(self, finder: PackageFinder) -> None: + def _raise_conflicts( + conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]] + ) -> None: format_string = ( "Some build dependencies for {requirement} " "conflict with {conflicting_with}: {description}." diff --git a/src/pip/_internal/distributions/wheel.py b/src/pip/_internal/distributions/wheel.py index d0384797b46..00a70b02dc3 100644 --- a/src/pip/_internal/distributions/wheel.py +++ b/src/pip/_internal/distributions/wheel.py @@ -13,8 +13,7 @@ class WheelDistribution(AbstractDistribution): This does not need any preparation as wheels can be directly unpacked. """ - def get_pkg_resources_distribution(self): - # type: () -> Distribution + def get_pkg_resources_distribution(self) -> Distribution: """Loads the metadata from the wheel file into memory and returns a Distribution that uses it, not relying on the wheel file or requirement. @@ -29,6 +28,7 @@ def get_pkg_resources_distribution(self): z, self.req.name, self.req.local_file_path ) - def prepare_distribution_metadata(self, finder, build_isolation): - # type: (PackageFinder, bool) -> None + def prepare_distribution_metadata( + self, finder: PackageFinder, build_isolation: bool + ) -> None: pass From 1f1e1d621123bab014fb0321c60ab6867ed4bf1a Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Tue, 13 Jul 2021 15:23:23 +0200 Subject: [PATCH 60/60] Remove deprecated --find-links option from pip freeze --- news/9069.removal.rst | 1 + src/pip/_internal/commands/freeze.py | 18 -------- src/pip/_internal/operations/freeze.py | 5 -- tests/functional/test_freeze.py | 63 -------------------------- 4 files changed, 1 insertion(+), 86 deletions(-) create mode 100644 news/9069.removal.rst diff --git a/news/9069.removal.rst b/news/9069.removal.rst new file mode 100644 index 00000000000..2f76dcfaa1e --- /dev/null +++ b/news/9069.removal.rst @@ -0,0 +1 @@ +Remove deprecated ``--find-links`` option in ``pip freeze`` diff --git a/src/pip/_internal/commands/freeze.py b/src/pip/_internal/commands/freeze.py index 430d1018f04..a53db418bb8 100644 --- a/src/pip/_internal/commands/freeze.py +++ b/src/pip/_internal/commands/freeze.py @@ -7,7 +7,6 @@ from pip._internal.cli.status_codes import SUCCESS from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs -from pip._internal.utils.deprecation import deprecated DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'} @@ -34,14 +33,6 @@ def add_options(self): help="Use the order in the given requirements file and its " "comments when generating output. This option can be " "used multiple times.") - self.cmd_opts.add_option( - '-f', '--find-links', - dest='find_links', - action='append', - default=[], - metavar='URL', - help='URL for finding packages, which will be added to the ' - 'output.') self.cmd_opts.add_option( '-l', '--local', dest='local', @@ -82,17 +73,8 @@ def run(self, options, args): cmdoptions.check_list_path_option(options) - if options.find_links: - deprecated( - "--find-links option in pip freeze is deprecated.", - replacement=None, - gone_in="21.2", - issue=9069, - ) - for line in freeze( requirement=options.requirements, - find_links=options.find_links, local_only=options.local, user_only=options.user, paths=options.path, diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py index 3cda5c8c90e..ad2255435bc 100644 --- a/src/pip/_internal/operations/freeze.py +++ b/src/pip/_internal/operations/freeze.py @@ -35,7 +35,6 @@ def freeze( requirement=None, # type: Optional[List[str]] - find_links=None, # type: Optional[List[str]] local_only=False, # type: bool user_only=False, # type: bool paths=None, # type: Optional[List[str]] @@ -44,10 +43,6 @@ def freeze( skip=() # type: Container[str] ): # type: (...) -> Iterator[str] - find_links = find_links or [] - - for link in find_links: - yield f'-f {link}' installations = {} # type: Dict[str, FrozenRequirement] for dist in get_installed_distributions( diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 0af29dd0cb2..1a2589df97e 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -270,19 +270,6 @@ def test_freeze_git_clone(script, tmpdir): ).strip() _check_output(result.stdout, expected) - result = script.pip( - 'freeze', '-f', f'{repo_dir}#egg=pip_test_package', - expect_stderr=True, - ) - expected = textwrap.dedent( - """ - -f {repo}#egg=pip_test_package... - -e git+...#egg=version_pkg - ... - """.format(repo=repo_dir), - ).strip() - _check_output(result.stdout, expected) - # Check that slashes in branch or tag names are translated. # See also issue #1083: https://github.com/pypa/pip/issues/1083 script.run( @@ -335,19 +322,6 @@ def test_freeze_git_clone_srcdir(script, tmpdir): ).strip() _check_output(result.stdout, expected) - result = script.pip( - 'freeze', '-f', f'{repo_dir}#egg=pip_test_package', - expect_stderr=True, - ) - expected = textwrap.dedent( - """ - -f {repo}#egg=pip_test_package... - -e git+...#egg=version_pkg&subdirectory=subdir - ... - """.format(repo=repo_dir), - ).strip() - _check_output(result.stdout, expected) - @need_mercurial def test_freeze_mercurial_clone_srcdir(script, tmpdir): @@ -376,19 +350,6 @@ def test_freeze_mercurial_clone_srcdir(script, tmpdir): ).strip() _check_output(result.stdout, expected) - result = script.pip( - 'freeze', '-f', f'{repo_dir}#egg=pip_test_package', - expect_stderr=True, - ) - expected = textwrap.dedent( - """ - -f {repo}#egg=pip_test_package... - -e hg+...#egg=version_pkg&subdirectory=subdir - ... - """.format(repo=repo_dir), - ).strip() - _check_output(result.stdout, expected) - @pytest.mark.git def test_freeze_git_remote(script, tmpdir): @@ -482,19 +443,6 @@ def test_freeze_mercurial_clone(script, tmpdir): ).strip() _check_output(result.stdout, expected) - result = script.pip( - 'freeze', '-f', f'{repo_dir}#egg=pip_test_package', - expect_stderr=True, - ) - expected = textwrap.dedent( - """ - -f {repo}#egg=pip_test_package... - ...-e hg+...#egg=version_pkg - ... - """.format(repo=repo_dir), - ).strip() - _check_output(result.stdout, expected) - @need_bzr def test_freeze_bazaar_clone(script, tmpdir): @@ -521,17 +469,6 @@ def test_freeze_bazaar_clone(script, tmpdir): ...""") _check_output(result.stdout, expected) - result = script.pip( - 'freeze', '-f', - f'{checkout_path}/#egg=django-wikiapp', - expect_stderr=True, - ) - expected = textwrap.dedent("""\ - -f {repo}/#egg=django-wikiapp - ...-e bzr+file://...@...#egg=version_pkg - ...""".format(repo=checkout_path)) - _check_output(result.stdout, expected) - @need_mercurial @pytest.mark.git