diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst
index da68cf226..61a4e327b 100644
--- a/docs/source/plugins.rst
+++ b/docs/source/plugins.rst
@@ -76,10 +76,6 @@ Rez plugins require a specific folder structure as follows:
/plugin_file2.py (your plugin file)
etc.
-To make your plugin available to rez, you can install them directly under
-``src/rezplugins`` (that's called a namespace package) or you can add
-the path to :envvar:`REZ_PLUGIN_PATH`.
-
Registering subcommands
-----------------------
@@ -137,4 +133,58 @@ so that the plugin manager will find them.
from rez.plugin_managers import extend_path
__path__ = extend_path(__path__, __name__)
+Install plugins
+---------------
+
+1. Copy directly to rez install folder
+
+To make your plugin available to rez, you can install it directly under
+``src/rezplugins`` (that's called a namespace package).
+
+2. Add the source path to :envvar:`REZ_PLUGIN_PATH`
+
+Add the source path to the REZ_PLUGIN_PATH environment variable in order to make your plugin available to rez.
+
+3. Add entry points to pyproject.toml
+
+To make your plugin available to rez, you can also create an entry points section in your
+``pyproject.toml`` file, that will allow you to install your plugin with `pip install` command.
+
+.. code-block:: toml
+ :caption: pyproject.toml
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+
+ [project]
+ name = "foo"
+ version = "0.1.0"
+
+ [project.entry-points."rez.plugins"]
+ foo_cmd = "foo"
+
+
+4. Create a setup.py
+
+To make your plugin available to rez, you can also create a ``setup.py`` file,
+that will allow you to install your plugin with `pip install` command.
+
+.. code-block:: python
+ :caption: setup.py
+
+ from setuptools import setup, find_packages
+
+ setup(
+ name="foo",
+ version="0.1.0",
+ package_dir={
+ "foo": "foo"
+ },
+ packages=find_packages(where="."),
+ entry_points={
+ 'rez.plugins': [
+ 'foo_cmd = foo',
+ ]
+ }
+ )
diff --git a/src/rez/data/tests/extensions/baz/baz-0.1.0.dist-info/METADATA b/src/rez/data/tests/extensions/baz/baz-0.1.0.dist-info/METADATA
new file mode 100644
index 000000000..0805e85ea
--- /dev/null
+++ b/src/rez/data/tests/extensions/baz/baz-0.1.0.dist-info/METADATA
@@ -0,0 +1,3 @@
+Metadata-Version: 2.4
+Name: baz
+Version: 0.1.0
diff --git a/src/rez/data/tests/extensions/baz/baz-0.1.0.dist-info/entry_points.txt b/src/rez/data/tests/extensions/baz/baz-0.1.0.dist-info/entry_points.txt
new file mode 100644
index 000000000..82d72de09
--- /dev/null
+++ b/src/rez/data/tests/extensions/baz/baz-0.1.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[rez.plugins]
+baz_cmd = baz
diff --git a/src/rez/data/tests/extensions/baz/baz/__init__.py b/src/rez/data/tests/extensions/baz/baz/__init__.py
new file mode 100644
index 000000000..e33708c1d
--- /dev/null
+++ b/src/rez/data/tests/extensions/baz/baz/__init__.py
@@ -0,0 +1,38 @@
+"""
+baz plugin
+"""
+
+from rez.command import Command
+
+# This attribute is optional, default behavior will be applied if not present.
+command_behavior = {
+ "hidden": False, # (bool): default False
+ "arg_mode": None, # (str): "passthrough", "grouped", default None
+}
+
+
+def setup_parser(parser, completions=False):
+ parser.add_argument(
+ "-m", "--message", action="store_true", help="Print message from world."
+ )
+
+
+def command(opts, parser=None, extra_arg_groups=None):
+ from baz import core
+
+ if opts.message:
+ msg = core.get_message_from_baz()
+ print(msg)
+ return
+
+ print("Please use '-h' flag to see what you can do to this world !")
+
+
+class BazCommand(Command):
+ @classmethod
+ def name(cls):
+ return "baz"
+
+
+def register_plugin():
+ return BazCommand
diff --git a/src/rez/data/tests/extensions/baz/baz/core.py b/src/rez/data/tests/extensions/baz/baz/core.py
new file mode 100644
index 000000000..580795278
--- /dev/null
+++ b/src/rez/data/tests/extensions/baz/baz/core.py
@@ -0,0 +1,4 @@
+def get_message_from_baz():
+ from rez.config import config
+ message = config.plugins.command.baz.message
+ return message
diff --git a/src/rez/data/tests/extensions/baz/baz/rezconfig.py b/src/rez/data/tests/extensions/baz/baz/rezconfig.py
new file mode 100644
index 000000000..d36f5806b
--- /dev/null
+++ b/src/rez/data/tests/extensions/baz/baz/rezconfig.py
@@ -0,0 +1,3 @@
+baz = {
+ "message": "welcome to this world."
+}
diff --git a/src/rez/data/tests/extensions/baz/pyproject.toml b/src/rez/data/tests/extensions/baz/pyproject.toml
new file mode 100644
index 000000000..26745a089
--- /dev/null
+++ b/src/rez/data/tests/extensions/baz/pyproject.toml
@@ -0,0 +1,10 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "baz"
+version = "0.1.0"
+
+[project.entry-points."rez.plugins"]
+baz_cmd = "baz"
diff --git a/src/rez/plugin_managers.py b/src/rez/plugin_managers.py
index 64b1d8f09..1192a78fd 100644
--- a/src/rez/plugin_managers.py
+++ b/src/rez/plugin_managers.py
@@ -16,6 +16,11 @@
import os.path
import sys
+if sys.version_info[:2] >= (3, 8):
+ from importlib.metadata import entry_points
+else:
+ from rez.vendor.importlib_metadata import entry_points
+
# modified from pkgutil standard library:
# this function is called from the __init__.py files of each plugin type inside
@@ -109,6 +114,10 @@ def register_plugin(self, plugin_name, plugin_class, plugin_module):
self.plugin_modules[plugin_name] = plugin_module
def load_plugins(self):
+ self.load_plugins_from_namespace()
+ self.load_plugins_from_entry_points()
+
+ def load_plugins_from_namespace(self):
import pkgutil
from importlib import import_module
type_module_name = 'rezplugins.' + self.type_name
@@ -153,57 +162,82 @@ def load_plugins(self):
if config.debug("plugins"):
print_debug("loading %s plugin at %s: %s..."
% (self.type_name, path, modname))
+
try:
- # https://github.com/AcademySoftwareFoundation/rez/pull/218
- # load_module will force reload the module if it's
- # already loaded, so check for that
plugin_module = sys.modules.get(modname)
if plugin_module is None:
loader = importer.find_module(modname)
plugin_module = loader.load_module(modname)
- elif os.path.dirname(plugin_module.__file__) != path:
- if config.debug("plugins"):
- # this should not happen but if it does, tell why.
- print_warning(
- "plugin module %s is not loaded from current "
- "load path but reused from previous imported "
- "path: %s" % (modname, plugin_module.__file__))
-
- if (hasattr(plugin_module, "register_plugin")
- and callable(plugin_module.register_plugin)):
-
- plugin_class = plugin_module.register_plugin()
- if plugin_class is not None:
- self.register_plugin(plugin_name,
- plugin_class,
- plugin_module)
- else:
- if config.debug("plugins"):
- print_warning(
- "'register_plugin' function at %s: %s did "
- "not return a class." % (path, modname))
- else:
- if config.debug("plugins"):
- print_warning(
- "no 'register_plugin' function at %s: %s"
- % (path, modname))
-
- # delete from sys.modules?
-
+ self.register_plugin_module(plugin_name, plugin_module, path)
+ self.load_config_from_plugin(plugin_module)
except Exception as e:
- nameish = modname.split('.')[-1]
- self.failed_plugins[nameish] = str(e)
- if config.debug("plugins"):
- import traceback
- from io import StringIO
- out = StringIO()
- traceback.print_exc(file=out)
- print_debug(out.getvalue())
-
- # load config
- data, _ = _load_config_from_filepaths([os.path.join(path, "rezconfig")])
- deep_update(self.config_data, data)
+ self.print_log_plugins_error(modname, e)
+
+ def load_plugins_from_entry_points(self):
+ if sys.version_info[:2] >= (3, 8) and sys.version_info[:2] <= (3, 9):
+ discovered_plugins = entry_points().get("rez.plugins", [])
+ else:
+ discovered_plugins = entry_points(group='rez.plugins')
+
+ for plugin in discovered_plugins:
+ try:
+ plugin = plugin.load()
+ plugin_name = plugin.__name__
+ plugin_path = os.path.dirname(plugin.__file__)
+ self.register_plugin_module(plugin_name, plugin, plugin_path)
+ self.load_config_from_plugin(plugin)
+ except Exception as e:
+ self.print_log_plugins_error(plugin.__name__, e)
+
+ def print_log_plugins_error(self, module_name, error):
+ nameish = module_name.split('.')[-1]
+ self.failed_plugins[nameish] = str(error)
+
+ if not config.debug("plugins"):
+ return
+
+ import traceback
+ from io import StringIO
+ out = StringIO()
+ traceback.print_exc(file=out)
+ print_debug(out.getvalue())
+
+ def load_config_from_plugin(self, plugin):
+ plugin_path = os.path.dirname(plugin.__file__)
+ data, _ = _load_config_from_filepaths([os.path.join(plugin_path, "rezconfig")])
+ deep_update(self.config_data, data)
+
+ def register_plugin_module(self, plugin_name, plugin_module, plugin_path):
+ module_name = plugin_module.__name__
+ if os.path.dirname(plugin_module.__file__) != plugin_path:
+ if config.debug("plugins"):
+ # this should not happen but if it does, tell why.
+ print_warning(
+ "plugin module %s is not loaded from current "
+ "load path but reused from previous imported "
+ "path: %s" % (module_name, plugin_module.__file__))
+
+ if (hasattr(plugin_module, "register_plugin")
+ and callable(plugin_module.register_plugin)):
+
+ plugin_class = plugin_module.register_plugin()
+ if plugin_class is not None:
+ self.register_plugin(
+ plugin_name,
+ plugin_class,
+ plugin_module
+ )
+ else:
+ if config.debug("plugins"):
+ print_warning(
+ "'register_plugin' function at %s: %s did "
+ "not return a class." % (plugin_path, module_name))
+ else:
+ if config.debug("plugins"):
+ print_warning(
+ "no 'register_plugin' function at %s: %s"
+ % (plugin_path, module_name))
def get_plugin_class(self, plugin_name):
"""Returns the class registered under the given plugin name."""
diff --git a/src/rez/tests/test_plugin_manager.py b/src/rez/tests/test_plugin_manager.py
index 670441fba..c07089dde 100644
--- a/src/rez/tests/test_plugin_manager.py
+++ b/src/rez/tests/test_plugin_manager.py
@@ -49,7 +49,7 @@ def setUp(self):
TestBase.setUp(self)
self._reset_plugin_manager()
- def test_old_loading_style(self):
+ def test_load_plugin_from_plugin_path(self):
"""Test loading rez plugin from plugin_path"""
self.update_settings(dict(
plugin_path=[self.data_path("extensions", "foo")]
@@ -59,7 +59,7 @@ def test_old_loading_style(self):
"package_repository", "cloud")
self.assertEqual(cloud_cls.name(), "cloud")
- def test_new_loading_style(self):
+ def test_load_plugin_from_python_module(self):
"""Test loading rez plugin from python modules"""
with restore_sys_path():
sys.path.append(self.data_path("extensions"))
@@ -68,6 +68,13 @@ def test_new_loading_style(self):
"package_repository", "cloud")
self.assertEqual(cloud_cls.name(), "cloud")
+ def test_load_plugin_from_entry_points(self):
+ """Test loading rez plugin from setuptools entry points"""
+ with restore_sys_path():
+ sys.path.append(self.data_path("extensions", "baz"))
+ baz_cls = plugin_manager.get_plugin_class("command", "baz")
+ self.assertEqual(baz_cls.name(), "baz")
+
def test_plugin_override_1(self):
"""Test plugin from plugin_path can override the default"""
self.update_settings(dict(
diff --git a/src/rez/vendor/README.md b/src/rez/vendor/README.md
index b5918539f..dd668a580 100644
--- a/src/rez/vendor/README.md
+++ b/src/rez/vendor/README.md
@@ -105,6 +105,18 @@ By looking at the code, it's probably enum34. If so, the latest version is
1.1.6 (May 15, 2016)
+
+
+importlib-metadata
+ |
+6.7.0
+ |
+Apache 2.0
+ |
+https://pypi.org/project/importlib-metadata/
+Pinned to 6.7.0 to support Python 3.7. This dependency can be dropped once we drop support for Python 3.7.
+ |
+
lockfile
@@ -248,6 +260,18 @@ https://github.com/yaml/pyyaml
No changes but must maintain separate version between py2 and py3 for time being.
|
+
+
+typing_extensions
+ |
+4.7.1
+ |
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+ |
+https://pypi.org/project/zipp/
+Dependency for importlib-metadata. Can be dropped once we drop support for Python 3.7.
+ |
+
yaml lib3 (PyYAML)
@@ -260,4 +284,16 @@ https://github.com/yaml/pyyaml
No changes but must maintain separate version between py2 and py3 for time being.
|
+
+
+zipp
+ |
+3.15.0
+ |
+MIT
+ |
+https://pypi.org/project/zipp/
+Dependency for importlib-metadata. Can be dropped once we drop support for Python 3.7.
+ |
+
diff --git a/src/rez/vendor/importlib_metadata/LICENSE b/src/rez/vendor/importlib_metadata/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/src/rez/vendor/importlib_metadata/__init__.py b/src/rez/vendor/importlib_metadata/__init__.py
new file mode 100644
index 000000000..97d8833f3
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/__init__.py
@@ -0,0 +1,1015 @@
+import os
+import re
+import abc
+import csv
+import sys
+from rez.vendor import zipp
+import email
+import inspect
+import pathlib
+import operator
+import textwrap
+import warnings
+import functools
+import itertools
+import posixpath
+import collections
+
+from . import _adapters, _meta, _py39compat
+from ._collections import FreezableDefaultDict, Pair
+from ._compat import (
+ NullFinder,
+ StrPath,
+ install,
+ pypy_partial,
+)
+from ._functools import method_cache, pass_none
+from ._itertools import always_iterable, unique_everseen
+from ._meta import PackageMetadata, SimplePath
+
+from contextlib import suppress
+from importlib import import_module
+from importlib.abc import MetaPathFinder
+from itertools import starmap
+from typing import Iterable, List, Mapping, Optional, Set, cast
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageMetadata',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'packages_distributions',
+ 'requires',
+ 'version',
+]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+ def __str__(self) -> str:
+ return f"No package metadata was found for {self.name}"
+
+ @property
+ def name(self) -> str: # type: ignore[override]
+ (name,) = self.args
+ return name
+
+
+class Sectioned:
+ """
+ A simple entry point config parser for performance
+
+ >>> for item in Sectioned.read(Sectioned._sample):
+ ... print(item)
+ Pair(name='sec1', value='# comments ignored')
+ Pair(name='sec1', value='a = 1')
+ Pair(name='sec1', value='b = 2')
+ Pair(name='sec2', value='a = 2')
+
+ >>> res = Sectioned.section_pairs(Sectioned._sample)
+ >>> item = next(res)
+ >>> item.name
+ 'sec1'
+ >>> item.value
+ Pair(name='a', value='1')
+ >>> item = next(res)
+ >>> item.value
+ Pair(name='b', value='2')
+ >>> item = next(res)
+ >>> item.name
+ 'sec2'
+ >>> item.value
+ Pair(name='a', value='2')
+ >>> list(res)
+ []
+ """
+
+ _sample = textwrap.dedent(
+ """
+ [sec1]
+ # comments ignored
+ a = 1
+ b = 2
+
+ [sec2]
+ a = 2
+ """
+ ).lstrip()
+
+ @classmethod
+ def section_pairs(cls, text):
+ return (
+ section._replace(value=Pair.parse(section.value))
+ for section in cls.read(text, filter_=cls.valid)
+ if section.name is not None
+ )
+
+ @staticmethod
+ def read(text, filter_=None):
+ lines = filter(filter_, map(str.strip, text.splitlines()))
+ name = None
+ for value in lines:
+ section_match = value.startswith('[') and value.endswith(']')
+ if section_match:
+ name = value.strip('[]')
+ continue
+ yield Pair(name, value)
+
+ @staticmethod
+ def valid(line: str):
+ return line and not line.startswith('#')
+
+
+class DeprecatedTuple:
+ """
+ Provide subscript item access for backward compatibility.
+
+ >>> recwarn = getfixture('recwarn')
+ >>> ep = EntryPoint(name='name', value='value', group='group')
+ >>> ep[:]
+ ('name', 'value', 'group')
+ >>> ep[0]
+ 'name'
+ >>> len(recwarn)
+ 1
+ """
+
+ # Do not remove prior to 2023-05-01 or Python 3.13
+ _warn = functools.partial(
+ warnings.warn,
+ "EntryPoint tuple interface is deprecated. Access members by name.",
+ DeprecationWarning,
+ stacklevel=pypy_partial(2),
+ )
+
+ def __getitem__(self, item):
+ self._warn()
+ return self._key()[item]
+
+
+class EntryPoint(DeprecatedTuple):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ `_
+ for more information.
+
+ >>> ep = EntryPoint(
+ ... name=None, group=None, value='package.module:attr [extra1, extra2]')
+ >>> ep.module
+ 'package.module'
+ >>> ep.attr
+ 'attr'
+ >>> ep.extras
+ ['extra1', 'extra2']
+ """
+
+ pattern = re.compile(
+ r'(?P[\w.]+)\s*'
+ r'(:\s*(?P[\w.]+)\s*)?'
+ r'((?P\[.*\])\s*)?$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ name: str
+ value: str
+ group: str
+
+ dist: Optional['Distribution'] = None
+
+ def __init__(self, name: str, value: str, group: str) -> None:
+ vars(self).update(name=name, value=value, group=group)
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def module(self) -> str:
+ match = self.pattern.match(self.value)
+ assert match is not None
+ return match.group('module')
+
+ @property
+ def attr(self) -> str:
+ match = self.pattern.match(self.value)
+ assert match is not None
+ return match.group('attr')
+
+ @property
+ def extras(self) -> List[str]:
+ match = self.pattern.match(self.value)
+ assert match is not None
+ return re.findall(r'\w+', match.group('extras') or '')
+
+ def _for(self, dist):
+ vars(self).update(dist=dist)
+ return self
+
+ def matches(self, **params):
+ """
+ EntryPoint matches the given parameters.
+
+ >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]')
+ >>> ep.matches(group='foo')
+ True
+ >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]')
+ True
+ >>> ep.matches(group='foo', name='other')
+ False
+ >>> ep.matches()
+ True
+ >>> ep.matches(extras=['extra1', 'extra2'])
+ True
+ >>> ep.matches(module='bing')
+ True
+ >>> ep.matches(attr='bong')
+ True
+ """
+ attrs = (getattr(self, param) for param in params)
+ return all(map(operator.eq, params.values(), attrs))
+
+ def _key(self):
+ return self.name, self.value, self.group
+
+ def __lt__(self, other):
+ return self._key() < other._key()
+
+ def __eq__(self, other):
+ return self._key() == other._key()
+
+ def __setattr__(self, name, value):
+ raise AttributeError("EntryPoint objects are immutable.")
+
+ def __repr__(self):
+ return (
+ f'EntryPoint(name={self.name!r}, value={self.value!r}, '
+ f'group={self.group!r})'
+ )
+
+ def __hash__(self) -> int:
+ return hash(self._key())
+
+
+class EntryPoints(tuple):
+ """
+ An immutable collection of selectable EntryPoint objects.
+ """
+
+ __slots__ = ()
+
+ def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override]
+ """
+ Get the EntryPoint in self matching name.
+ """
+ try:
+ return next(iter(self.select(name=name)))
+ except StopIteration:
+ raise KeyError(name)
+
+ def select(self, **params):
+ """
+ Select entry points from self that match the
+ given parameters (typically group and/or name).
+ """
+ return EntryPoints(ep for ep in self if _py39compat.ep_matches(ep, **params))
+
+ @property
+ def names(self) -> Set[str]:
+ """
+ Return the set of all names of all entry points.
+ """
+ return {ep.name for ep in self}
+
+ @property
+ def groups(self) -> Set[str]:
+ """
+ Return the set of all groups of all entry points.
+ """
+ return {ep.group for ep in self}
+
+ @classmethod
+ def _from_text_for(cls, text, dist):
+ return cls(ep._for(dist) for ep in cls._from_text(text))
+
+ @staticmethod
+ def _from_text(text):
+ return (
+ EntryPoint(name=item.value.name, value=item.value.value, group=item.name)
+ for item in Sectioned.section_pairs(text or '')
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ hash: Optional["FileHash"]
+ size: int
+ dist: "Distribution"
+
+ def read_text(self, encoding: str = 'utf-8') -> str: # type: ignore[override]
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self) -> bytes:
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self) -> pathlib.Path:
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec: str) -> None:
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self) -> str:
+ return f''
+
+
+class DeprecatedNonAbstract:
+ def __new__(cls, *args, **kwargs):
+ all_names = {
+ name for subclass in inspect.getmro(cls) for name in vars(subclass)
+ }
+ abstract = {
+ name
+ for name in all_names
+ if getattr(getattr(cls, name), '__isabstractmethod__', False)
+ }
+ if abstract:
+ warnings.warn(
+ f"Unimplemented abstract methods {abstract}",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return super().__new__(cls)
+
+
+class Distribution(DeprecatedNonAbstract):
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename) -> Optional[str]:
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path: StrPath) -> pathlib.Path:
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name: str) -> "Distribution":
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ :raises ValueError: When an invalid value is supplied for name.
+ """
+ if not name:
+ raise ValueError("A distribution name is required.")
+ try:
+ return next(iter(cls.discover(name=name)))
+ except StopIteration:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs) -> Iterable["Distribution"]:
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context) for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path: StrPath) -> "Distribution":
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None) for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @property
+ def metadata(self) -> _meta.PackageMetadata:
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ opt_text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ text = cast(str, opt_text)
+ return _adapters.Message(email.message_from_string(text))
+
+ @property
+ def name(self) -> str:
+ """Return the 'Name' metadata for the distribution package."""
+ return self.metadata['Name']
+
+ @property
+ def _normalized_name(self):
+ """Return a normalized version of the name."""
+ return Prepared.normalize(self.name)
+
+ @property
+ def version(self) -> str:
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self) -> EntryPoints:
+ return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self)
+
+ @property
+ def files(self) -> Optional[List[PackagePath]]:
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info, or installed-files.txt or
+ SOURCES.txt for egg-info) is missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ @pass_none
+ def make_files(lines):
+ return starmap(make_file, csv.reader(lines))
+
+ @pass_none
+ def skip_missing_files(package_paths):
+ return list(filter(lambda path: path.locate().exists(), package_paths))
+
+ return skip_missing_files(
+ make_files(
+ self._read_files_distinfo()
+ or self._read_files_egginfo_installed()
+ or self._read_files_egginfo_sources()
+ )
+ )
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo_installed(self):
+ """
+ Read installed-files.txt and return lines in a similar
+ CSV-parsable format as RECORD: each file must be placed
+ relative to the site-packages directory and must also be
+ quoted (since file names can contain literal commas).
+
+ This file is written when the package is installed by pip,
+ but it might not be written for other installation methods.
+ Assume the file is accurate if it exists.
+ """
+ text = self.read_text('installed-files.txt')
+ # Prepend the .egg-info/ subdir to the lines in this file.
+ # But this subdir is only available from PathDistribution's
+ # self._path.
+ subdir = getattr(self, '_path', None)
+ if not text or not subdir:
+ return
+
+ paths = (
+ (subdir / name)
+ .resolve()
+ .relative_to(self.locate_file('').resolve())
+ .as_posix()
+ for name in text.splitlines()
+ )
+ return map('"{}"'.format, paths)
+
+ def _read_files_egginfo_sources(self):
+ """
+ Read SOURCES.txt and return lines in a similar CSV-parsable
+ format as RECORD: each file name must be quoted (since it
+ might contain literal commas).
+
+ Note that SOURCES.txt is not a reliable source for what
+ files are installed by a package. This file is generated
+ for a source archive, and the files that are present
+ there (e.g. setup.py) may not correctly reflect the files
+ that are present after the package has been installed.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self) -> Optional[List[str]]:
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return pass_none(self._deps_from_requires_text)(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source))
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+
+ def make_condition(name):
+ return name and f'extra == "{name}"'
+
+ def quoted_marker(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = f'({markers})'
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ def url_req_space(req):
+ """
+ PEP 508 requires a space between the url_spec and the quoted_marker.
+ Ref python/importlib_metadata#357.
+ """
+ # '@' is uniquely indicative of a url_req.
+ return ' ' * ('@' in req)
+
+ for section in sections:
+ space = url_req_space(section.value)
+ yield section.value + space + quoted_marker(section.name)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self) -> List[str]:
+ """
+ The sequence of directory path that a distribution finder
+ should search.
+
+ Typically refers to Python installed package paths such as
+ "site-packages" directories and defaults to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()) -> Iterable[Distribution]:
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+
+ >>> FastPath('').children()
+ ['...']
+ """
+
+ @functools.lru_cache() # type: ignore
+ def __new__(cls, root):
+ return super().__new__(cls)
+
+ def __init__(self, root):
+ self.root = root
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '.')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipp.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names)
+
+ def search(self, name):
+ return self.lookup(self.mtime).search(name)
+
+ @property
+ def mtime(self):
+ with suppress(OSError):
+ return os.stat(self.root).st_mtime
+ self.lookup.cache_clear()
+
+ @method_cache
+ def lookup(self, mtime):
+ return Lookup(self)
+
+
+class Lookup:
+ def __init__(self, path: FastPath):
+ base = os.path.basename(path.root).lower()
+ base_is_egg = base.endswith(".egg")
+ self.infos = FreezableDefaultDict(list)
+ self.eggs = FreezableDefaultDict(list)
+
+ for child in path.children():
+ low = child.lower()
+ if low.endswith((".dist-info", ".egg-info")):
+ # rpartition is faster than splitext and suitable for this purpose.
+ name = low.rpartition(".")[0].partition("-")[0]
+ normalized = Prepared.normalize(name)
+ self.infos[normalized].append(path.joinpath(child))
+ elif base_is_egg and low == "egg-info":
+ name = base.rpartition(".")[0].partition("-")[0]
+ legacy_normalized = Prepared.legacy_normalize(name)
+ self.eggs[legacy_normalized].append(path.joinpath(child))
+
+ self.infos.freeze()
+ self.eggs.freeze()
+
+ def search(self, prepared):
+ infos = (
+ self.infos[prepared.normalized]
+ if prepared
+ else itertools.chain.from_iterable(self.infos.values())
+ )
+ eggs = (
+ self.eggs[prepared.legacy_normalized]
+ if prepared
+ else itertools.chain.from_iterable(self.eggs.values())
+ )
+ return itertools.chain(infos, eggs)
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+
+ normalized = None
+ legacy_normalized = None
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = self.normalize(name)
+ self.legacy_normalized = self.legacy_normalize(name)
+
+ @staticmethod
+ def normalize(name):
+ """
+ PEP 503 normalization plus dashes as underscores.
+ """
+ return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_')
+
+ @staticmethod
+ def legacy_normalize(name):
+ """
+ Normalize the package name as found in the convention in
+ older packaging tools versions and specs.
+ """
+ return name.lower().replace('-', '_')
+
+ def __bool__(self):
+ return bool(self.name)
+
+
+@install
+class MetadataPathFinder(NullFinder, DistributionFinder):
+ """A degenerate finder for distribution packages on the file system.
+
+ This finder supplies only a find_distributions() method for versions
+ of Python that do not have a PathFinder find_distributions().
+ """
+
+ def find_distributions(
+ self, context=DistributionFinder.Context()
+ ) -> Iterable["PathDistribution"]:
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = self._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ prepared = Prepared(name)
+ return itertools.chain.from_iterable(
+ path.search(prepared) for path in map(FastPath, paths)
+ )
+
+ def invalidate_caches(cls) -> None:
+ FastPath.__new__.cache_clear()
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path: SimplePath) -> None:
+ """Construct a distribution.
+
+ :param path: SimplePath indicating the metadata directory.
+ """
+ self._path = path
+
+ def read_text(self, filename: StrPath) -> Optional[str]:
+ with suppress(
+ FileNotFoundError,
+ IsADirectoryError,
+ KeyError,
+ NotADirectoryError,
+ PermissionError,
+ ):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+
+ return None
+
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path: StrPath) -> pathlib.Path:
+ return self._path.parent / path
+
+ @property
+ def _normalized_name(self):
+ """
+ Performance optimization: where possible, resolve the
+ normalized name from the file system path.
+ """
+ stem = os.path.basename(str(self._path))
+ return (
+ pass_none(Prepared.normalize)(self._name_from_stem(stem))
+ or super()._normalized_name
+ )
+
+ @staticmethod
+ def _name_from_stem(stem):
+ """
+ >>> PathDistribution._name_from_stem('foo-3.0.egg-info')
+ 'foo'
+ >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info')
+ 'CherryPy'
+ >>> PathDistribution._name_from_stem('face.egg-info')
+ 'face'
+ >>> PathDistribution._name_from_stem('foo.bar')
+ """
+ filename, ext = os.path.splitext(stem)
+ if ext not in ('.dist-info', '.egg-info'):
+ return
+ name, sep, rest = filename.partition('-')
+ return name
+
+
+def distribution(distribution_name) -> Distribution:
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs) -> Iterable[Distribution]:
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name) -> _meta.PackageMetadata:
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: A PackageMetadata containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name) -> str:
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+_unique = functools.partial(
+ unique_everseen,
+ key=_py39compat.normalized_name,
+)
+"""
+Wrapper for ``distributions`` to return unique distributions by name.
+"""
+
+
+def entry_points(**params) -> EntryPoints:
+ """Return EntryPoint objects for all installed packages.
+
+ Pass selection parameters (group or name) to filter the
+ result to entry points matching those properties (see
+ EntryPoints.select()).
+
+ :return: EntryPoints for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in _unique(distributions())
+ )
+ return EntryPoints(eps).select(**params)
+
+
+def files(distribution_name) -> Optional[List[PackagePath]]:
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name) -> Optional[List[str]]:
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterable of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
+
+
+def packages_distributions() -> Mapping[str, List[str]]:
+ """
+ Return a mapping of top-level packages to their
+ distributions.
+
+ >>> import collections.abc
+ >>> pkgs = packages_distributions()
+ >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values())
+ True
+ """
+ pkg_to_dist = collections.defaultdict(list)
+ for dist in distributions():
+ for pkg in _top_level_declared(dist) or _top_level_inferred(dist):
+ pkg_to_dist[pkg].append(dist.metadata['Name'])
+ return dict(pkg_to_dist)
+
+
+def _top_level_declared(dist):
+ return (dist.read_text('top_level.txt') or '').split()
+
+
+def _topmost(name: PackagePath) -> Optional[str]:
+ """
+ Return the top-most parent as long as there is a parent.
+ """
+ top, *rest = name.parts
+ return top if rest else None
+
+
+def _get_toplevel_name(name: PackagePath) -> str:
+ """
+ Infer a possibly importable module name from a name presumed on
+ sys.path.
+
+ >>> _get_toplevel_name(PackagePath('foo.py'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo.pyc'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo/__init__.py'))
+ 'foo'
+ >>> _get_toplevel_name(PackagePath('foo.pth'))
+ 'foo.pth'
+ >>> _get_toplevel_name(PackagePath('foo.dist-info'))
+ 'foo.dist-info'
+ """
+ return _topmost(name) or (
+ # python/typeshed#10328
+ inspect.getmodulename(name) # type: ignore
+ or str(name)
+ )
+
+
+def _top_level_inferred(dist):
+ opt_names = set(map(_get_toplevel_name, always_iterable(dist.files)))
+
+ def importable_name(name):
+ return '.' not in name
+
+ return filter(importable_name, opt_names)
diff --git a/src/rez/vendor/importlib_metadata/_adapters.py b/src/rez/vendor/importlib_metadata/_adapters.py
new file mode 100644
index 000000000..e33cba5e4
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_adapters.py
@@ -0,0 +1,90 @@
+import functools
+import warnings
+import re
+import textwrap
+import email.message
+
+from ._text import FoldedCase
+from ._compat import pypy_partial
+
+
+# Do not remove prior to 2024-01-01 or Python 3.14
+_warn = functools.partial(
+ warnings.warn,
+ "Implicit None on return values is deprecated and will raise KeyErrors.",
+ DeprecationWarning,
+ stacklevel=pypy_partial(2),
+)
+
+
+class Message(email.message.Message):
+ multiple_use_keys = set(
+ map(
+ FoldedCase,
+ [
+ 'Classifier',
+ 'Obsoletes-Dist',
+ 'Platform',
+ 'Project-URL',
+ 'Provides-Dist',
+ 'Provides-Extra',
+ 'Requires-Dist',
+ 'Requires-External',
+ 'Supported-Platform',
+ 'Dynamic',
+ ],
+ )
+ )
+ """
+ Keys that may be indicated multiple times per PEP 566.
+ """
+
+ def __new__(cls, orig: email.message.Message):
+ res = super().__new__(cls)
+ vars(res).update(vars(orig))
+ return res
+
+ def __init__(self, *args, **kwargs):
+ self._headers = self._repair_headers()
+
+ # suppress spurious error from mypy
+ def __iter__(self):
+ return super().__iter__()
+
+ def __getitem__(self, item):
+ """
+ Warn users that a ``KeyError`` can be expected when a
+ mising key is supplied. Ref python/importlib_metadata#371.
+ """
+ res = super().__getitem__(item)
+ if res is None:
+ _warn()
+ return res
+
+ def _repair_headers(self):
+ def redent(value):
+ "Correct for RFC822 indentation"
+ if not value or '\n' not in value:
+ return value
+ return textwrap.dedent(' ' * 8 + value)
+
+ headers = [(key, redent(value)) for key, value in vars(self)['_headers']]
+ if self._payload:
+ headers.append(('Description', self.get_payload()))
+ return headers
+
+ @property
+ def json(self):
+ """
+ Convert PackageMetadata to a JSON-compatible format
+ per PEP 0566.
+ """
+
+ def transform(key):
+ value = self.get_all(key) if key in self.multiple_use_keys else self[key]
+ if key == 'Keywords':
+ value = re.split(r'\s+', value)
+ tk = key.lower().replace('-', '_')
+ return tk, value
+
+ return dict(map(transform, map(FoldedCase, self)))
diff --git a/src/rez/vendor/importlib_metadata/_collections.py b/src/rez/vendor/importlib_metadata/_collections.py
new file mode 100644
index 000000000..cf0954e1a
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_collections.py
@@ -0,0 +1,30 @@
+import collections
+
+
+# from jaraco.collections 3.3
+class FreezableDefaultDict(collections.defaultdict):
+ """
+ Often it is desirable to prevent the mutation of
+ a default dict after its initial construction, such
+ as to prevent mutation during iteration.
+
+ >>> dd = FreezableDefaultDict(list)
+ >>> dd[0].append('1')
+ >>> dd.freeze()
+ >>> dd[1]
+ []
+ >>> len(dd)
+ 1
+ """
+
+ def __missing__(self, key):
+ return getattr(self, '_frozen', super().__missing__)(key)
+
+ def freeze(self):
+ self._frozen = lambda key: self.default_factory()
+
+
+class Pair(collections.namedtuple('Pair', 'name value')):
+ @classmethod
+ def parse(cls, text):
+ return cls(*map(str.strip, text.split("=", 1)))
diff --git a/src/rez/vendor/importlib_metadata/_compat.py b/src/rez/vendor/importlib_metadata/_compat.py
new file mode 100644
index 000000000..00c33f222
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_compat.py
@@ -0,0 +1,74 @@
+import os
+import sys
+import platform
+
+from typing import Union
+
+
+__all__ = ['install', 'NullFinder', 'Protocol']
+
+
+try:
+ from typing import Protocol
+except ImportError: # pragma: no cover
+ # Python 3.7 compatibility
+ from rez.vendor.typing_extensions.typing_extensions import Protocol # type: ignore
+
+
+def install(cls):
+ """
+ Class decorator for installation on sys.meta_path.
+
+ Adds the backport DistributionFinder to sys.meta_path and
+ attempts to disable the finder functionality of the stdlib
+ DistributionFinder.
+ """
+ sys.meta_path.append(cls())
+ disable_stdlib_finder()
+ return cls
+
+
+def disable_stdlib_finder():
+ """
+ Give the backport primacy for discovering path-based distributions
+ by monkey-patching the stdlib O_O.
+
+ See #91 for more background for rationale on this sketchy
+ behavior.
+ """
+
+ def matches(finder):
+ return getattr(
+ finder, '__module__', None
+ ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions')
+
+ for finder in filter(matches, sys.meta_path): # pragma: nocover
+ del finder.find_distributions
+
+
+class NullFinder:
+ """
+ A "Finder" (aka "MetaClassFinder") that never finds any modules,
+ but may find distributions.
+ """
+
+ @staticmethod
+ def find_spec(*args, **kwargs):
+ return None
+
+
+def pypy_partial(val):
+ """
+ Adjust for variable stacklevel on partial under PyPy.
+
+ Workaround for #327.
+ """
+ is_pypy = platform.python_implementation() == 'PyPy'
+ return val + is_pypy
+
+
+if sys.version_info >= (3, 9):
+ StrPath = Union[str, os.PathLike[str]]
+else:
+ # PathLike is only subscriptable at runtime in 3.9+
+ StrPath = Union[str, "os.PathLike[str]"] # pragma: no cover
diff --git a/src/rez/vendor/importlib_metadata/_functools.py b/src/rez/vendor/importlib_metadata/_functools.py
new file mode 100644
index 000000000..71f66bd03
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_functools.py
@@ -0,0 +1,104 @@
+import types
+import functools
+
+
+# from jaraco.functools 3.3
+def method_cache(method, cache_wrapper=None):
+ """
+ Wrap lru_cache to support storing the cache data in the object instances.
+
+ Abstracts the common paradigm where the method explicitly saves an
+ underscore-prefixed protected property on first call and returns that
+ subsequently.
+
+ >>> class MyClass:
+ ... calls = 0
+ ...
+ ... @method_cache
+ ... def method(self, value):
+ ... self.calls += 1
+ ... return value
+
+ >>> a = MyClass()
+ >>> a.method(3)
+ 3
+ >>> for x in range(75):
+ ... res = a.method(x)
+ >>> a.calls
+ 75
+
+ Note that the apparent behavior will be exactly like that of lru_cache
+ except that the cache is stored on each instance, so values in one
+ instance will not flush values from another, and when an instance is
+ deleted, so are the cached values for that instance.
+
+ >>> b = MyClass()
+ >>> for x in range(35):
+ ... res = b.method(x)
+ >>> b.calls
+ 35
+ >>> a.method(0)
+ 0
+ >>> a.calls
+ 75
+
+ Note that if method had been decorated with ``functools.lru_cache()``,
+ a.calls would have been 76 (due to the cached value of 0 having been
+ flushed by the 'b' instance).
+
+ Clear the cache with ``.cache_clear()``
+
+ >>> a.method.cache_clear()
+
+ Same for a method that hasn't yet been called.
+
+ >>> c = MyClass()
+ >>> c.method.cache_clear()
+
+ Another cache wrapper may be supplied:
+
+ >>> cache = functools.lru_cache(maxsize=2)
+ >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
+ >>> a = MyClass()
+ >>> a.method2()
+ 3
+
+ Caution - do not subsequently wrap the method with another decorator, such
+ as ``@property``, which changes the semantics of the function.
+
+ See also
+ http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
+ for another implementation and additional justification.
+ """
+ cache_wrapper = cache_wrapper or functools.lru_cache()
+
+ def wrapper(self, *args, **kwargs):
+ # it's the first call, replace the method with a cached, bound method
+ bound_method = types.MethodType(method, self)
+ cached_method = cache_wrapper(bound_method)
+ setattr(self, method.__name__, cached_method)
+ return cached_method(*args, **kwargs)
+
+ # Support cache clear even before cache has been created.
+ wrapper.cache_clear = lambda: None
+
+ return wrapper
+
+
+# From jaraco.functools 3.3
+def pass_none(func):
+ """
+ Wrap func so it's not called if its first param is None
+
+ >>> print_text = pass_none(print)
+ >>> print_text('text')
+ text
+ >>> print_text(None)
+ """
+
+ @functools.wraps(func)
+ def wrapper(param, *args, **kwargs):
+ if param is not None:
+ return func(param, *args, **kwargs)
+
+ return wrapper
diff --git a/src/rez/vendor/importlib_metadata/_itertools.py b/src/rez/vendor/importlib_metadata/_itertools.py
new file mode 100644
index 000000000..d4ca9b914
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_itertools.py
@@ -0,0 +1,73 @@
+from itertools import filterfalse
+
+
+def unique_everseen(iterable, key=None):
+ "List unique elements, preserving order. Remember all elements ever seen."
+ # unique_everseen('AAAABBBCCDAABBB') --> A B C D
+ # unique_everseen('ABBCcAD', str.lower) --> A B C D
+ seen = set()
+ seen_add = seen.add
+ if key is None:
+ for element in filterfalse(seen.__contains__, iterable):
+ seen_add(element)
+ yield element
+ else:
+ for element in iterable:
+ k = key(element)
+ if k not in seen:
+ seen_add(k)
+ yield element
+
+
+# copied from more_itertools 8.8
+def always_iterable(obj, base_type=(str, bytes)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
diff --git a/src/rez/vendor/importlib_metadata/_meta.py b/src/rez/vendor/importlib_metadata/_meta.py
new file mode 100644
index 000000000..0c7e8791c
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_meta.py
@@ -0,0 +1,63 @@
+from ._compat import Protocol
+from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload
+
+
+_T = TypeVar("_T")
+
+
+class PackageMetadata(Protocol):
+ def __len__(self) -> int:
+ ... # pragma: no cover
+
+ def __contains__(self, item: str) -> bool:
+ ... # pragma: no cover
+
+ def __getitem__(self, key: str) -> str:
+ ... # pragma: no cover
+
+ def __iter__(self) -> Iterator[str]:
+ ... # pragma: no cover
+
+ @overload
+ def get(self, name: str, failobj: None = None) -> Optional[str]:
+ ... # pragma: no cover
+
+ @overload
+ def get(self, name: str, failobj: _T) -> Union[str, _T]:
+ ... # pragma: no cover
+
+ # overload per python/importlib_metadata#435
+ @overload
+ def get_all(self, name: str, failobj: None = None) -> Optional[List[Any]]:
+ ... # pragma: no cover
+
+ @overload
+ def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]:
+ """
+ Return all values associated with a possibly multi-valued key.
+ """
+
+ @property
+ def json(self) -> Dict[str, Union[str, List[str]]]:
+ """
+ A JSON-compatible form of the metadata.
+ """
+
+
+class SimplePath(Protocol[_T]):
+ """
+ A minimal subset of pathlib.Path required by PathDistribution.
+ """
+
+ def joinpath(self, other: Union[str, _T]) -> _T:
+ ... # pragma: no cover
+
+ def __truediv__(self, other: Union[str, _T]) -> _T:
+ ... # pragma: no cover
+
+ @property
+ def parent(self) -> _T:
+ ... # pragma: no cover
+
+ def read_text(self) -> str:
+ ... # pragma: no cover
diff --git a/src/rez/vendor/importlib_metadata/_py39compat.py b/src/rez/vendor/importlib_metadata/_py39compat.py
new file mode 100644
index 000000000..cde4558fb
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_py39compat.py
@@ -0,0 +1,35 @@
+"""
+Compatibility layer with Python 3.8/3.9
+"""
+from typing import TYPE_CHECKING, Any, Optional
+
+if TYPE_CHECKING: # pragma: no cover
+ # Prevent circular imports on runtime.
+ from . import Distribution, EntryPoint
+else:
+ Distribution = EntryPoint = Any
+
+
+def normalized_name(dist: Distribution) -> Optional[str]:
+ """
+ Honor name normalization for distributions that don't provide ``_normalized_name``.
+ """
+ try:
+ return dist._normalized_name
+ except AttributeError:
+ from . import Prepared # -> delay to prevent circular imports.
+
+ return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name'])
+
+
+def ep_matches(ep: EntryPoint, **params) -> bool:
+ """
+ Workaround for ``EntryPoint`` objects without the ``matches`` method.
+ """
+ try:
+ return ep.matches(**params)
+ except AttributeError:
+ from . import EntryPoint # -> delay to prevent circular imports.
+
+ # Reconstruct the EntryPoint object to make sure it is compatible.
+ return EntryPoint(ep.name, ep.value, ep.group).matches(**params)
diff --git a/src/rez/vendor/importlib_metadata/_text.py b/src/rez/vendor/importlib_metadata/_text.py
new file mode 100644
index 000000000..c88cfbb23
--- /dev/null
+++ b/src/rez/vendor/importlib_metadata/_text.py
@@ -0,0 +1,99 @@
+import re
+
+from ._functools import method_cache
+
+
+# from jaraco.text 3.5
+class FoldedCase(str):
+ """
+ A case insensitive string class; behaves just like str
+ except compares equal when the only variation is case.
+
+ >>> s = FoldedCase('hello world')
+
+ >>> s == 'Hello World'
+ True
+
+ >>> 'Hello World' == s
+ True
+
+ >>> s != 'Hello World'
+ False
+
+ >>> s.index('O')
+ 4
+
+ >>> s.split('O')
+ ['hell', ' w', 'rld']
+
+ >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
+ ['alpha', 'Beta', 'GAMMA']
+
+ Sequence membership is straightforward.
+
+ >>> "Hello World" in [s]
+ True
+ >>> s in ["Hello World"]
+ True
+
+ You may test for set inclusion, but candidate and elements
+ must both be folded.
+
+ >>> FoldedCase("Hello World") in {s}
+ True
+ >>> s in {FoldedCase("Hello World")}
+ True
+
+ String inclusion works as long as the FoldedCase object
+ is on the right.
+
+ >>> "hello" in FoldedCase("Hello World")
+ True
+
+ But not if the FoldedCase object is on the left:
+
+ >>> FoldedCase('hello') in 'Hello World'
+ False
+
+ In that case, use in_:
+
+ >>> FoldedCase('hello').in_('Hello World')
+ True
+
+ >>> FoldedCase('hello') > FoldedCase('Hello')
+ False
+ """
+
+ def __lt__(self, other):
+ return self.lower() < other.lower()
+
+ def __gt__(self, other):
+ return self.lower() > other.lower()
+
+ def __eq__(self, other):
+ return self.lower() == other.lower()
+
+ def __ne__(self, other):
+ return self.lower() != other.lower()
+
+ def __hash__(self):
+ return hash(self.lower())
+
+ def __contains__(self, other):
+ return super().lower().__contains__(other.lower())
+
+ def in_(self, other):
+ "Does self appear in other?"
+ return self in FoldedCase(other)
+
+ # cache lower since it's likely to be called frequently.
+ @method_cache
+ def lower(self):
+ return super().lower()
+
+ def index(self, sub):
+ return self.lower().index(sub.lower())
+
+ def split(self, splitter=' ', maxsplit=0):
+ pattern = re.compile(re.escape(splitter), re.I)
+ return pattern.split(self, maxsplit)
diff --git a/src/rez/vendor/importlib_metadata/py.typed b/src/rez/vendor/importlib_metadata/py.typed
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/rez/vendor/typing_extensions/LICENSE b/src/rez/vendor/typing_extensions/LICENSE
new file mode 100644
index 000000000..f26bcf4d2
--- /dev/null
+++ b/src/rez/vendor/typing_extensions/LICENSE
@@ -0,0 +1,279 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation. In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/src/rez/vendor/typing_extensions/__init__.py b/src/rez/vendor/typing_extensions/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/rez/vendor/typing_extensions/typing_extensions.py b/src/rez/vendor/typing_extensions/typing_extensions.py
new file mode 100644
index 000000000..901f3b967
--- /dev/null
+++ b/src/rez/vendor/typing_extensions/typing_extensions.py
@@ -0,0 +1,3072 @@
+import abc
+import collections
+import collections.abc
+import functools
+import inspect
+import operator
+import sys
+import types as _types
+import typing
+import warnings
+
+__all__ = [
+ # Super-special typing primitives.
+ 'Any',
+ 'ClassVar',
+ 'Concatenate',
+ 'Final',
+ 'LiteralString',
+ 'ParamSpec',
+ 'ParamSpecArgs',
+ 'ParamSpecKwargs',
+ 'Self',
+ 'Type',
+ 'TypeVar',
+ 'TypeVarTuple',
+ 'Unpack',
+
+ # ABCs (from collections.abc).
+ 'Awaitable',
+ 'AsyncIterator',
+ 'AsyncIterable',
+ 'Coroutine',
+ 'AsyncGenerator',
+ 'AsyncContextManager',
+ 'Buffer',
+ 'ChainMap',
+
+ # Concrete collection types.
+ 'ContextManager',
+ 'Counter',
+ 'Deque',
+ 'DefaultDict',
+ 'NamedTuple',
+ 'OrderedDict',
+ 'TypedDict',
+
+ # Structural checks, a.k.a. protocols.
+ 'SupportsAbs',
+ 'SupportsBytes',
+ 'SupportsComplex',
+ 'SupportsFloat',
+ 'SupportsIndex',
+ 'SupportsInt',
+ 'SupportsRound',
+
+ # One-off things.
+ 'Annotated',
+ 'assert_never',
+ 'assert_type',
+ 'clear_overloads',
+ 'dataclass_transform',
+ 'deprecated',
+ 'get_overloads',
+ 'final',
+ 'get_args',
+ 'get_origin',
+ 'get_original_bases',
+ 'get_protocol_members',
+ 'get_type_hints',
+ 'IntVar',
+ 'is_protocol',
+ 'is_typeddict',
+ 'Literal',
+ 'NewType',
+ 'overload',
+ 'override',
+ 'Protocol',
+ 'reveal_type',
+ 'runtime',
+ 'runtime_checkable',
+ 'Text',
+ 'TypeAlias',
+ 'TypeAliasType',
+ 'TypeGuard',
+ 'TYPE_CHECKING',
+ 'Never',
+ 'NoReturn',
+ 'Required',
+ 'NotRequired',
+
+ # Pure aliases, have always been in typing
+ 'AbstractSet',
+ 'AnyStr',
+ 'BinaryIO',
+ 'Callable',
+ 'Collection',
+ 'Container',
+ 'Dict',
+ 'ForwardRef',
+ 'FrozenSet',
+ 'Generator',
+ 'Generic',
+ 'Hashable',
+ 'IO',
+ 'ItemsView',
+ 'Iterable',
+ 'Iterator',
+ 'KeysView',
+ 'List',
+ 'Mapping',
+ 'MappingView',
+ 'Match',
+ 'MutableMapping',
+ 'MutableSequence',
+ 'MutableSet',
+ 'Optional',
+ 'Pattern',
+ 'Reversible',
+ 'Sequence',
+ 'Set',
+ 'Sized',
+ 'TextIO',
+ 'Tuple',
+ 'Union',
+ 'ValuesView',
+ 'cast',
+ 'no_type_check',
+ 'no_type_check_decorator',
+]
+
+# for backward compatibility
+PEP_560 = True
+GenericMeta = type
+
+# The functions below are modified copies of typing internal helpers.
+# They are needed by _ProtocolMeta and they provide support for PEP 646.
+
+
+class _Sentinel:
+ def __repr__(self):
+ return ""
+
+
+_marker = _Sentinel()
+
+
+def _check_generic(cls, parameters, elen=_marker):
+ """Check correct count for parameters of a generic cls (internal helper).
+ This gives a nice error message in case of count mismatch.
+ """
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ if elen is _marker:
+ if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+ raise TypeError(f"{cls} is not a generic class")
+ elen = len(cls.__parameters__)
+ alen = len(parameters)
+ if alen != elen:
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+ num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+ if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+ return
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
+ f" actual {alen}, expected {elen}")
+
+
+if sys.version_info >= (3, 10):
+ def _should_collect_from_parameters(t):
+ return isinstance(
+ t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
+ )
+elif sys.version_info >= (3, 9):
+ def _should_collect_from_parameters(t):
+ return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
+else:
+ def _should_collect_from_parameters(t):
+ return isinstance(t, typing._GenericAlias) and not t._special
+
+
+def _collect_type_vars(types, typevar_types=None):
+ """Collect all type variable contained in types in order of
+ first appearance (lexicographic order). For example::
+
+ _collect_type_vars((T, List[S, T])) == (T, S)
+ """
+ if typevar_types is None:
+ typevar_types = typing.TypeVar
+ tvars = []
+ for t in types:
+ if (
+ isinstance(t, typevar_types) and
+ t not in tvars and
+ not _is_unpack(t)
+ ):
+ tvars.append(t)
+ if _should_collect_from_parameters(t):
+ tvars.extend([t for t in t.__parameters__ if t not in tvars])
+ return tuple(tvars)
+
+
+NoReturn = typing.NoReturn
+
+# Some unconstrained type variables. These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T') # Any type.
+KT = typing.TypeVar('KT') # Key type.
+VT = typing.TypeVar('VT') # Value type.
+T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
+
+
+if sys.version_info >= (3, 11):
+ from typing import Any
+else:
+
+ class _AnyMeta(type):
+ def __instancecheck__(self, obj):
+ if self is Any:
+ raise TypeError("typing_extensions.Any cannot be used with isinstance()")
+ return super().__instancecheck__(obj)
+
+ def __repr__(self):
+ if self is Any:
+ return "typing_extensions.Any"
+ return super().__repr__()
+
+ class Any(metaclass=_AnyMeta):
+ """Special type indicating an unconstrained type.
+ - Any is compatible with every type.
+ - Any assumed to have all methods.
+ - All values assumed to be instances of Any.
+ Note that all the above statements are true from the point of view of
+ static type checkers. At runtime, Any should not be used with instance
+ checks.
+ """
+ def __new__(cls, *args, **kwargs):
+ if cls is Any:
+ raise TypeError("Any cannot be instantiated")
+ return super().__new__(cls, *args, **kwargs)
+
+
+ClassVar = typing.ClassVar
+
+
+class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+
+# On older versions of typing there is an internal class named "Final".
+# 3.8+
+if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
+ Final = typing.Final
+# 3.7
+else:
+ class _FinalForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+ Final = _FinalForm('Final',
+ doc="""A special typing construct to indicate that a name
+ cannot be re-assigned or overridden in a subclass.
+ For example:
+
+ MAX_SIZE: Final = 9000
+ MAX_SIZE += 1 # Error reported by type checker
+
+ class Connection:
+ TIMEOUT: Final[int] = 10
+ class FastConnector(Connection):
+ TIMEOUT = 1 # Error reported by type checker
+
+ There is no runtime checking of these properties.""")
+
+if sys.version_info >= (3, 11):
+ final = typing.final
+else:
+ # @final exists in 3.8+, but we backport it for all versions
+ # before 3.11 to keep support for the __final__ attribute.
+ # See https://bugs.python.org/issue46342
+ def final(f):
+ """This decorator can be used to indicate to type checkers that
+ the decorated method cannot be overridden, and decorated class
+ cannot be subclassed. For example:
+
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
+
+ There is no runtime checking of these properties. The decorator
+ sets the ``__final__`` attribute to ``True`` on the decorated object
+ to allow runtime introspection.
+ """
+ try:
+ f.__final__ = True
+ except (AttributeError, TypeError):
+ # Skip the attribute silently if it is not writable.
+ # AttributeError happens if the object has __slots__ or a
+ # read-only property, TypeError if it's a builtin class.
+ pass
+ return f
+
+
+def IntVar(name):
+ return typing.TypeVar(name)
+
+
+# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
+if sys.version_info >= (3, 10, 1):
+ Literal = typing.Literal
+else:
+ def _flatten_literal_params(parameters):
+ """An internal helper for Literal creation: flatten Literals among parameters"""
+ params = []
+ for p in parameters:
+ if isinstance(p, _LiteralGenericAlias):
+ params.extend(p.__args__)
+ else:
+ params.append(p)
+ return tuple(params)
+
+ def _value_and_type_iter(params):
+ for p in params:
+ yield p, type(p)
+
+ class _LiteralGenericAlias(typing._GenericAlias, _root=True):
+ def __eq__(self, other):
+ if not isinstance(other, _LiteralGenericAlias):
+ return NotImplemented
+ these_args_deduped = set(_value_and_type_iter(self.__args__))
+ other_args_deduped = set(_value_and_type_iter(other.__args__))
+ return these_args_deduped == other_args_deduped
+
+ def __hash__(self):
+ return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+ class _LiteralForm(_ExtensionsSpecialForm, _root=True):
+ def __init__(self, doc: str):
+ self._name = 'Literal'
+ self._doc = self.__doc__ = doc
+
+ def __getitem__(self, parameters):
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+
+ parameters = _flatten_literal_params(parameters)
+
+ val_type_pairs = list(_value_and_type_iter(parameters))
+ try:
+ deduped_pairs = set(val_type_pairs)
+ except TypeError:
+ # unhashable parameters
+ pass
+ else:
+ # similar logic to typing._deduplicate on Python 3.9+
+ if len(deduped_pairs) < len(val_type_pairs):
+ new_parameters = []
+ for pair in val_type_pairs:
+ if pair in deduped_pairs:
+ new_parameters.append(pair[0])
+ deduped_pairs.remove(pair)
+ assert not deduped_pairs, deduped_pairs
+ parameters = tuple(new_parameters)
+
+ return _LiteralGenericAlias(self, parameters)
+
+ Literal = _LiteralForm(doc="""\
+ A type that can be used to indicate to type checkers
+ that the corresponding value has a value literally equivalent
+ to the provided parameter. For example:
+
+ var: Literal[4] = 4
+
+ The type checker understands that 'var' is literally equal to
+ the value 4 and no other value.
+
+ Literal[...] cannot be subclassed. There is no runtime
+ checking verifying that the parameter is actually a value
+ instead of a type.""")
+
+
+_overload_dummy = typing._overload_dummy
+
+
+if hasattr(typing, "get_overloads"): # 3.11+
+ overload = typing.overload
+ get_overloads = typing.get_overloads
+ clear_overloads = typing.clear_overloads
+else:
+ # {module: {qualname: {firstlineno: func}}}
+ _overload_registry = collections.defaultdict(
+ functools.partial(collections.defaultdict, dict)
+ )
+
+ def overload(func):
+ """Decorator for overloaded functions/methods.
+
+ In a stub file, place two or more stub definitions for the same
+ function in a row, each decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+
+ In a non-stub file (i.e. a regular .py file), do the same but
+ follow it with an implementation. The implementation should *not*
+ be decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+ def utf8(value):
+ # implementation goes here
+
+ The overloads for a function can be retrieved at runtime using the
+ get_overloads() function.
+ """
+ # classmethod and staticmethod
+ f = getattr(func, "__func__", func)
+ try:
+ _overload_registry[f.__module__][f.__qualname__][
+ f.__code__.co_firstlineno
+ ] = func
+ except AttributeError:
+ # Not a normal function; ignore.
+ pass
+ return _overload_dummy
+
+ def get_overloads(func):
+ """Return all defined overloads for *func* as a sequence."""
+ # classmethod and staticmethod
+ f = getattr(func, "__func__", func)
+ if f.__module__ not in _overload_registry:
+ return []
+ mod_dict = _overload_registry[f.__module__]
+ if f.__qualname__ not in mod_dict:
+ return []
+ return list(mod_dict[f.__qualname__].values())
+
+ def clear_overloads():
+ """Clear all overloads in the registry."""
+ _overload_registry.clear()
+
+
+# This is not a real generic class. Don't use outside annotations.
+Type = typing.Type
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+
+Awaitable = typing.Awaitable
+Coroutine = typing.Coroutine
+AsyncIterable = typing.AsyncIterable
+AsyncIterator = typing.AsyncIterator
+Deque = typing.Deque
+ContextManager = typing.ContextManager
+AsyncContextManager = typing.AsyncContextManager
+DefaultDict = typing.DefaultDict
+
+# 3.7.2+
+if hasattr(typing, 'OrderedDict'):
+ OrderedDict = typing.OrderedDict
+# 3.7.0-3.7.2
+else:
+ OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
+
+Counter = typing.Counter
+ChainMap = typing.ChainMap
+AsyncGenerator = typing.AsyncGenerator
+Text = typing.Text
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+_PROTO_ALLOWLIST = {
+ 'collections.abc': [
+ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+ 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
+ ],
+ 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+ 'typing_extensions': ['Buffer'],
+}
+
+
+_EXCLUDED_ATTRS = {
+ "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol",
+ "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__",
+ "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__",
+ "__subclasshook__", "__orig_class__", "__init__", "__new__",
+ "__protocol_attrs__", "__callable_proto_members_only__",
+}
+
+if sys.version_info < (3, 8):
+ _EXCLUDED_ATTRS |= {
+ "_gorg", "__next_in_mro__", "__extra__", "__tree_hash__", "__args__",
+ "__origin__"
+ }
+
+if sys.version_info >= (3, 9):
+ _EXCLUDED_ATTRS.add("__class_getitem__")
+
+if sys.version_info >= (3, 12):
+ _EXCLUDED_ATTRS.add("__type_params__")
+
+_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS)
+
+
+def _get_protocol_attrs(cls):
+ attrs = set()
+ for base in cls.__mro__[:-1]: # without object
+ if base.__name__ in {'Protocol', 'Generic'}:
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in (*base.__dict__, *annotations):
+ if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
+ attrs.add(attr)
+ return attrs
+
+
+def _maybe_adjust_parameters(cls):
+ """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__.
+
+ The contents of this function are very similar
+ to logic found in typing.Generic.__init_subclass__
+ on the CPython main branch.
+ """
+ tvars = []
+ if '__orig_bases__' in cls.__dict__:
+ tvars = _collect_type_vars(cls.__orig_bases__)
+ # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
+ # If found, tvars must be a subset of it.
+ # If not found, tvars is it.
+ # Also check for and reject plain Generic,
+ # and reject multiple Generic[...] and/or Protocol[...].
+ gvars = None
+ for base in cls.__orig_bases__:
+ if (isinstance(base, typing._GenericAlias) and
+ base.__origin__ in (typing.Generic, Protocol)):
+ # for error messages
+ the_base = base.__origin__.__name__
+ if gvars is not None:
+ raise TypeError(
+ "Cannot inherit from Generic[...]"
+ " and/or Protocol[...] multiple types.")
+ gvars = base.__parameters__
+ if gvars is None:
+ gvars = tvars
+ else:
+ tvarset = set(tvars)
+ gvarset = set(gvars)
+ if not tvarset <= gvarset:
+ s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
+ s_args = ', '.join(str(g) for g in gvars)
+ raise TypeError(f"Some type variables ({s_vars}) are"
+ f" not listed in {the_base}[{s_args}]")
+ tvars = gvars
+ cls.__parameters__ = tuple(tvars)
+
+
+def _caller(depth=2):
+ try:
+ return sys._getframe(depth).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError): # For platforms without _getframe()
+ return None
+
+
+# The performance of runtime-checkable protocols is significantly improved on Python 3.12,
+# so we backport the 3.12 version of Protocol to Python <=3.11
+if sys.version_info >= (3, 12):
+ Protocol = typing.Protocol
+else:
+ def _allow_reckless_class_checks(depth=3):
+ """Allow instance and class checks for special stdlib modules.
+ The abc and functools modules indiscriminately call isinstance() and
+ issubclass() on the whole MRO of a user class, which may contain protocols.
+ """
+ return _caller(depth) in {'abc', 'functools', None}
+
+ def _no_init(self, *args, **kwargs):
+ if type(self)._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+
+ if sys.version_info >= (3, 8):
+ # Inheriting from typing._ProtocolMeta isn't actually desirable,
+ # but is necessary to allow typing.Protocol and typing_extensions.Protocol
+ # to mix without getting TypeErrors about "metaclass conflict"
+ _typing_Protocol = typing.Protocol
+ _ProtocolMetaBase = type(_typing_Protocol)
+ else:
+ _typing_Protocol = _marker
+ _ProtocolMetaBase = abc.ABCMeta
+
+ class _ProtocolMeta(_ProtocolMetaBase):
+ # This metaclass is somewhat unfortunate,
+ # but is necessary for several reasons...
+ #
+ # NOTE: DO NOT call super() in any methods in this class
+ # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
+ # and those are slow
+ def __new__(mcls, name, bases, namespace, **kwargs):
+ if name == "Protocol" and len(bases) < 2:
+ pass
+ elif {Protocol, _typing_Protocol} & set(bases):
+ for base in bases:
+ if not (
+ base in {object, typing.Generic, Protocol, _typing_Protocol}
+ or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
+ or is_protocol(base)
+ ):
+ raise TypeError(
+ f"Protocols can only inherit from other protocols, "
+ f"got {base!r}"
+ )
+ return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
+
+ def __init__(cls, *args, **kwargs):
+ abc.ABCMeta.__init__(cls, *args, **kwargs)
+ if getattr(cls, "_is_protocol", False):
+ cls.__protocol_attrs__ = _get_protocol_attrs(cls)
+ # PEP 544 prohibits using issubclass()
+ # with protocols that have non-method members.
+ cls.__callable_proto_members_only__ = all(
+ callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__
+ )
+
+ def __subclasscheck__(cls, other):
+ if cls is Protocol:
+ return type.__subclasscheck__(cls, other)
+ if (
+ getattr(cls, '_is_protocol', False)
+ and not _allow_reckless_class_checks()
+ ):
+ if not isinstance(other, type):
+ # Same error message as for issubclass(1, int).
+ raise TypeError('issubclass() arg 1 must be a class')
+ if (
+ not cls.__callable_proto_members_only__
+ and cls.__dict__.get("__subclasshook__") is _proto_hook
+ ):
+ raise TypeError(
+ "Protocols with non-method members don't support issubclass()"
+ )
+ if not getattr(cls, '_is_runtime_protocol', False):
+ raise TypeError(
+ "Instance and class checks can only be used with "
+ "@runtime_checkable protocols"
+ )
+ return abc.ABCMeta.__subclasscheck__(cls, other)
+
+ def __instancecheck__(cls, instance):
+ # We need this method for situations where attributes are
+ # assigned in __init__.
+ if cls is Protocol:
+ return type.__instancecheck__(cls, instance)
+ if not getattr(cls, "_is_protocol", False):
+ # i.e., it's a concrete subclass of a protocol
+ return abc.ABCMeta.__instancecheck__(cls, instance)
+
+ if (
+ not getattr(cls, '_is_runtime_protocol', False) and
+ not _allow_reckless_class_checks()
+ ):
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime_checkable protocols")
+
+ if abc.ABCMeta.__instancecheck__(cls, instance):
+ return True
+
+ for attr in cls.__protocol_attrs__:
+ try:
+ val = inspect.getattr_static(instance, attr)
+ except AttributeError:
+ break
+ if val is None and callable(getattr(cls, attr, None)):
+ break
+ else:
+ return True
+
+ return False
+
+ def __eq__(cls, other):
+ # Hack so that typing.Generic.__class_getitem__
+ # treats typing_extensions.Protocol
+ # as equivalent to typing.Protocol on Python 3.8+
+ if abc.ABCMeta.__eq__(cls, other) is True:
+ return True
+ return (
+ cls is Protocol and other is getattr(typing, "Protocol", object())
+ )
+
+ # This has to be defined, or the abc-module cache
+ # complains about classes with this metaclass being unhashable,
+ # if we define only __eq__!
+ def __hash__(cls) -> int:
+ return type.__hash__(cls)
+
+ @classmethod
+ def _proto_hook(cls, other):
+ if not cls.__dict__.get('_is_protocol', False):
+ return NotImplemented
+
+ for attr in cls.__protocol_attrs__:
+ for base in other.__mro__:
+ # Check if the members appears in the class dictionary...
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+
+ # ...or in annotations, if it is a sub-protocol.
+ annotations = getattr(base, '__annotations__', {})
+ if (
+ isinstance(annotations, collections.abc.Mapping)
+ and attr in annotations
+ and is_protocol(other)
+ ):
+ break
+ else:
+ return NotImplemented
+ return True
+
+ if sys.version_info >= (3, 8):
+ class Protocol(typing.Generic, metaclass=_ProtocolMeta):
+ __doc__ = typing.Protocol.__doc__
+ __slots__ = ()
+ _is_protocol = True
+ _is_runtime_protocol = False
+
+ def __init_subclass__(cls, *args, **kwargs):
+ super().__init_subclass__(*args, **kwargs)
+
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', False):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+ # Set (or override) the protocol subclass hook.
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ # Prohibit instantiation for protocol classes
+ if cls._is_protocol and cls.__init__ is Protocol.__init__:
+ cls.__init__ = _no_init
+
+ else:
+ class Protocol(metaclass=_ProtocolMeta):
+ # There is quite a lot of overlapping code with typing.Generic.
+ # Unfortunately it is hard to avoid this on Python <3.8,
+ # as the typing module on Python 3.7 doesn't let us subclass typing.Generic!
+ """Base class for protocol classes. Protocol classes are defined as::
+
+ class Proto(Protocol):
+ def meth(self) -> int:
+ ...
+
+ Such classes are primarily used with static type checkers that recognize
+ structural subtyping (static duck-typing), for example::
+
+ class C:
+ def meth(self) -> int:
+ return 0
+
+ def func(x: Proto) -> int:
+ return x.meth()
+
+ func(C()) # Passes static type check
+
+ See PEP 544 for details. Protocol classes decorated with
+ @typing_extensions.runtime_checkable act
+ as simple-minded runtime-checkable protocols that check
+ only the presence of given attributes, ignoring their type signatures.
+
+ Protocol classes can be generic, they are defined as::
+
+ class GenProto(Protocol[T]):
+ def meth(self) -> T:
+ ...
+ """
+ __slots__ = ()
+ _is_protocol = True
+ _is_runtime_protocol = False
+
+ def __new__(cls, *args, **kwds):
+ if cls is Protocol:
+ raise TypeError("Type Protocol cannot be instantiated; "
+ "it can only be used as a base class")
+ return super().__new__(cls)
+
+ @typing._tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ if not params and cls is not typing.Tuple:
+ raise TypeError(
+ f"Parameter list to {cls.__qualname__}[...] cannot be empty")
+ msg = "Parameters to generic types must be types."
+ params = tuple(typing._type_check(p, msg) for p in params)
+ if cls is Protocol:
+ # Generic can only be subscripted with unique type variables.
+ if not all(isinstance(p, typing.TypeVar) for p in params):
+ i = 0
+ while isinstance(params[i], typing.TypeVar):
+ i += 1
+ raise TypeError(
+ "Parameters to Protocol[...] must all be type variables."
+ f" Parameter {i + 1} is {params[i]}")
+ if len(set(params)) != len(params):
+ raise TypeError(
+ "Parameters to Protocol[...] must all be unique")
+ else:
+ # Subscripting a regular Generic subclass.
+ _check_generic(cls, params, len(cls.__parameters__))
+ return typing._GenericAlias(cls, params)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ if '__orig_bases__' in cls.__dict__:
+ error = typing.Generic in cls.__orig_bases__
+ else:
+ error = typing.Generic in cls.__bases__
+ if error:
+ raise TypeError("Cannot inherit from plain Generic")
+ _maybe_adjust_parameters(cls)
+
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', None):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+ # Set (or override) the protocol subclass hook.
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ # Prohibit instantiation for protocol classes
+ if cls._is_protocol and cls.__init__ is Protocol.__init__:
+ cls.__init__ = _no_init
+
+
+if sys.version_info >= (3, 8):
+ runtime_checkable = typing.runtime_checkable
+else:
+ def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol, so that it
+ can be used with isinstance() and issubclass(). Raise TypeError
+ if applied to a non-protocol class.
+
+ This allows a simple-minded structural check very similar to the
+ one-offs in collections.abc such as Hashable.
+ """
+ if not (
+ (isinstance(cls, _ProtocolMeta) or issubclass(cls, typing.Generic))
+ and getattr(cls, "_is_protocol", False)
+ ):
+ raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+ f' got {cls!r}')
+ cls._is_runtime_protocol = True
+ return cls
+
+
+# Exists for backwards compatibility.
+runtime = runtime_checkable
+
+
+# Our version of runtime-checkable protocols is faster on Python 3.7-3.11
+if sys.version_info >= (3, 12):
+ SupportsInt = typing.SupportsInt
+ SupportsFloat = typing.SupportsFloat
+ SupportsComplex = typing.SupportsComplex
+ SupportsBytes = typing.SupportsBytes
+ SupportsIndex = typing.SupportsIndex
+ SupportsAbs = typing.SupportsAbs
+ SupportsRound = typing.SupportsRound
+else:
+ @runtime_checkable
+ class SupportsInt(Protocol):
+ """An ABC with one abstract method __int__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __int__(self) -> int:
+ pass
+
+ @runtime_checkable
+ class SupportsFloat(Protocol):
+ """An ABC with one abstract method __float__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __float__(self) -> float:
+ pass
+
+ @runtime_checkable
+ class SupportsComplex(Protocol):
+ """An ABC with one abstract method __complex__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __complex__(self) -> complex:
+ pass
+
+ @runtime_checkable
+ class SupportsBytes(Protocol):
+ """An ABC with one abstract method __bytes__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __bytes__(self) -> bytes:
+ pass
+
+ @runtime_checkable
+ class SupportsIndex(Protocol):
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __index__(self) -> int:
+ pass
+
+ @runtime_checkable
+ class SupportsAbs(Protocol[T_co]):
+ """
+ An ABC with one abstract method __abs__ that is covariant in its return type.
+ """
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __abs__(self) -> T_co:
+ pass
+
+ @runtime_checkable
+ class SupportsRound(Protocol[T_co]):
+ """
+ An ABC with one abstract method __round__ that is covariant in its return type.
+ """
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __round__(self, ndigits: int = 0) -> T_co:
+ pass
+
+
+def _ensure_subclassable(mro_entries):
+ def inner(func):
+ if sys.implementation.name == "pypy" and sys.version_info < (3, 9):
+ cls_dict = {
+ "__call__": staticmethod(func),
+ "__mro_entries__": staticmethod(mro_entries)
+ }
+ t = type(func.__name__, (), cls_dict)
+ return functools.update_wrapper(t(), func)
+ else:
+ func.__mro_entries__ = mro_entries
+ return func
+ return inner
+
+
+if sys.version_info >= (3, 13):
+ # The standard library TypedDict in Python 3.8 does not store runtime information
+ # about which (if any) keys are optional. See https://bugs.python.org/issue38834
+ # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
+ # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
+ # The standard library TypedDict below Python 3.11 does not store runtime
+ # information about optional and required keys when using Required or NotRequired.
+ # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
+ # Aaaand on 3.12 we add __orig_bases__ to TypedDict
+ # to enable better runtime introspection.
+ # On 3.13 we deprecate some odd ways of creating TypedDicts.
+ TypedDict = typing.TypedDict
+ _TypedDictMeta = typing._TypedDictMeta
+ is_typeddict = typing.is_typeddict
+else:
+ # 3.10.0 and later
+ _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
+ if sys.version_info >= (3, 8):
+ _fake_name = "Protocol"
+ else:
+ _fake_name = "_Protocol"
+
+ class _TypedDictMeta(type):
+ def __new__(cls, name, bases, ns, total=True):
+ """Create new typed dict class object.
+
+ This method is called when TypedDict is subclassed,
+ or when TypedDict is instantiated. This way
+ TypedDict supports all three syntax forms described in its docstring.
+ Subclasses and instances of TypedDict return actual dictionaries.
+ """
+ for base in bases:
+ if type(base) is not _TypedDictMeta and base is not typing.Generic:
+ raise TypeError('cannot inherit from both a TypedDict type '
+ 'and a non-TypedDict base class')
+
+ if any(issubclass(b, typing.Generic) for b in bases):
+ generic_base = (typing.Generic,)
+ else:
+ generic_base = ()
+
+ # typing.py generally doesn't let you inherit from plain Generic, unless
+ # the name of the class happens to be "Protocol" (or "_Protocol" on 3.7).
+ tp_dict = type.__new__(_TypedDictMeta, _fake_name, (*generic_base, dict), ns)
+ tp_dict.__name__ = name
+ if tp_dict.__qualname__ == _fake_name:
+ tp_dict.__qualname__ = name
+
+ if not hasattr(tp_dict, '__orig_bases__'):
+ tp_dict.__orig_bases__ = bases
+
+ annotations = {}
+ own_annotations = ns.get('__annotations__', {})
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ if _TAKES_MODULE:
+ own_annotations = {
+ n: typing._type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own_annotations.items()
+ }
+ else:
+ own_annotations = {
+ n: typing._type_check(tp, msg)
+ for n, tp in own_annotations.items()
+ }
+ required_keys = set()
+ optional_keys = set()
+
+ for base in bases:
+ annotations.update(base.__dict__.get('__annotations__', {}))
+ required_keys.update(base.__dict__.get('__required_keys__', ()))
+ optional_keys.update(base.__dict__.get('__optional_keys__', ()))
+
+ annotations.update(own_annotations)
+ for annotation_key, annotation_type in own_annotations.items():
+ annotation_origin = get_origin(annotation_type)
+ if annotation_origin is Annotated:
+ annotation_args = get_args(annotation_type)
+ if annotation_args:
+ annotation_type = annotation_args[0]
+ annotation_origin = get_origin(annotation_type)
+
+ if annotation_origin is Required:
+ required_keys.add(annotation_key)
+ elif annotation_origin is NotRequired:
+ optional_keys.add(annotation_key)
+ elif total:
+ required_keys.add(annotation_key)
+ else:
+ optional_keys.add(annotation_key)
+
+ tp_dict.__annotations__ = annotations
+ tp_dict.__required_keys__ = frozenset(required_keys)
+ tp_dict.__optional_keys__ = frozenset(optional_keys)
+ if not hasattr(tp_dict, '__total__'):
+ tp_dict.__total__ = total
+ return tp_dict
+
+ __call__ = dict # static method
+
+ def __subclasscheck__(cls, other):
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+
+ __instancecheck__ = __subclasscheck__
+
+ _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+
+ @_ensure_subclassable(lambda bases: (_TypedDict,))
+ def TypedDict(__typename, __fields=_marker, *, total=True, **kwargs):
+ """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type such that a type checker will expect all
+ instances to have a certain set of keys, where each key is
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime.
+
+ Usage::
+
+ class Point2D(TypedDict):
+ x: int
+ y: int
+ label: str
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info can be accessed via the Point2D.__annotations__ dict, and
+ the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+ TypedDict supports an additional equivalent form::
+
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ By default, all keys must be present in a TypedDict. It is possible
+ to override this by specifying totality::
+
+ class Point2D(TypedDict, total=False):
+ x: int
+ y: int
+
+ This means that a Point2D TypedDict can have any of the keys omitted. A type
+ checker is only expected to support a literal False or True as the value of
+ the total argument. True is the default, and makes all items defined in the
+ class body be required.
+
+ The Required and NotRequired special forms can also be used to mark
+ individual keys as being required or not required::
+
+ class Point2D(TypedDict):
+ x: int # the "x" key must always be present (Required is the default)
+ y: NotRequired[int] # the "y" key can be omitted
+
+ See PEP 655 for more details on Required and NotRequired.
+ """
+ if __fields is _marker or __fields is None:
+ if __fields is _marker:
+ deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+ else:
+ deprecated_thing = "Passing `None` as the 'fields' parameter"
+
+ example = f"`{__typename} = TypedDict({__typename!r}, {{}})`"
+ deprecation_msg = (
+ f"{deprecated_thing} is deprecated and will be disallowed in "
+ "Python 3.15. To create a TypedDict class with 0 fields "
+ "using the functional syntax, pass an empty dictionary, e.g. "
+ ) + example + "."
+ warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
+ __fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+ if kwargs:
+ warnings.warn(
+ "The kwargs-based syntax for TypedDict definitions is deprecated "
+ "in Python 3.11, will be removed in Python 3.13, and may not be "
+ "understood by third-party type checkers.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ ns = {'__annotations__': dict(__fields)}
+ module = _caller()
+ if module is not None:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = module
+
+ td = _TypedDictMeta(__typename, (), ns, total=total)
+ td.__orig_bases__ = (TypedDict,)
+ return td
+
+ if hasattr(typing, "_TypedDictMeta"):
+ _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
+ else:
+ _TYPEDDICT_TYPES = (_TypedDictMeta,)
+
+ def is_typeddict(tp):
+ """Check if an annotation is a TypedDict class
+
+ For example::
+ class Film(TypedDict):
+ title: str
+ year: int
+
+ is_typeddict(Film) # => True
+ is_typeddict(Union[list, str]) # => False
+ """
+ # On 3.8, this would otherwise return True
+ if hasattr(typing, "TypedDict") and tp is typing.TypedDict:
+ return False
+ return isinstance(tp, _TYPEDDICT_TYPES)
+
+
+if hasattr(typing, "assert_type"):
+ assert_type = typing.assert_type
+
+else:
+ def assert_type(__val, __typ):
+ """Assert (to the type checker) that the value is of the given type.
+
+ When the type checker encounters a call to assert_type(), it
+ emits an error if the value is not of the specified type::
+
+ def greet(name: str) -> None:
+ assert_type(name, str) # ok
+ assert_type(name, int) # type checker error
+
+ At runtime this returns the first argument unchanged and otherwise
+ does nothing.
+ """
+ return __val
+
+
+if hasattr(typing, "Required"):
+ get_type_hints = typing.get_type_hints
+else:
+ # replaces _strip_annotations()
+ def _strip_extras(t):
+ """Strips Annotated, Required and NotRequired from a given type."""
+ if isinstance(t, _AnnotatedAlias):
+ return _strip_extras(t.__origin__)
+ if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
+ return _strip_extras(t.__args__[0])
+ if isinstance(t, typing._GenericAlias):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return t.copy_with(stripped_args)
+ if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return _types.GenericAlias(t.__origin__, stripped_args)
+ if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return functools.reduce(operator.or_, stripped_args)
+
+ return t
+
+ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+ """Return type hints for an object.
+
+ This is often the same as obj.__annotations__, but it handles
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
+ (unless 'include_extras=True').
+
+ The argument may be a module, class, method, or function. The annotations
+ are returned as a dictionary. For classes, annotations include also
+ inherited members.
+
+ TypeError is raised if the argument is not of a type that can contain
+ annotations, and an empty dictionary is returned if no annotations are
+ present.
+
+ BEWARE -- the behavior of globalns and localns is counterintuitive
+ (unless you are familiar with how eval() and exec() work). The
+ search order is locals first, then globals.
+
+ - If no dict arguments are passed, an attempt is made to use the
+ globals from obj (or the respective module's globals for classes),
+ and these are also used as the locals. If the object does not appear
+ to have globals, an empty dictionary is used.
+
+ - If one dict argument is passed, it is used for both globals and
+ locals.
+
+ - If two dict arguments are passed, they specify globals and
+ locals, respectively.
+ """
+ if hasattr(typing, "Annotated"):
+ hint = typing.get_type_hints(
+ obj, globalns=globalns, localns=localns, include_extras=True
+ )
+ else:
+ hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+ if include_extras:
+ return hint
+ return {k: _strip_extras(t) for k, t in hint.items()}
+
+
+# Python 3.9+ has PEP 593 (Annotated)
+if hasattr(typing, 'Annotated'):
+ Annotated = typing.Annotated
+ # Not exported and not a public API, but needed for get_origin() and get_args()
+ # to work.
+ _AnnotatedAlias = typing._AnnotatedAlias
+# 3.7-3.8
+else:
+ class _AnnotatedAlias(typing._GenericAlias, _root=True):
+ """Runtime representation of an annotated type.
+
+ At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+ with extra annotations. The alias behaves like a normal typing alias,
+ instantiating is the same as instantiating the underlying type, binding
+ it to types is also the same.
+ """
+ def __init__(self, origin, metadata):
+ if isinstance(origin, _AnnotatedAlias):
+ metadata = origin.__metadata__ + metadata
+ origin = origin.__origin__
+ super().__init__(origin, origin)
+ self.__metadata__ = metadata
+
+ def copy_with(self, params):
+ assert len(params) == 1
+ new_type = params[0]
+ return _AnnotatedAlias(new_type, self.__metadata__)
+
+ def __repr__(self):
+ return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
+ f"{', '.join(repr(a) for a in self.__metadata__)}]")
+
+ def __reduce__(self):
+ return operator.getitem, (
+ Annotated, (self.__origin__,) + self.__metadata__
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, _AnnotatedAlias):
+ return NotImplemented
+ if self.__origin__ != other.__origin__:
+ return False
+ return self.__metadata__ == other.__metadata__
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__metadata__))
+
+ class Annotated:
+ """Add context specific metadata to a type.
+
+ Example: Annotated[int, runtime_check.Unsigned] indicates to the
+ hypothetical runtime_check module that this type is an unsigned int.
+ Every other consumer of this type can ignore this metadata and treat
+ this type as int.
+
+ The first argument to Annotated must be a valid type (and will be in
+ the __origin__ field), the remaining arguments are kept as a tuple in
+ the __extra__ field.
+
+ Details:
+
+ - It's an error to call `Annotated` with less than two arguments.
+ - Nested Annotated are flattened::
+
+ Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+ - Instantiating an annotated type is equivalent to instantiating the
+ underlying type::
+
+ Annotated[C, Ann1](5) == C(5)
+
+ - Annotated can be used as a generic type alias::
+
+ Optimized = Annotated[T, runtime.Optimize()]
+ Optimized[int] == Annotated[int, runtime.Optimize()]
+
+ OptimizedList = Annotated[List[T], runtime.Optimize()]
+ OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise TypeError("Type Annotated cannot be instantiated.")
+
+ @typing._tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple) or len(params) < 2:
+ raise TypeError("Annotated[...] should be used "
+ "with at least two arguments (a type and an "
+ "annotation).")
+ allowed_special_forms = (ClassVar, Final)
+ if get_origin(params[0]) in allowed_special_forms:
+ origin = params[0]
+ else:
+ msg = "Annotated[t, ...]: t must be a type."
+ origin = typing._type_check(params[0], msg)
+ metadata = tuple(params[1:])
+ return _AnnotatedAlias(origin, metadata)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ f"Cannot subclass {cls.__module__}.Annotated"
+ )
+
+# Python 3.8 has get_origin() and get_args() but those implementations aren't
+# Annotated-aware, so we can't use those. Python 3.9's versions don't support
+# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
+if sys.version_info[:2] >= (3, 10):
+ get_origin = typing.get_origin
+ get_args = typing.get_args
+# 3.7-3.9
+else:
+ try:
+ # 3.9+
+ from typing import _BaseGenericAlias
+ except ImportError:
+ _BaseGenericAlias = typing._GenericAlias
+ try:
+ # 3.9+
+ from typing import GenericAlias as _typing_GenericAlias
+ except ImportError:
+ _typing_GenericAlias = typing._GenericAlias
+
+ def get_origin(tp):
+ """Get the unsubscripted version of a type.
+
+ This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+ and Annotated. Return None for unsupported types. Examples::
+
+ get_origin(Literal[42]) is Literal
+ get_origin(int) is None
+ get_origin(ClassVar[int]) is ClassVar
+ get_origin(Generic) is Generic
+ get_origin(Generic[T]) is Generic
+ get_origin(Union[T, int]) is Union
+ get_origin(List[Tuple[T, T]][int]) == list
+ get_origin(P.args) is P
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return Annotated
+ if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
+ ParamSpecArgs, ParamSpecKwargs)):
+ return tp.__origin__
+ if tp is typing.Generic:
+ return typing.Generic
+ return None
+
+ def get_args(tp):
+ """Get type arguments with all substitutions performed.
+
+ For unions, basic simplifications used by Union constructor are performed.
+ Examples::
+ get_args(Dict[str, int]) == (str, int)
+ get_args(int) == ()
+ get_args(Union[int, Union[T, int], str][int]) == (int, str)
+ get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+ get_args(Callable[[], T][int]) == ([], int)
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return (tp.__origin__,) + tp.__metadata__
+ if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
+ if getattr(tp, "_special", False):
+ return ()
+ res = tp.__args__
+ if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+ res = (list(res[:-1]), res[-1])
+ return res
+ return ()
+
+
+# 3.10+
+if hasattr(typing, 'TypeAlias'):
+ TypeAlias = typing.TypeAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def TypeAlias(self, parameters):
+ """Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example above.
+ """
+ raise TypeError(f"{self} is not subscriptable")
+# 3.7-3.8
+else:
+ TypeAlias = _ExtensionsSpecialForm(
+ 'TypeAlias',
+ doc="""Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example
+ above."""
+ )
+
+
+def _set_default(type_param, default):
+ if isinstance(default, (tuple, list)):
+ type_param.__default__ = tuple((typing._type_check(d, "Default must be a type")
+ for d in default))
+ elif default != _marker:
+ type_param.__default__ = typing._type_check(default, "Default must be a type")
+ else:
+ type_param.__default__ = None
+
+
+def _set_module(typevarlike):
+ # for pickling:
+ def_mod = _caller(depth=3)
+ if def_mod != 'typing_extensions':
+ typevarlike.__module__ = def_mod
+
+
+class _DefaultMixin:
+ """Mixin for TypeVarLike defaults."""
+
+ __slots__ = ()
+ __init__ = _set_default
+
+
+# Classes using this metaclass must provide a _backported_typevarlike ClassVar
+class _TypeVarLikeMeta(type):
+ def __instancecheck__(cls, __instance: Any) -> bool:
+ return isinstance(__instance, cls._backported_typevarlike)
+
+
+# Add default and infer_variance parameters from PEP 696 and 695
+class TypeVar(metaclass=_TypeVarLikeMeta):
+ """Type variable."""
+
+ _backported_typevarlike = typing.TypeVar
+
+ def __new__(cls, name, *constraints, bound=None,
+ covariant=False, contravariant=False,
+ default=_marker, infer_variance=False):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented, can pass infer_variance to typing.TypeVar
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant)
+ if infer_variance and (covariant or contravariant):
+ raise ValueError("Variance cannot be specified with infer_variance.")
+ typevar.__infer_variance__ = infer_variance
+ _set_default(typevar, default)
+ _set_module(typevar)
+ return typevar
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+
+
+# Python 3.10+ has PEP 612
+if hasattr(typing, 'ParamSpecArgs'):
+ ParamSpecArgs = typing.ParamSpecArgs
+ ParamSpecKwargs = typing.ParamSpecKwargs
+# 3.7-3.9
+else:
+ class _Immutable:
+ """Mixin to indicate that object should not be copied."""
+ __slots__ = ()
+
+ def __copy__(self):
+ return self
+
+ def __deepcopy__(self, memo):
+ return self
+
+ class ParamSpecArgs(_Immutable):
+ """The args for a ParamSpec object.
+
+ Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
+
+ ParamSpecArgs objects have a reference back to their ParamSpec:
+
+ P.args.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.args"
+
+ def __eq__(self, other):
+ if not isinstance(other, ParamSpecArgs):
+ return NotImplemented
+ return self.__origin__ == other.__origin__
+
+ class ParamSpecKwargs(_Immutable):
+ """The kwargs for a ParamSpec object.
+
+ Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
+
+ ParamSpecKwargs objects have a reference back to their ParamSpec:
+
+ P.kwargs.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.kwargs"
+
+ def __eq__(self, other):
+ if not isinstance(other, ParamSpecKwargs):
+ return NotImplemented
+ return self.__origin__ == other.__origin__
+
+# 3.10+
+if hasattr(typing, 'ParamSpec'):
+
+ # Add default parameter - PEP 696
+ class ParamSpec(metaclass=_TypeVarLikeMeta):
+ """Parameter specification."""
+
+ _backported_typevarlike = typing.ParamSpec
+
+ def __new__(cls, name, *, bound=None,
+ covariant=False, contravariant=False,
+ infer_variance=False, default=_marker):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented, can pass infer_variance to typing.TypeVar
+ paramspec = typing.ParamSpec(name, bound=bound,
+ covariant=covariant,
+ contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ paramspec = typing.ParamSpec(name, bound=bound,
+ covariant=covariant,
+ contravariant=contravariant)
+ paramspec.__infer_variance__ = infer_variance
+
+ _set_default(paramspec, default)
+ _set_module(paramspec)
+ return paramspec
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
+
+# 3.7-3.9
+else:
+
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+ class ParamSpec(list, _DefaultMixin):
+ """Parameter specification variable.
+
+ Usage::
+
+ P = ParamSpec('P')
+
+ Parameter specification variables exist primarily for the benefit of static
+ type checkers. They are used to forward the parameter types of one
+ callable to another callable, a pattern commonly found in higher order
+ functions and decorators. They are only valid when used in ``Concatenate``,
+ or s the first argument to ``Callable``. In Python 3.10 and higher,
+ they are also supported in user-defined Generics at runtime.
+ See class Generic for more information on generic types. An
+ example for annotating a decorator::
+
+ T = TypeVar('T')
+ P = ParamSpec('P')
+
+ def add_logging(f: Callable[P, T]) -> Callable[P, T]:
+ '''A type-safe decorator to add logging to a function.'''
+ def inner(*args: P.args, **kwargs: P.kwargs) -> T:
+ logging.info(f'{f.__name__} was called')
+ return f(*args, **kwargs)
+ return inner
+
+ @add_logging
+ def add_two(x: float, y: float) -> float:
+ '''Add two numbers together.'''
+ return x + y
+
+ Parameter specification variables defined with covariant=True or
+ contravariant=True can be used to declare covariant or contravariant
+ generic types. These keyword arguments are valid, but their actual semantics
+ are yet to be decided. See PEP 612 for details.
+
+ Parameter specification variables can be introspected. e.g.:
+
+ P.__name__ == 'T'
+ P.__bound__ == None
+ P.__covariant__ == False
+ P.__contravariant__ == False
+
+ Note that only parameter specification variables defined in global scope can
+ be pickled.
+ """
+
+ # Trick Generic __parameters__.
+ __class__ = typing.TypeVar
+
+ @property
+ def args(self):
+ return ParamSpecArgs(self)
+
+ @property
+ def kwargs(self):
+ return ParamSpecKwargs(self)
+
+ def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
+ infer_variance=False, default=_marker):
+ super().__init__([self])
+ self.__name__ = name
+ self.__covariant__ = bool(covariant)
+ self.__contravariant__ = bool(contravariant)
+ self.__infer_variance__ = bool(infer_variance)
+ if bound:
+ self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
+ else:
+ self.__bound__ = None
+ _DefaultMixin.__init__(self, default)
+
+ # for pickling:
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ def __repr__(self):
+ if self.__infer_variance__:
+ prefix = ''
+ elif self.__covariant__:
+ prefix = '+'
+ elif self.__contravariant__:
+ prefix = '-'
+ else:
+ prefix = '~'
+ return prefix + self.__name__
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __reduce__(self):
+ return self.__name__
+
+ # Hack to get typing._type_check to pass.
+ def __call__(self, *args, **kwargs):
+ pass
+
+
+# 3.7-3.9
+if not hasattr(typing, 'Concatenate'):
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+ class _ConcatenateGenericAlias(list):
+
+ # Trick Generic into looking into this for __parameters__.
+ __class__ = typing._GenericAlias
+
+ # Flag in 3.8.
+ _special = False
+
+ def __init__(self, origin, args):
+ super().__init__(args)
+ self.__origin__ = origin
+ self.__args__ = args
+
+ def __repr__(self):
+ _type_repr = typing._type_repr
+ return (f'{_type_repr(self.__origin__)}'
+ f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__args__))
+
+ # Hack to get typing._type_check to pass in Generic.
+ def __call__(self, *args, **kwargs):
+ pass
+
+ @property
+ def __parameters__(self):
+ return tuple(
+ tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
+ )
+
+
+# 3.7-3.9
+@typing._tp_cache
+def _concatenate_getitem(self, parameters):
+ if parameters == ():
+ raise TypeError("Cannot take a Concatenate of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if not isinstance(parameters[-1], ParamSpec):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable.")
+ msg = "Concatenate[arg, ...]: each arg must be a type."
+ parameters = tuple(typing._type_check(p, msg) for p in parameters)
+ return _ConcatenateGenericAlias(self, parameters)
+
+
+# 3.10+
+if hasattr(typing, 'Concatenate'):
+ Concatenate = typing.Concatenate
+ _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def Concatenate(self, parameters):
+ """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """
+ return _concatenate_getitem(self, parameters)
+# 3.7-8
+else:
+ class _ConcatenateForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ return _concatenate_getitem(self, parameters)
+
+ Concatenate = _ConcatenateForm(
+ 'Concatenate',
+ doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """)
+
+# 3.10+
+if hasattr(typing, 'TypeGuard'):
+ TypeGuard = typing.TypeGuard
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def TypeGuard(self, parameters):
+ """Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+# 3.7-3.8
+else:
+ class _TypeGuardForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type')
+ return typing._GenericAlias(self, (item,))
+
+ TypeGuard = _TypeGuardForm(
+ 'TypeGuard',
+ doc="""Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """)
+
+
+# Vendored from cpython typing._SpecialFrom
+class _SpecialForm(typing._Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
+
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
+
+ def __getattr__(self, item):
+ if item in {'__name__', '__qualname__'}:
+ return self._name
+
+ raise AttributeError(item)
+
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
+
+ def __repr__(self):
+ return f'typing_extensions.{self._name}'
+
+ def __reduce__(self):
+ return self._name
+
+ def __call__(self, *args, **kwds):
+ raise TypeError(f"Cannot instantiate {self!r}")
+
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+ def __instancecheck__(self, obj):
+ raise TypeError(f"{self} cannot be used with isinstance()")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError(f"{self} cannot be used with issubclass()")
+
+ @typing._tp_cache
+ def __getitem__(self, parameters):
+ return self._getitem(self, parameters)
+
+
+if hasattr(typing, "LiteralString"):
+ LiteralString = typing.LiteralString
+else:
+ @_SpecialForm
+ def LiteralString(self, params):
+ """Represents an arbitrary literal string.
+
+ Example::
+
+ from typing_extensions import LiteralString
+
+ def query(sql: LiteralString) -> ...:
+ ...
+
+ query("SELECT * FROM table") # ok
+ query(f"SELECT * FROM {input()}") # not ok
+
+ See PEP 675 for details.
+
+ """
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Self"):
+ Self = typing.Self
+else:
+ @_SpecialForm
+ def Self(self, params):
+ """Used to spell the type of "self" in classes.
+
+ Example::
+
+ from typing import Self
+
+ class ReturnsSelf:
+ def parse(self, data: bytes) -> Self:
+ ...
+ return self
+
+ """
+
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Never"):
+ Never = typing.Never
+else:
+ @_SpecialForm
+ def Never(self, params):
+ """The bottom type, a type that has no members.
+
+ This can be used to define a function that should never be
+ called, or a function that never returns::
+
+ from typing_extensions import Never
+
+ def never_call_me(arg: Never) -> None:
+ pass
+
+ def int_or_str(arg: int | str) -> None:
+ never_call_me(arg) # type checker error
+ match arg:
+ case int():
+ print("It's an int")
+ case str():
+ print("It's a str")
+ case _:
+ never_call_me(arg) # ok, arg is of type Never
+
+ """
+
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, 'Required'):
+ Required = typing.Required
+ NotRequired = typing.NotRequired
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def Required(self, parameters):
+ """A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+ @_ExtensionsSpecialForm
+ def NotRequired(self, parameters):
+ """A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+else:
+ class _RequiredForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+ Required = _RequiredForm(
+ 'Required',
+ doc="""A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """)
+ NotRequired = _RequiredForm(
+ 'NotRequired',
+ doc="""A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """)
+
+
+_UNPACK_DOC = """\
+Type unpack operator.
+
+The type unpack operator takes the child types from some container type,
+such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
+example:
+
+ # For some generic class `Foo`:
+ Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str]
+
+ Ts = TypeVarTuple('Ts')
+ # Specifies that `Bar` is generic in an arbitrary number of types.
+ # (Think of `Ts` as a tuple of an arbitrary number of individual
+ # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
+ # `Generic[]`.)
+ class Bar(Generic[Unpack[Ts]]): ...
+ Bar[int] # Valid
+ Bar[int, str] # Also valid
+
+From Python 3.11, this can also be done using the `*` operator:
+
+ Foo[*tuple[int, str]]
+ class Bar(Generic[*Ts]): ...
+
+The operator can also be used along with a `TypedDict` to annotate
+`**kwargs` in a function signature. For instance:
+
+ class Movie(TypedDict):
+ name: str
+ year: int
+
+ # This function expects two keyword arguments - *name* of type `str` and
+ # *year* of type `int`.
+ def foo(**kwargs: Unpack[Movie]): ...
+
+Note that there is only some runtime checking of this operator. Not
+everything the runtime allows may be accepted by static type checkers.
+
+For more information, see PEP 646 and PEP 692.
+"""
+
+
+if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[]
+ Unpack = typing.Unpack
+
+ def _is_unpack(obj):
+ return get_origin(obj) is Unpack
+
+elif sys.version_info[:2] >= (3, 9):
+ class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
+ def __init__(self, getitem):
+ super().__init__(getitem)
+ self.__doc__ = _UNPACK_DOC
+
+ class _UnpackAlias(typing._GenericAlias, _root=True):
+ __class__ = typing.TypeVar
+
+ @_UnpackSpecialForm
+ def Unpack(self, parameters):
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return _UnpackAlias(self, (item,))
+
+ def _is_unpack(obj):
+ return isinstance(obj, _UnpackAlias)
+
+else:
+ class _UnpackAlias(typing._GenericAlias, _root=True):
+ __class__ = typing.TypeVar
+
+ class _UnpackForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type.')
+ return _UnpackAlias(self, (item,))
+
+ Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC)
+
+ def _is_unpack(obj):
+ return isinstance(obj, _UnpackAlias)
+
+
+if hasattr(typing, "TypeVarTuple"): # 3.11+
+
+ # Add default parameter - PEP 696
+ class TypeVarTuple(metaclass=_TypeVarLikeMeta):
+ """Type variable tuple."""
+
+ _backported_typevarlike = typing.TypeVarTuple
+
+ def __new__(cls, name, *, default=_marker):
+ tvt = typing.TypeVarTuple(name)
+ _set_default(tvt, default)
+ _set_module(tvt)
+ return tvt
+
+ def __init_subclass__(self, *args, **kwds):
+ raise TypeError("Cannot subclass special typing classes")
+
+else:
+ class TypeVarTuple(_DefaultMixin):
+ """Type variable tuple.
+
+ Usage::
+
+ Ts = TypeVarTuple('Ts')
+
+ In the same way that a normal type variable is a stand-in for a single
+ type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
+ type such as ``Tuple[int, str]``.
+
+ Type variable tuples can be used in ``Generic`` declarations.
+ Consider the following example::
+
+ class Array(Generic[*Ts]): ...
+
+ The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
+ where ``T1`` and ``T2`` are type variables. To use these type variables
+ as type parameters of ``Array``, we must *unpack* the type variable tuple using
+ the star operator: ``*Ts``. The signature of ``Array`` then behaves
+ as if we had simply written ``class Array(Generic[T1, T2]): ...``.
+ In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
+ us to parameterise the class with an *arbitrary* number of type parameters.
+
+ Type variable tuples can be used anywhere a normal ``TypeVar`` can.
+ This includes class definitions, as shown above, as well as function
+ signatures and variable annotations::
+
+ class Array(Generic[*Ts]):
+
+ def __init__(self, shape: Tuple[*Ts]):
+ self._shape: Tuple[*Ts] = shape
+
+ def get_shape(self) -> Tuple[*Ts]:
+ return self._shape
+
+ shape = (Height(480), Width(640))
+ x: Array[Height, Width] = Array(shape)
+ y = abs(x) # Inferred type is Array[Height, Width]
+ z = x + x # ... is Array[Height, Width]
+ x.get_shape() # ... is tuple[Height, Width]
+
+ """
+
+ # Trick Generic __parameters__.
+ __class__ = typing.TypeVar
+
+ def __iter__(self):
+ yield self.__unpacked__
+
+ def __init__(self, name, *, default=_marker):
+ self.__name__ = name
+ _DefaultMixin.__init__(self, default)
+
+ # for pickling:
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ self.__unpacked__ = Unpack[self]
+
+ def __repr__(self):
+ return self.__name__
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __reduce__(self):
+ return self.__name__
+
+ def __init_subclass__(self, *args, **kwds):
+ if '_root' not in kwds:
+ raise TypeError("Cannot subclass special typing classes")
+
+
+if hasattr(typing, "reveal_type"):
+ reveal_type = typing.reveal_type
+else:
+ def reveal_type(__obj: T) -> T:
+ """Reveal the inferred type of a variable.
+
+ When a static type checker encounters a call to ``reveal_type()``,
+ it will emit the inferred type of the argument::
+
+ x: int = 1
+ reveal_type(x)
+
+ Running a static type checker (e.g., ``mypy``) on this example
+ will produce output similar to 'Revealed type is "builtins.int"'.
+
+ At runtime, the function prints the runtime type of the
+ argument and returns it unchanged.
+
+ """
+ print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr)
+ return __obj
+
+
+if hasattr(typing, "assert_never"):
+ assert_never = typing.assert_never
+else:
+ def assert_never(__arg: Never) -> Never:
+ """Assert to the type checker that a line of code is unreachable.
+
+ Example::
+
+ def int_or_str(arg: int | str) -> None:
+ match arg:
+ case int():
+ print("It's an int")
+ case str():
+ print("It's a str")
+ case _:
+ assert_never(arg)
+
+ If a type checker finds that a call to assert_never() is
+ reachable, it will emit an error.
+
+ At runtime, this throws an exception when called.
+
+ """
+ raise AssertionError("Expected code to be unreachable")
+
+
+if sys.version_info >= (3, 12):
+ # dataclass_transform exists in 3.11 but lacks the frozen_default parameter
+ dataclass_transform = typing.dataclass_transform
+else:
+ def dataclass_transform(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ frozen_default: bool = False,
+ field_specifiers: typing.Tuple[
+ typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
+ ...
+ ] = (),
+ **kwargs: typing.Any,
+ ) -> typing.Callable[[T], T]:
+ """Decorator that marks a function, class, or metaclass as providing
+ dataclass-like behavior.
+
+ Example:
+
+ from typing_extensions import dataclass_transform
+
+ _T = TypeVar("_T")
+
+ # Used on a decorator function
+ @dataclass_transform()
+ def create_model(cls: type[_T]) -> type[_T]:
+ ...
+ return cls
+
+ @create_model
+ class CustomerModel:
+ id: int
+ name: str
+
+ # Used on a base class
+ @dataclass_transform()
+ class ModelBase: ...
+
+ class CustomerModel(ModelBase):
+ id: int
+ name: str
+
+ # Used on a metaclass
+ @dataclass_transform()
+ class ModelMeta(type): ...
+
+ class ModelBase(metaclass=ModelMeta): ...
+
+ class CustomerModel(ModelBase):
+ id: int
+ name: str
+
+ Each of the ``CustomerModel`` classes defined in this example will now
+ behave similarly to a dataclass created with the ``@dataclasses.dataclass``
+ decorator. For example, the type checker will synthesize an ``__init__``
+ method.
+
+ The arguments to this decorator can be used to customize this behavior:
+ - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
+ True or False if it is omitted by the caller.
+ - ``order_default`` indicates whether the ``order`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``field_specifiers`` specifies a static list of supported classes
+ or functions that describe fields, similar to ``dataclasses.field()``.
+
+ At runtime, this decorator records its arguments in the
+ ``__dataclass_transform__`` attribute on the decorated object.
+
+ See PEP 681 for details.
+
+ """
+ def decorator(cls_or_fn):
+ cls_or_fn.__dataclass_transform__ = {
+ "eq_default": eq_default,
+ "order_default": order_default,
+ "kw_only_default": kw_only_default,
+ "frozen_default": frozen_default,
+ "field_specifiers": field_specifiers,
+ "kwargs": kwargs,
+ }
+ return cls_or_fn
+ return decorator
+
+
+if hasattr(typing, "override"):
+ override = typing.override
+else:
+ _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
+
+ def override(__arg: _F) -> _F:
+ """Indicate that a method is intended to override a method in a base class.
+
+ Usage:
+
+ class Base:
+ def method(self) -> None: ...
+ pass
+
+ class Child(Base):
+ @override
+ def method(self) -> None:
+ super().method()
+
+ When this decorator is applied to a method, the type checker will
+ validate that it overrides a method with the same name on a base class.
+ This helps prevent bugs that may occur when a base class is changed
+ without an equivalent change to a child class.
+
+ There is no runtime checking of these properties. The decorator
+ sets the ``__override__`` attribute to ``True`` on the decorated object
+ to allow runtime introspection.
+
+ See PEP 698 for details.
+
+ """
+ try:
+ __arg.__override__ = True
+ except (AttributeError, TypeError):
+ # Skip the attribute silently if it is not writable.
+ # AttributeError happens if the object has __slots__ or a
+ # read-only property, TypeError if it's a builtin class.
+ pass
+ return __arg
+
+
+if hasattr(typing, "deprecated"):
+ deprecated = typing.deprecated
+else:
+ _T = typing.TypeVar("_T")
+
+ def deprecated(
+ __msg: str,
+ *,
+ category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+ stacklevel: int = 1,
+ ) -> typing.Callable[[_T], _T]:
+ """Indicate that a class, function or overload is deprecated.
+
+ Usage:
+
+ @deprecated("Use B instead")
+ class A:
+ pass
+
+ @deprecated("Use g instead")
+ def f():
+ pass
+
+ @overload
+ @deprecated("int support is deprecated")
+ def g(x: int) -> int: ...
+ @overload
+ def g(x: str) -> int: ...
+
+ When this decorator is applied to an object, the type checker
+ will generate a diagnostic on usage of the deprecated object.
+
+ The warning specified by ``category`` will be emitted on use
+ of deprecated objects. For functions, that happens on calls;
+ for classes, on instantiation. If the ``category`` is ``None``,
+ no warning is emitted. The ``stacklevel`` determines where the
+ warning is emitted. If it is ``1`` (the default), the warning
+ is emitted at the direct caller of the deprecated object; if it
+ is higher, it is emitted further up the stack.
+
+ The decorator sets the ``__deprecated__``
+ attribute on the decorated object to the deprecation message
+ passed to the decorator. If applied to an overload, the decorator
+ must be after the ``@overload`` decorator for the attribute to
+ exist on the overload as returned by ``get_overloads()``.
+
+ See PEP 702 for details.
+
+ """
+ def decorator(__arg: _T) -> _T:
+ if category is None:
+ __arg.__deprecated__ = __msg
+ return __arg
+ elif isinstance(__arg, type):
+ original_new = __arg.__new__
+ has_init = __arg.__init__ is not object.__init__
+
+ @functools.wraps(original_new)
+ def __new__(cls, *args, **kwargs):
+ warnings.warn(__msg, category=category, stacklevel=stacklevel + 1)
+ if original_new is not object.__new__:
+ return original_new(cls, *args, **kwargs)
+ # Mirrors a similar check in object.__new__.
+ elif not has_init and (args or kwargs):
+ raise TypeError(f"{cls.__name__}() takes no arguments")
+ else:
+ return original_new(cls)
+
+ __arg.__new__ = staticmethod(__new__)
+ __arg.__deprecated__ = __new__.__deprecated__ = __msg
+ return __arg
+ elif callable(__arg):
+ @functools.wraps(__arg)
+ def wrapper(*args, **kwargs):
+ warnings.warn(__msg, category=category, stacklevel=stacklevel + 1)
+ return __arg(*args, **kwargs)
+
+ __arg.__deprecated__ = wrapper.__deprecated__ = __msg
+ return wrapper
+ else:
+ raise TypeError(
+ "@deprecated decorator with non-None category must be applied to "
+ f"a class or callable, not {__arg!r}"
+ )
+
+ return decorator
+
+
+# We have to do some monkey patching to deal with the dual nature of
+# Unpack/TypeVarTuple:
+# - We want Unpack to be a kind of TypeVar so it gets accepted in
+# Generic[Unpack[Ts]]
+# - We want it to *not* be treated as a TypeVar for the purposes of
+# counting generic parameters, so that when we subscript a generic,
+# the runtime doesn't try to substitute the Unpack with the subscripted type.
+if not hasattr(typing, "TypeVarTuple"):
+ typing._collect_type_vars = _collect_type_vars
+ typing._check_generic = _check_generic
+
+
+# Backport typing.NamedTuple as it exists in Python 3.12.
+# In 3.11, the ability to define generic `NamedTuple`s was supported.
+# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
+# On 3.12, we added __orig_bases__ to call-based NamedTuples
+# On 3.13, we deprecated kwargs-based NamedTuples
+if sys.version_info >= (3, 13):
+ NamedTuple = typing.NamedTuple
+else:
+ def _make_nmtuple(name, types, module, defaults=()):
+ fields = [n for n, t in types]
+ annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
+ for n, t in types}
+ nm_tpl = collections.namedtuple(name, fields,
+ defaults=defaults, module=module)
+ nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
+ # The `_field_types` attribute was removed in 3.9;
+ # in earlier versions, it is the same as the `__annotations__` attribute
+ if sys.version_info < (3, 9):
+ nm_tpl._field_types = annotations
+ return nm_tpl
+
+ _prohibited_namedtuple_fields = typing._prohibited
+ _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
+
+ class _NamedTupleMeta(type):
+ def __new__(cls, typename, bases, ns):
+ assert _NamedTuple in bases
+ for base in bases:
+ if base is not _NamedTuple and base is not typing.Generic:
+ raise TypeError(
+ 'can only inherit from a NamedTuple type and Generic')
+ bases = tuple(tuple if base is _NamedTuple else base for base in bases)
+ types = ns.get('__annotations__', {})
+ default_names = []
+ for field_name in types:
+ if field_name in ns:
+ default_names.append(field_name)
+ elif default_names:
+ raise TypeError(f"Non-default namedtuple field {field_name} "
+ f"cannot follow default field"
+ f"{'s' if len(default_names) > 1 else ''} "
+ f"{', '.join(default_names)}")
+ nm_tpl = _make_nmtuple(
+ typename, types.items(),
+ defaults=[ns[n] for n in default_names],
+ module=ns['__module__']
+ )
+ nm_tpl.__bases__ = bases
+ if typing.Generic in bases:
+ if hasattr(typing, '_generic_class_getitem'): # 3.12+
+ nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
+ else:
+ class_getitem = typing.Generic.__class_getitem__.__func__
+ nm_tpl.__class_getitem__ = classmethod(class_getitem)
+ # update from user namespace without overriding special namedtuple attributes
+ for key in ns:
+ if key in _prohibited_namedtuple_fields:
+ raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+ elif key not in _special_namedtuple_fields and key not in nm_tpl._fields:
+ setattr(nm_tpl, key, ns[key])
+ if typing.Generic in bases:
+ nm_tpl.__init_subclass__()
+ return nm_tpl
+
+ _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
+
+ def _namedtuple_mro_entries(bases):
+ assert NamedTuple in bases
+ return (_NamedTuple,)
+
+ @_ensure_subclassable(_namedtuple_mro_entries)
+ def NamedTuple(__typename, __fields=_marker, **kwargs):
+ """Typed version of namedtuple.
+
+ Usage::
+
+ class Employee(NamedTuple):
+ name: str
+ id: int
+
+ This is equivalent to::
+
+ Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+ The resulting class has an extra __annotations__ attribute, giving a
+ dict that maps field names to types. (The field names are also in
+ the _fields attribute, which is part of the namedtuple API.)
+ An alternative equivalent functional syntax is also accepted::
+
+ Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+ """
+ if __fields is _marker:
+ if kwargs:
+ deprecated_thing = "Creating NamedTuple classes using keyword arguments"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "Use the class-based or functional syntax instead."
+ )
+ else:
+ deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+ example = f"`{__typename} = NamedTuple({__typename!r}, [])`"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. "
+ ) + example + "."
+ elif __fields is None:
+ if kwargs:
+ raise TypeError(
+ "Cannot pass `None` as the 'fields' parameter "
+ "and also specify fields using keyword arguments"
+ )
+ else:
+ deprecated_thing = "Passing `None` as the 'fields' parameter"
+ example = f"`{__typename} = NamedTuple({__typename!r}, [])`"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. "
+ ) + example + "."
+ elif kwargs:
+ raise TypeError("Either list of fields or keywords"
+ " can be provided to NamedTuple, not both")
+ if __fields is _marker or __fields is None:
+ warnings.warn(
+ deprecation_msg.format(name=deprecated_thing, remove="3.15"),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ __fields = kwargs.items()
+ nt = _make_nmtuple(__typename, __fields, module=_caller())
+ nt.__orig_bases__ = (NamedTuple,)
+ return nt
+
+ # On 3.8+, alter the signature so that it matches typing.NamedTuple.
+ # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7,
+ # so just leave the signature as it is on 3.7.
+ if sys.version_info >= (3, 8):
+ _new_signature = '(typename, fields=None, /, **kwargs)'
+ if isinstance(NamedTuple, _types.FunctionType):
+ NamedTuple.__text_signature__ = _new_signature
+ else:
+ NamedTuple.__call__.__text_signature__ = _new_signature
+
+
+if hasattr(collections.abc, "Buffer"):
+ Buffer = collections.abc.Buffer
+else:
+ class Buffer(abc.ABC):
+ """Base class for classes that implement the buffer protocol.
+
+ The buffer protocol allows Python objects to expose a low-level
+ memory buffer interface. Before Python 3.12, it is not possible
+ to implement the buffer protocol in pure Python code, or even
+ to check whether a class implements the buffer protocol. In
+ Python 3.12 and higher, the ``__buffer__`` method allows access
+ to the buffer protocol from Python code, and the
+ ``collections.abc.Buffer`` ABC allows checking whether a class
+ implements the buffer protocol.
+
+ To indicate support for the buffer protocol in earlier versions,
+ inherit from this ABC, either in a stub file or at runtime,
+ or use ABC registration. This ABC provides no methods, because
+ there is no Python-accessible methods shared by pre-3.12 buffer
+ classes. It is useful primarily for static checks.
+
+ """
+
+ # As a courtesy, register the most common stdlib buffer classes.
+ Buffer.register(memoryview)
+ Buffer.register(bytearray)
+ Buffer.register(bytes)
+
+
+# Backport of types.get_original_bases, available on 3.12+ in CPython
+if hasattr(_types, "get_original_bases"):
+ get_original_bases = _types.get_original_bases
+else:
+ def get_original_bases(__cls):
+ """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+ Examples::
+
+ from typing import TypeVar, Generic
+ from typing_extensions import NamedTuple, TypedDict
+
+ T = TypeVar("T")
+ class Foo(Generic[T]): ...
+ class Bar(Foo[int], float): ...
+ class Baz(list[str]): ...
+ Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+ Spam = TypedDict("Spam", {"a": int, "b": str})
+
+ assert get_original_bases(Bar) == (Foo[int], float)
+ assert get_original_bases(Baz) == (list[str],)
+ assert get_original_bases(Eggs) == (NamedTuple,)
+ assert get_original_bases(Spam) == (TypedDict,)
+ assert get_original_bases(int) == (object,)
+ """
+ try:
+ return __cls.__orig_bases__
+ except AttributeError:
+ try:
+ return __cls.__bases__
+ except AttributeError:
+ raise TypeError(
+ f'Expected an instance of type, not {type(__cls).__name__!r}'
+ ) from None
+
+
+# NewType is a class on Python 3.10+, making it pickleable
+# The error message for subclassing instances of NewType was improved on 3.11+
+if sys.version_info >= (3, 11):
+ NewType = typing.NewType
+else:
+ class NewType:
+ """NewType creates simple unique types with almost zero
+ runtime overhead. NewType(name, tp) is considered a subtype of tp
+ by static type checkers. At runtime, NewType(name, tp) returns
+ a dummy callable that simply returns its argument. Usage::
+ UserId = NewType('UserId', int)
+ def name_by_id(user_id: UserId) -> str:
+ ...
+ UserId('user') # Fails type check
+ name_by_id(42) # Fails type check
+ name_by_id(UserId(42)) # OK
+ num = UserId(5) + 1 # type: int
+ """
+
+ def __call__(self, obj):
+ return obj
+
+ def __init__(self, name, tp):
+ self.__qualname__ = name
+ if '.' in name:
+ name = name.rpartition('.')[-1]
+ self.__name__ = name
+ self.__supertype__ = tp
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ def __mro_entries__(self, bases):
+ # We defined __mro_entries__ to get a better error message
+ # if a user attempts to subclass a NewType instance. bpo-46170
+ supercls_name = self.__name__
+
+ class Dummy:
+ def __init_subclass__(cls):
+ subcls_name = cls.__name__
+ raise TypeError(
+ f"Cannot subclass an instance of NewType. "
+ f"Perhaps you were looking for: "
+ f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
+ )
+
+ return (Dummy,)
+
+ def __repr__(self):
+ return f'{self.__module__}.{self.__qualname__}'
+
+ def __reduce__(self):
+ return self.__qualname__
+
+ if sys.version_info >= (3, 10):
+ # PEP 604 methods
+ # It doesn't make sense to have these methods on Python <3.10
+
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+
+if hasattr(typing, "TypeAliasType"):
+ TypeAliasType = typing.TypeAliasType
+else:
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ TypeAliasType,
+ ))
+
+ class TypeAliasType:
+ """Create named, parameterized type aliases.
+
+ This provides a backport of the new `type` statement in Python 3.12:
+
+ type ListOrSet[T] = list[T] | set[T]
+
+ is equivalent to:
+
+ T = TypeVar("T")
+ ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
+
+ The name ListOrSet can then be used as an alias for the type it refers to.
+
+ The type_params argument should contain all the type parameters used
+ in the value of the type alias. If the alias is not generic, this
+ argument is omitted.
+
+ Static type checkers should only support type aliases declared using
+ TypeAliasType that follow these rules:
+
+ - The first argument (the name) must be a string literal.
+ - The TypeAliasType instance must be immediately assigned to a variable
+ of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
+ as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
+
+ """
+
+ def __init__(self, name: str, value, *, type_params=()):
+ if not isinstance(name, str):
+ raise TypeError("TypeAliasType name must be a string")
+ self.__value__ = value
+ self.__type_params__ = type_params
+
+ parameters = []
+ for type_param in type_params:
+ if isinstance(type_param, TypeVarTuple):
+ parameters.extend(type_param)
+ else:
+ parameters.append(type_param)
+ self.__parameters__ = tuple(parameters)
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+ # Setting this attribute closes the TypeAliasType from further modification
+ self.__name__ = name
+
+ def __setattr__(self, __name: str, __value: object) -> None:
+ if hasattr(self, "__name__"):
+ self._raise_attribute_error(__name)
+ super().__setattr__(__name, __value)
+
+ def __delattr__(self, __name: str) -> Never:
+ self._raise_attribute_error(__name)
+
+ def _raise_attribute_error(self, name: str) -> Never:
+ # Match the Python 3.12 error messages exactly
+ if name == "__name__":
+ raise AttributeError("readonly attribute")
+ elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
+ raise AttributeError(
+ f"attribute '{name}' of 'typing.TypeAliasType' objects "
+ "is not writable"
+ )
+ else:
+ raise AttributeError(
+ f"'typing.TypeAliasType' object has no attribute '{name}'"
+ )
+
+ def __repr__(self) -> str:
+ return self.__name__
+
+ def __getitem__(self, parameters):
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ parameters = [
+ typing._type_check(
+ item, f'Subscripting {self.__name__} requires a type.'
+ )
+ for item in parameters
+ ]
+ return typing._GenericAlias(self, tuple(parameters))
+
+ def __reduce__(self):
+ return self.__name__
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
+ )
+
+ # The presence of this method convinces typing._type_check
+ # that TypeAliasTypes are types.
+ def __call__(self):
+ raise TypeError("Type alias is not callable")
+
+ if sys.version_info >= (3, 10):
+ def __or__(self, right):
+ # For forward compatibility with 3.12, reject Unions
+ # that are not accepted by the built-in Union.
+ if not _is_unionable(right):
+ return NotImplemented
+ return typing.Union[self, right]
+
+ def __ror__(self, left):
+ if not _is_unionable(left):
+ return NotImplemented
+ return typing.Union[left, self]
+
+
+if hasattr(typing, "is_protocol"):
+ is_protocol = typing.is_protocol
+ get_protocol_members = typing.get_protocol_members
+else:
+ def is_protocol(__tp: type) -> bool:
+ """Return True if the given type is a Protocol.
+
+ Example::
+
+ >>> from typing_extensions import Protocol, is_protocol
+ >>> class P(Protocol):
+ ... def a(self) -> str: ...
+ ... b: int
+ >>> is_protocol(P)
+ True
+ >>> is_protocol(int)
+ False
+ """
+ return (
+ isinstance(__tp, type)
+ and getattr(__tp, '_is_protocol', False)
+ and __tp is not Protocol
+ and __tp is not getattr(typing, "Protocol", object())
+ )
+
+ def get_protocol_members(__tp: type) -> typing.FrozenSet[str]:
+ """Return the set of members defined in a Protocol.
+
+ Example::
+
+ >>> from typing_extensions import Protocol, get_protocol_members
+ >>> class P(Protocol):
+ ... def a(self) -> str: ...
+ ... b: int
+ >>> get_protocol_members(P)
+ frozenset({'a', 'b'})
+
+ Raise a TypeError for arguments that are not Protocols.
+ """
+ if not is_protocol(__tp):
+ raise TypeError(f'{__tp!r} is not a Protocol')
+ if hasattr(__tp, '__protocol_attrs__'):
+ return frozenset(__tp.__protocol_attrs__)
+ return frozenset(_get_protocol_attrs(__tp))
+
+
+# Aliases for items that have always been in typing.
+# Explicitly assign these (rather than using `from typing import *` at the top),
+# so that we get a CI error if one of these is deleted from typing.py
+# in a future version of Python
+AbstractSet = typing.AbstractSet
+AnyStr = typing.AnyStr
+BinaryIO = typing.BinaryIO
+Callable = typing.Callable
+Collection = typing.Collection
+Container = typing.Container
+Dict = typing.Dict
+ForwardRef = typing.ForwardRef
+FrozenSet = typing.FrozenSet
+Generator = typing.Generator
+Generic = typing.Generic
+Hashable = typing.Hashable
+IO = typing.IO
+ItemsView = typing.ItemsView
+Iterable = typing.Iterable
+Iterator = typing.Iterator
+KeysView = typing.KeysView
+List = typing.List
+Mapping = typing.Mapping
+MappingView = typing.MappingView
+Match = typing.Match
+MutableMapping = typing.MutableMapping
+MutableSequence = typing.MutableSequence
+MutableSet = typing.MutableSet
+Optional = typing.Optional
+Pattern = typing.Pattern
+Reversible = typing.Reversible
+Sequence = typing.Sequence
+Set = typing.Set
+Sized = typing.Sized
+TextIO = typing.TextIO
+Tuple = typing.Tuple
+Union = typing.Union
+ValuesView = typing.ValuesView
+cast = typing.cast
+no_type_check = typing.no_type_check
+no_type_check_decorator = typing.no_type_check_decorator
diff --git a/src/rez/vendor/zipp/LICENSE b/src/rez/vendor/zipp/LICENSE
new file mode 100644
index 000000000..353924be0
--- /dev/null
+++ b/src/rez/vendor/zipp/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/src/rez/vendor/zipp/__init__.py b/src/rez/vendor/zipp/__init__.py
new file mode 100644
index 000000000..ddfa0a644
--- /dev/null
+++ b/src/rez/vendor/zipp/__init__.py
@@ -0,0 +1,402 @@
+import io
+import posixpath
+import zipfile
+import itertools
+import contextlib
+import pathlib
+import re
+import fnmatch
+
+from .py310compat import text_encoding
+
+
+__all__ = ['Path']
+
+
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+_dedupe = dict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+ """
+ Return items in minuend not in subtrahend, retaining order
+ with O(1) lookup.
+ """
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class InitializedState:
+ """
+ Mix-in to save the initialization state for pickling.
+ """
+
+ def __init__(self, *args, **kwargs):
+ self.__args = args
+ self.__kwargs = kwargs
+ super().__init__(*args, **kwargs)
+
+ def __getstate__(self):
+ return self.__args, self.__kwargs
+
+ def __setstate__(self, state):
+ args, kwargs = state
+ super().__init__(*args, **kwargs)
+
+
+class CompleteDirs(InitializedState, zipfile.ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+
+ >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt']))
+ ['foo/', 'foo/bar/']
+ >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/']))
+ ['foo/']
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ as_dirs = (p + posixpath.sep for p in parents)
+ return _dedupe(_difference(as_dirs, names))
+
+ def namelist(self):
+ names = super().namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ def getinfo(self, name):
+ """
+ Supplement getinfo for implied dirs.
+ """
+ try:
+ return super().getinfo(name)
+ except KeyError:
+ if not name.endswith('/') or name not in self._name_set():
+ raise
+ return zipfile.ZipInfo(filename=name)
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, zipfile.ZipFile):
+ return cls(source)
+
+ # Only allow for FastLookup when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ source.__class__ = cls
+ return source
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+
+ def namelist(self):
+ with contextlib.suppress(AttributeError):
+ return self.__names
+ self.__names = super().namelist()
+ return self.__names
+
+ def _name_set(self):
+ with contextlib.suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super()._name_set()
+ return self.__lookup
+
+
+def _extract_text_encoding(encoding=None, *args, **kwargs):
+ # stacklevel=3 so that the caller of the caller see any warning.
+ return text_encoding(encoding, 3), args, kwargs
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = zipfile.ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'mem/abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('mem/abcde.zip', 'a.txt')
+ >>> b
+ Path('mem/abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('mem/abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text(encoding='utf-8')
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> import os
+ >>> str(c).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip/b/c.txt'
+
+ At the root, ``name``, ``filename``, and ``parent``
+ resolve to the zipfile. Note these attributes are not
+ valid and will raise a ``ValueError`` if the zipfile
+ has no filename.
+
+ >>> root.name
+ 'abcde.zip'
+ >>> str(root.filename).replace(os.sep, posixpath.sep)
+ 'mem/abcde.zip'
+ >>> str(root.parent)
+ 'mem'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ """
+ Construct a Path from a ZipFile or filename.
+
+ Note: When the source is an existing ZipFile object,
+ its type (__class__) will be mutated to a
+ specialized type. If the caller wishes to retain the
+ original type, the caller should either create a
+ separate ZipFile object or pass a filename.
+ """
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def __eq__(self, other):
+ """
+ >>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo'
+ False
+ """
+ if self.__class__ is not other.__class__:
+ return NotImplemented
+ return (self.root, self.at) == (other.root, other.at)
+
+ def __hash__(self):
+ return hash((self.root, self.at))
+
+ def open(self, mode='r', *args, pwd=None, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ if self.is_dir():
+ raise IsADirectoryError(self)
+ zip_mode = mode[0]
+ if not self.exists() and zip_mode == 'r':
+ raise FileNotFoundError(self)
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ # Text mode:
+ encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
+ return io.TextIOWrapper(stream, encoding, *args, **kwargs)
+
+ @property
+ def name(self):
+ return pathlib.Path(self.at).name or self.filename.name
+
+ @property
+ def suffix(self):
+ return pathlib.Path(self.at).suffix or self.filename.suffix
+
+ @property
+ def suffixes(self):
+ return pathlib.Path(self.at).suffixes or self.filename.suffixes
+
+ @property
+ def stem(self):
+ return pathlib.Path(self.at).stem or self.filename.stem
+
+ @property
+ def filename(self):
+ return pathlib.Path(self.root.filename).joinpath(self.at)
+
+ def read_text(self, *args, **kwargs):
+ encoding, args, kwargs = _extract_text_encoding(*args, **kwargs)
+ with self.open('r', encoding, *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return self.__class__(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return self.exists() and not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def match(self, path_pattern):
+ return pathlib.Path(self.at).match(path_pattern)
+
+ def is_symlink(self):
+ """
+ Return whether this path is a symlink. Always false (python/cpython#82102).
+ """
+ return False
+
+ def _descendants(self):
+ for child in self.iterdir():
+ yield child
+ if child.is_dir():
+ yield from child._descendants()
+
+ def glob(self, pattern):
+ if not pattern:
+ raise ValueError(f"Unacceptable pattern: {pattern!r}")
+
+ matches = re.compile(fnmatch.translate(pattern)).fullmatch
+ return (
+ child
+ for child in self._descendants()
+ if matches(str(child.relative_to(self)))
+ )
+
+ def rglob(self, pattern):
+ return self.glob(f'**/{pattern}')
+
+ def relative_to(self, other, *extra):
+ return posixpath.relpath(str(self), str(other.joinpath(*extra)))
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, *other):
+ next = posixpath.join(self.at, *other)
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ if not self.at:
+ return self.filename.parent
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
diff --git a/src/rez/vendor/zipp/py310compat.py b/src/rez/vendor/zipp/py310compat.py
new file mode 100644
index 000000000..8244124c3
--- /dev/null
+++ b/src/rez/vendor/zipp/py310compat.py
@@ -0,0 +1,12 @@
+import sys
+import io
+
+
+te_impl = 'lambda encoding, stacklevel=2, /: encoding'
+te_impl_37 = te_impl.replace(', /', '')
+_text_encoding = eval(te_impl) if sys.version_info > (3, 8) else eval(te_impl_37)
+
+
+text_encoding = (
+ io.text_encoding if sys.version_info > (3, 10) else _text_encoding # type: ignore
+)