aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/setuptools/py3
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-07-05 12:25:14 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-07-05 12:43:33 +0300
commit47a295315fc93a01bd4513e142c619ebbd0df9fb (patch)
treea8c8acf6bfe2788cefb03c40799db650022b4140 /contrib/python/setuptools/py3
parent86fee8a2b5c06a5238ea35fc05ac439c3c52945c (diff)
downloadydb-47a295315fc93a01bd4513e142c619ebbd0df9fb.tar.gz
Intermediate changes
Diffstat (limited to 'contrib/python/setuptools/py3')
-rw-r--r--contrib/python/setuptools/py3/.dist-info/METADATA13
-rw-r--r--contrib/python/setuptools/py3/.dist-info/entry_points.txt1
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/__init__.py637
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/extern/__init__.py27
-rw-r--r--contrib/python/setuptools/py3/setuptools/__init__.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_core_metadata.py11
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/wheel/__init__.py3
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/wheel/macosx_libfile.py469
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/wheel/metadata.py180
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/wheel/util.py26
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/wheel/wheelfile.py199
-rw-r--r--contrib/python/setuptools/py3/setuptools/build_meta.py38
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/_requirestxt.py8
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/bdist_egg.py17
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py597
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/build.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/build_ext.py18
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/build_py.py24
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/develop.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/easy_install.py23
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/editable_wheel.py62
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/egg_info.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/install_lib.py13
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/install_scripts.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/rotate.py5
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/test.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/compat/py311.py22
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py63
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/expand.py134
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py48
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/setupcfg.py35
-rw-r--r--contrib/python/setuptools/py3/setuptools/depends.py141
-rw-r--r--contrib/python/setuptools/py3/setuptools/discovery.py34
-rw-r--r--contrib/python/setuptools/py3/setuptools/dist.py32
-rw-r--r--contrib/python/setuptools/py3/setuptools/extern/__init__.py1
-rw-r--r--contrib/python/setuptools/py3/setuptools/monkey.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/msvc.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/package_index.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/sandbox.py5
-rw-r--r--contrib/python/setuptools/py3/setuptools/warnings.py20
-rw-r--r--contrib/python/setuptools/py3/ya.make8
41 files changed, 2322 insertions, 638 deletions
diff --git a/contrib/python/setuptools/py3/.dist-info/METADATA b/contrib/python/setuptools/py3/.dist-info/METADATA
index 486f5bb21e..975dc27833 100644
--- a/contrib/python/setuptools/py3/.dist-info/METADATA
+++ b/contrib/python/setuptools/py3/.dist-info/METADATA
@@ -1,10 +1,9 @@
Metadata-Version: 2.1
Name: setuptools
-Version: 70.0.0
+Version: 70.1.0
Summary: Easily download, build, install, upgrade, and uninstall Python packages
-Home-page: https://github.com/pypa/setuptools
-Author: Python Packaging Authority
-Author-email: distutils-sig@python.org
+Author-email: Python Packaging Authority <distutils-sig@python.org>
+Project-URL: Homepage, https://github.com/pypa/setuptools
Project-URL: Documentation, https://setuptools.pypa.io/
Project-URL: Changelog, https://setuptools.pypa.io/en/stable/history.html
Keywords: CPAN PyPI distutils eggs package management
@@ -18,6 +17,7 @@ Classifier: Topic :: System :: Archiving :: Packaging
Classifier: Topic :: System :: Systems Administration
Classifier: Topic :: Utilities
Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
License-File: LICENSE
Provides-Extra: certs
Provides-Extra: docs
@@ -53,14 +53,15 @@ Requires-Dist: ini2toml[lite] >=0.14 ; extra == 'testing'
Requires-Dist: tomli-w >=1.0.0 ; extra == 'testing'
Requires-Dist: pytest-timeout ; extra == 'testing'
Requires-Dist: pytest-home >=0.5 ; extra == 'testing'
-Requires-Dist: mypy ==1.9 ; extra == 'testing'
+Requires-Dist: mypy ==1.10.0 ; extra == 'testing'
Requires-Dist: tomli ; extra == 'testing'
Requires-Dist: importlib-metadata ; extra == 'testing'
Requires-Dist: pytest-subprocess ; extra == 'testing'
Requires-Dist: pyproject-hooks !=1.1 ; extra == 'testing'
+Requires-Dist: jaraco.test ; extra == 'testing'
Requires-Dist: pytest-cov ; (platform_python_implementation != "PyPy") and extra == 'testing'
Requires-Dist: jaraco.develop >=7.21 ; (python_version >= "3.9" and sys_platform != "cygwin") and extra == 'testing'
-Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'testing'
+Requires-Dist: pytest-ruff >=0.3.2 ; (sys_platform != "cygwin") and extra == 'testing'
Requires-Dist: pytest-perf ; (sys_platform != "cygwin") and extra == 'testing'
.. |pypi-version| image:: https://img.shields.io/pypi/v/setuptools.svg
diff --git a/contrib/python/setuptools/py3/.dist-info/entry_points.txt b/contrib/python/setuptools/py3/.dist-info/entry_points.txt
index b429cbd846..84d34dadf1 100644
--- a/contrib/python/setuptools/py3/.dist-info/entry_points.txt
+++ b/contrib/python/setuptools/py3/.dist-info/entry_points.txt
@@ -2,6 +2,7 @@
alias = setuptools.command.alias:alias
bdist_egg = setuptools.command.bdist_egg:bdist_egg
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel
build = setuptools.command.build:build
build_clib = setuptools.command.build_clib:build_clib
build_ext = setuptools.command.build_ext:build_ext
diff --git a/contrib/python/setuptools/py3/pkg_resources/__init__.py b/contrib/python/setuptools/py3/pkg_resources/__init__.py
index f138821e52..bb8588f7b7 100644
--- a/contrib/python/setuptools/py3/pkg_resources/__init__.py
+++ b/contrib/python/setuptools/py3/pkg_resources/__init__.py
@@ -1,3 +1,6 @@
+# TODO: Add Generic type annotations to initialized collections.
+# For now we'd simply use implicit Any/Unknown which would add redundant annotations
+# mypy: disable-error-code="var-annotated"
"""
Package resource API
--------------------
@@ -17,9 +20,11 @@ This module is deprecated. Users are directed to :mod:`importlib.resources`,
:mod:`importlib.metadata` and :pypi:`packaging` instead.
"""
+from __future__ import annotations
+
import sys
-if sys.version_info < (3, 8):
+if sys.version_info < (3, 8): # noqa: UP036 # Check for unsupported versions
raise RuntimeError("Python 3.8 or later is required")
import os
@@ -28,14 +33,21 @@ import time
import re
import types
from typing import (
+ Any,
+ Dict,
+ Iterator,
+ Mapping,
+ MutableSequence,
+ NamedTuple,
+ NoReturn,
+ Tuple,
+ Union,
TYPE_CHECKING,
- List,
Protocol,
Callable,
- Dict,
Iterable,
- Optional,
TypeVar,
+ overload,
)
import zipfile
import zipimport
@@ -55,6 +67,7 @@ import inspect
import ntpath
import posixpath
import importlib
+import importlib.abc
import importlib.machinery
from pkgutil import get_importer
@@ -62,6 +75,8 @@ import _imp
# capture these to bypass sandboxing
from os import utime
+from os import open as os_open
+from os.path import isdir, split
try:
from os import mkdir, rename, unlink
@@ -71,9 +86,6 @@ except ImportError:
# no write support, probably under GAE
WRITE_SUPPORT = False
-from os import open as os_open
-from os.path import isdir, split
-
from pkg_resources.extern.jaraco.text import (
yield_lines,
drop_comment,
@@ -85,6 +97,9 @@ from pkg_resources.extern.packaging import utils as _packaging_utils
from pkg_resources.extern.packaging import version as _packaging_version
from pkg_resources.extern.platformdirs import user_cache_dir as _user_cache_dir
+if TYPE_CHECKING:
+ from typing_extensions import Self
+ from _typeshed import StrPath, StrOrBytesPath, BytesPath
warnings.warn(
"pkg_resources is deprecated as an API. "
@@ -93,7 +108,27 @@ warnings.warn(
stacklevel=2,
)
-T = TypeVar("T")
+
+_T = TypeVar("_T")
+# Type aliases
+_NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
+_InstallerType = Callable[["Requirement"], Union["Distribution", None]]
+_PkgReqType = Union[str, "Requirement"]
+_EPDistType = Union["Distribution", _PkgReqType]
+_MetadataType = Union["IResourceProvider", None]
+# Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
+_ModuleLike = Union[object, types.ModuleType]
+_ProviderFactoryType = Callable[[_ModuleLike], "IResourceProvider"]
+_DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
+_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
+_AdapterT = TypeVar(
+ "_AdapterT", _DistFinderType[Any], _ProviderFactoryType, _NSHandlerType[Any]
+)
+
+
+# Use _typeshed.importlib.LoaderProtocol once available https://github.com/python/typeshed/pull/11890
+class _LoaderProtocol(Protocol):
+ def load_module(self, fullname: str, /) -> types.ModuleType: ...
_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
@@ -109,15 +144,15 @@ class PEP440Warning(RuntimeWarning):
parse_version = _packaging_version.Version
-_state_vars: Dict[str, str] = {}
+_state_vars: dict[str, str] = {}
-def _declare_state(vartype: str, varname: str, initial_value: T) -> T:
+def _declare_state(vartype: str, varname: str, initial_value: _T) -> _T:
_state_vars[varname] = vartype
return initial_value
-def __getstate__():
+def __getstate__() -> dict[str, Any]:
state = {}
g = globals()
for k, v in _state_vars.items():
@@ -125,7 +160,7 @@ def __getstate__():
return state
-def __setstate__(state):
+def __setstate__(state: dict[str, Any]) -> dict[str, Any]:
g = globals()
for k, v in state.items():
g['_sset_' + _state_vars[k]](k, g[k], v)
@@ -280,17 +315,17 @@ class VersionConflict(ResolutionError):
_template = "{self.dist} is installed but {self.req} is required"
@property
- def dist(self):
+ def dist(self) -> Distribution:
return self.args[0]
@property
- def req(self):
+ def req(self) -> Requirement:
return self.args[1]
def report(self):
return self._template.format(**locals())
- def with_context(self, required_by):
+ def with_context(self, required_by: set[Distribution | str]):
"""
If required_by is non-empty, return a version of self that is a
ContextualVersionConflict.
@@ -310,7 +345,7 @@ class ContextualVersionConflict(VersionConflict):
_template = VersionConflict._template + ' by {self.required_by}'
@property
- def required_by(self):
+ def required_by(self) -> set[str]:
return self.args[2]
@@ -323,11 +358,11 @@ class DistributionNotFound(ResolutionError):
)
@property
- def req(self):
+ def req(self) -> Requirement:
return self.args[0]
@property
- def requirers(self):
+ def requirers(self) -> set[str] | None:
return self.args[1]
@property
@@ -347,7 +382,7 @@ class UnknownExtra(ResolutionError):
"""Distribution doesn't have an "extra feature" of the given name"""
-_provider_factories = {}
+_provider_factories: dict[type[_ModuleLike], _ProviderFactoryType] = {}
PY_MAJOR = '{}.{}'.format(*sys.version_info)
EGG_DIST = 3
@@ -357,7 +392,9 @@ CHECKOUT_DIST = 0
DEVELOP_DIST = -1
-def register_loader_type(loader_type, provider_factory):
+def register_loader_type(
+ loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType
+):
"""Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
@@ -367,7 +404,7 @@ def register_loader_type(loader_type, provider_factory):
_provider_factories[loader_type] = provider_factory
-def get_provider(moduleOrReq):
+def get_provider(moduleOrReq: str | Requirement):
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq, Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
@@ -429,7 +466,7 @@ darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
get_platform = get_build_platform
-def compatible_platforms(provided, required):
+def compatible_platforms(provided: str | None, required: str | None):
"""Can code for the `provided` platform run on the `required` platform?
Returns true if either platform is ``None``, or the platforms are equal.
@@ -478,7 +515,7 @@ def compatible_platforms(provided, required):
return False
-def get_distribution(dist):
+def get_distribution(dist: _EPDistType):
"""Return a current distribution object for a Requirement or string"""
if isinstance(dist, str):
dist = Requirement.parse(dist)
@@ -489,78 +526,80 @@ def get_distribution(dist):
return dist
-def load_entry_point(dist, group, name):
+def load_entry_point(dist: _EPDistType, group: str, name: str):
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
return get_distribution(dist).load_entry_point(group, name)
-def get_entry_map(dist, group=None):
+def get_entry_map(dist: _EPDistType, group: str | None = None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
-def get_entry_info(dist, group, name):
+def get_entry_info(dist: _EPDistType, group: str, name: str):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider(Protocol):
- def has_metadata(self, name) -> bool:
+ def has_metadata(self, name: str) -> bool:
"""Does the package's distribution contain the named metadata?"""
- def get_metadata(self, name):
+ def get_metadata(self, name: str):
"""The named metadata resource as a string"""
- def get_metadata_lines(self, name):
+ def get_metadata_lines(self, name: str):
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
- def metadata_isdir(self, name) -> bool:
+ def metadata_isdir(self, name: str) -> bool:
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
- def metadata_listdir(self, name):
+ def metadata_listdir(self, name: str):
"""List of metadata names in the directory (like ``os.listdir()``)"""
- def run_script(self, script_name, namespace):
+ def run_script(self, script_name: str, namespace: dict[str, Any]):
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider, Protocol):
"""An object that provides access to package resources"""
- def get_resource_filename(self, manager, resource_name):
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str):
"""Return a true filesystem path for `resource_name`
- `manager` must be an ``IResourceManager``"""
+ `manager` must be a ``ResourceManager``"""
- def get_resource_stream(self, manager, resource_name):
+ def get_resource_stream(self, manager: ResourceManager, resource_name: str):
"""Return a readable file-like object for `resource_name`
- `manager` must be an ``IResourceManager``"""
+ `manager` must be a ``ResourceManager``"""
- def get_resource_string(self, manager, resource_name) -> bytes:
+ def get_resource_string(
+ self, manager: ResourceManager, resource_name: str
+ ) -> bytes:
"""Return the contents of `resource_name` as :obj:`bytes`
- `manager` must be an ``IResourceManager``"""
+ `manager` must be a ``ResourceManager``"""
- def has_resource(self, resource_name):
+ def has_resource(self, resource_name: str):
"""Does the package contain the named resource?"""
- def resource_isdir(self, resource_name):
+ def resource_isdir(self, resource_name: str):
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
- def resource_listdir(self, resource_name):
+ def resource_listdir(self, resource_name: str):
"""List of resource names in the directory (like ``os.listdir()``)"""
class WorkingSet:
"""A collection of active distributions on sys.path (or a similar list)"""
- def __init__(self, entries=None):
+ def __init__(self, entries: Iterable[str] | None = None):
"""Create working set from list of path entries (default=sys.path)"""
- self.entries = []
+ self.entries: list[str] = []
self.entry_keys = {}
self.by_key = {}
self.normalized_to_canonical_keys = {}
@@ -614,7 +653,7 @@ class WorkingSet:
sys.path[:] = ws.entries
return ws
- def add_entry(self, entry):
+ def add_entry(self, entry: str):
"""Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
@@ -629,11 +668,11 @@ class WorkingSet:
for dist in find_distributions(entry, True):
self.add(dist, entry, False)
- def __contains__(self, dist):
+ def __contains__(self, dist: Distribution) -> bool:
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
- def find(self, req):
+ def find(self, req: Requirement) -> Distribution | None:
"""Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
@@ -657,7 +696,7 @@ class WorkingSet:
raise VersionConflict(dist, req)
return dist
- def iter_entry_points(self, group, name=None):
+ def iter_entry_points(self, group: str, name: str | None = None):
"""Yield entry point objects from `group` matching `name`
If `name` is None, yields all entry points in `group` from all
@@ -671,7 +710,7 @@ class WorkingSet:
if name is None or name == entry.name
)
- def run_script(self, requires, script_name):
+ def run_script(self, requires: str, script_name: str):
"""Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
@@ -679,7 +718,7 @@ class WorkingSet:
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
- def __iter__(self):
+ def __iter__(self) -> Iterator[Distribution]:
"""Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
@@ -693,10 +732,16 @@ class WorkingSet:
for key in self.entry_keys[item]:
if key not in seen:
- seen[key] = 1
+ seen[key] = True
yield self.by_key[key]
- def add(self, dist, entry=None, insert=True, replace=False):
+ def add(
+ self,
+ dist: Distribution,
+ entry: str | None = None,
+ insert: bool = True,
+ replace: bool = False,
+ ):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
@@ -730,11 +775,11 @@ class WorkingSet:
def resolve(
self,
- requirements,
- env=None,
- installer=None,
- replace_conflicting=False,
- extras=None,
+ requirements: Iterable[Requirement],
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
):
"""List all distributions needed to (recursively) meet `requirements`
@@ -804,7 +849,7 @@ class WorkingSet:
def _resolve_dist(
self, req, best, replace_conflicting, env, installer, required_by, to_activate
- ):
+ ) -> Distribution:
dist = best.get(req.key)
if dist is None:
# Find the best distribution and add it to the map
@@ -833,7 +878,13 @@ class WorkingSet:
raise VersionConflict(dist, req).with_context(dependent_req)
return dist
- def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ fallback: bool = True,
+ ):
"""Find all activatable distributions in `plugin_env`
Example usage::
@@ -914,7 +965,7 @@ class WorkingSet:
return sorted_distributions, error_info
- def require(self, *requirements):
+ def require(self, *requirements: _NestedStr):
"""Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
@@ -930,7 +981,9 @@ class WorkingSet:
return needed
- def subscribe(self, callback, existing=True):
+ def subscribe(
+ self, callback: Callable[[Distribution], object], existing: bool = True
+ ):
"""Invoke `callback` for all distributions
If `existing=True` (default),
@@ -966,12 +1019,12 @@ class WorkingSet:
self.callbacks = callbacks[:]
-class _ReqExtras(dict):
+class _ReqExtras(Dict["Requirement", Tuple[str, ...]]):
"""
Map each requirement to the extras that demanded it.
"""
- def markers_pass(self, req, extras=None):
+ def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None):
"""
Evaluate markers for req against each extra that
demanded it.
@@ -990,7 +1043,10 @@ class Environment:
"""Searchable snapshot of distributions on a search path"""
def __init__(
- self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR
+ self,
+ search_path: Iterable[str] | None = None,
+ platform: str | None = get_supported_platform(),
+ python: str | None = PY_MAJOR,
):
"""Snapshot distributions available on a search path
@@ -1013,7 +1069,7 @@ class Environment:
self.python = python
self.scan(search_path)
- def can_add(self, dist):
+ def can_add(self, dist: Distribution):
"""Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
@@ -1027,11 +1083,11 @@ class Environment:
)
return py_compat and compatible_platforms(dist.platform, self.platform)
- def remove(self, dist):
+ def remove(self, dist: Distribution):
"""Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
- def scan(self, search_path=None):
+ def scan(self, search_path: Iterable[str] | None = None):
"""Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
@@ -1046,7 +1102,7 @@ class Environment:
for dist in find_distributions(item):
self.add(dist)
- def __getitem__(self, project_name):
+ def __getitem__(self, project_name: str) -> list[Distribution]:
"""Return a newest-to-oldest list of distributions for `project_name`
Uses case-insensitive `project_name` comparison, assuming all the
@@ -1057,7 +1113,7 @@ class Environment:
distribution_key = project_name.lower()
return self._distmap.get(distribution_key, [])
- def add(self, dist):
+ def add(self, dist: Distribution):
"""Add `dist` if we ``can_add()`` it and it has not already been added"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, [])
@@ -1065,7 +1121,13 @@ class Environment:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
- def best_match(self, req, working_set, installer=None, replace_conflicting=False):
+ def best_match(
+ self,
+ req: Requirement,
+ working_set: WorkingSet,
+ installer: Callable[[Requirement], Any] | None = None,
+ replace_conflicting: bool = False,
+ ):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
@@ -1092,7 +1154,11 @@ class Environment:
# try to download/install
return self.obtain(req, installer)
- def obtain(self, requirement, installer=None):
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: Callable[[Requirement], Any] | None = None,
+ ):
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
@@ -1103,13 +1169,13 @@ class Environment:
to the `installer` argument."""
return installer(requirement) if installer else None
- def __iter__(self):
+ def __iter__(self) -> Iterator[str]:
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]:
yield key
- def __iadd__(self, other):
+ def __iadd__(self, other: Distribution | Environment):
"""In-place addition of a distribution or environment"""
if isinstance(other, Distribution):
self.add(other)
@@ -1121,7 +1187,7 @@ class Environment:
raise TypeError("Can't add %r to environment" % (other,))
return self
- def __add__(self, other):
+ def __add__(self, other: Distribution | Environment):
"""Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None)
for env in self, other:
@@ -1148,46 +1214,54 @@ class ExtractionError(RuntimeError):
The exception instance that caused extraction to fail
"""
+ manager: ResourceManager
+ cache_path: str
+ original_error: BaseException | None
+
class ResourceManager:
"""Manage resource extraction and packages"""
- extraction_path = None
+ extraction_path: str | None = None
def __init__(self):
self.cached_files = {}
- def resource_exists(self, package_or_requirement, resource_name):
+ def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str):
"""Does the named resource exist?"""
return get_provider(package_or_requirement).has_resource(resource_name)
- def resource_isdir(self, package_or_requirement, resource_name):
+ def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str):
"""Is the named resource an existing directory?"""
return get_provider(package_or_requirement).resource_isdir(resource_name)
- def resource_filename(self, package_or_requirement, resource_name):
+ def resource_filename(
+ self, package_or_requirement: _PkgReqType, resource_name: str
+ ):
"""Return a true filesystem path for specified resource"""
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name
)
- def resource_stream(self, package_or_requirement, resource_name):
+ def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str):
"""Return a readable file-like object for specified resource"""
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name
)
- def resource_string(self, package_or_requirement, resource_name) -> bytes:
+ def resource_string(
+ self, package_or_requirement: _PkgReqType, resource_name: str
+ ) -> bytes:
"""Return specified resource as :obj:`bytes`"""
return get_provider(package_or_requirement).get_resource_string(
self, resource_name
)
- def resource_listdir(self, package_or_requirement, resource_name):
+ def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str):
"""List the contents of the named resource directory"""
return get_provider(package_or_requirement).resource_listdir(resource_name)
- def extraction_error(self):
+ def extraction_error(self) -> NoReturn:
"""Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
@@ -1217,7 +1291,7 @@ class ResourceManager:
err.original_error = old_exc
raise err
- def get_cache_path(self, archive_name, names=()):
+ def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
@@ -1269,7 +1343,7 @@ class ResourceManager:
).format(**locals())
warnings.warn(msg, UserWarning)
- def postprocess(self, tempname, filename):
+ def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath):
"""Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
@@ -1289,7 +1363,7 @@ class ResourceManager:
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
os.chmod(tempname, mode)
- def set_extraction_path(self, path):
+ def set_extraction_path(self, path: str):
"""Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
@@ -1313,7 +1387,7 @@ class ResourceManager:
self.extraction_path = path
- def cleanup_resources(self, force=False) -> List[str]:
+ def cleanup_resources(self, force: bool = False) -> list[str]:
"""
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
@@ -1328,7 +1402,7 @@ class ResourceManager:
return []
-def get_default_cache():
+def get_default_cache() -> str:
"""
Return the ``PYTHON_EGG_CACHE`` environment variable
or a platform-relevant user cache dir for an app
@@ -1337,7 +1411,7 @@ def get_default_cache():
return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
-def safe_name(name):
+def safe_name(name: str):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
@@ -1345,7 +1419,7 @@ def safe_name(name):
return re.sub('[^A-Za-z0-9.]+', '-', name)
-def safe_version(version):
+def safe_version(version: str):
"""
Convert an arbitrary string to a standard version string
"""
@@ -1389,7 +1463,7 @@ def _safe_segment(segment):
return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
-def safe_extra(extra):
+def safe_extra(extra: str):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
@@ -1398,7 +1472,7 @@ def safe_extra(extra):
return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
-def to_filename(name):
+def to_filename(name: str):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
@@ -1406,7 +1480,7 @@ def to_filename(name):
return name.replace('-', '_')
-def invalid_marker(text):
+def invalid_marker(text: str):
"""
Validate text as a PEP 508 environment marker; return an exception
if invalid or False otherwise.
@@ -1420,7 +1494,7 @@ def invalid_marker(text):
return False
-def evaluate_marker(text, extra=None):
+def evaluate_marker(text: str, extra: str | None = None) -> bool:
"""
Evaluate a PEP 508 environment marker.
Return a boolean indicating the marker result in this environment.
@@ -1438,37 +1512,40 @@ def evaluate_marker(text, extra=None):
class NullProvider:
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
- egg_name = None
- egg_info = None
- loader = None
+ egg_name: str | None = None
+ egg_info: str | None = None
+ loader: _LoaderProtocol | None = None
+ module_path: str | None # Some subclasses can have a None module_path
- def __init__(self, module):
+ def __init__(self, module: _ModuleLike):
self.loader = getattr(module, '__loader__', None)
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
- def get_resource_filename(self, manager, resource_name):
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str):
return self._fn(self.module_path, resource_name)
- def get_resource_stream(self, manager, resource_name):
+ def get_resource_stream(self, manager: ResourceManager, resource_name: str):
return io.BytesIO(self.get_resource_string(manager, resource_name))
- def get_resource_string(self, manager, resource_name) -> bytes:
+ def get_resource_string(
+ self, manager: ResourceManager, resource_name: str
+ ) -> bytes:
return self._get(self._fn(self.module_path, resource_name))
- def has_resource(self, resource_name):
+ def has_resource(self, resource_name: str):
return self._has(self._fn(self.module_path, resource_name))
def _get_metadata_path(self, name):
return self._fn(self.egg_info, name)
- def has_metadata(self, name) -> bool:
+ def has_metadata(self, name: str) -> bool:
if not self.egg_info:
return False
path = self._get_metadata_path(name)
return self._has(path)
- def get_metadata(self, name):
+ def get_metadata(self, name: str):
if not self.egg_info:
return ""
path = self._get_metadata_path(name)
@@ -1481,24 +1558,24 @@ class NullProvider:
exc.reason += ' in {} file at path: {}'.format(name, path)
raise
- def get_metadata_lines(self, name):
+ def get_metadata_lines(self, name: str):
return yield_lines(self.get_metadata(name))
- def resource_isdir(self, resource_name):
+ def resource_isdir(self, resource_name: str):
return self._isdir(self._fn(self.module_path, resource_name))
- def metadata_isdir(self, name) -> bool:
+ def metadata_isdir(self, name: str) -> bool:
return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name)))
- def resource_listdir(self, resource_name):
+ def resource_listdir(self, resource_name: str):
return self._listdir(self._fn(self.module_path, resource_name))
- def metadata_listdir(self, name):
+ def metadata_listdir(self, name: str):
if self.egg_info:
return self._listdir(self._fn(self.egg_info, name))
return []
- def run_script(self, script_name, namespace):
+ def run_script(self, script_name: str, namespace: dict[str, Any]):
script = 'scripts/' + script_name
if not self.has_metadata(script):
raise ResolutionError(
@@ -1541,7 +1618,7 @@ class NullProvider:
"Can't perform this operation for unregistered loader type"
)
- def _fn(self, base, resource_name):
+ def _fn(self, base, resource_name: str):
self._validate_resource_path(resource_name)
if resource_name:
return os.path.join(base, *resource_name.split('/'))
@@ -1624,7 +1701,8 @@ is not allowed.
def _get(self, path) -> bytes:
if hasattr(self.loader, 'get_data') and self.loader:
- return self.loader.get_data(path)
+ # Already checked get_data exists
+ return self.loader.get_data(path) # type: ignore[attr-defined]
raise NotImplementedError(
"Can't perform this operation for loaders without 'get_data()'"
)
@@ -1647,7 +1725,7 @@ def _parents(path):
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
- def __init__(self, module):
+ def __init__(self, module: _ModuleLike):
super().__init__(module)
self._setup_prefix()
@@ -1658,7 +1736,7 @@ class EggProvider(NullProvider):
egg = next(eggs, None)
egg and self._set_egg(egg)
- def _set_egg(self, path):
+ def _set_egg(self, path: str):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
@@ -1676,7 +1754,7 @@ class DefaultProvider(EggProvider):
def _listdir(self, path):
return os.listdir(path)
- def get_resource_stream(self, manager, resource_name):
+ def get_resource_stream(self, manager: object, resource_name: str):
return open(self._fn(self.module_path, resource_name), 'rb')
def _get(self, path) -> bytes:
@@ -1717,13 +1795,14 @@ class EmptyProvider(NullProvider):
empty_provider = EmptyProvider()
-class ZipManifests(dict):
+class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
"""
zip manifest builder
"""
+ # `path` could be `StrPath | IO[bytes]` but that violates the LSP for `MemoizedZipManifests.load`
@classmethod
- def build(cls, path):
+ def build(cls, path: str):
"""
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
@@ -1749,9 +1828,11 @@ class MemoizedZipManifests(ZipManifests):
Memoized zipfile manifests.
"""
- manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+ class manifest_mod(NamedTuple):
+ manifest: dict[str, zipfile.ZipInfo]
+ mtime: float
- def load(self, path):
+ def load(self, path: str) -> dict[str, zipfile.ZipInfo]: # type: ignore[override] # ZipManifests.load is a classmethod
"""
Load a manifest at path or return a suitable manifest already loaded.
"""
@@ -1768,10 +1849,12 @@ class MemoizedZipManifests(ZipManifests):
class ZipProvider(EggProvider):
"""Resource support for zips and eggs"""
- eagers = None
+ eagers: list[str] | None = None
_zip_manifests = MemoizedZipManifests()
+ # ZipProvider's loader should always be a zipimporter or equivalent
+ loader: zipimport.zipimporter
- def __init__(self, module):
+ def __init__(self, module: _ModuleLike):
super().__init__(module)
self.zip_pre = self.loader.archive + os.sep
@@ -1797,7 +1880,7 @@ class ZipProvider(EggProvider):
def zipinfo(self):
return self._zip_manifests.load(self.loader.archive)
- def get_resource_filename(self, manager, resource_name):
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str):
if not self.egg_name:
raise NotImplementedError(
"resource_filename() only supported for .egg, not .zip"
@@ -1820,7 +1903,7 @@ class ZipProvider(EggProvider):
return timestamp, size
# FIXME: 'ZipProvider._extract_resource' is too complex (12)
- def _extract_resource(self, manager, zip_path): # noqa: C901
+ def _extract_resource(self, manager: ResourceManager, zip_path): # noqa: C901
if zip_path in self._index():
for name in self._index()[zip_path]:
last = self._extract_resource(manager, os.path.join(zip_path, name))
@@ -1834,6 +1917,10 @@ class ZipProvider(EggProvider):
'"os.rename" and "os.unlink" are not supported ' 'on this platform'
)
try:
+ if not self.egg_name:
+ raise OSError(
+ '"egg_name" is empty. This likely means no egg could be found from the "module_path".'
+ )
real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
if self._is_current(real_path, zip_path):
@@ -1922,10 +2009,10 @@ class ZipProvider(EggProvider):
def _listdir(self, fspath):
return list(self._index().get(self._zipinfo_name(fspath), ()))
- def _eager_to_zip(self, resource_name):
+ def _eager_to_zip(self, resource_name: str):
return self._zipinfo_name(self._fn(self.egg_root, resource_name))
- def _resource_to_zip(self, resource_name):
+ def _resource_to_zip(self, resource_name: str):
return self._zipinfo_name(self._fn(self.module_path, resource_name))
@@ -1944,13 +2031,13 @@ class FileMetadata(EmptyProvider):
the provided location.
"""
- def __init__(self, path):
+ def __init__(self, path: StrPath):
self.path = path
def _get_metadata_path(self, name):
return self.path
- def has_metadata(self, name) -> bool:
+ def has_metadata(self, name: str) -> bool:
return name == 'PKG-INFO' and os.path.isfile(self.path)
def get_metadata(self, name):
@@ -1993,7 +2080,7 @@ class PathMetadata(DefaultProvider):
dist = Distribution.from_filename(egg_path, metadata=metadata)
"""
- def __init__(self, path, egg_info):
+ def __init__(self, path: str, egg_info: str):
self.module_path = path
self.egg_info = egg_info
@@ -2001,7 +2088,7 @@ class PathMetadata(DefaultProvider):
class EggMetadata(ZipProvider):
"""Metadata provider for .egg files"""
- def __init__(self, importer):
+ def __init__(self, importer: zipimport.zipimporter):
"""Create a metadata provider from a zipimporter"""
self.zip_pre = importer.archive + os.sep
@@ -2013,12 +2100,12 @@ class EggMetadata(ZipProvider):
self._setup_prefix()
-_distribution_finders: Dict[
- type, Callable[[object, str, bool], Iterable["Distribution"]]
-] = _declare_state('dict', '_distribution_finders', {})
+_distribution_finders: dict[type, _DistFinderType[Any]] = _declare_state(
+ 'dict', '_distribution_finders', {}
+)
-def register_finder(importer_type, distribution_finder):
+def register_finder(importer_type: type[_T], distribution_finder: _DistFinderType[_T]):
"""Register `distribution_finder` to find distributions in sys.path items
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2028,14 +2115,16 @@ def register_finder(importer_type, distribution_finder):
_distribution_finders[importer_type] = distribution_finder
-def find_distributions(path_item, only=False):
+def find_distributions(path_item: str, only: bool = False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
-def find_eggs_in_zip(importer, path_item, only=False):
+def find_eggs_in_zip(
+ importer: zipimport.zipimporter, path_item: str, only: bool = False
+) -> Iterator[Distribution]:
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
@@ -2064,14 +2153,16 @@ def find_eggs_in_zip(importer, path_item, only=False):
register_finder(zipimport.zipimporter, find_eggs_in_zip)
-def find_nothing(importer, path_item, only=False):
+def find_nothing(
+ importer: object | None, path_item: str | None, only: bool | None = False
+):
return ()
register_finder(object, find_nothing)
-def find_on_path(importer, path_item, only=False):
+def find_on_path(importer: object | None, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
@@ -2126,7 +2217,7 @@ class NoDists:
return iter(())
-def safe_listdir(path):
+def safe_listdir(path: StrOrBytesPath):
"""
Attempt to list contents of path, but suppress some exceptions.
"""
@@ -2142,13 +2233,13 @@ def safe_listdir(path):
return ()
-def distributions_from_metadata(path):
+def distributions_from_metadata(path: str):
root = os.path.dirname(path)
if os.path.isdir(path):
if len(os.listdir(path)) == 0:
# empty metadata dir; skip
return
- metadata = PathMetadata(root, path)
+ metadata: _MetadataType = PathMetadata(root, path)
else:
metadata = FileMetadata(path)
entry = os.path.basename(path)
@@ -2188,15 +2279,17 @@ if hasattr(pkgutil, 'ImpImporter'):
register_finder(importlib.machinery.FileFinder, find_on_path)
-_namespace_handlers: Dict[
- type, Callable[[object, str, str, types.ModuleType], Optional[str]]
-] = _declare_state('dict', '_namespace_handlers', {})
-_namespace_packages: Dict[Optional[str], List[str]] = _declare_state(
+_namespace_handlers: dict[type, _NSHandlerType[Any]] = _declare_state(
+ 'dict', '_namespace_handlers', {}
+)
+_namespace_packages: dict[str | None, list[str]] = _declare_state(
'dict', '_namespace_packages', {}
)
-def register_namespace_handler(importer_type, namespace_handler):
+def register_namespace_handler(
+ importer_type: type[_T], namespace_handler: _NSHandlerType[_T]
+):
"""Register `namespace_handler` to declare namespace packages
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2251,7 +2344,7 @@ def _handle_ns(packageName, path_item):
return subpath
-def _rebuild_mod_path(orig_path, package_name, module):
+def _rebuild_mod_path(orig_path, package_name, module: types.ModuleType):
"""
Rebuild module.__path__ ensuring that all entries are ordered
corresponding to their sys.path order
@@ -2285,7 +2378,7 @@ def _rebuild_mod_path(orig_path, package_name, module):
module.__path__ = new_path
-def declare_namespace(packageName):
+def declare_namespace(packageName: str):
"""Declare that package 'packageName' is a namespace package"""
msg = (
@@ -2302,7 +2395,7 @@ def declare_namespace(packageName):
if packageName in _namespace_packages:
return
- path = sys.path
+ path: MutableSequence[str] = sys.path
parent, _, _ = packageName.rpartition('.')
if parent:
@@ -2328,7 +2421,7 @@ def declare_namespace(packageName):
_imp.release_lock()
-def fixup_namespace_packages(path_item, parent=None):
+def fixup_namespace_packages(path_item: str, parent: str | None = None):
"""Ensure that previously-declared namespace packages include path_item"""
_imp.acquire_lock()
try:
@@ -2340,7 +2433,12 @@ def fixup_namespace_packages(path_item, parent=None):
_imp.release_lock()
-def file_ns_handler(importer, path_item, packageName, module):
+def file_ns_handler(
+ importer: object,
+ path_item: StrPath,
+ packageName: str,
+ module: types.ModuleType,
+):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
@@ -2360,19 +2458,28 @@ register_namespace_handler(zipimport.zipimporter, file_ns_handler)
register_namespace_handler(importlib.machinery.FileFinder, file_ns_handler)
-def null_ns_handler(importer, path_item, packageName, module):
+def null_ns_handler(
+ importer: object,
+ path_item: str | None,
+ packageName: str | None,
+ module: _ModuleLike | None,
+):
return None
register_namespace_handler(object, null_ns_handler)
-def normalize_path(filename):
+@overload
+def normalize_path(filename: StrPath) -> str: ...
+@overload
+def normalize_path(filename: BytesPath) -> bytes: ...
+def normalize_path(filename: StrOrBytesPath):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
-def _cygwin_patch(filename): # pragma: nocover
+def _cygwin_patch(filename: StrOrBytesPath): # pragma: nocover
"""
Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
symlink components. Using
@@ -2383,9 +2490,19 @@ def _cygwin_patch(filename): # pragma: nocover
return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
-@functools.lru_cache(maxsize=None)
-def _normalize_cached(filename):
- return normalize_path(filename)
+if TYPE_CHECKING:
+ # https://github.com/python/mypy/issues/16261
+ # https://github.com/python/typeshed/issues/6347
+ @overload
+ def _normalize_cached(filename: StrPath) -> str: ...
+ @overload
+ def _normalize_cached(filename: BytesPath) -> bytes: ...
+ def _normalize_cached(filename: StrOrBytesPath) -> str | bytes: ...
+else:
+
+ @functools.lru_cache(maxsize=None)
+ def _normalize_cached(filename):
+ return normalize_path(filename)
def _is_egg_path(path):
@@ -2438,7 +2555,14 @@ EGG_NAME = re.compile(
class EntryPoint:
"""Object representing an advertised importable object"""
- def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+ def __init__(
+ self,
+ name: str,
+ module_name: str,
+ attrs: Iterable[str] = (),
+ extras: Iterable[str] = (),
+ dist: Distribution | None = None,
+ ):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
@@ -2458,7 +2582,12 @@ class EntryPoint:
def __repr__(self):
return "EntryPoint.parse(%r)" % str(self)
- def load(self, require=True, *args, **kwargs):
+ def load(
+ self,
+ require: bool = True,
+ *args: Environment | _InstallerType | None,
+ **kwargs: Environment | _InstallerType | None,
+ ):
"""
Require packages for this EntryPoint, then resolve it.
"""
@@ -2470,7 +2599,9 @@ class EntryPoint:
stacklevel=2,
)
if require:
- self.require(*args, **kwargs)
+ # We could pass `env` and `installer` directly,
+ # but keeping `*args` and `**kwargs` for backwards compatibility
+ self.require(*args, **kwargs) # type: ignore
return self.resolve()
def resolve(self):
@@ -2483,7 +2614,11 @@ class EntryPoint:
except AttributeError as exc:
raise ImportError(str(exc)) from exc
- def require(self, env=None, installer=None):
+ def require(
+ self,
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ ):
if not self.dist:
error_cls = UnknownExtra if self.extras else AttributeError
raise error_cls("Can't require() without a distribution", self)
@@ -2507,7 +2642,7 @@ class EntryPoint:
)
@classmethod
- def parse(cls, src, dist=None):
+ def parse(cls, src: str, dist: Distribution | None = None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
@@ -2536,11 +2671,16 @@ class EntryPoint:
return req.extras
@classmethod
- def parse_group(cls, group, lines, dist=None):
+ def parse_group(
+ cls,
+ group: str,
+ lines: _NestedStr,
+ dist: Distribution | None = None,
+ ):
"""Parse an entry point group"""
if not MODULE(group):
raise ValueError("Invalid group name", group)
- this = {}
+ this: dict[str, Self] = {}
for line in yield_lines(lines):
ep = cls.parse(line, dist)
if ep.name in this:
@@ -2549,13 +2689,18 @@ class EntryPoint:
return this
@classmethod
- def parse_map(cls, data, dist=None):
+ def parse_map(
+ cls,
+ data: str | Iterable[str] | dict[str, str | Iterable[str]],
+ dist: Distribution | None = None,
+ ):
"""Parse a map of entry point groups"""
+ _data: Iterable[tuple[str | None, str | Iterable[str]]]
if isinstance(data, dict):
_data = data.items()
else:
_data = split_sections(data)
- maps = {}
+ maps: dict[str, dict[str, Self]] = {}
for group, lines in _data:
if group is None:
if not lines:
@@ -2590,13 +2735,13 @@ class Distribution:
def __init__(
self,
- location=None,
- metadata=None,
- project_name=None,
- version=None,
- py_version=PY_MAJOR,
- platform=None,
- precedence=EGG_DIST,
+ location: str | None = None,
+ metadata: _MetadataType = None,
+ project_name: str | None = None,
+ version: str | None = None,
+ py_version: str | None = PY_MAJOR,
+ platform: str | None = None,
+ precedence: int = EGG_DIST,
):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
@@ -2608,7 +2753,13 @@ class Distribution:
self._provider = metadata or empty_provider
@classmethod
- def from_location(cls, location, basename, metadata=None, **kw):
+ def from_location(
+ cls,
+ location: str,
+ basename: StrPath,
+ metadata: _MetadataType = None,
+ **kw: int, # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
+ ) -> Distribution:
project_name, version, py_version, platform = [None] * 4
basename, ext = os.path.splitext(basename)
if ext.lower() in _distributionImpl:
@@ -2646,25 +2797,25 @@ class Distribution:
def __hash__(self):
return hash(self.hashcmp)
- def __lt__(self, other):
+ def __lt__(self, other: Distribution):
return self.hashcmp < other.hashcmp
- def __le__(self, other):
+ def __le__(self, other: Distribution):
return self.hashcmp <= other.hashcmp
- def __gt__(self, other):
+ def __gt__(self, other: Distribution):
return self.hashcmp > other.hashcmp
- def __ge__(self, other):
+ def __ge__(self, other: Distribution):
return self.hashcmp >= other.hashcmp
- def __eq__(self, other):
+ def __eq__(self, other: object):
if not isinstance(other, self.__class__):
# It's not a Distribution, so they are not equal
return False
return self.hashcmp == other.hashcmp
- def __ne__(self, other):
+ def __ne__(self, other: object):
return not self == other
# These properties have to be lazy so that we don't have to load any
@@ -2747,14 +2898,14 @@ class Distribution:
return self.__dep_map
@staticmethod
- def _filter_extras(dm):
+ def _filter_extras(dm: dict[str | None, list[Requirement]]):
"""
Given a mapping of extras to dependencies, strip off
environment markers and filter out any dependencies
not matching the markers.
"""
for extra in list(filter(None, dm)):
- new_extra = extra
+ new_extra: str | None = extra
reqs = dm.pop(extra)
new_extra, _, marker = extra.partition(':')
fails_marker = marker and (
@@ -2774,10 +2925,10 @@ class Distribution:
dm.setdefault(extra, []).extend(parse_requirements(reqs))
return dm
- def requires(self, extras=()):
+ def requires(self, extras: Iterable[str] = ()):
"""List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map
- deps = []
+ deps: list[Requirement] = []
deps.extend(dm.get(None, ()))
for ext in extras:
try:
@@ -2813,7 +2964,7 @@ class Distribution:
lines = self._get_metadata(self.PKG_INFO)
return _version_from_file(lines)
- def activate(self, path=None, replace=False):
+ def activate(self, path: list[str] | None = None, replace: bool = False):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None:
path = sys.path
@@ -2863,7 +3014,12 @@ class Distribution:
)
@classmethod
- def from_filename(cls, filename, metadata=None, **kw):
+ def from_filename(
+ cls,
+ filename: StrPath,
+ metadata: _MetadataType = None,
+ **kw: int, # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
+ ):
return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata, **kw
)
@@ -2877,14 +3033,14 @@ class Distribution:
return Requirement.parse(spec)
- def load_entry_point(self, group, name):
+ def load_entry_point(self, group: str, name: str):
"""Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group, name)
if ep is None:
raise ImportError("Entry point %r not found" % ((group, name),))
return ep.load()
- def get_entry_map(self, group=None):
+ def get_entry_map(self, group: str | None = None):
"""Return the entry point map for `group`, or the full entry map"""
if not hasattr(self, "_ep_map"):
self._ep_map = EntryPoint.parse_map(
@@ -2894,12 +3050,17 @@ class Distribution:
return self._ep_map.get(group, {})
return self._ep_map
- def get_entry_info(self, group, name):
+ def get_entry_info(self, group: str, name: str):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
# FIXME: 'Distribution.insert_on' is too complex (13)
- def insert_on(self, path, loc=None, replace=False): # noqa: C901
+ def insert_on( # noqa: C901
+ self,
+ path: list[str],
+ loc=None,
+ replace: bool = False,
+ ):
"""Ensure self.location is on path
If replace=False (default):
@@ -3004,13 +3165,14 @@ class Distribution:
return False
return True
- def clone(self, **kw):
+ def clone(self, **kw: str | int | IResourceProvider | None):
"""Copy this distribution, substituting in any changed keyword args"""
names = 'project_name version py_version platform location precedence'
for attr in names.split():
kw.setdefault(attr, getattr(self, attr, None))
kw.setdefault('metadata', self._provider)
- return self.__class__(**kw)
+ # Unsafely unpacking. But keeping **kw for backwards and subclassing compatibility
+ return self.__class__(**kw) # type:ignore[arg-type]
@property
def extras(self):
@@ -3063,11 +3225,11 @@ class DistInfoDistribution(Distribution):
self.__dep_map = self._compute_dependencies()
return self.__dep_map
- def _compute_dependencies(self):
+ def _compute_dependencies(self) -> dict[str | None, list[Requirement]]:
"""Recompute this distribution's dependencies."""
- dm = self.__dep_map = {None: []}
+ self.__dep_map: dict[str | None, list[Requirement]] = {None: []}
- reqs = []
+ reqs: list[Requirement] = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
reqs.extend(parse_requirements(req))
@@ -3078,13 +3240,15 @@ class DistInfoDistribution(Distribution):
yield req
common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
- dm[None].extend(common)
+ self.__dep_map[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
s_extra = safe_extra(extra.strip())
- dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
+ self.__dep_map[s_extra] = [
+ r for r in reqs_for_extra(extra) if r not in common
+ ]
- return dm
+ return self.__dep_map
_distributionImpl = {
@@ -3107,7 +3271,7 @@ def issue_warning(*args, **kw):
warnings.warn(stacklevel=level + 1, *args, **kw)
-def parse_requirements(strs):
+def parse_requirements(strs: _NestedStr):
"""
Yield ``Requirement`` objects for each specification in `strs`.
@@ -3121,14 +3285,15 @@ class RequirementParseError(_packaging_requirements.InvalidRequirement):
class Requirement(_packaging_requirements.Requirement):
- def __init__(self, requirement_string):
+ def __init__(self, requirement_string: str):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
super().__init__(requirement_string)
self.unsafe_name = self.name
project_name = safe_name(self.name)
self.project_name, self.key = project_name, project_name.lower()
self.specs = [(spec.operator, spec.version) for spec in self.specifier]
- self.extras = tuple(map(safe_extra, self.extras))
+ # packaging.requirements.Requirement uses a set for its extras. We use a variable-length tuple
+ self.extras: tuple[str] = tuple(map(safe_extra, self.extras))
self.hashCmp = (
self.key,
self.url,
@@ -3138,13 +3303,13 @@ class Requirement(_packaging_requirements.Requirement):
)
self.__hash = hash(self.hashCmp)
- def __eq__(self, other):
+ def __eq__(self, other: object):
return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
def __ne__(self, other):
return not self == other
- def __contains__(self, item):
+ def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool:
if isinstance(item, Distribution):
if item.key != self.key:
return False
@@ -3163,7 +3328,7 @@ class Requirement(_packaging_requirements.Requirement):
return "Requirement.parse(%r)" % str(self)
@staticmethod
- def parse(s):
+ def parse(s: str | Iterable[str]):
(req,) = parse_requirements(s)
return req
@@ -3178,7 +3343,7 @@ def _always_object(classes):
return classes
-def _find_adapter(registry, ob):
+def _find_adapter(registry: Mapping[type, _AdapterT], ob: object) -> _AdapterT:
"""Return an adapter factory for `ob` from `registry`"""
types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
for t in types:
@@ -3189,7 +3354,7 @@ def _find_adapter(registry, ob):
raise TypeError(f"Could not find adapter for {registry} and {ob}")
-def ensure_directory(path):
+def ensure_directory(path: StrOrBytesPath):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
os.makedirs(dirname, exist_ok=True)
@@ -3208,7 +3373,7 @@ def _bypass_ensure_directory(path):
pass
-def split_sections(s):
+def split_sections(s: _NestedStr) -> Iterator[tuple[str | None, list[str]]]:
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
@@ -3387,6 +3552,38 @@ class PkgResourcesDeprecationWarning(Warning):
"""
+# Ported from ``setuptools`` to avoid introducing an import inter-dependency:
+_LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
+
+
+def _read_utf8_with_fallback(file: str, fallback_encoding=_LOCALE_ENCODING) -> str:
+ """See setuptools.unicode_utils._read_utf8_with_fallback"""
+ try:
+ with open(file, "r", encoding="utf-8") as f:
+ return f.read()
+ except UnicodeDecodeError: # pragma: no cover
+ msg = f"""\
+ ********************************************************************************
+ `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
+
+ This fallback behaviour is considered **deprecated** and future versions of
+ `setuptools/pkg_resources` may not implement it.
+
+ Please encode {file!r} with "utf-8" to ensure future builds will succeed.
+
+ If this file was produced by `setuptools` itself, cleaning up the cached files
+ and re-building/re-installing the package with a newer version of `setuptools`
+ (e.g. by updating `build-system.requires` in its `pyproject.toml`)
+ might solve the problem.
+ ********************************************************************************
+ """
+ # TODO: Add a deadline?
+ # See comment in setuptools.unicode_utils._Utf8EncodingNeeded
+ warnings.warn(msg, PkgResourcesDeprecationWarning, stacklevel=2)
+ with open(file, "r", encoding=fallback_encoding) as f:
+ return f.read()
+
+
# from jaraco.functools 1.3
def _call_aside(f, *args, **kwargs):
f(*args, **kwargs)
@@ -3459,35 +3656,3 @@ if TYPE_CHECKING:
add_activation_listener = working_set.subscribe
run_script = working_set.run_script
run_main = run_script
-
-
-# ---- Ported from ``setuptools`` to avoid introducing an import inter-dependency ----
-LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
-
-
-def _read_utf8_with_fallback(file: str, fallback_encoding=LOCALE_ENCODING) -> str:
- """See setuptools.unicode_utils._read_utf8_with_fallback"""
- try:
- with open(file, "r", encoding="utf-8") as f:
- return f.read()
- except UnicodeDecodeError: # pragma: no cover
- msg = f"""\
- ********************************************************************************
- `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
-
- This fallback behaviour is considered **deprecated** and future versions of
- `setuptools/pkg_resources` may not implement it.
-
- Please encode {file!r} with "utf-8" to ensure future builds will succeed.
-
- If this file was produced by `setuptools` itself, cleaning up the cached files
- and re-building/re-installing the package with a newer version of `setuptools`
- (e.g. by updating `build-system.requires` in its `pyproject.toml`)
- might solve the problem.
- ********************************************************************************
- """
- # TODO: Add a deadline?
- # See comment in setuptools.unicode_utils._Utf8EncodingNeeded
- warnings.warn(msg, PkgResourcesDeprecationWarning, stacklevel=2)
- with open(file, "r", encoding=fallback_encoding) as f:
- return f.read()
diff --git a/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py b/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
index bfb9eb8bdf..9b9ac10aa9 100644
--- a/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
+++ b/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
@@ -1,5 +1,9 @@
+from __future__ import annotations
+from importlib.machinery import ModuleSpec
import importlib.util
import sys
+from types import ModuleType
+from typing import Iterable, Sequence
class VendorImporter:
@@ -8,7 +12,12 @@ class VendorImporter:
or otherwise naturally-installed packages from root_name.
"""
- def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
+ def __init__(
+ self,
+ root_name: str,
+ vendored_names: Iterable[str] = (),
+ vendor_pkg: str | None = None,
+ ):
self.root_name = root_name
self.vendored_names = set(vendored_names)
self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
@@ -26,7 +35,7 @@ class VendorImporter:
root, base, target = fullname.partition(self.root_name + '.')
return not root and any(map(target.startswith, self.vendored_names))
- def load_module(self, fullname):
+ def load_module(self, fullname: str):
"""
Iterate over the search path to locate and load fullname.
"""
@@ -48,16 +57,22 @@ class VendorImporter:
"distribution.".format(**locals())
)
- def create_module(self, spec):
+ def create_module(self, spec: ModuleSpec):
return self.load_module(spec.name)
- def exec_module(self, module):
+ def exec_module(self, module: ModuleType):
pass
- def find_spec(self, fullname, path=None, target=None):
+ def find_spec(
+ self,
+ fullname: str,
+ path: Sequence[str] | None = None,
+ target: ModuleType | None = None,
+ ):
"""Return a module spec for vendored names."""
return (
- importlib.util.spec_from_loader(fullname, self)
+ # This should fix itself next mypy release https://github.com/python/typeshed/pull/11890
+ importlib.util.spec_from_loader(fullname, self) # type: ignore[arg-type]
if self._module_matches_namespace(fullname)
else None
)
diff --git a/contrib/python/setuptools/py3/setuptools/__init__.py b/contrib/python/setuptools/py3/setuptools/__init__.py
index a59bbe1177..5ef0f7dbd8 100644
--- a/contrib/python/setuptools/py3/setuptools/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/__init__.py
@@ -216,7 +216,7 @@ class Command(_Command):
"'%s' must be a list of strings (got %r)" % (option, val)
)
- def reinitialize_command(self, command, reinit_subcommands=0, **kw):
+ def reinitialize_command(self, command, reinit_subcommands=False, **kw):
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
vars(cmd).update(kw)
return cmd
diff --git a/contrib/python/setuptools/py3/setuptools/_core_metadata.py b/contrib/python/setuptools/py3/setuptools/_core_metadata.py
index 9b4f38ded2..f1de9c9ba6 100644
--- a/contrib/python/setuptools/py3/setuptools/_core_metadata.py
+++ b/contrib/python/setuptools/py3/setuptools/_core_metadata.py
@@ -4,13 +4,14 @@ Handling of Core Metadata for Python packages (including reading and writing).
See: https://packaging.python.org/en/latest/specifications/core-metadata/
"""
+from __future__ import annotations
+
import os
import stat
import textwrap
from email import message_from_file
from email.message import Message
from tempfile import NamedTemporaryFile
-from typing import Optional, List
from distutils.util import rfc822_escape
@@ -38,7 +39,7 @@ def rfc822_unescape(content: str) -> str:
return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:]))))
-def _read_field_from_msg(msg: Message, field: str) -> Optional[str]:
+def _read_field_from_msg(msg: Message, field: str) -> str | None:
"""Read Message header field."""
value = msg[field]
if value == 'UNKNOWN':
@@ -46,7 +47,7 @@ def _read_field_from_msg(msg: Message, field: str) -> Optional[str]:
return value
-def _read_field_unescaped_from_msg(msg: Message, field: str) -> Optional[str]:
+def _read_field_unescaped_from_msg(msg: Message, field: str) -> str | None:
"""Read Message header field and apply rfc822_unescape."""
value = _read_field_from_msg(msg, field)
if value is None:
@@ -54,7 +55,7 @@ def _read_field_unescaped_from_msg(msg: Message, field: str) -> Optional[str]:
return rfc822_unescape(value)
-def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]:
+def _read_list_from_msg(msg: Message, field: str) -> list[str] | None:
"""Read Message header field and return all results as list."""
values = msg.get_all(field, None)
if values == []:
@@ -62,7 +63,7 @@ def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]:
return values
-def _read_payload_from_msg(msg: Message) -> Optional[str]:
+def _read_payload_from_msg(msg: Message) -> str | None:
value = str(msg.get_payload()).strip()
if value == 'UNKNOWN' or not value:
return None
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/wheel/__init__.py b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/__init__.py
new file mode 100644
index 0000000000..a773bbbcd7
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/__init__.py
@@ -0,0 +1,3 @@
+from __future__ import annotations
+
+__version__ = "0.43.0"
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/wheel/macosx_libfile.py b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/macosx_libfile.py
new file mode 100644
index 0000000000..8953c3f805
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/macosx_libfile.py
@@ -0,0 +1,469 @@
+"""
+This module contains function to analyse dynamic library
+headers to extract system information
+
+Currently only for MacOSX
+
+Library file on macosx system starts with Mach-O or Fat field.
+This can be distinguish by first 32 bites and it is called magic number.
+Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
+reversed bytes order.
+Both fields can occur in two types: 32 and 64 bytes.
+
+FAT field inform that this library contains few version of library
+(typically for different types version). It contains
+information where Mach-O headers starts.
+
+Each section started with Mach-O header contains one library
+(So if file starts with this field it contains only one version).
+
+After filed Mach-O there are section fields.
+Each of them starts with two fields:
+cmd - magic number for this command
+cmdsize - total size occupied by this section information.
+
+In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
+and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
+because them contains information about minimal system version.
+
+Important remarks:
+- For fat files this implementation looks for maximum number version.
+ It not check if it is 32 or 64 and do not compare it with currently built package.
+ So it is possible to false report higher version that needed.
+- All structures signatures are taken form macosx header files.
+- I think that binary format will be more stable than `otool` output.
+ and if apple introduce some changes both implementation will need to be updated.
+- The system compile will set the deployment target no lower than
+ 11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
+ target when the arm64 target is 11.0.
+"""
+
+from __future__ import annotations
+
+import ctypes
+import os
+import sys
+
+"""here the needed const and struct from mach-o header files"""
+
+FAT_MAGIC = 0xCAFEBABE
+FAT_CIGAM = 0xBEBAFECA
+FAT_MAGIC_64 = 0xCAFEBABF
+FAT_CIGAM_64 = 0xBFBAFECA
+MH_MAGIC = 0xFEEDFACE
+MH_CIGAM = 0xCEFAEDFE
+MH_MAGIC_64 = 0xFEEDFACF
+MH_CIGAM_64 = 0xCFFAEDFE
+
+LC_VERSION_MIN_MACOSX = 0x24
+LC_BUILD_VERSION = 0x32
+
+CPU_TYPE_ARM64 = 0x0100000C
+
+mach_header_fields = [
+ ("magic", ctypes.c_uint32),
+ ("cputype", ctypes.c_int),
+ ("cpusubtype", ctypes.c_int),
+ ("filetype", ctypes.c_uint32),
+ ("ncmds", ctypes.c_uint32),
+ ("sizeofcmds", ctypes.c_uint32),
+ ("flags", ctypes.c_uint32),
+]
+"""
+struct mach_header {
+ uint32_t magic; /* mach magic number identifier */
+ cpu_type_t cputype; /* cpu specifier */
+ cpu_subtype_t cpusubtype; /* machine specifier */
+ uint32_t filetype; /* type of file */
+ uint32_t ncmds; /* number of load commands */
+ uint32_t sizeofcmds; /* the size of all the load commands */
+ uint32_t flags; /* flags */
+};
+typedef integer_t cpu_type_t;
+typedef integer_t cpu_subtype_t;
+"""
+
+mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
+"""
+struct mach_header_64 {
+ uint32_t magic; /* mach magic number identifier */
+ cpu_type_t cputype; /* cpu specifier */
+ cpu_subtype_t cpusubtype; /* machine specifier */
+ uint32_t filetype; /* type of file */
+ uint32_t ncmds; /* number of load commands */
+ uint32_t sizeofcmds; /* the size of all the load commands */
+ uint32_t flags; /* flags */
+ uint32_t reserved; /* reserved */
+};
+"""
+
+fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
+"""
+struct fat_header {
+ uint32_t magic; /* FAT_MAGIC or FAT_MAGIC_64 */
+ uint32_t nfat_arch; /* number of structs that follow */
+};
+"""
+
+fat_arch_fields = [
+ ("cputype", ctypes.c_int),
+ ("cpusubtype", ctypes.c_int),
+ ("offset", ctypes.c_uint32),
+ ("size", ctypes.c_uint32),
+ ("align", ctypes.c_uint32),
+]
+"""
+struct fat_arch {
+ cpu_type_t cputype; /* cpu specifier (int) */
+ cpu_subtype_t cpusubtype; /* machine specifier (int) */
+ uint32_t offset; /* file offset to this object file */
+ uint32_t size; /* size of this object file */
+ uint32_t align; /* alignment as a power of 2 */
+};
+"""
+
+fat_arch_64_fields = [
+ ("cputype", ctypes.c_int),
+ ("cpusubtype", ctypes.c_int),
+ ("offset", ctypes.c_uint64),
+ ("size", ctypes.c_uint64),
+ ("align", ctypes.c_uint32),
+ ("reserved", ctypes.c_uint32),
+]
+"""
+struct fat_arch_64 {
+ cpu_type_t cputype; /* cpu specifier (int) */
+ cpu_subtype_t cpusubtype; /* machine specifier (int) */
+ uint64_t offset; /* file offset to this object file */
+ uint64_t size; /* size of this object file */
+ uint32_t align; /* alignment as a power of 2 */
+ uint32_t reserved; /* reserved */
+};
+"""
+
+segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
+"""base for reading segment info"""
+
+segment_command_fields = [
+ ("cmd", ctypes.c_uint32),
+ ("cmdsize", ctypes.c_uint32),
+ ("segname", ctypes.c_char * 16),
+ ("vmaddr", ctypes.c_uint32),
+ ("vmsize", ctypes.c_uint32),
+ ("fileoff", ctypes.c_uint32),
+ ("filesize", ctypes.c_uint32),
+ ("maxprot", ctypes.c_int),
+ ("initprot", ctypes.c_int),
+ ("nsects", ctypes.c_uint32),
+ ("flags", ctypes.c_uint32),
+]
+"""
+struct segment_command { /* for 32-bit architectures */
+ uint32_t cmd; /* LC_SEGMENT */
+ uint32_t cmdsize; /* includes sizeof section structs */
+ char segname[16]; /* segment name */
+ uint32_t vmaddr; /* memory address of this segment */
+ uint32_t vmsize; /* memory size of this segment */
+ uint32_t fileoff; /* file offset of this segment */
+ uint32_t filesize; /* amount to map from the file */
+ vm_prot_t maxprot; /* maximum VM protection */
+ vm_prot_t initprot; /* initial VM protection */
+ uint32_t nsects; /* number of sections in segment */
+ uint32_t flags; /* flags */
+};
+typedef int vm_prot_t;
+"""
+
+segment_command_fields_64 = [
+ ("cmd", ctypes.c_uint32),
+ ("cmdsize", ctypes.c_uint32),
+ ("segname", ctypes.c_char * 16),
+ ("vmaddr", ctypes.c_uint64),
+ ("vmsize", ctypes.c_uint64),
+ ("fileoff", ctypes.c_uint64),
+ ("filesize", ctypes.c_uint64),
+ ("maxprot", ctypes.c_int),
+ ("initprot", ctypes.c_int),
+ ("nsects", ctypes.c_uint32),
+ ("flags", ctypes.c_uint32),
+]
+"""
+struct segment_command_64 { /* for 64-bit architectures */
+ uint32_t cmd; /* LC_SEGMENT_64 */
+ uint32_t cmdsize; /* includes sizeof section_64 structs */
+ char segname[16]; /* segment name */
+ uint64_t vmaddr; /* memory address of this segment */
+ uint64_t vmsize; /* memory size of this segment */
+ uint64_t fileoff; /* file offset of this segment */
+ uint64_t filesize; /* amount to map from the file */
+ vm_prot_t maxprot; /* maximum VM protection */
+ vm_prot_t initprot; /* initial VM protection */
+ uint32_t nsects; /* number of sections in segment */
+ uint32_t flags; /* flags */
+};
+"""
+
+version_min_command_fields = segment_base_fields + [
+ ("version", ctypes.c_uint32),
+ ("sdk", ctypes.c_uint32),
+]
+"""
+struct version_min_command {
+ uint32_t cmd; /* LC_VERSION_MIN_MACOSX or
+ LC_VERSION_MIN_IPHONEOS or
+ LC_VERSION_MIN_WATCHOS or
+ LC_VERSION_MIN_TVOS */
+ uint32_t cmdsize; /* sizeof(struct min_version_command) */
+ uint32_t version; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+ uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+};
+"""
+
+build_version_command_fields = segment_base_fields + [
+ ("platform", ctypes.c_uint32),
+ ("minos", ctypes.c_uint32),
+ ("sdk", ctypes.c_uint32),
+ ("ntools", ctypes.c_uint32),
+]
+"""
+struct build_version_command {
+ uint32_t cmd; /* LC_BUILD_VERSION */
+ uint32_t cmdsize; /* sizeof(struct build_version_command) plus */
+ /* ntools * sizeof(struct build_tool_version) */
+ uint32_t platform; /* platform */
+ uint32_t minos; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+ uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
+ uint32_t ntools; /* number of tool entries following this */
+};
+"""
+
+
+def swap32(x):
+ return (
+ ((x << 24) & 0xFF000000)
+ | ((x << 8) & 0x00FF0000)
+ | ((x >> 8) & 0x0000FF00)
+ | ((x >> 24) & 0x000000FF)
+ )
+
+
+def get_base_class_and_magic_number(lib_file, seek=None):
+ if seek is None:
+ seek = lib_file.tell()
+ else:
+ lib_file.seek(seek)
+ magic_number = ctypes.c_uint32.from_buffer_copy(
+ lib_file.read(ctypes.sizeof(ctypes.c_uint32))
+ ).value
+
+ # Handle wrong byte order
+ if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
+ if sys.byteorder == "little":
+ BaseClass = ctypes.BigEndianStructure
+ else:
+ BaseClass = ctypes.LittleEndianStructure
+
+ magic_number = swap32(magic_number)
+ else:
+ BaseClass = ctypes.Structure
+
+ lib_file.seek(seek)
+ return BaseClass, magic_number
+
+
+def read_data(struct_class, lib_file):
+ return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
+
+
+def extract_macosx_min_system_version(path_to_lib):
+ with open(path_to_lib, "rb") as lib_file:
+ BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
+ if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
+ return
+
+ if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
+
+ class FatHeader(BaseClass):
+ _fields_ = fat_header_fields
+
+ fat_header = read_data(FatHeader, lib_file)
+ if magic_number == FAT_MAGIC:
+
+ class FatArch(BaseClass):
+ _fields_ = fat_arch_fields
+
+ else:
+
+ class FatArch(BaseClass):
+ _fields_ = fat_arch_64_fields
+
+ fat_arch_list = [
+ read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
+ ]
+
+ versions_list = []
+ for el in fat_arch_list:
+ try:
+ version = read_mach_header(lib_file, el.offset)
+ if version is not None:
+ if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
+ # Xcode will not set the deployment target below 11.0.0
+ # for the arm64 architecture. Ignore the arm64 deployment
+ # in fat binaries when the target is 11.0.0, that way
+ # the other architectures can select a lower deployment
+ # target.
+ # This is safe because there is no arm64 variant for
+ # macOS 10.15 or earlier.
+ if version == (11, 0, 0):
+ continue
+ versions_list.append(version)
+ except ValueError:
+ pass
+
+ if len(versions_list) > 0:
+ return max(versions_list)
+ else:
+ return None
+
+ else:
+ try:
+ return read_mach_header(lib_file, 0)
+ except ValueError:
+ """when some error during read library files"""
+ return None
+
+
+def read_mach_header(lib_file, seek=None):
+ """
+ This function parses a Mach-O header and extracts
+ information about the minimal macOS version.
+
+ :param lib_file: reference to opened library file with pointer
+ """
+ base_class, magic_number = get_base_class_and_magic_number(lib_file, seek)
+ arch = "32" if magic_number == MH_MAGIC else "64"
+
+ class SegmentBase(base_class):
+ _fields_ = segment_base_fields
+
+ if arch == "32":
+
+ class MachHeader(base_class):
+ _fields_ = mach_header_fields
+
+ else:
+
+ class MachHeader(base_class):
+ _fields_ = mach_header_fields_64
+
+ mach_header = read_data(MachHeader, lib_file)
+ for _i in range(mach_header.ncmds):
+ pos = lib_file.tell()
+ segment_base = read_data(SegmentBase, lib_file)
+ lib_file.seek(pos)
+ if segment_base.cmd == LC_VERSION_MIN_MACOSX:
+
+ class VersionMinCommand(base_class):
+ _fields_ = version_min_command_fields
+
+ version_info = read_data(VersionMinCommand, lib_file)
+ return parse_version(version_info.version)
+ elif segment_base.cmd == LC_BUILD_VERSION:
+
+ class VersionBuild(base_class):
+ _fields_ = build_version_command_fields
+
+ version_info = read_data(VersionBuild, lib_file)
+ return parse_version(version_info.minos)
+ else:
+ lib_file.seek(pos + segment_base.cmdsize)
+ continue
+
+
+def parse_version(version):
+ x = (version & 0xFFFF0000) >> 16
+ y = (version & 0x0000FF00) >> 8
+ z = version & 0x000000FF
+ return x, y, z
+
+
+def calculate_macosx_platform_tag(archive_root, platform_tag):
+ """
+ Calculate proper macosx platform tag basing on files which are included to wheel
+
+ Example platform tag `macosx-10.14-x86_64`
+ """
+ prefix, base_version, suffix = platform_tag.split("-")
+ base_version = tuple(int(x) for x in base_version.split("."))
+ base_version = base_version[:2]
+ if base_version[0] > 10:
+ base_version = (base_version[0], 0)
+ assert len(base_version) == 2
+ if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+ deploy_target = tuple(
+ int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
+ )
+ deploy_target = deploy_target[:2]
+ if deploy_target[0] > 10:
+ deploy_target = (deploy_target[0], 0)
+ if deploy_target < base_version:
+ sys.stderr.write(
+ "[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
+ "the version on which the Python interpreter was compiled ({}), and "
+ "will be ignored.\n".format(
+ ".".join(str(x) for x in deploy_target),
+ ".".join(str(x) for x in base_version),
+ )
+ )
+ else:
+ base_version = deploy_target
+
+ assert len(base_version) == 2
+ start_version = base_version
+ versions_dict = {}
+ for dirpath, _dirnames, filenames in os.walk(archive_root):
+ for filename in filenames:
+ if filename.endswith(".dylib") or filename.endswith(".so"):
+ lib_path = os.path.join(dirpath, filename)
+ min_ver = extract_macosx_min_system_version(lib_path)
+ if min_ver is not None:
+ min_ver = min_ver[0:2]
+ if min_ver[0] > 10:
+ min_ver = (min_ver[0], 0)
+ versions_dict[lib_path] = min_ver
+
+ if len(versions_dict) > 0:
+ base_version = max(base_version, max(versions_dict.values()))
+
+ # macosx platform tag do not support minor bugfix release
+ fin_base_version = "_".join([str(x) for x in base_version])
+ if start_version < base_version:
+ problematic_files = [k for k, v in versions_dict.items() if v > start_version]
+ problematic_files = "\n".join(problematic_files)
+ if len(problematic_files) == 1:
+ files_form = "this file"
+ else:
+ files_form = "these files"
+ error_message = (
+ "[WARNING] This wheel needs a higher macOS version than {} "
+ "To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
+ + fin_base_version
+ + " or recreate "
+ + files_form
+ + " with lower "
+ "MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files
+ )
+
+ if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
+ error_message = error_message.format(
+ "is set in MACOSX_DEPLOYMENT_TARGET variable."
+ )
+ else:
+ error_message = error_message.format(
+ "the version your Python interpreter is compiled against."
+ )
+
+ sys.stderr.write(error_message)
+
+ platform_tag = prefix + "_" + fin_base_version + "_" + suffix
+ return platform_tag
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/wheel/metadata.py b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/metadata.py
new file mode 100644
index 0000000000..341f614ceb
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/metadata.py
@@ -0,0 +1,180 @@
+"""
+Tools for converting old- to new-style metadata.
+"""
+
+from __future__ import annotations
+
+import functools
+import itertools
+import os.path
+import re
+import textwrap
+from email.message import Message
+from email.parser import Parser
+from typing import Iterator
+
+from ..packaging.requirements import Requirement
+
+
+def _nonblank(str):
+ return str and not str.startswith("#")
+
+
+@functools.singledispatch
+def yield_lines(iterable):
+ r"""
+ Yield valid lines of a string or iterable.
+ >>> list(yield_lines(''))
+ []
+ >>> list(yield_lines(['foo', 'bar']))
+ ['foo', 'bar']
+ >>> list(yield_lines('foo\nbar'))
+ ['foo', 'bar']
+ >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
+ ['foo', 'baz #comment']
+ >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
+ ['foo', 'bar', 'baz', 'bing']
+ """
+ return itertools.chain.from_iterable(map(yield_lines, iterable))
+
+
+@yield_lines.register(str)
+def _(text):
+ return filter(_nonblank, map(str.strip, text.splitlines()))
+
+
+def split_sections(s):
+ """Split a string or iterable thereof into (section, content) pairs
+ Each ``section`` is a stripped version of the section header ("[section]")
+ and each ``content`` is a list of stripped lines excluding blank lines and
+ comment-only lines. If there are any such lines before the first section
+ header, they're returned in a first ``section`` of ``None``.
+ """
+ section = None
+ content = []
+ for line in yield_lines(s):
+ if line.startswith("["):
+ if line.endswith("]"):
+ if section or content:
+ yield section, content
+ section = line[1:-1].strip()
+ content = []
+ else:
+ raise ValueError("Invalid section heading", line)
+ else:
+ content.append(line)
+
+ # wrap up last segment
+ yield section, content
+
+
+def safe_extra(extra):
+ """Convert an arbitrary string to a standard 'extra' name
+ Any runs of non-alphanumeric characters are replaced with a single '_',
+ and the result is always lowercased.
+ """
+ return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()
+
+
+def safe_name(name):
+ """Convert an arbitrary string to a standard distribution name
+ Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+ """
+ return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def requires_to_requires_dist(requirement: Requirement) -> str:
+ """Return the version specifier for a requirement in PEP 345/566 fashion."""
+ if getattr(requirement, "url", None):
+ return " @ " + requirement.url
+
+ requires_dist = []
+ for spec in requirement.specifier:
+ requires_dist.append(spec.operator + spec.version)
+
+ if requires_dist:
+ return " " + ",".join(sorted(requires_dist))
+ else:
+ return ""
+
+
+def convert_requirements(requirements: list[str]) -> Iterator[str]:
+ """Yield Requires-Dist: strings for parsed requirements strings."""
+ for req in requirements:
+ parsed_requirement = Requirement(req)
+ spec = requires_to_requires_dist(parsed_requirement)
+ extras = ",".join(sorted(safe_extra(e) for e in parsed_requirement.extras))
+ if extras:
+ extras = f"[{extras}]"
+
+ yield safe_name(parsed_requirement.name) + extras + spec
+
+
+def generate_requirements(
+ extras_require: dict[str, list[str]],
+) -> Iterator[tuple[str, str]]:
+ """
+ Convert requirements from a setup()-style dictionary to
+ ('Requires-Dist', 'requirement') and ('Provides-Extra', 'extra') tuples.
+
+ extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
+ using the empty extra {'': [requirements]} to hold install_requires.
+ """
+ for extra, depends in extras_require.items():
+ condition = ""
+ extra = extra or ""
+ if ":" in extra: # setuptools extra:condition syntax
+ extra, condition = extra.split(":", 1)
+
+ extra = safe_extra(extra)
+ if extra:
+ yield "Provides-Extra", extra
+ if condition:
+ condition = "(" + condition + ") and "
+ condition += "extra == '%s'" % extra
+
+ if condition:
+ condition = " ; " + condition
+
+ for new_req in convert_requirements(depends):
+ yield "Requires-Dist", new_req + condition
+
+
+def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message:
+ """
+ Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
+ """
+ with open(pkginfo_path, encoding="utf-8") as headers:
+ pkg_info = Parser().parse(headers)
+
+ pkg_info.replace_header("Metadata-Version", "2.1")
+ # Those will be regenerated from `requires.txt`.
+ del pkg_info["Provides-Extra"]
+ del pkg_info["Requires-Dist"]
+ requires_path = os.path.join(egg_info_path, "requires.txt")
+ if os.path.exists(requires_path):
+ with open(requires_path, encoding="utf-8") as requires_file:
+ requires = requires_file.read()
+
+ parsed_requirements = sorted(split_sections(requires), key=lambda x: x[0] or "")
+ for extra, reqs in parsed_requirements:
+ for key, value in generate_requirements({extra: reqs}):
+ if (key, value) not in pkg_info.items():
+ pkg_info[key] = value
+
+ description = pkg_info["Description"]
+ if description:
+ description_lines = pkg_info["Description"].splitlines()
+ dedented_description = "\n".join(
+ # if the first line of long_description is blank,
+ # the first line here will be indented.
+ (
+ description_lines[0].lstrip(),
+ textwrap.dedent("\n".join(description_lines[1:])),
+ "\n",
+ )
+ )
+ pkg_info.set_payload(dedented_description)
+ del pkg_info["Description"]
+
+ return pkg_info
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/wheel/util.py b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/util.py
new file mode 100644
index 0000000000..d98d98cb52
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/util.py
@@ -0,0 +1,26 @@
+from __future__ import annotations
+
+import base64
+import logging
+
+log = logging.getLogger("wheel")
+
+# ensure Python logging is configured
+try:
+ __import__("setuptools.logging")
+except ImportError:
+ # setuptools < ??
+ from . import _setuptools_logging
+
+ _setuptools_logging.configure()
+
+
+def urlsafe_b64encode(data: bytes) -> bytes:
+ """urlsafe_b64encode without padding"""
+ return base64.urlsafe_b64encode(data).rstrip(b"=")
+
+
+def urlsafe_b64decode(data: bytes) -> bytes:
+ """urlsafe_b64decode without padding"""
+ pad = b"=" * (4 - (len(data) & 3))
+ return base64.urlsafe_b64decode(data + pad)
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/wheel/wheelfile.py b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/wheelfile.py
new file mode 100644
index 0000000000..83a31772bd
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/wheel/wheelfile.py
@@ -0,0 +1,199 @@
+from __future__ import annotations
+
+import csv
+import hashlib
+import os.path
+import re
+import stat
+import time
+from io import StringIO, TextIOWrapper
+from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
+
+from .util import log, urlsafe_b64decode, urlsafe_b64encode
+
+# Non-greedy matching of an optional build number may be too clever (more
+# invalid wheel filenames will match). Separate regex for .dist-info?
+WHEEL_INFO_RE = re.compile(
+ r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))(-(?P<build>\d[^\s-]*))?
+ -(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>\S+)\.whl$""",
+ re.VERBOSE,
+)
+MINIMUM_TIMESTAMP = 315532800 # 1980-01-01 00:00:00 UTC
+
+
+def get_zipinfo_datetime(timestamp=None):
+ # Some applications need reproducible .whl files, but they can't do this without
+ # forcing the timestamp of the individual ZipInfo objects. See issue #143.
+ timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
+ timestamp = max(timestamp, MINIMUM_TIMESTAMP)
+ return time.gmtime(timestamp)[0:6]
+
+
+class WheelFile(ZipFile):
+ """A ZipFile derivative class that also reads SHA-256 hashes from
+ .dist-info/RECORD and checks any read files against those.
+ """
+
+ _default_algorithm = hashlib.sha256
+
+ def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
+ basename = os.path.basename(file)
+ self.parsed_filename = WHEEL_INFO_RE.match(basename)
+ if not basename.endswith(".whl") or self.parsed_filename is None:
+ raise WheelError(f"Bad wheel filename {basename!r}")
+
+ ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
+
+ self.dist_info_path = "{}.dist-info".format(
+ self.parsed_filename.group("namever")
+ )
+ self.record_path = self.dist_info_path + "/RECORD"
+ self._file_hashes = {}
+ self._file_sizes = {}
+ if mode == "r":
+ # Ignore RECORD and any embedded wheel signatures
+ self._file_hashes[self.record_path] = None, None
+ self._file_hashes[self.record_path + ".jws"] = None, None
+ self._file_hashes[self.record_path + ".p7s"] = None, None
+
+ # Fill in the expected hashes by reading them from RECORD
+ try:
+ record = self.open(self.record_path)
+ except KeyError:
+ raise WheelError(f"Missing {self.record_path} file") from None
+
+ with record:
+ for line in csv.reader(
+ TextIOWrapper(record, newline="", encoding="utf-8")
+ ):
+ path, hash_sum, size = line
+ if not hash_sum:
+ continue
+
+ algorithm, hash_sum = hash_sum.split("=")
+ try:
+ hashlib.new(algorithm)
+ except ValueError:
+ raise WheelError(
+ f"Unsupported hash algorithm: {algorithm}"
+ ) from None
+
+ if algorithm.lower() in {"md5", "sha1"}:
+ raise WheelError(
+ f"Weak hash algorithm ({algorithm}) is not permitted by "
+ f"PEP 427"
+ )
+
+ self._file_hashes[path] = (
+ algorithm,
+ urlsafe_b64decode(hash_sum.encode("ascii")),
+ )
+
+ def open(self, name_or_info, mode="r", pwd=None):
+ def _update_crc(newdata):
+ eof = ef._eof
+ update_crc_orig(newdata)
+ running_hash.update(newdata)
+ if eof and running_hash.digest() != expected_hash:
+ raise WheelError(f"Hash mismatch for file '{ef_name}'")
+
+ ef_name = (
+ name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
+ )
+ if (
+ mode == "r"
+ and not ef_name.endswith("/")
+ and ef_name not in self._file_hashes
+ ):
+ raise WheelError(f"No hash found for file '{ef_name}'")
+
+ ef = ZipFile.open(self, name_or_info, mode, pwd)
+ if mode == "r" and not ef_name.endswith("/"):
+ algorithm, expected_hash = self._file_hashes[ef_name]
+ if expected_hash is not None:
+ # Monkey patch the _update_crc method to also check for the hash from
+ # RECORD
+ running_hash = hashlib.new(algorithm)
+ update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
+
+ return ef
+
+ def write_files(self, base_dir):
+ log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
+ deferred = []
+ for root, dirnames, filenames in os.walk(base_dir):
+ # Sort the directory names so that `os.walk` will walk them in a
+ # defined order on the next iteration.
+ dirnames.sort()
+ for name in sorted(filenames):
+ path = os.path.normpath(os.path.join(root, name))
+ if os.path.isfile(path):
+ arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
+ if arcname == self.record_path:
+ pass
+ elif root.endswith(".dist-info"):
+ deferred.append((path, arcname))
+ else:
+ self.write(path, arcname)
+
+ deferred.sort()
+ for path, arcname in deferred:
+ self.write(path, arcname)
+
+ def write(self, filename, arcname=None, compress_type=None):
+ with open(filename, "rb") as f:
+ st = os.fstat(f.fileno())
+ data = f.read()
+
+ zinfo = ZipInfo(
+ arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
+ )
+ zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
+ zinfo.compress_type = compress_type or self.compression
+ self.writestr(zinfo, data, compress_type)
+
+ def writestr(self, zinfo_or_arcname, data, compress_type=None):
+ if isinstance(zinfo_or_arcname, str):
+ zinfo_or_arcname = ZipInfo(
+ zinfo_or_arcname, date_time=get_zipinfo_datetime()
+ )
+ zinfo_or_arcname.compress_type = self.compression
+ zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
+
+ if isinstance(data, str):
+ data = data.encode("utf-8")
+
+ ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
+ fname = (
+ zinfo_or_arcname.filename
+ if isinstance(zinfo_or_arcname, ZipInfo)
+ else zinfo_or_arcname
+ )
+ log.info(f"adding '{fname}'")
+ if fname != self.record_path:
+ hash_ = self._default_algorithm(data)
+ self._file_hashes[fname] = (
+ hash_.name,
+ urlsafe_b64encode(hash_.digest()).decode("ascii"),
+ )
+ self._file_sizes[fname] = len(data)
+
+ def close(self):
+ # Write RECORD
+ if self.fp is not None and self.mode == "w" and self._file_hashes:
+ data = StringIO()
+ writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
+ writer.writerows(
+ (
+ (fname, algorithm + "=" + hash_, self._file_sizes[fname])
+ for fname, (algorithm, hash_) in self._file_hashes.items()
+ )
+ )
+ writer.writerow((format(self.record_path), "", ""))
+ self.writestr(self.record_path, data.getvalue())
+
+ ZipFile.close(self)
+
+
+class WheelError(Exception):
+ pass
diff --git a/contrib/python/setuptools/py3/setuptools/build_meta.py b/contrib/python/setuptools/py3/setuptools/build_meta.py
index be2742d73d..c52c872fd0 100644
--- a/contrib/python/setuptools/py3/setuptools/build_meta.py
+++ b/contrib/python/setuptools/py3/setuptools/build_meta.py
@@ -26,6 +26,8 @@ bug reports or API stability):
Again, this is not a formal definition! Just a "taste" of the module.
"""
+from __future__ import annotations
+
import io
import os
import shlex
@@ -36,12 +38,12 @@ import contextlib
import tempfile
import warnings
from pathlib import Path
-from typing import Dict, Iterator, List, Optional, Union
+from typing import Dict, Iterator, List, Optional, Union, Iterable
import setuptools
import distutils
from . import errors
-from ._path import same_path
+from ._path import same_path, StrPath
from ._reqs import parse_strings
from .warnings import SetuptoolsDeprecationWarning
from distutils.util import strtobool
@@ -113,7 +115,7 @@ def _get_immediate_subdirectories(a_dir):
]
-def _file_with_extension(directory, extension):
+def _file_with_extension(directory: StrPath, extension: str | tuple[str, ...]):
matching = (f for f in os.listdir(directory) if f.endswith(extension))
try:
(file,) = matching
@@ -163,7 +165,7 @@ class _ConfigSettingsTranslator:
# See pypa/setuptools#1928 pypa/setuptools#2491
- def _get_config(self, key: str, config_settings: _ConfigSettings) -> List[str]:
+ def _get_config(self, key: str, config_settings: _ConfigSettings) -> list[str]:
"""
Get the value of a specific key in ``config_settings`` as a list of strings.
@@ -322,7 +324,7 @@ class _BuildMetaBackend(_ConfigSettingsTranslator):
)
def get_requires_for_build_wheel(self, config_settings=None):
- return self._get_build_requires(config_settings, requirements=['wheel'])
+ return self._get_build_requires(config_settings, requirements=[])
def get_requires_for_build_sdist(self, config_settings=None):
return self._get_build_requires(config_settings, requirements=[])
@@ -370,11 +372,11 @@ class _BuildMetaBackend(_ConfigSettingsTranslator):
def _build_with_temp_dir(
self,
- setup_command,
- result_extension,
- result_directory,
- config_settings,
- arbitrary_args=(),
+ setup_command: Iterable[str],
+ result_extension: str | tuple[str, ...],
+ result_directory: StrPath,
+ config_settings: _ConfigSettings,
+ arbitrary_args: Iterable[str] = (),
):
result_directory = os.path.abspath(result_directory)
@@ -404,7 +406,10 @@ class _BuildMetaBackend(_ConfigSettingsTranslator):
return result_basename
def build_wheel(
- self, wheel_directory, config_settings=None, metadata_directory=None
+ self,
+ wheel_directory: StrPath,
+ config_settings: _ConfigSettings = None,
+ metadata_directory: StrPath | None = None,
):
with suppress_known_deprecation():
return self._build_with_temp_dir(
@@ -415,12 +420,14 @@ class _BuildMetaBackend(_ConfigSettingsTranslator):
self._arbitrary_args(config_settings),
)
- def build_sdist(self, sdist_directory, config_settings=None):
+ def build_sdist(
+ self, sdist_directory: StrPath, config_settings: _ConfigSettings = None
+ ):
return self._build_with_temp_dir(
['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings
)
- def _get_dist_info_dir(self, metadata_directory: Optional[str]) -> Optional[str]:
+ def _get_dist_info_dir(self, metadata_directory: StrPath | None) -> str | None:
if not metadata_directory:
return None
dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info"))
@@ -433,7 +440,10 @@ class _BuildMetaBackend(_ConfigSettingsTranslator):
# get_requires_for_build_editable
# prepare_metadata_for_build_editable
def build_editable(
- self, wheel_directory, config_settings=None, metadata_directory=None
+ self,
+ wheel_directory: StrPath,
+ config_settings: _ConfigSettings = None,
+ metadata_directory: str | None = None,
):
# XXX can or should we hide our editable_wheel command normally?
info_dir = self._get_dist_info_dir(metadata_directory)
diff --git a/contrib/python/setuptools/py3/setuptools/command/_requirestxt.py b/contrib/python/setuptools/py3/setuptools/command/_requirestxt.py
index b0c2d7059a..1f1967e7aa 100644
--- a/contrib/python/setuptools/py3/setuptools/command/_requirestxt.py
+++ b/contrib/python/setuptools/py3/setuptools/command/_requirestxt.py
@@ -7,10 +7,12 @@ The ``requires.txt`` file has an specific format:
See https://setuptools.pypa.io/en/latest/deprecated/python_eggs.html#requires-txt
"""
+from __future__ import annotations
+
import io
from collections import defaultdict
from itertools import filterfalse
-from typing import Dict, List, Tuple, Mapping, TypeVar
+from typing import Dict, Mapping, TypeVar
from .. import _reqs
from ..extern.jaraco.text import yield_lines
@@ -26,7 +28,7 @@ _StrOrIter = _reqs._StrOrIter
def _prepare(
install_requires: _StrOrIter, extras_require: Mapping[str, _StrOrIter]
-) -> Tuple[List[str], Dict[str, List[str]]]:
+) -> tuple[list[str], dict[str, list[str]]]:
"""Given values for ``install_requires`` and ``extras_require``
create modified versions in a way that can be written in ``requires.txt``
"""
@@ -54,7 +56,7 @@ def _convert_extras_requirements(
def _move_install_requirements_markers(
install_requires: _StrOrIter, extras_require: Mapping[str, _Ordered[Requirement]]
-) -> Tuple[List[str], Dict[str, List[str]]]:
+) -> tuple[list[str], dict[str, list[str]]]:
"""
The ``requires.txt`` file has an specific format:
- Environment markers need to be part of the section headers and
diff --git a/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py b/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py
index adcb0a1ba1..73476e0cec 100644
--- a/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py
+++ b/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py
@@ -85,9 +85,9 @@ class bdist_egg(Command):
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
- self.keep_temp = 0
+ self.keep_temp = False
self.dist_dir = None
- self.skip_build = 0
+ self.skip_build = False
self.egg_output = None
self.exclude_source_files = None
@@ -136,7 +136,7 @@ class bdist_egg(Command):
try:
log.info("installing package data to %s", self.bdist_dir)
- self.call_command('install_data', force=0, root=None)
+ self.call_command('install_data', force=False, root=None)
finally:
self.distribution.data_files = old
@@ -164,7 +164,7 @@ class bdist_egg(Command):
instcmd.root = None
if self.distribution.has_c_libraries() and not self.skip_build:
self.run_command('build_clib')
- cmd = self.call_command('install_lib', warn_dir=0)
+ cmd = self.call_command('install_lib', warn_dir=False)
instcmd.root = old_root
all_outputs, ext_outputs = self.get_ext_outputs()
@@ -192,7 +192,7 @@ class bdist_egg(Command):
if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s", script_dir)
- self.call_command('install_scripts', install_dir=script_dir, no_ep=1)
+ self.call_command('install_scripts', install_dir=script_dir, no_ep=True)
self.copy_metadata_to(egg_info)
native_libs = os.path.join(egg_info, "native_libs.txt")
@@ -382,8 +382,9 @@ def scan_module(egg_dir, base, name, stubs):
for bad in [
'getsource',
'getabsfile',
+ 'getfile',
'getsourcefile',
- 'getfile' 'getsourcelines',
+ 'getsourcelines',
'findsource',
'getcomments',
'getframeinfo',
@@ -426,7 +427,9 @@ def can_scan():
INSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']
-def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, mode='w'):
+def make_zipfile(
+ zip_filename, base_dir, verbose=False, dry_run=False, compress=True, mode='w'
+):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
diff --git a/contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py b/contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py
new file mode 100644
index 0000000000..a81187598a
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py
@@ -0,0 +1,597 @@
+"""
+Create a wheel (.whl) distribution.
+
+A wheel is a built archive format.
+"""
+
+from __future__ import annotations
+
+import os
+import re
+import shutil
+import stat
+import struct
+import sys
+import sysconfig
+import warnings
+from email.generator import BytesGenerator, Generator
+from email.policy import EmailPolicy
+from distutils import log
+from glob import iglob
+from shutil import rmtree
+from typing import TYPE_CHECKING, Callable, Iterable, Literal, Sequence, cast
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+from .. import Command, __version__
+from ..extern.wheel.metadata import pkginfo_to_metadata
+from ..extern.packaging import tags
+from ..extern.packaging import version as _packaging_version
+from ..extern.wheel.wheelfile import WheelFile
+
+if TYPE_CHECKING:
+ import types
+
+
+def safe_name(name: str) -> str:
+ """Convert an arbitrary string to a standard distribution name
+ Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+ """
+ return re.sub("[^A-Za-z0-9.]+", "-", name)
+
+
+def safe_version(version: str) -> str:
+ """
+ Convert an arbitrary string to a standard version string
+ """
+ try:
+ # normalize the version
+ return str(_packaging_version.Version(version))
+ except _packaging_version.InvalidVersion:
+ version = version.replace(" ", ".")
+ return re.sub("[^A-Za-z0-9.]+", "-", version)
+
+
+setuptools_major_version = int(__version__.split(".")[0])
+
+PY_LIMITED_API_PATTERN = r"cp3\d"
+
+
+def _is_32bit_interpreter() -> bool:
+ return struct.calcsize("P") == 4
+
+
+def python_tag() -> str:
+ return f"py{sys.version_info[0]}"
+
+
+def get_platform(archive_root: str | None) -> str:
+ """Return our platform name 'win32', 'linux_x86_64'"""
+ result = sysconfig.get_platform()
+ if result.startswith("macosx") and archive_root is not None:
+ from ..extern.wheel.macosx_libfile import calculate_macosx_platform_tag
+
+ result = calculate_macosx_platform_tag(archive_root, result)
+ elif _is_32bit_interpreter():
+ if result == "linux-x86_64":
+ # pip pull request #3497
+ result = "linux-i686"
+ elif result == "linux-aarch64":
+ # packaging pull request #234
+ # TODO armv8l, packaging pull request #690 => this did not land
+ # in pip/packaging yet
+ result = "linux-armv7l"
+
+ return result.replace("-", "_")
+
+
+def get_flag(
+ var: str, fallback: bool, expected: bool = True, warn: bool = True
+) -> bool:
+ """Use a fallback value for determining SOABI flags if the needed config
+ var is unset or unavailable."""
+ val = sysconfig.get_config_var(var)
+ if val is None:
+ if warn:
+ warnings.warn(
+ f"Config variable '{var}' is unset, Python ABI tag may be incorrect",
+ RuntimeWarning,
+ stacklevel=2,
+ )
+ return fallback
+ return val == expected
+
+
+def get_abi_tag() -> str | None:
+ """Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
+ soabi: str = sysconfig.get_config_var("SOABI")
+ impl = tags.interpreter_name()
+ if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
+ d = ""
+ m = ""
+ u = ""
+ if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
+ d = "d"
+
+ if get_flag(
+ "WITH_PYMALLOC",
+ impl == "cp",
+ warn=(impl == "cp" and sys.version_info < (3, 8)),
+ ) and sys.version_info < (3, 8):
+ m = "m"
+
+ abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
+ elif soabi and impl == "cp" and soabi.startswith("cpython"):
+ # non-Windows
+ abi = "cp" + soabi.split("-")[1]
+ elif soabi and impl == "cp" and soabi.startswith("cp"):
+ # Windows
+ abi = soabi.split("-")[0]
+ elif soabi and impl == "pp":
+ # we want something like pypy36-pp73
+ abi = "-".join(soabi.split("-")[:2])
+ abi = abi.replace(".", "_").replace("-", "_")
+ elif soabi and impl == "graalpy":
+ abi = "-".join(soabi.split("-")[:3])
+ abi = abi.replace(".", "_").replace("-", "_")
+ elif soabi:
+ abi = soabi.replace(".", "_").replace("-", "_")
+ else:
+ abi = None
+
+ return abi
+
+
+def safer_name(name: str) -> str:
+ return safe_name(name).replace("-", "_")
+
+
+def safer_version(version: str) -> str:
+ return safe_version(version).replace("-", "_")
+
+
+def remove_readonly(
+ func: Callable[..., object],
+ path: str,
+ excinfo: tuple[type[Exception], Exception, types.TracebackType],
+) -> None:
+ remove_readonly_exc(func, path, excinfo[1])
+
+
+def remove_readonly_exc(func: Callable[..., object], path: str, exc: Exception) -> None:
+ os.chmod(path, stat.S_IWRITE)
+ func(path)
+
+
+class bdist_wheel(Command):
+ description = "create a wheel distribution"
+
+ supported_compressions = {
+ "stored": ZIP_STORED,
+ "deflated": ZIP_DEFLATED,
+ }
+
+ user_options = [
+ ("bdist-dir=", "b", "temporary directory for creating the distribution"),
+ (
+ "plat-name=",
+ "p",
+ "platform name to embed in generated filenames "
+ f"(default: {get_platform(None)})",
+ ),
+ (
+ "keep-temp",
+ "k",
+ "keep the pseudo-installation tree around after "
+ "creating the distribution archive",
+ ),
+ ("dist-dir=", "d", "directory to put final built distributions in"),
+ ("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
+ (
+ "relative",
+ None,
+ "build the archive using relative paths (default: false)",
+ ),
+ (
+ "owner=",
+ "u",
+ "Owner name used when creating a tar file [default: current user]",
+ ),
+ (
+ "group=",
+ "g",
+ "Group name used when creating a tar file [default: current group]",
+ ),
+ ("universal", None, "make a universal wheel (default: false)"),
+ (
+ "compression=",
+ None,
+ "zipfile compression (one of: {}) (default: 'deflated')".format(
+ ", ".join(supported_compressions)
+ ),
+ ),
+ (
+ "python-tag=",
+ None,
+ f"Python implementation compatibility tag (default: '{python_tag()}')",
+ ),
+ (
+ "build-number=",
+ None,
+ "Build number for this particular version. "
+ "As specified in PEP-0427, this must start with a digit. "
+ "[default: None]",
+ ),
+ (
+ "py-limited-api=",
+ None,
+ "Python tag (cp32|cp33|cpNN) for abi3 wheel tag (default: false)",
+ ),
+ ]
+
+ boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
+
+ def initialize_options(self) -> None:
+ self.bdist_dir: str | None = None
+ self.data_dir = None
+ self.plat_name: str | None = None
+ self.plat_tag = None
+ self.format = "zip"
+ self.keep_temp = False
+ self.dist_dir: str | None = None
+ self.egginfo_dir = None
+ self.root_is_pure: bool | None = None
+ self.skip_build = None
+ self.relative = False
+ self.owner = None
+ self.group = None
+ self.universal: bool = False
+ self.compression: str | int = "deflated"
+ self.python_tag: str = python_tag()
+ self.build_number: str | None = None
+ self.py_limited_api: str | Literal[False] = False
+ self.plat_name_supplied = False
+
+ def finalize_options(self):
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command("bdist").bdist_base
+ self.bdist_dir = os.path.join(bdist_base, "wheel")
+
+ egg_info = self.distribution.get_command_obj("egg_info")
+ egg_info.ensure_finalized() # needed for correct `wheel_dist_name`
+
+ self.data_dir = self.wheel_dist_name + ".data"
+ self.plat_name_supplied = self.plat_name is not None
+
+ try:
+ self.compression = self.supported_compressions[self.compression]
+ except KeyError:
+ raise ValueError(f"Unsupported compression: {self.compression}") from None
+
+ need_options = ("dist_dir", "plat_name", "skip_build")
+
+ self.set_undefined_options("bdist", *zip(need_options, need_options))
+
+ self.root_is_pure = not (
+ self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
+ )
+
+ if self.py_limited_api and not re.match(
+ PY_LIMITED_API_PATTERN, self.py_limited_api
+ ):
+ raise ValueError(f"py-limited-api must match '{PY_LIMITED_API_PATTERN}'")
+
+ # Support legacy [wheel] section for setting universal
+ wheel = self.distribution.get_option_dict("wheel")
+ if "universal" in wheel:
+ # please don't define this in your global configs
+ log.warn("The [wheel] section is deprecated. Use [bdist_wheel] instead.")
+ val = wheel["universal"][1].strip()
+ if val.lower() in ("1", "true", "yes"):
+ self.universal = True
+
+ if self.build_number is not None and not self.build_number[:1].isdigit():
+ raise ValueError("Build tag (build-number) must start with a digit.")
+
+ @property
+ def wheel_dist_name(self):
+ """Return distribution full name with - replaced with _"""
+ components = (
+ safer_name(self.distribution.get_name()),
+ safer_version(self.distribution.get_version()),
+ )
+ if self.build_number:
+ components += (self.build_number,)
+ return "-".join(components)
+
+ def get_tag(self) -> tuple[str, str, str]:
+ # bdist sets self.plat_name if unset, we should only use it for purepy
+ # wheels if the user supplied it.
+ if self.plat_name_supplied:
+ plat_name = cast(str, self.plat_name)
+ elif self.root_is_pure:
+ plat_name = "any"
+ else:
+ # macosx contains system version in platform name so need special handle
+ if self.plat_name and not self.plat_name.startswith("macosx"):
+ plat_name = self.plat_name
+ else:
+ # on macosx always limit the platform name to comply with any
+ # c-extension modules in bdist_dir, since the user can specify
+ # a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
+
+ # on other platforms, and on macosx if there are no c-extension
+ # modules, use the default platform name.
+ plat_name = get_platform(self.bdist_dir)
+
+ if _is_32bit_interpreter():
+ if plat_name in ("linux-x86_64", "linux_x86_64"):
+ plat_name = "linux_i686"
+ if plat_name in ("linux-aarch64", "linux_aarch64"):
+ # TODO armv8l, packaging pull request #690 => this did not land
+ # in pip/packaging yet
+ plat_name = "linux_armv7l"
+
+ plat_name = (
+ plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
+ )
+
+ if self.root_is_pure:
+ if self.universal:
+ impl = "py2.py3"
+ else:
+ impl = self.python_tag
+ tag = (impl, "none", plat_name)
+ else:
+ impl_name = tags.interpreter_name()
+ impl_ver = tags.interpreter_version()
+ impl = impl_name + impl_ver
+ # We don't work on CPython 3.1, 3.0.
+ if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
+ impl = self.py_limited_api
+ abi_tag = "abi3"
+ else:
+ abi_tag = str(get_abi_tag()).lower()
+ tag = (impl, abi_tag, plat_name)
+ # issue gh-374: allow overriding plat_name
+ supported_tags = [
+ (t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
+ ]
+ assert (
+ tag in supported_tags
+ ), f"would build wheel with unsupported tag {tag}"
+ return tag
+
+ def run(self):
+ build_scripts = self.reinitialize_command("build_scripts")
+ build_scripts.executable = "python"
+ build_scripts.force = True
+
+ build_ext = self.reinitialize_command("build_ext")
+ build_ext.inplace = False
+
+ if not self.skip_build:
+ self.run_command("build")
+
+ install = self.reinitialize_command("install", reinit_subcommands=True)
+ install.root = self.bdist_dir
+ install.compile = False
+ install.skip_build = self.skip_build
+ install.warn_dir = False
+
+ # A wheel without setuptools scripts is more cross-platform.
+ # Use the (undocumented) `no_ep` option to setuptools'
+ # install_scripts command to avoid creating entry point scripts.
+ install_scripts = self.reinitialize_command("install_scripts")
+ install_scripts.no_ep = True
+
+ # Use a custom scheme for the archive, because we have to decide
+ # at installation time which scheme to use.
+ for key in ("headers", "scripts", "data", "purelib", "platlib"):
+ setattr(install, "install_" + key, os.path.join(self.data_dir, key))
+
+ basedir_observed = ""
+
+ if os.name == "nt":
+ # win32 barfs if any of these are ''; could be '.'?
+ # (distutils.command.install:change_roots bug)
+ basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
+ self.install_libbase = self.install_lib = basedir_observed
+
+ setattr(
+ install,
+ "install_purelib" if self.root_is_pure else "install_platlib",
+ basedir_observed,
+ )
+
+ log.info(f"installing to {self.bdist_dir}")
+
+ self.run_command("install")
+
+ impl_tag, abi_tag, plat_tag = self.get_tag()
+ archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
+ if not self.relative:
+ archive_root = self.bdist_dir
+ else:
+ archive_root = os.path.join(
+ self.bdist_dir, self._ensure_relative(install.install_base)
+ )
+
+ self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
+ distinfo_dirname = (
+ f"{safer_name(self.distribution.get_name())}-"
+ f"{safer_version(self.distribution.get_version())}.dist-info"
+ )
+ distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
+ self.egg2dist(self.egginfo_dir, distinfo_dir)
+
+ self.write_wheelfile(distinfo_dir)
+
+ # Make the archive
+ if not os.path.exists(self.dist_dir):
+ os.makedirs(self.dist_dir)
+
+ wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
+ with WheelFile(wheel_path, "w", self.compression) as wf:
+ wf.write_files(archive_root)
+
+ # Add to 'Distribution.dist_files' so that the "upload" command works
+ getattr(self.distribution, "dist_files", []).append((
+ "bdist_wheel",
+ "{}.{}".format(*sys.version_info[:2]), # like 3.7
+ wheel_path,
+ ))
+
+ if not self.keep_temp:
+ log.info(f"removing {self.bdist_dir}")
+ if not self.dry_run:
+ if sys.version_info < (3, 12):
+ rmtree(self.bdist_dir, onerror=remove_readonly)
+ else:
+ rmtree(self.bdist_dir, onexc=remove_readonly_exc)
+
+ def write_wheelfile(
+ self, wheelfile_base: str, generator: str = f"setuptools ({__version__})"
+ ):
+ from email.message import Message
+
+ msg = Message()
+ msg["Wheel-Version"] = "1.0" # of the spec
+ msg["Generator"] = generator
+ msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
+ if self.build_number is not None:
+ msg["Build"] = self.build_number
+
+ # Doesn't work for bdist_wininst
+ impl_tag, abi_tag, plat_tag = self.get_tag()
+ for impl in impl_tag.split("."):
+ for abi in abi_tag.split("."):
+ for plat in plat_tag.split("."):
+ msg["Tag"] = "-".join((impl, abi, plat))
+
+ wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
+ log.info(f"creating {wheelfile_path}")
+ with open(wheelfile_path, "wb") as f:
+ BytesGenerator(f, maxheaderlen=0).flatten(msg)
+
+ def _ensure_relative(self, path: str) -> str:
+ # copied from dir_util, deleted
+ drive, path = os.path.splitdrive(path)
+ if path[0:1] == os.sep:
+ path = drive + path[1:]
+ return path
+
+ @property
+ def license_paths(self) -> Iterable[str]:
+ if setuptools_major_version >= 57:
+ # Setuptools has resolved any patterns to actual file names
+ return self.distribution.metadata.license_files or ()
+
+ files: set[str] = set()
+ metadata = self.distribution.get_option_dict("metadata")
+ if setuptools_major_version >= 42:
+ # Setuptools recognizes the license_files option but does not do globbing
+ patterns = cast(Sequence[str], self.distribution.metadata.license_files)
+ else:
+ # Prior to those, wheel is entirely responsible for handling license files
+ if "license_files" in metadata:
+ patterns = metadata["license_files"][1].split()
+ else:
+ patterns = ()
+
+ if "license_file" in metadata:
+ warnings.warn(
+ 'The "license_file" option is deprecated. Use "license_files" instead.',
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ files.add(metadata["license_file"][1])
+
+ if not files and not patterns and not isinstance(patterns, list):
+ patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
+
+ for pattern in patterns:
+ for path in iglob(pattern):
+ if path.endswith("~"):
+ log.debug(
+ f'ignoring license file "{path}" as it looks like a backup'
+ )
+ continue
+
+ if path not in files and os.path.isfile(path):
+ log.info(
+ f'adding license file "{path}" (matched pattern "{pattern}")'
+ )
+ files.add(path)
+
+ return files
+
+ def egg2dist(self, egginfo_path: str, distinfo_path: str):
+ """Convert an .egg-info directory into a .dist-info directory"""
+
+ def adios(p: str) -> None:
+ """Appropriately delete directory, file or link."""
+ if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
+ shutil.rmtree(p)
+ elif os.path.exists(p):
+ os.unlink(p)
+
+ adios(distinfo_path)
+
+ if not os.path.exists(egginfo_path):
+ # There is no egg-info. This is probably because the egg-info
+ # file/directory is not named matching the distribution name used
+ # to name the archive file. Check for this case and report
+ # accordingly.
+ import glob
+
+ pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
+ possible = glob.glob(pat)
+ err = f"Egg metadata expected at {egginfo_path} but not found"
+ if possible:
+ alt = os.path.basename(possible[0])
+ err += f" ({alt} found - possible misnamed archive file?)"
+
+ raise ValueError(err)
+
+ if os.path.isfile(egginfo_path):
+ # .egg-info is a single file
+ pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
+ os.mkdir(distinfo_path)
+ else:
+ # .egg-info is a directory
+ pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
+ pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
+
+ # ignore common egg metadata that is useless to wheel
+ shutil.copytree(
+ egginfo_path,
+ distinfo_path,
+ ignore=lambda x, y: {
+ "PKG-INFO",
+ "requires.txt",
+ "SOURCES.txt",
+ "not-zip-safe",
+ },
+ )
+
+ # delete dependency_links if it is only whitespace
+ dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
+ with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
+ dependency_links = dependency_links_file.read().strip()
+ if not dependency_links:
+ adios(dependency_links_path)
+
+ pkg_info_path = os.path.join(distinfo_path, "METADATA")
+ serialization_policy = EmailPolicy(
+ utf8=True,
+ mangle_from_=False,
+ max_line_length=0,
+ )
+ with open(pkg_info_path, "w", encoding="utf-8") as out:
+ Generator(out, policy=serialization_policy).flatten(pkg_info)
+
+ for license_path in self.license_paths:
+ filename = os.path.basename(license_path)
+ shutil.copy(license_path, os.path.join(distinfo_path, filename))
+
+ adios(egginfo_path)
diff --git a/contrib/python/setuptools/py3/setuptools/command/build.py b/contrib/python/setuptools/py3/setuptools/command/build.py
index 16c077b7cc..bc765a17ae 100644
--- a/contrib/python/setuptools/py3/setuptools/command/build.py
+++ b/contrib/python/setuptools/py3/setuptools/command/build.py
@@ -1,4 +1,6 @@
-from typing import Dict, List, Protocol
+from __future__ import annotations
+
+from typing import Protocol
from distutils.command.build import build as _build
_ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
@@ -87,7 +89,7 @@ class SubCommand(Protocol):
def run(self):
"""(Required by the original :class:`setuptools.Command` interface)"""
- def get_source_files(self) -> List[str]:
+ def get_source_files(self) -> list[str]:
"""
Return a list of all files that are used by the command to create the expected
outputs.
@@ -98,7 +100,7 @@ class SubCommand(Protocol):
All files should be strings relative to the project root directory.
"""
- def get_outputs(self) -> List[str]:
+ def get_outputs(self) -> list[str]:
"""
Return a list of files intended for distribution as they would have been
produced by the build.
@@ -111,7 +113,7 @@ class SubCommand(Protocol):
and don't correspond to any source file already present in the project.
"""
- def get_output_mapping(self) -> Dict[str, str]:
+ def get_output_mapping(self) -> dict[str, str]:
"""
Return a mapping between destination files as they would be produced by the
build (dict keys) into the respective existing (source) files (dict values).
diff --git a/contrib/python/setuptools/py3/setuptools/command/build_ext.py b/contrib/python/setuptools/py3/setuptools/command/build_ext.py
index 6056fe9b24..508704f3c0 100644
--- a/contrib/python/setuptools/py3/setuptools/command/build_ext.py
+++ b/contrib/python/setuptools/py3/setuptools/command/build_ext.py
@@ -1,9 +1,11 @@
+from __future__ import annotations
+
import os
import sys
import itertools
from importlib.machinery import EXTENSION_SUFFIXES
from importlib.util import cache_from_source as _compiled_file_name
-from typing import Dict, Iterator, List, Tuple
+from typing import Iterator
from pathlib import Path
from distutils.command.build_ext import build_ext as _du_build_ext
@@ -93,7 +95,7 @@ class build_ext(_build_ext):
if old_inplace:
self.copy_extensions_to_source()
- def _get_inplace_equivalent(self, build_py, ext: Extension) -> Tuple[str, str]:
+ def _get_inplace_equivalent(self, build_py, ext: Extension) -> tuple[str, str]:
fullname = self.get_ext_fullname(ext.name)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
@@ -125,7 +127,7 @@ class build_ext(_build_ext):
_, _, name = ext.name.rpartition(".")
return f"{os.path.join(dir_, name)}.py"
- def _get_output_mapping(self) -> Iterator[Tuple[str, str]]:
+ def _get_output_mapping(self) -> Iterator[tuple[str, str]]:
if not self.inplace:
return
@@ -265,7 +267,7 @@ class build_ext(_build_ext):
pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
return any(pkg + libname in libnames for libname in ext.libraries)
- def get_source_files(self) -> List[str]:
+ def get_source_files(self) -> list[str]:
return [*_build_ext.get_source_files(self), *self._get_internal_depends()]
def _get_internal_depends(self) -> Iterator[str]:
@@ -306,12 +308,12 @@ class build_ext(_build_ext):
yield path.as_posix()
- def get_outputs(self) -> List[str]:
+ def get_outputs(self) -> list[str]:
if self.inplace:
return list(self.get_output_mapping().keys())
return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs())
- def get_output_mapping(self) -> Dict[str, str]:
+ def get_output_mapping(self) -> dict[str, str]:
"""See :class:`setuptools.commands.build.SubCommand`"""
mapping = self._get_output_mapping()
return dict(sorted(mapping, key=lambda x: x[0]))
@@ -399,7 +401,7 @@ if use_stubs or os.name == 'nt':
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -434,7 +436,7 @@ else:
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
diff --git a/contrib/python/setuptools/py3/setuptools/command/build_py.py b/contrib/python/setuptools/py3/setuptools/command/build_py.py
index 3f40b060b3..ab49874635 100644
--- a/contrib/python/setuptools/py3/setuptools/command/build_py.py
+++ b/contrib/python/setuptools/py3/setuptools/command/build_py.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from functools import partial
from glob import glob
from distutils.util import convert_path
@@ -9,7 +11,7 @@ import distutils.errors
import itertools
import stat
from pathlib import Path
-from typing import Dict, Iterable, Iterator, List, Optional, Tuple
+from typing import Iterable, Iterator
from ..extern.more_itertools import unique_everseen
from ..warnings import SetuptoolsDeprecationWarning
@@ -33,7 +35,7 @@ class build_py(orig.build_py):
"""
editable_mode: bool = False
- existing_egg_info_dir: Optional[str] = None #: Private API, internal use only.
+ existing_egg_info_dir: str | None = None #: Private API, internal use only.
def finalize_options(self):
orig.build_py.finalize_options(self)
@@ -44,7 +46,13 @@ class build_py(orig.build_py):
self.__updated_files = []
def copy_file(
- self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
+ self,
+ infile,
+ outfile,
+ preserve_mode=True,
+ preserve_times=True,
+ link=None,
+ level=1,
):
# Overwrite base class to allow using links
if link:
@@ -68,7 +76,7 @@ class build_py(orig.build_py):
# Only compile actual .py files, using our base class' idea of what our
# output files are.
- self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
+ self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=False))
def __getattr__(self, attr):
"lazily compute data files"
@@ -130,13 +138,13 @@ class build_py(orig.build_py):
)
return self.exclude_data_files(package, src_dir, files)
- def get_outputs(self, include_bytecode=1) -> List[str]:
+ def get_outputs(self, include_bytecode=True) -> list[str]:
"""See :class:`setuptools.commands.build.SubCommand`"""
if self.editable_mode:
return list(self.get_output_mapping().keys())
return super().get_outputs(include_bytecode)
- def get_output_mapping(self) -> Dict[str, str]:
+ def get_output_mapping(self) -> dict[str, str]:
"""See :class:`setuptools.commands.build.SubCommand`"""
mapping = itertools.chain(
self._get_package_data_output_mapping(),
@@ -144,14 +152,14 @@ class build_py(orig.build_py):
)
return dict(sorted(mapping, key=lambda x: x[0]))
- def _get_module_mapping(self) -> Iterator[Tuple[str, str]]:
+ def _get_module_mapping(self) -> Iterator[tuple[str, str]]:
"""Iterate over all modules producing (dest, src) pairs."""
for package, module, module_file in self.find_all_modules():
package = package.split('.')
filename = self.get_module_outfile(self.build_lib, package, module)
yield (filename, module_file)
- def _get_package_data_output_mapping(self) -> Iterator[Tuple[str, str]]:
+ def _get_package_data_output_mapping(self) -> Iterator[tuple[str, str]]:
"""Iterate over package data producing (dest, src) pairs."""
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
diff --git a/contrib/python/setuptools/py3/setuptools/command/develop.py b/contrib/python/setuptools/py3/setuptools/command/develop.py
index d07736a005..55f24f396c 100644
--- a/contrib/python/setuptools/py3/setuptools/command/develop.py
+++ b/contrib/python/setuptools/py3/setuptools/command/develop.py
@@ -109,7 +109,7 @@ class develop(namespaces.DevelopInstaller, easy_install):
self.run_command('egg_info')
# Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
+ self.reinitialize_command('build_ext', inplace=True)
self.run_command('build_ext')
if setuptools.bootstrap_install_from:
diff --git a/contrib/python/setuptools/py3/setuptools/command/easy_install.py b/contrib/python/setuptools/py3/setuptools/command/easy_install.py
index 41ff382fe4..eb6ba1025f 100644
--- a/contrib/python/setuptools/py3/setuptools/command/easy_install.py
+++ b/contrib/python/setuptools/py3/setuptools/command/easy_install.py
@@ -10,6 +10,8 @@ __ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
"""
+from __future__ import annotations
+
from glob import glob
from distutils.util import get_platform
from distutils.util import convert_path, subst_vars
@@ -25,7 +27,6 @@ from distutils.spawn import find_executable
from distutils.command import install
import sys
import os
-from typing import Dict, List
import zipimport
import shutil
import tempfile
@@ -170,7 +171,7 @@ class easy_install(Command):
# the --user option seems to be an opt-in one,
# so the default should be False.
- self.user = 0
+ self.user = False
self.zip_ok = self.local_snapshots_ok = None
self.install_dir = self.script_dir = self.exclude_scripts = None
self.index_url = None
@@ -1058,10 +1059,10 @@ class easy_install(Command):
dl = dst.lower()
if dl.endswith('.pyd') or dl.endswith('.dll'):
parts[-1] = bdist_egg.strip_module(parts[-1])
- top_level[os.path.splitext(parts[0])[0]] = 1
+ top_level[os.path.splitext(parts[0])[0]] = True
native_libs.append(src)
elif dl.endswith('.py') and old != 'SCRIPTS/':
- top_level[os.path.splitext(parts[0])[0]] = 1
+ top_level[os.path.splitext(parts[0])[0]] = True
to_compile.append(dst)
return dst
if not src.endswith('.pth'):
@@ -1317,12 +1318,12 @@ class easy_install(Command):
# try to make the byte compile messages quieter
log.set_verbosity(self.verbose - 1)
- byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+ byte_compile(to_compile, optimize=0, force=True, dry_run=self.dry_run)
if self.optimize:
byte_compile(
to_compile,
optimize=self.optimize,
- force=1,
+ force=True,
dry_run=self.dry_run,
)
finally:
@@ -1490,7 +1491,7 @@ def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME
if dirname in seen:
continue
- seen[dirname] = 1
+ seen[dirname] = True
if not os.path.isdir(dirname):
continue
@@ -1519,7 +1520,7 @@ def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME
if line in seen:
continue
- seen[line] = 1
+ seen[line] = True
if not os.path.isdir(line):
continue
@@ -1642,7 +1643,7 @@ class PthDistributions(Environment):
dirty = True
paths.pop()
continue
- seen[normalized_path] = 1
+ seen[normalized_path] = True
f.close()
# remove any trailing empty/blank line
while paths and not paths[-1].strip():
@@ -2038,8 +2039,8 @@ class CommandSpec(list):
those passed to Popen.
"""
- options: List[str] = []
- split_args: Dict[str, bool] = dict()
+ options: list[str] = []
+ split_args: dict[str, bool] = dict()
@classmethod
def best(cls):
diff --git a/contrib/python/setuptools/py3/setuptools/command/editable_wheel.py b/contrib/python/setuptools/py3/setuptools/command/editable_wheel.py
index b8ed84750a..ae31bb4c79 100644
--- a/contrib/python/setuptools/py3/setuptools/command/editable_wheel.py
+++ b/contrib/python/setuptools/py3/setuptools/command/editable_wheel.py
@@ -10,6 +10,8 @@ Create a wheel that, when installed, will make the source package 'editable'
*auxiliary build directory* or ``auxiliary_dir``.
"""
+from __future__ import annotations
+
import logging
import io
import os
@@ -23,14 +25,10 @@ from pathlib import Path
from tempfile import TemporaryDirectory
from typing import (
TYPE_CHECKING,
- Dict,
Iterable,
Iterator,
- List,
Mapping,
- Optional,
Protocol,
- Tuple,
TypeVar,
cast,
)
@@ -59,7 +57,7 @@ from .install import install as install_cls
from .install_scripts import install_scripts as install_scripts_cls
if TYPE_CHECKING:
- from wheel.wheelfile import WheelFile # type:ignore[import-untyped] # noqa
+ from .._vendor.wheel.wheelfile import WheelFile
_P = TypeVar("_P", bound=StrPath)
_logger = logging.getLogger(__name__)
@@ -78,7 +76,7 @@ class _EditableMode(Enum):
COMPAT = "compat" # TODO: Remove `compat` after Dec/2022.
@classmethod
- def convert(cls, mode: Optional[str]) -> "_EditableMode":
+ def convert(cls, mode: str | None) -> _EditableMode:
if not mode:
return _EditableMode.LENIENT # default
@@ -180,7 +178,7 @@ class editable_wheel(Command):
installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root)
installer.install_namespaces()
- def _find_egg_info_dir(self) -> Optional[str]:
+ def _find_egg_info_dir(self) -> str | None:
parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path()
candidates = map(str, parent_dir.glob("*.egg-info"))
return next(candidates, None)
@@ -255,9 +253,9 @@ class editable_wheel(Command):
elif hasattr(cmd, "inplace"):
cmd.inplace = True # backward compatibility with distutils
- def _collect_build_outputs(self) -> Tuple[List[str], Dict[str, str]]:
- files: List[str] = []
- mapping: Dict[str, str] = {}
+ def _collect_build_outputs(self) -> tuple[list[str], dict[str, str]]:
+ files: list[str] = []
+ mapping: dict[str, str] = {}
build = self.get_finalized_command("build")
for cmd_name in build.get_sub_commands():
@@ -275,7 +273,7 @@ class editable_wheel(Command):
unpacked_wheel: StrPath,
build_lib: StrPath,
tmp_dir: StrPath,
- ) -> Tuple[List[str], Dict[str, str]]:
+ ) -> tuple[list[str], dict[str, str]]:
self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir)
self._run_build_subcommands()
files, mapping = self._collect_build_outputs()
@@ -335,7 +333,7 @@ class editable_wheel(Command):
)
def _create_wheel_file(self, bdist_wheel):
- from wheel.wheelfile import WheelFile
+ from ..extern.wheel.wheelfile import WheelFile
dist_info = self.get_finalized_command("dist_info")
dist_name = dist_info.name
@@ -373,7 +371,7 @@ class editable_wheel(Command):
name: str,
tag: str,
build_lib: StrPath,
- ) -> "EditableStrategy":
+ ) -> EditableStrategy:
"""Decides which strategy to use to implement an editable installation."""
build_name = f"__editable__.{name}-{tag}"
project_dir = Path(self.project_dir)
@@ -396,9 +394,7 @@ class editable_wheel(Command):
class EditableStrategy(Protocol):
- def __call__(
- self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]
- ): ...
+ def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]): ...
def __enter__(self): ...
@@ -406,12 +402,12 @@ class EditableStrategy(Protocol):
class _StaticPth:
- def __init__(self, dist: Distribution, name: str, path_entries: List[Path]):
+ def __init__(self, dist: Distribution, name: str, path_entries: list[Path]):
self.dist = dist
self.name = name
self.path_entries = path_entries
- def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+ def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
entries = "\n".join(str(p.resolve()) for p in self.path_entries)
contents = _encode_pth(f"{entries}\n")
wheel.writestr(f"__editable__.{self.name}.pth", contents)
@@ -451,11 +447,11 @@ class _LinkTree(_StaticPth):
self._file = dist.get_command_obj("build_py").copy_file # type: ignore[union-attr]
super().__init__(dist, name, [self.auxiliary_dir])
- def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+ def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
self._create_links(files, mapping)
super().__call__(wheel, files, mapping)
- def _normalize_output(self, file: str) -> Optional[str]:
+ def _normalize_output(self, file: str) -> str | None:
# Files relative to build_lib will be normalized to None
with suppress(ValueError):
path = Path(file).resolve().relative_to(self.build_lib)
@@ -505,13 +501,13 @@ class _TopLevelFinder:
self.dist = dist
self.name = name
- def template_vars(self) -> Tuple[str, str, Dict[str, str], Dict[str, List[str]]]:
+ def template_vars(self) -> tuple[str, str, dict[str, str], dict[str, list[str]]]:
src_root = self.dist.src_root or os.curdir
top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
package_dir = self.dist.package_dir or {}
roots = _find_package_roots(top_level, package_dir, src_root)
- namespaces_: Dict[str, List[str]] = dict(
+ namespaces_: dict[str, list[str]] = dict(
chain(
_find_namespaces(self.dist.packages or [], roots),
((ns, []) for ns in _find_virtual_namespaces(roots)),
@@ -519,7 +515,7 @@ class _TopLevelFinder:
)
legacy_namespaces = {
- cast(str, pkg): find_package_path(pkg, roots, self.dist.src_root or "")
+ pkg: find_package_path(pkg, roots, self.dist.src_root or "")
for pkg in self.dist.namespace_packages or []
}
@@ -532,7 +528,7 @@ class _TopLevelFinder:
finder = _normalization.safe_identifier(name)
return finder, name, mapping, namespaces_
- def get_implementation(self) -> Iterator[Tuple[str, bytes]]:
+ def get_implementation(self) -> Iterator[tuple[str, bytes]]:
finder, name, mapping, namespaces_ = self.template_vars()
content = bytes(_finder_template(name, mapping, namespaces_), "utf-8")
@@ -541,7 +537,7 @@ class _TopLevelFinder:
content = _encode_pth(f"import {finder}; {finder}.install()")
yield (f"__editable__.{self.name}.pth", content)
- def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+ def __call__(self, wheel: WheelFile, files: list[str], mapping: dict[str, str]):
for file, content in self.get_implementation():
wheel.writestr(file, content)
@@ -597,7 +593,7 @@ def _can_symlink_files(base_dir: Path) -> bool:
def _simple_layout(
- packages: Iterable[str], package_dir: Dict[str, str], project_dir: StrPath
+ packages: Iterable[str], package_dir: dict[str, str], project_dir: StrPath
) -> bool:
"""Return ``True`` if:
- all packages are contained by the same parent directory, **and**
@@ -680,8 +676,8 @@ def _find_package_roots(
packages: Iterable[str],
package_dir: Mapping[str, str],
src_root: StrPath,
-) -> Dict[str, str]:
- pkg_roots: Dict[str, str] = {
+) -> dict[str, str]:
+ pkg_roots: dict[str, str] = {
pkg: _absolute_root(find_package_path(pkg, package_dir, src_root))
for pkg in sorted(packages)
}
@@ -700,7 +696,7 @@ def _absolute_root(path: StrPath) -> str:
return str(parent.resolve() / path_.name)
-def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]:
+def _find_virtual_namespaces(pkg_roots: dict[str, str]) -> Iterator[str]:
"""By carefully designing ``package_dir``, it is possible to implement the logical
structure of PEP 420 in a package without the corresponding directories.
@@ -725,15 +721,15 @@ def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]:
def _find_namespaces(
- packages: List[str], pkg_roots: Dict[str, str]
-) -> Iterator[Tuple[str, List[str]]]:
+ packages: list[str], pkg_roots: dict[str, str]
+) -> Iterator[tuple[str, list[str]]]:
for pkg in packages:
path = find_package_path(pkg, pkg_roots, "")
if Path(path).exists() and not Path(path, "__init__.py").exists():
yield (pkg, [path])
-def _remove_nested(pkg_roots: Dict[str, str]) -> Dict[str, str]:
+def _remove_nested(pkg_roots: dict[str, str]) -> dict[str, str]:
output = dict(pkg_roots.copy())
for pkg, path in reversed(list(pkg_roots.items())):
@@ -883,7 +879,7 @@ def install():
def _finder_template(
- name: str, mapping: Mapping[str, str], namespaces: Dict[str, List[str]]
+ name: str, mapping: Mapping[str, str], namespaces: dict[str, list[str]]
) -> str:
"""Create a string containing the code for the``MetaPathFinder`` and
``PathEntryFinder``.
diff --git a/contrib/python/setuptools/py3/setuptools/command/egg_info.py b/contrib/python/setuptools/py3/setuptools/command/egg_info.py
index 62d2feea9b..ccc2db8972 100644
--- a/contrib/python/setuptools/py3/setuptools/command/egg_info.py
+++ b/contrib/python/setuptools/py3/setuptools/command/egg_info.py
@@ -534,10 +534,10 @@ class manifest_maker(sdist):
template = "MANIFEST.in"
def initialize_options(self):
- self.use_defaults = 1
- self.prune = 1
- self.manifest_only = 1
- self.force_manifest = 1
+ self.use_defaults = True
+ self.prune = True
+ self.manifest_only = True
+ self.force_manifest = True
self.ignore_egg_info_dir = False
def finalize_options(self):
@@ -623,7 +623,7 @@ class manifest_maker(sdist):
self.filelist.prune(base_dir)
sep = re.escape(os.sep)
self.filelist.exclude_pattern(
- r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=1
+ r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, is_regex=True
)
def _safe_data_files(self, build_py):
diff --git a/contrib/python/setuptools/py3/setuptools/command/install_lib.py b/contrib/python/setuptools/py3/setuptools/command/install_lib.py
index 32ff65e783..5e74be247e 100644
--- a/contrib/python/setuptools/py3/setuptools/command/install_lib.py
+++ b/contrib/python/setuptools/py3/setuptools/command/install_lib.py
@@ -2,6 +2,7 @@ import os
import sys
from itertools import product, starmap
import distutils.command.install_lib as orig
+from .._path import StrPath
class install_lib(orig.install_lib):
@@ -85,18 +86,18 @@ class install_lib(orig.install_lib):
def copy_tree(
self,
- infile,
- outfile,
- preserve_mode=1,
- preserve_times=1,
- preserve_symlinks=0,
+ infile: StrPath,
+ outfile: str,
+ preserve_mode=True,
+ preserve_times=True,
+ preserve_symlinks=False,
level=1,
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
- return orig.install_lib.copy_tree(self, infile, outfile)
+ return orig.install_lib.copy_tree(self, infile, outfile) # type: ignore[arg-type] # Fixed upstream
# Exclude namespace package __init__.py* files from the output
diff --git a/contrib/python/setuptools/py3/setuptools/command/install_scripts.py b/contrib/python/setuptools/py3/setuptools/command/install_scripts.py
index d79a4ab7b0..f44281b49b 100644
--- a/contrib/python/setuptools/py3/setuptools/command/install_scripts.py
+++ b/contrib/python/setuptools/py3/setuptools/command/install_scripts.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from distutils import log
import distutils.command.install_scripts as orig
import os
@@ -13,12 +15,12 @@ class install_scripts(orig.install_scripts):
orig.install_scripts.initialize_options(self)
self.no_ep = False
- def run(self):
+ def run(self) -> None:
self.run_command("egg_info")
if self.distribution.scripts:
orig.install_scripts.run(self) # run first to set up self.outfiles
else:
- self.outfiles = []
+ self.outfiles: list[str] = []
if self.no_ep:
# don't install entry point scripts into .egg file!
return
diff --git a/contrib/python/setuptools/py3/setuptools/command/rotate.py b/contrib/python/setuptools/py3/setuptools/command/rotate.py
index 6f73721c70..064d7959ff 100644
--- a/contrib/python/setuptools/py3/setuptools/command/rotate.py
+++ b/contrib/python/setuptools/py3/setuptools/command/rotate.py
@@ -1,9 +1,10 @@
+from __future__ import annotations
+
from distutils.util import convert_path
from distutils import log
from distutils.errors import DistutilsOptionError
import os
import shutil
-from typing import List
from setuptools import Command
@@ -18,7 +19,7 @@ class rotate(Command):
('keep=', 'k', "number of matching distributions to keep"),
]
- boolean_options: List[str] = []
+ boolean_options: list[str] = []
def initialize_options(self):
self.match = None
diff --git a/contrib/python/setuptools/py3/setuptools/command/test.py b/contrib/python/setuptools/py3/setuptools/command/test.py
index 0a128f2a7a..af1349e1c6 100644
--- a/contrib/python/setuptools/py3/setuptools/command/test.py
+++ b/contrib/python/setuptools/py3/setuptools/command/test.py
@@ -136,7 +136,7 @@ class test(Command):
self.run_command('egg_info')
# Build extensions in-place
- self.reinitialize_command('build_ext', inplace=1)
+ self.reinitialize_command('build_ext', inplace=True)
self.run_command('build_ext')
ei_cmd = self.get_finalized_command("egg_info")
diff --git a/contrib/python/setuptools/py3/setuptools/compat/py311.py b/contrib/python/setuptools/py3/setuptools/compat/py311.py
index 28175b1f75..5069c441c4 100644
--- a/contrib/python/setuptools/py3/setuptools/compat/py311.py
+++ b/contrib/python/setuptools/py3/setuptools/compat/py311.py
@@ -1,12 +1,26 @@
-import sys
+from __future__ import annotations
+
import shutil
+import sys
+from typing import Any, Callable, TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from _typeshed import StrOrBytesPath, ExcInfo
+
+# Same as shutil._OnExcCallback from typeshed
+_OnExcCallback = Callable[[Callable[..., Any], str, BaseException], object]
-def shutil_rmtree(path, ignore_errors=False, onexc=None):
+def shutil_rmtree(
+ path: StrOrBytesPath,
+ ignore_errors: bool = False,
+ onexc: _OnExcCallback | None = None,
+) -> None:
if sys.version_info >= (3, 12):
return shutil.rmtree(path, ignore_errors, onexc=onexc)
- def _handler(fn, path, excinfo):
- return onexc(fn, path, excinfo[1])
+ def _handler(fn: Callable[..., Any], path: str, excinfo: ExcInfo) -> None:
+ if onexc:
+ onexc(fn, path, excinfo[1])
return shutil.rmtree(path, ignore_errors, onerror=_handler)
diff --git a/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py b/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py
index 3626282a79..5a8700051e 100644
--- a/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py
+++ b/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py
@@ -8,9 +8,10 @@ need to be processed before being applied.
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
"""
+from __future__ import annotations
+
import logging
import os
-from collections.abc import Mapping
from email.headerregistry import Address
from functools import partial, reduce
from inspect import cleandoc
@@ -21,13 +22,8 @@ from typing import (
Any,
Callable,
Dict,
- List,
- Optional,
- Set,
- Tuple,
- Type,
+ Mapping,
Union,
- cast,
)
from .._path import StrPath
from ..errors import RemovedConfigError
@@ -39,14 +35,14 @@ if TYPE_CHECKING:
from setuptools.dist import Distribution # noqa
EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like
-_DictOrStr = Union[dict, str]
+_ProjectReadmeValue = Union[str, Dict[str, str]]
_CorrespFn = Callable[["Distribution", Any, StrPath], None]
_Correspondence = Union[str, _CorrespFn]
_logger = logging.getLogger(__name__)
-def apply(dist: "Distribution", config: dict, filename: StrPath) -> "Distribution":
+def apply(dist: Distribution, config: dict, filename: StrPath) -> Distribution:
"""Apply configuration dict read with :func:`read_configuration`"""
if not config:
@@ -68,7 +64,7 @@ def apply(dist: "Distribution", config: dict, filename: StrPath) -> "Distributio
return dist
-def _apply_project_table(dist: "Distribution", config: dict, root_dir: StrPath):
+def _apply_project_table(dist: Distribution, config: dict, root_dir: StrPath):
project_table = config.get("project", {}).copy()
if not project_table:
return # short-circuit
@@ -85,7 +81,7 @@ def _apply_project_table(dist: "Distribution", config: dict, root_dir: StrPath):
_set_config(dist, corresp, value)
-def _apply_tool_table(dist: "Distribution", config: dict, filename: StrPath):
+def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
tool_table = config.get("tool", {}).get("setuptools", {})
if not tool_table:
return # short-circuit
@@ -107,7 +103,7 @@ def _apply_tool_table(dist: "Distribution", config: dict, filename: StrPath):
_copy_command_options(config, dist, filename)
-def _handle_missing_dynamic(dist: "Distribution", project_table: dict):
+def _handle_missing_dynamic(dist: Distribution, project_table: dict):
"""Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``"""
dynamic = set(project_table.get("dynamic", []))
for field, getter in _PREVIOUSLY_DEFINED.items():
@@ -123,7 +119,7 @@ def json_compatible_key(key: str) -> str:
return key.lower().replace("-", "_")
-def _set_config(dist: "Distribution", field: str, value: Any):
+def _set_config(dist: Distribution, field: str, value: Any):
setter = getattr(dist.metadata, f"set_{field}", None)
if setter:
setter(value)
@@ -140,7 +136,7 @@ _CONTENT_TYPES = {
}
-def _guess_content_type(file: str) -> Optional[str]:
+def _guess_content_type(file: str) -> str | None:
_, ext = os.path.splitext(file.lower())
if not ext:
return None
@@ -153,15 +149,16 @@ def _guess_content_type(file: str) -> Optional[str]:
raise ValueError(f"Undefined content type for {file}, {msg}")
-def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: StrPath):
+def _long_description(dist: Distribution, val: _ProjectReadmeValue, root_dir: StrPath):
from setuptools.config import expand
+ file: str | tuple[()]
if isinstance(val, str):
- file: Union[str, list] = val
+ file = val
text = expand.read_files(file, root_dir)
- ctype = _guess_content_type(val)
+ ctype = _guess_content_type(file)
else:
- file = val.get("file") or []
+ file = val.get("file") or ()
text = val.get("text") or expand.read_files(file, root_dir)
ctype = val["content-type"]
@@ -171,10 +168,10 @@ def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: StrPath):
_set_config(dist, "long_description_content_type", ctype)
if file:
- dist._referenced_files.add(cast(str, file))
+ dist._referenced_files.add(file)
-def _license(dist: "Distribution", val: dict, root_dir: StrPath):
+def _license(dist: Distribution, val: dict, root_dir: StrPath):
from setuptools.config import expand
if "file" in val:
@@ -184,7 +181,7 @@ def _license(dist: "Distribution", val: dict, root_dir: StrPath):
_set_config(dist, "license", val["text"])
-def _people(dist: "Distribution", val: List[dict], _root_dir: StrPath, kind: str):
+def _people(dist: Distribution, val: list[dict], _root_dir: StrPath, kind: str):
field = []
email_field = []
for person in val:
@@ -202,24 +199,24 @@ def _people(dist: "Distribution", val: List[dict], _root_dir: StrPath, kind: str
_set_config(dist, f"{kind}_email", ", ".join(email_field))
-def _project_urls(dist: "Distribution", val: dict, _root_dir):
+def _project_urls(dist: Distribution, val: dict, _root_dir):
_set_config(dist, "project_urls", val)
-def _python_requires(dist: "Distribution", val: dict, _root_dir):
+def _python_requires(dist: Distribution, val: dict, _root_dir):
from setuptools.extern.packaging.specifiers import SpecifierSet
_set_config(dist, "python_requires", SpecifierSet(val))
-def _dependencies(dist: "Distribution", val: list, _root_dir):
+def _dependencies(dist: Distribution, val: list, _root_dir):
if getattr(dist, "install_requires", []):
msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)"
SetuptoolsWarning.emit(msg)
dist.install_requires = val
-def _optional_dependencies(dist: "Distribution", val: dict, _root_dir):
+def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
existing = getattr(dist, "extras_require", None) or {}
dist.extras_require = {**existing, **val}
@@ -244,7 +241,7 @@ def _unify_entry_points(project_table: dict):
# intentional (for resetting configurations that are missing `dynamic`).
-def _copy_command_options(pyproject: dict, dist: "Distribution", filename: StrPath):
+def _copy_command_options(pyproject: dict, dist: Distribution, filename: StrPath):
tool_table = pyproject.get("tool", {})
cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
valid_options = _valid_command_options(cmdclass)
@@ -263,7 +260,7 @@ def _copy_command_options(pyproject: dict, dist: "Distribution", filename: StrPa
_logger.warning(f"Command option {cmd}.{key} is not defined")
-def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
+def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
from .._importlib import metadata
from setuptools.dist import Distribution
@@ -280,7 +277,7 @@ def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
return valid_options
-def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]:
+def _load_ep(ep: metadata.EntryPoint) -> tuple[str, type] | None:
# Ignore all the errors
try:
return (ep.name, ep.load())
@@ -294,22 +291,22 @@ def _normalise_cmd_option_key(name: str) -> str:
return json_compatible_key(name).strip("_=")
-def _normalise_cmd_options(desc: "_OptionsList") -> Set[str]:
+def _normalise_cmd_options(desc: _OptionsList) -> set[str]:
return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
-def _get_previous_entrypoints(dist: "Distribution") -> Dict[str, list]:
+def _get_previous_entrypoints(dist: Distribution) -> dict[str, list]:
ignore = ("console_scripts", "gui_scripts")
value = getattr(dist, "entry_points", None) or {}
return {k: v for k, v in value.items() if k not in ignore}
-def _get_previous_scripts(dist: "Distribution") -> Optional[list]:
+def _get_previous_scripts(dist: Distribution) -> list | None:
value = getattr(dist, "entry_points", None) or {}
return value.get("console_scripts")
-def _get_previous_gui_scripts(dist: "Distribution") -> Optional[list]:
+def _get_previous_gui_scripts(dist: Distribution) -> list | None:
value = getattr(dist, "entry_points", None) or {}
return value.get("gui_scripts")
@@ -349,7 +346,7 @@ def _some_attrgetter(*items):
return _acessor
-PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = {
+PYPROJECT_CORRESPONDENCE: dict[str, _Correspondence] = {
"readme": _long_description,
"license": _license,
"authors": partial(_people, kind="author"),
diff --git a/contrib/python/setuptools/py3/setuptools/config/expand.py b/contrib/python/setuptools/py3/setuptools/config/expand.py
index 0d8d58add8..6ea6cf6d0e 100644
--- a/contrib/python/setuptools/py3/setuptools/config/expand.py
+++ b/contrib/python/setuptools/py3/setuptools/config/expand.py
@@ -18,6 +18,8 @@ functions among several configuration file formats.
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
"""
+from __future__ import annotations
+
import ast
import importlib
import os
@@ -25,21 +27,15 @@ import pathlib
import sys
from glob import iglob
from configparser import ConfigParser
-from importlib.machinery import ModuleSpec
+from importlib.machinery import ModuleSpec, all_suffixes
from itertools import chain
from typing import (
TYPE_CHECKING,
Callable,
- Dict,
Iterable,
Iterator,
- List,
Mapping,
- Optional,
- Tuple,
TypeVar,
- Union,
- cast,
)
from pathlib import Path
from types import ModuleType
@@ -47,14 +43,12 @@ from types import ModuleType
from distutils.errors import DistutilsOptionError
from .._path import same_path as _same_path, StrPath
+from ..discovery import find_package_path
from ..warnings import SetuptoolsWarning
if TYPE_CHECKING:
from setuptools.dist import Distribution # noqa
- from setuptools.discovery import ConfigDiscovery # noqa
- from distutils.dist import DistributionMetadata # noqa
-chain_iter = chain.from_iterable
_K = TypeVar("_K")
_V = TypeVar("_V", covariant=True)
@@ -67,7 +61,7 @@ class StaticModule:
vars(self).update(locals())
del self.self
- def _find_assignments(self) -> Iterator[Tuple[ast.AST, ast.AST]]:
+ def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
for statement in self.module.body:
if isinstance(statement, ast.Assign):
yield from ((target, statement.value) for target in statement.targets)
@@ -87,8 +81,8 @@ class StaticModule:
def glob_relative(
- patterns: Iterable[str], root_dir: Optional[StrPath] = None
-) -> List[str]:
+ patterns: Iterable[str], root_dir: StrPath | None = None
+) -> list[str]:
"""Expand the list of glob patterns, but preserving relative paths.
:param list[str] patterns: List of glob patterns
@@ -119,7 +113,9 @@ def glob_relative(
return expanded_values
-def read_files(filepaths: Union[str, bytes, Iterable[StrPath]], root_dir=None) -> str:
+def read_files(
+ filepaths: StrPath | Iterable[StrPath], root_dir: StrPath | None = None
+) -> str:
"""Return the content of the files concatenated using ``\n`` as str
This function is sandboxed and won't reach anything outside ``root_dir``
@@ -145,7 +141,7 @@ def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]:
SetuptoolsWarning.emit(f"File {path!r} cannot be found")
-def _read_file(filepath: Union[bytes, StrPath]) -> str:
+def _read_file(filepath: bytes | StrPath) -> str:
with open(filepath, encoding='utf-8') as f:
return f.read()
@@ -160,8 +156,8 @@ def _assert_local(filepath: StrPath, root_dir: str):
def read_attr(
attr_desc: str,
- package_dir: Optional[Mapping[str, str]] = None,
- root_dir: Optional[StrPath] = None,
+ package_dir: Mapping[str, str] | None = None,
+ root_dir: StrPath | None = None,
):
"""Reads the value of an attribute from a module.
@@ -185,7 +181,7 @@ def read_attr(
attr_name = attrs_path.pop()
module_name = '.'.join(attrs_path)
module_name = module_name or '__init__'
- _parent_path, path, module_name = _find_module(module_name, package_dir, root_dir)
+ path = _find_module(module_name, package_dir, root_dir)
spec = _find_spec(module_name, path)
try:
@@ -196,7 +192,7 @@ def read_attr(
return getattr(module, attr_name)
-def _find_spec(module_name: str, module_path: Optional[StrPath]) -> ModuleSpec:
+def _find_spec(module_name: str, module_path: StrPath | None) -> ModuleSpec:
spec = importlib.util.spec_from_file_location(module_name, module_path)
spec = spec or importlib.util.find_spec(module_name)
@@ -217,43 +213,32 @@ def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
def _find_module(
- module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: StrPath
-) -> Tuple[StrPath, Optional[str], str]:
- """Given a module (that could normally be imported by ``module_name``
- after the build is complete), find the path to the parent directory where
- it is contained and the canonical name that could be used to import it
- considering the ``package_dir`` in the build configuration and ``root_dir``
+ module_name: str, package_dir: Mapping[str, str] | None, root_dir: StrPath
+) -> str | None:
+ """Find the path to the module named ``module_name``,
+ considering the ``package_dir`` in the build configuration and ``root_dir``.
+
+ >>> tmp = getfixture('tmpdir')
+ >>> _ = tmp.ensure("a/b/c.py")
+ >>> _ = tmp.ensure("a/b/d/__init__.py")
+ >>> r = lambda x: x.replace(str(tmp), "tmp").replace(os.sep, "/")
+ >>> r(_find_module("a.b.c", None, tmp))
+ 'tmp/a/b/c.py'
+ >>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))
+ 'tmp/a/b/d/__init__.py'
"""
- parent_path = root_dir
- module_parts = module_name.split('.')
- if package_dir:
- if module_parts[0] in package_dir:
- # A custom path was specified for the module we want to import
- custom_path = package_dir[module_parts[0]]
- parts = custom_path.rsplit('/', 1)
- if len(parts) > 1:
- parent_path = os.path.join(root_dir, parts[0])
- parent_module = parts[1]
- else:
- parent_module = custom_path
- module_name = ".".join([parent_module, *module_parts[1:]])
- elif '' in package_dir:
- # A custom parent directory was specified for all root modules
- parent_path = os.path.join(root_dir, package_dir[''])
-
- path_start = os.path.join(parent_path, *module_name.split("."))
- candidates = chain(
- (f"{path_start}.py", os.path.join(path_start, "__init__.py")),
- iglob(f"{path_start}.*"),
+ path_start = find_package_path(module_name, package_dir or {}, root_dir)
+ candidates = chain.from_iterable(
+ (f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))
+ for ext in all_suffixes()
)
- module_path = next((x for x in candidates if os.path.isfile(x)), None)
- return parent_path, module_path, module_name
+ return next((x for x in candidates if os.path.isfile(x)), None)
def resolve_class(
qualified_class_name: str,
- package_dir: Optional[Mapping[str, str]] = None,
- root_dir: Optional[StrPath] = None,
+ package_dir: Mapping[str, str] | None = None,
+ root_dir: StrPath | None = None,
) -> Callable:
"""Given a qualified class name, return the associated class object"""
root_dir = root_dir or os.getcwd()
@@ -261,16 +246,16 @@ def resolve_class(
class_name = qualified_class_name[idx + 1 :]
pkg_name = qualified_class_name[:idx]
- _parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir)
- module = _load_spec(_find_spec(module_name, path), module_name)
+ path = _find_module(pkg_name, package_dir, root_dir)
+ module = _load_spec(_find_spec(pkg_name, path), pkg_name)
return getattr(module, class_name)
def cmdclass(
- values: Dict[str, str],
- package_dir: Optional[Mapping[str, str]] = None,
- root_dir: Optional[StrPath] = None,
-) -> Dict[str, Callable]:
+ values: dict[str, str],
+ package_dir: Mapping[str, str] | None = None,
+ root_dir: StrPath | None = None,
+) -> dict[str, Callable]:
"""Given a dictionary mapping command names to strings for qualified class
names, apply :func:`resolve_class` to the dict values.
"""
@@ -280,10 +265,10 @@ def cmdclass(
def find_packages(
*,
namespaces=True,
- fill_package_dir: Optional[Dict[str, str]] = None,
- root_dir: Optional[StrPath] = None,
+ fill_package_dir: dict[str, str] | None = None,
+ root_dir: StrPath | None = None,
**kwargs,
-) -> List[str]:
+) -> list[str]:
"""Works similarly to :func:`setuptools.find_packages`, but with all
arguments given as keyword arguments. Moreover, ``where`` can be given
as a list (the results will be simply concatenated).
@@ -311,7 +296,7 @@ def find_packages(
root_dir = root_dir or os.curdir
where = kwargs.pop('where', ['.'])
- packages: List[str] = []
+ packages: list[str] = []
fill_package_dir = {} if fill_package_dir is None else fill_package_dir
search = list(unique_everseen(always_iterable(where)))
@@ -335,22 +320,17 @@ def _nest_path(parent: StrPath, path: StrPath) -> str:
return os.path.normpath(path)
-def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str:
+def version(value: Callable | Iterable[str | int] | str) -> str:
"""When getting the version directly from an attribute,
it should be normalised to string.
"""
- if callable(value):
- value = value()
-
- value = cast(Iterable[Union[str, int]], value)
-
- if not isinstance(value, str):
- if hasattr(value, '__iter__'):
- value = '.'.join(map(str, value))
- else:
- value = '%s' % value
+ _value = value() if callable(value) else value
- return value
+ if isinstance(_value, str):
+ return _value
+ if hasattr(_value, '__iter__'):
+ return '.'.join(map(str, _value))
+ return '%s' % _value
def canonic_package_data(package_data: dict) -> dict:
@@ -360,8 +340,8 @@ def canonic_package_data(package_data: dict) -> dict:
def canonic_data_files(
- data_files: Union[list, dict], root_dir: Optional[StrPath] = None
-) -> List[Tuple[str, List[str]]]:
+ data_files: list | dict, root_dir: StrPath | None = None
+) -> list[tuple[str, list[str]]]:
"""For compatibility with ``setup.py``, ``data_files`` should be a list
of pairs instead of a dict.
@@ -376,7 +356,7 @@ def canonic_data_files(
]
-def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]:
+def entry_points(text: str, text_source="entry-points") -> dict[str, dict]:
"""Given the contents of entry-points file,
process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
The first level keys are entry-point groups, the second level keys are
@@ -401,7 +381,7 @@ class EnsurePackagesDiscovered:
and those might not have been processed yet.
"""
- def __init__(self, distribution: "Distribution"):
+ def __init__(self, distribution: Distribution):
self._dist = distribution
self._called = False
@@ -445,7 +425,7 @@ class LazyMappingProxy(Mapping[_K, _V]):
def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
self._obtain = obtain_mapping_value
- self._value: Optional[Mapping[_K, _V]] = None
+ self._value: Mapping[_K, _V] | None = None
def _target(self) -> Mapping[_K, _V]:
if self._value is None:
diff --git a/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py b/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py
index d379405595..c8dae5f751 100644
--- a/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py
+++ b/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py
@@ -9,11 +9,13 @@ For simple scenarios, you can also try parsing the file directly
with the help of ``tomllib`` or ``tomli``.
"""
+from __future__ import annotations
+
import logging
import os
from contextlib import contextmanager
from functools import partial
-from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set
+from typing import TYPE_CHECKING, Callable, Mapping
from .._path import StrPath
from ..errors import FileError, InvalidConfigError
@@ -58,10 +60,10 @@ def validate(config: dict, filepath: StrPath) -> bool:
def apply_configuration(
- dist: "Distribution",
+ dist: Distribution,
filepath: StrPath,
ignore_option_errors=False,
-) -> "Distribution":
+) -> Distribution:
"""Apply the configuration from a ``pyproject.toml`` file into an existing
distribution object.
"""
@@ -73,7 +75,7 @@ def read_configuration(
filepath: StrPath,
expand=True,
ignore_option_errors=False,
- dist: Optional["Distribution"] = None,
+ dist: Distribution | None = None,
):
"""Read given configuration file and returns options from it as a dict.
@@ -141,9 +143,9 @@ def read_configuration(
def expand_configuration(
config: dict,
- root_dir: Optional[StrPath] = None,
+ root_dir: StrPath | None = None,
ignore_option_errors: bool = False,
- dist: Optional["Distribution"] = None,
+ dist: Distribution | None = None,
) -> dict:
"""Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
find their final values.
@@ -166,9 +168,9 @@ class _ConfigExpander:
def __init__(
self,
config: dict,
- root_dir: Optional[StrPath] = None,
+ root_dir: StrPath | None = None,
ignore_option_errors: bool = False,
- dist: Optional["Distribution"] = None,
+ dist: Distribution | None = None,
):
self.config = config
self.root_dir = root_dir or os.getcwd()
@@ -178,9 +180,9 @@ class _ConfigExpander:
self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
self.ignore_option_errors = ignore_option_errors
self._dist = dist
- self._referenced_files: Set[str] = set()
+ self._referenced_files: set[str] = set()
- def _ensure_dist(self) -> "Distribution":
+ def _ensure_dist(self) -> Distribution:
from setuptools.dist import Distribution
attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)}
@@ -233,7 +235,7 @@ class _ConfigExpander:
cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
self._process_field(self.setuptools_cfg, "cmdclass", cmdclass)
- def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]):
+ def _expand_all_dynamic(self, dist: Distribution, package_dir: Mapping[str, str]):
special = ( # need special handling
"version",
"readme",
@@ -263,7 +265,7 @@ class _ConfigExpander:
updates = {k: v for k, v in obtained_dynamic.items() if v is not None}
self.project_cfg.update(updates)
- def _ensure_previously_set(self, dist: "Distribution", field: str):
+ def _ensure_previously_set(self, dist: Distribution, field: str):
previous = _PREVIOUSLY_DEFINED[field](dist)
if previous is None and not self.ignore_option_errors:
msg = (
@@ -288,7 +290,7 @@ class _ConfigExpander:
raise ValueError(f"invalid `{specifier}`: {directive!r}")
return None
- def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]):
+ def _obtain(self, dist: Distribution, field: str, package_dir: Mapping[str, str]):
if field in self.dynamic_cfg:
return self._expand_directive(
f"tool.setuptools.dynamic.{field}",
@@ -298,13 +300,13 @@ class _ConfigExpander:
self._ensure_previously_set(dist, field)
return None
- def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]):
+ def _obtain_version(self, dist: Distribution, package_dir: Mapping[str, str]):
# Since plugins can set version, let's silently skip if it cannot be obtained
if "version" in self.dynamic and "version" in self.dynamic_cfg:
return _expand.version(self._obtain(dist, "version", package_dir))
return None
- def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]:
+ def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
if "readme" not in self.dynamic:
return None
@@ -319,8 +321,8 @@ class _ConfigExpander:
return None
def _obtain_entry_points(
- self, dist: "Distribution", package_dir: Mapping[str, str]
- ) -> Optional[Dict[str, dict]]:
+ self, dist: Distribution, package_dir: Mapping[str, str]
+ ) -> dict[str, dict] | None:
fields = ("entry-points", "scripts", "gui-scripts")
if not any(field in self.dynamic for field in fields):
return None
@@ -344,21 +346,21 @@ class _ConfigExpander:
return expanded
- def _obtain_classifiers(self, dist: "Distribution"):
+ def _obtain_classifiers(self, dist: Distribution):
if "classifiers" in self.dynamic:
value = self._obtain(dist, "classifiers", {})
if value:
return value.splitlines()
return None
- def _obtain_dependencies(self, dist: "Distribution"):
+ def _obtain_dependencies(self, dist: Distribution):
if "dependencies" in self.dynamic:
value = self._obtain(dist, "dependencies", {})
if value:
return _parse_requirements_list(value)
return None
- def _obtain_optional_dependencies(self, dist: "Distribution"):
+ def _obtain_optional_dependencies(self, dist: Distribution):
if "optional-dependencies" not in self.dynamic:
return None
if "optional-dependencies" in self.dynamic_cfg:
@@ -400,18 +402,18 @@ def _ignore_errors(ignore_option_errors: bool):
class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
def __init__(
- self, distribution: "Distribution", project_cfg: dict, setuptools_cfg: dict
+ self, distribution: Distribution, project_cfg: dict, setuptools_cfg: dict
):
super().__init__(distribution)
self._project_cfg = project_cfg
self._setuptools_cfg = setuptools_cfg
- def __enter__(self) -> "Self":
+ def __enter__(self) -> Self:
"""When entering the context, the values of ``packages``, ``py_modules`` and
``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
"""
dist, cfg = self._dist, self._setuptools_cfg
- package_dir: Dict[str, str] = cfg.setdefault("package-dir", {})
+ package_dir: dict[str, str] = cfg.setdefault("package-dir", {})
package_dir.update(dist.package_dir or {})
dist.package_dir = package_dir # needs to be the same object
diff --git a/contrib/python/setuptools/py3/setuptools/config/setupcfg.py b/contrib/python/setuptools/py3/setuptools/config/setupcfg.py
index 2912d3e143..0a7a42eb09 100644
--- a/contrib/python/setuptools/py3/setuptools/config/setupcfg.py
+++ b/contrib/python/setuptools/py3/setuptools/config/setupcfg.py
@@ -9,6 +9,8 @@ For simple scenarios, you can also try parsing the file directly
with the help of ``configparser``.
"""
+from __future__ import annotations
+
import contextlib
import functools
import os
@@ -22,9 +24,6 @@ from typing import (
Dict,
Generic,
Iterable,
- List,
- Optional,
- Set,
Tuple,
TypeVar,
Union,
@@ -80,7 +79,7 @@ def read_configuration(
return configuration_to_dict(handlers)
-def apply_configuration(dist: "Distribution", filepath: StrPath) -> "Distribution":
+def apply_configuration(dist: Distribution, filepath: StrPath) -> Distribution:
"""Apply the configuration from a ``setup.cfg`` file into an existing
distribution object.
"""
@@ -90,11 +89,11 @@ def apply_configuration(dist: "Distribution", filepath: StrPath) -> "Distributio
def _apply(
- dist: "Distribution",
+ dist: Distribution,
filepath: StrPath,
other_files: Iterable[StrPath] = (),
ignore_option_errors: bool = False,
-) -> Tuple["ConfigHandler", ...]:
+) -> tuple[ConfigHandler, ...]:
"""Read configuration from ``filepath`` and applies to the ``dist`` object."""
from setuptools.dist import _Distribution
@@ -108,7 +107,7 @@ def _apply(
filenames = [*other_files, filepath]
try:
- _Distribution.parse_config_files(dist, filenames=filenames) # type: ignore[arg-type] # TODO: fix in disutils stubs
+ _Distribution.parse_config_files(dist, filenames=filenames) # type: ignore[arg-type] # TODO: fix in distutils stubs
handlers = parse_configuration(
dist, dist.command_options, ignore_option_errors=ignore_option_errors
)
@@ -131,7 +130,7 @@ def _get_option(target_obj: Target, key: str):
return getter()
-def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
+def configuration_to_dict(handlers: tuple[ConfigHandler, ...]) -> dict:
"""Returns configuration data gathered by given handlers as a dict.
:param list[ConfigHandler] handlers: Handlers list,
@@ -150,10 +149,10 @@ def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
def parse_configuration(
- distribution: "Distribution",
+ distribution: Distribution,
command_options: AllCommandOptions,
ignore_option_errors=False,
-) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]:
+) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
"""Performs additional parsing of configuration options
for a distribution.
@@ -236,7 +235,7 @@ class ConfigHandler(Generic[Target]):
"""
- aliases: Dict[str, str] = {}
+ aliases: dict[str, str] = {}
"""Options aliases.
For compatibility with various packages. E.g.: d2to1 and pbr.
Note: `-` in keys is replaced with `_` by config parser.
@@ -253,9 +252,9 @@ class ConfigHandler(Generic[Target]):
self.ignore_option_errors = ignore_option_errors
self.target_obj = target_obj
self.sections = dict(self._section_options(options))
- self.set_options: List[str] = []
+ self.set_options: list[str] = []
self.ensure_discovered = ensure_discovered
- self._referenced_files: Set[str] = set()
+ self._referenced_files: set[str] = set()
"""After parsing configurations, this property will enumerate
all files referenced by the "file:" directive. Private API for setuptools only.
"""
@@ -485,7 +484,7 @@ class ConfigHandler(Generic[Target]):
if section_name: # [section.option] variant
method_postfix = '_%s' % section_name
- section_parser_method: Optional[Callable] = getattr(
+ section_parser_method: Callable | None = getattr(
self,
# Dots in section names are translated into dunderscores.
('parse_section%s' % method_postfix).replace('.', '__'),
@@ -534,11 +533,11 @@ class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
def __init__(
self,
- target_obj: "DistributionMetadata",
+ target_obj: DistributionMetadata,
options: AllCommandOptions,
ignore_option_errors: bool,
ensure_discovered: expand.EnsurePackagesDiscovered,
- package_dir: Optional[dict] = None,
+ package_dir: dict | None = None,
root_dir: StrPath = os.curdir,
):
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
@@ -598,14 +597,14 @@ class ConfigOptionsHandler(ConfigHandler["Distribution"]):
def __init__(
self,
- target_obj: "Distribution",
+ target_obj: Distribution,
options: AllCommandOptions,
ignore_option_errors: bool,
ensure_discovered: expand.EnsurePackagesDiscovered,
):
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
self.root_dir = target_obj.src_root
- self.package_dir: Dict[str, str] = {} # To be filled by `find_packages`
+ self.package_dir: dict[str, str] = {} # To be filled by `find_packages`
@classmethod
def _parse_list_semicolon(cls, value):
diff --git a/contrib/python/setuptools/py3/setuptools/depends.py b/contrib/python/setuptools/py3/setuptools/depends.py
index c0ca84d404..b6af51c410 100644
--- a/contrib/python/setuptools/py3/setuptools/depends.py
+++ b/contrib/python/setuptools/py3/setuptools/depends.py
@@ -9,7 +9,7 @@ from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
from .extern.packaging.version import Version
-__all__ = ['Require', 'find_module', 'get_module_constant', 'extract_constant']
+__all__ = ['Require', 'find_module']
class Require:
@@ -95,86 +95,73 @@ def maybe_close(f):
return contextlib.closing(f)
-def get_module_constant(module, symbol, default=-1, paths=None):
- """Find 'module' by searching 'paths', and extract 'symbol'
-
- Return 'None' if 'module' does not exist on 'paths', or it does not define
- 'symbol'. If the module defines 'symbol' as a constant, return the
- constant. Otherwise, return 'default'."""
-
- try:
- f, path, (suffix, mode, kind) = info = find_module(module, paths)
- except ImportError:
- # Module doesn't exist
- return None
-
- with maybe_close(f):
- if kind == PY_COMPILED:
- f.read(8) # skip magic & date
- code = marshal.load(f)
- elif kind == PY_FROZEN:
- code = _imp.get_frozen_object(module, paths)
- elif kind == PY_SOURCE:
- code = compile(f.read(), path, 'exec')
- else:
- # Not something we can parse; we'll have to import it. :(
- imported = _imp.get_module(module, paths, info)
- return getattr(imported, symbol, None)
-
- return extract_constant(code, symbol, default)
-
-
-def extract_constant(code, symbol, default=-1):
- """Extract the constant value of 'symbol' from 'code'
-
- If the name 'symbol' is bound to a constant value by the Python code
- object 'code', return that value. If 'symbol' is bound to an expression,
- return 'default'. Otherwise, return 'None'.
-
- Return value is based on the first assignment to 'symbol'. 'symbol' must
- be a global, or at least a non-"fast" local in the code block. That is,
- only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
- must be present in 'code.co_names'.
- """
- if symbol not in code.co_names:
- # name's not there, can't possibly be an assignment
- return None
-
- name_idx = list(code.co_names).index(symbol)
-
- STORE_NAME = dis.opmap['STORE_NAME']
- STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
- LOAD_CONST = dis.opmap['LOAD_CONST']
-
- const = default
-
- for byte_code in dis.Bytecode(code):
- op = byte_code.opcode
- arg = byte_code.arg
-
- if op == LOAD_CONST:
- const = code.co_consts[arg]
- elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
- return const
- else:
- const = default
+# Some objects are not available on some platforms.
+# XXX it'd be better to test assertions about bytecode instead.
+if not sys.platform.startswith('java') and sys.platform != 'cli':
+
+ def get_module_constant(module, symbol, default=-1, paths=None):
+ """Find 'module' by searching 'paths', and extract 'symbol'
+
+ Return 'None' if 'module' does not exist on 'paths', or it does not define
+ 'symbol'. If the module defines 'symbol' as a constant, return the
+ constant. Otherwise, return 'default'."""
+
+ try:
+ f, path, (suffix, mode, kind) = info = find_module(module, paths)
+ except ImportError:
+ # Module doesn't exist
+ return None
+
+ with maybe_close(f):
+ if kind == PY_COMPILED:
+ f.read(8) # skip magic & date
+ code = marshal.load(f)
+ elif kind == PY_FROZEN:
+ code = _imp.get_frozen_object(module, paths)
+ elif kind == PY_SOURCE:
+ code = compile(f.read(), path, 'exec')
+ else:
+ # Not something we can parse; we'll have to import it. :(
+ imported = _imp.get_module(module, paths, info)
+ return getattr(imported, symbol, None)
+
+ return extract_constant(code, symbol, default)
+
+ def extract_constant(code, symbol, default=-1):
+ """Extract the constant value of 'symbol' from 'code'
+
+ If the name 'symbol' is bound to a constant value by the Python code
+ object 'code', return that value. If 'symbol' is bound to an expression,
+ return 'default'. Otherwise, return 'None'.
+
+ Return value is based on the first assignment to 'symbol'. 'symbol' must
+ be a global, or at least a non-"fast" local in the code block. That is,
+ only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
+ must be present in 'code.co_names'.
+ """
+ if symbol not in code.co_names:
+ # name's not there, can't possibly be an assignment
+ return None
- return None
+ name_idx = list(code.co_names).index(symbol)
+ STORE_NAME = dis.opmap['STORE_NAME']
+ STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
+ LOAD_CONST = dis.opmap['LOAD_CONST']
-def _update_globals():
- """
- Patch the globals to remove the objects not available on some platforms.
+ const = default
- XXX it'd be better to test assertions about bytecode instead.
- """
+ for byte_code in dis.Bytecode(code):
+ op = byte_code.opcode
+ arg = byte_code.arg
- if not sys.platform.startswith('java') and sys.platform != 'cli':
- return
- incompatible = 'extract_constant', 'get_module_constant'
- for name in incompatible:
- del globals()[name]
- __all__.remove(name)
+ if op == LOAD_CONST:
+ const = code.co_consts[arg]
+ elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
+ return const
+ else:
+ const = default
+ return None
-_update_globals()
+ __all__ += ['get_module_constant', 'extract_constant']
diff --git a/contrib/python/setuptools/py3/setuptools/discovery.py b/contrib/python/setuptools/py3/setuptools/discovery.py
index 571be12bf4..880d414033 100644
--- a/contrib/python/setuptools/py3/setuptools/discovery.py
+++ b/contrib/python/setuptools/py3/setuptools/discovery.py
@@ -37,6 +37,8 @@ For the purposes of this module, the following nomenclature is used:
"""
+from __future__ import annotations
+
import itertools
import os
from fnmatch import fnmatchcase
@@ -44,13 +46,9 @@ from glob import glob
from pathlib import Path
from typing import (
TYPE_CHECKING,
- Dict,
Iterable,
Iterator,
- List,
Mapping,
- Optional,
- Tuple,
)
import _distutils_hack.override # noqa: F401
@@ -91,8 +89,8 @@ class _Filter:
class _Finder:
"""Base class that exposes functionality for module/package finders"""
- ALWAYS_EXCLUDE: Tuple[str, ...] = ()
- DEFAULT_EXCLUDE: Tuple[str, ...] = ()
+ ALWAYS_EXCLUDE: tuple[str, ...] = ()
+ DEFAULT_EXCLUDE: tuple[str, ...] = ()
@classmethod
def find(
@@ -100,7 +98,7 @@ class _Finder:
where: StrPath = '.',
exclude: Iterable[str] = (),
include: Iterable[str] = ('*',),
- ) -> List[str]:
+ ) -> list[str]:
"""Return a list of all Python items (packages or modules, depending on
the finder implementation) found within directory 'where'.
@@ -291,7 +289,7 @@ class FlatLayoutModuleFinder(ModuleFinder):
"""Reserved top-level module names"""
-def _find_packages_within(root_pkg: str, pkg_dir: StrPath) -> List[str]:
+def _find_packages_within(root_pkg: str, pkg_dir: StrPath) -> list[str]:
nested = PEP420PackageFinder.find(pkg_dir)
return [root_pkg] + [".".join((root_pkg, n)) for n in nested]
@@ -301,7 +299,7 @@ class ConfigDiscovery:
(from other metadata/options, the file system or conventions)
"""
- def __init__(self, distribution: "Distribution"):
+ def __init__(self, distribution: Distribution):
self.dist = distribution
self._called = False
self._disabled = False
@@ -329,7 +327,7 @@ class ConfigDiscovery:
return self.dist.src_root or os.curdir
@property
- def _package_dir(self) -> Dict[str, str]:
+ def _package_dir(self) -> dict[str, str]:
if self.dist.package_dir is None:
return {}
return self.dist.package_dir
@@ -455,7 +453,7 @@ class ConfigDiscovery:
self._ensure_no_accidental_inclusion(self.dist.py_modules, "modules")
return bool(self.dist.py_modules)
- def _ensure_no_accidental_inclusion(self, detected: List[str], kind: str):
+ def _ensure_no_accidental_inclusion(self, detected: list[str], kind: str):
if len(detected) > 1:
from inspect import cleandoc
@@ -495,7 +493,7 @@ class ConfigDiscovery:
if name:
self.dist.metadata.name = name
- def _find_name_single_package_or_module(self) -> Optional[str]:
+ def _find_name_single_package_or_module(self) -> str | None:
"""Exactly one module or package"""
for field in ('packages', 'py_modules'):
items = getattr(self.dist, field, None) or []
@@ -505,7 +503,7 @@ class ConfigDiscovery:
return None
- def _find_name_from_packages(self) -> Optional[str]:
+ def _find_name_from_packages(self) -> str | None:
"""Try to find the root package that is not a PEP 420 namespace"""
if not self.dist.packages:
return None
@@ -522,7 +520,7 @@ class ConfigDiscovery:
return None
-def remove_nested_packages(packages: List[str]) -> List[str]:
+def remove_nested_packages(packages: list[str]) -> list[str]:
"""Remove nested packages from a list of packages.
>>> remove_nested_packages(["a", "a.b1", "a.b2", "a.b1.c1"])
@@ -540,7 +538,7 @@ def remove_nested_packages(packages: List[str]) -> List[str]:
return top_level
-def remove_stubs(packages: List[str]) -> List[str]:
+def remove_stubs(packages: list[str]) -> list[str]:
"""Remove type stubs (:pep:`561`) from a list of packages.
>>> remove_stubs(["a", "a.b", "a-stubs", "a-stubs.b.c", "b", "c-stubs"])
@@ -550,8 +548,8 @@ def remove_stubs(packages: List[str]) -> List[str]:
def find_parent_package(
- packages: List[str], package_dir: Mapping[str, str], root_dir: StrPath
-) -> Optional[str]:
+ packages: list[str], package_dir: Mapping[str, str], root_dir: StrPath
+) -> str | None:
"""Find the parent package that is not a namespace."""
packages = sorted(packages, key=len)
common_ancestors = []
@@ -607,7 +605,7 @@ def find_package_path(
return os.path.join(root_dir, *parent.split("/"), *parts)
-def construct_package_dir(packages: List[str], package_path: StrPath) -> Dict[str, str]:
+def construct_package_dir(packages: list[str], package_path: StrPath) -> dict[str, str]:
parent_pkgs = remove_nested_packages(packages)
prefix = Path(package_path).parts
return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs}
diff --git a/contrib/python/setuptools/py3/setuptools/dist.py b/contrib/python/setuptools/py3/setuptools/dist.py
index 03f6c0398b..43762960ba 100644
--- a/contrib/python/setuptools/py3/setuptools/dist.py
+++ b/contrib/python/setuptools/py3/setuptools/dist.py
@@ -1,5 +1,4 @@
-__all__ = ['Distribution']
-
+from __future__ import annotations
import io
import itertools
@@ -10,7 +9,7 @@ import sys
from contextlib import suppress
from glob import iglob
from pathlib import Path
-from typing import TYPE_CHECKING, Dict, List, MutableMapping, Optional, Set, Tuple
+from typing import TYPE_CHECKING, MutableMapping
import distutils.cmd
import distutils.command
@@ -38,6 +37,7 @@ from .discovery import ConfigDiscovery
from .monkey import get_unpatched
from .warnings import InformationOnly, SetuptoolsDeprecationWarning
+__all__ = ['Distribution']
sequence = tuple, list
@@ -272,6 +272,8 @@ class Distribution(_Distribution):
}
_patched_dist = None
+ # Used by build_py, editable_wheel and install_lib commands for legacy namespaces
+ namespace_packages: list[str] #: :meta private: DEPRECATED
def patch_missing_pkg_info(self, attrs):
# Fake up a replacement for the data that would normally come from
@@ -287,12 +289,12 @@ class Distribution(_Distribution):
dist._version = _normalization.safe_version(str(attrs['version']))
self._patched_dist = dist
- def __init__(self, attrs: Optional[MutableMapping] = None) -> None:
+ def __init__(self, attrs: MutableMapping | None = None) -> None:
have_package_data = hasattr(self, "package_data")
if not have_package_data:
- self.package_data: Dict[str, List[str]] = {}
+ self.package_data: dict[str, list[str]] = {}
attrs = attrs or {}
- self.dist_files: List[Tuple[str, str, str]] = []
+ self.dist_files: list[tuple[str, str, str]] = []
# Filter-out setuptools' specific options.
self.src_root = attrs.pop("src_root", None)
self.patch_missing_pkg_info(attrs)
@@ -309,7 +311,7 @@ class Distribution(_Distribution):
# Private API (setuptools-use only, not restricted to Distribution)
# Stores files that are referenced by the configuration and need to be in the
# sdist (e.g. `version = file: VERSION.txt`)
- self._referenced_files: Set[str] = set()
+ self._referenced_files: set[str] = set()
self.set_defaults = ConfigDiscovery(self)
@@ -387,10 +389,10 @@ class Distribution(_Distribution):
def _finalize_license_files(self) -> None:
"""Compute names of all license files which should be included."""
- license_files: Optional[List[str]] = self.metadata.license_files
- patterns: List[str] = license_files if license_files else []
+ license_files: list[str] | None = self.metadata.license_files
+ patterns: list[str] = license_files if license_files else []
- license_file: Optional[str] = self.metadata.license_file
+ license_file: str | None = self.metadata.license_file
if license_file and license_file not in patterns:
patterns.append(license_file)
@@ -409,8 +411,8 @@ class Distribution(_Distribution):
"""
>>> list(Distribution._expand_patterns(['LICENSE']))
['LICENSE']
- >>> list(Distribution._expand_patterns(['setup.cfg', 'LIC*']))
- ['setup.cfg', 'LICENSE']
+ >>> list(Distribution._expand_patterns(['pyproject.toml', 'LIC*']))
+ ['pyproject.toml', 'LICENSE']
"""
return (
path
@@ -710,6 +712,12 @@ class Distribution(_Distribution):
if command in self.cmdclass:
return self.cmdclass[command]
+ # Special case bdist_wheel so it's never loaded from "wheel"
+ if command == 'bdist_wheel':
+ from .command.bdist_wheel import bdist_wheel
+
+ return bdist_wheel
+
eps = metadata.entry_points(group='distutils.commands', name=command)
for ep in eps:
self.cmdclass[command] = cmdclass = ep.load()
diff --git a/contrib/python/setuptools/py3/setuptools/extern/__init__.py b/contrib/python/setuptools/py3/setuptools/extern/__init__.py
index 8eb02ac6d3..5ad7169e3b 100644
--- a/contrib/python/setuptools/py3/setuptools/extern/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/extern/__init__.py
@@ -85,6 +85,7 @@ names = (
'ordered_set',
'packaging',
'tomli',
+ 'wheel',
'zipp',
)
# [[[end]]]
diff --git a/contrib/python/setuptools/py3/setuptools/monkey.py b/contrib/python/setuptools/py3/setuptools/monkey.py
index 1f8d8ffe0f..e513f95245 100644
--- a/contrib/python/setuptools/py3/setuptools/monkey.py
+++ b/contrib/python/setuptools/py3/setuptools/monkey.py
@@ -2,20 +2,22 @@
Monkey patching of distutils.
"""
+from __future__ import annotations
+
import functools
import inspect
import platform
import sys
import types
from importlib import import_module
-from typing import List, TypeVar
+from typing import TypeVar
import distutils.filelist
_T = TypeVar("_T")
-__all__: List[str] = []
+__all__: list[str] = []
"""
Everything is private. Contact the project team
if you think you need this functionality.
diff --git a/contrib/python/setuptools/py3/setuptools/msvc.py b/contrib/python/setuptools/py3/setuptools/msvc.py
index b2a0f2bebb..f86c480d18 100644
--- a/contrib/python/setuptools/py3/setuptools/msvc.py
+++ b/contrib/python/setuptools/py3/setuptools/msvc.py
@@ -11,6 +11,8 @@ Microsoft Visual C++ 14.X:
This may also support compilers shipped with compatible Visual Studio versions.
"""
+from __future__ import annotations
+
import json
from os import listdir, pathsep
from os.path import join, isfile, isdir, dirname
@@ -20,7 +22,7 @@ import platform
import itertools
import subprocess
import distutils.errors
-from typing import Dict, TYPE_CHECKING
+from typing import TYPE_CHECKING
from setuptools.extern.more_itertools import unique_everseen
# https://github.com/python/mypy/issues/8166
@@ -36,7 +38,7 @@ else:
HKEY_LOCAL_MACHINE = None
HKEY_CLASSES_ROOT = None
- environ: Dict[str, str] = dict()
+ environ: dict[str, str] = dict()
def _msvc14_find_vc2015():
diff --git a/contrib/python/setuptools/py3/setuptools/package_index.py b/contrib/python/setuptools/py3/setuptools/package_index.py
index c3ffee41a7..c8789e279f 100644
--- a/contrib/python/setuptools/py3/setuptools/package_index.py
+++ b/contrib/python/setuptools/py3/setuptools/package_index.py
@@ -642,7 +642,7 @@ class PackageIndex(Environment):
"Skipping development or system egg: %s",
dist,
)
- skipped[dist] = 1
+ skipped[dist] = True
continue
test = dist in req and (dist.precedence <= SOURCE_DIST or not source)
diff --git a/contrib/python/setuptools/py3/setuptools/sandbox.py b/contrib/python/setuptools/py3/setuptools/sandbox.py
index e5da9d86f0..147b26749e 100644
--- a/contrib/python/setuptools/py3/setuptools/sandbox.py
+++ b/contrib/python/setuptools/py3/setuptools/sandbox.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import os
import sys
import tempfile
@@ -9,7 +11,6 @@ import contextlib
import pickle
import textwrap
import builtins
-from typing import Union, List
import pkg_resources
from distutils.errors import DistutilsError
@@ -425,7 +426,7 @@ class DirectorySandbox(AbstractSandbox):
"tempnam",
])
- _exception_patterns: List[Union[str, re.Pattern]] = []
+ _exception_patterns: list[str | re.Pattern] = []
"exempt writing to paths that match the pattern"
def __init__(self, sandbox, exceptions=_EXCEPTIONS):
diff --git a/contrib/python/setuptools/py3/setuptools/warnings.py b/contrib/python/setuptools/py3/setuptools/warnings.py
index b3e252ca57..5d9cca6c37 100644
--- a/contrib/python/setuptools/py3/setuptools/warnings.py
+++ b/contrib/python/setuptools/py3/setuptools/warnings.py
@@ -5,12 +5,14 @@ Using custom classes (other than ``UserWarning``) allow users to set
setuptools.
"""
+from __future__ import annotations
+
import os
import warnings
from datetime import date
from inspect import cleandoc
from textwrap import indent
-from typing import Optional, Tuple
+from typing import Tuple
_DueDate = Tuple[int, int, int] # time tuple
_INDENT = 8 * " "
@@ -23,11 +25,11 @@ class SetuptoolsWarning(UserWarning):
@classmethod
def emit(
cls,
- summary: Optional[str] = None,
- details: Optional[str] = None,
- due_date: Optional[_DueDate] = None,
- see_docs: Optional[str] = None,
- see_url: Optional[str] = None,
+ summary: str | None = None,
+ details: str | None = None,
+ due_date: _DueDate | None = None,
+ see_docs: str | None = None,
+ see_url: str | None = None,
stacklevel: int = 2,
**kwargs,
):
@@ -51,9 +53,9 @@ class SetuptoolsWarning(UserWarning):
cls,
summary: str,
details: str,
- due_date: Optional[date] = None,
- see_url: Optional[str] = None,
- format_args: Optional[dict] = None,
+ due_date: date | None = None,
+ see_url: str | None = None,
+ format_args: dict | None = None,
):
"""Private: reserved for ``setuptools`` internal use only"""
today = date.today()
diff --git a/contrib/python/setuptools/py3/ya.make b/contrib/python/setuptools/py3/ya.make
index e3a563cd6d..29bd4687af 100644
--- a/contrib/python/setuptools/py3/ya.make
+++ b/contrib/python/setuptools/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(70.0.0)
+VERSION(70.1.0)
LICENSE(MIT)
@@ -191,6 +191,11 @@ PY_SRCS(
setuptools/_vendor/tomli/_parser.py
setuptools/_vendor/tomli/_re.py
setuptools/_vendor/tomli/_types.py
+ setuptools/_vendor/wheel/__init__.py
+ setuptools/_vendor/wheel/macosx_libfile.py
+ setuptools/_vendor/wheel/metadata.py
+ setuptools/_vendor/wheel/util.py
+ setuptools/_vendor/wheel/wheelfile.py
setuptools/_vendor/zipp.py
setuptools/archive_util.py
setuptools/build_meta.py
@@ -199,6 +204,7 @@ PY_SRCS(
setuptools/command/alias.py
setuptools/command/bdist_egg.py
setuptools/command/bdist_rpm.py
+ setuptools/command/bdist_wheel.py
setuptools/command/build.py
setuptools/command/build_clib.py
setuptools/command/build_ext.py