aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-07-17 16:49:25 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-07-17 16:59:34 +0300
commit61640a2297d94a7c75442e8397e517ded0c784a0 (patch)
treec58ad8592f7fa2de16b4821d06e23bd74499d175
parentd59b288b13385d748ec8d9087c7c2e4ebfd36b1e (diff)
downloadydb-61640a2297d94a7c75442e8397e517ded0c784a0.tar.gz
Intermediate changes
-rw-r--r--contrib/python/setuptools/py3/.dist-info/METADATA88
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/__init__.py4
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/extern/__init__.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/__init__.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_core_metadata.py22
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_modified.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/__init__.py0
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/__init__.py15
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_elffile.py108
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_manylinux.py260
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_musllinux.py83
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_parser.py356
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_structures.py61
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_tokenizer.py192
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/markers.py252
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/metadata.py825
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/py.typed0
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/requirements.py90
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/specifiers.py1017
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/tags.py571
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/utils.py172
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/version.py563
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py27
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py20
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py109
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/cmd.py36
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/__init__.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py19
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py38
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build.py13
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py14
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py26
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py18
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/check.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py8
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/config.py16
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install.py31
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_headers.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/register.py17
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py37
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/compat/py39.py (renamed from contrib/python/setuptools/py3/setuptools/_distutils/py39compat.py)0
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/config.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/core.py11
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py34
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py30
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/dist.py94
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/extension.py16
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py24
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/file_util.py18
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/filelist.py30
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py30
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py28
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py8
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/spawn.py73
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py28
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/text_file.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py11
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/util.py50
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/version.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/_importlib.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/bdist_egg.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py12
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/dist_info.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/easy_install.py32
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/egg_info.py8
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/register.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/sdist.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/upload.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/_validate_pyproject/formats.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/expand.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/config/setupcfg.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/depends.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/discovery.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/dist.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/extension.py5
-rw-r--r--contrib/python/setuptools/py3/setuptools/extern/__init__.py10
-rw-r--r--contrib/python/setuptools/py3/setuptools/msvc.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/namespaces.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/package_index.py6
-rw-r--r--contrib/python/setuptools/py3/ya.make21
93 files changed, 5251 insertions, 604 deletions
diff --git a/contrib/python/setuptools/py3/.dist-info/METADATA b/contrib/python/setuptools/py3/.dist-info/METADATA
index da4720019f..6500c47bd1 100644
--- a/contrib/python/setuptools/py3/.dist-info/METADATA
+++ b/contrib/python/setuptools/py3/.dist-info/METADATA
@@ -1,9 +1,9 @@
Metadata-Version: 2.1
Name: setuptools
-Version: 70.1.1
+Version: 70.2.0
Summary: Easily download, build, install, upgrade, and uninstall Python packages
Author-email: Python Packaging Authority <distutils-sig@python.org>
-Project-URL: Homepage, https://github.com/pypa/setuptools
+Project-URL: Source, https://github.com/pypa/setuptools
Project-URL: Documentation, https://setuptools.pypa.io/
Project-URL: Changelog, https://setuptools.pypa.io/en/stable/history.html
Keywords: CPAN PyPI distutils eggs package management
@@ -20,49 +20,49 @@ Requires-Python: >=3.8
Description-Content-Type: text/x-rst
License-File: LICENSE
Provides-Extra: certs
-Provides-Extra: docs
-Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
-Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
-Requires-Dist: furo ; extra == 'docs'
-Requires-Dist: sphinx-lint ; extra == 'docs'
-Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
-Requires-Dist: pygments-github-lexers ==0.0.5 ; extra == 'docs'
-Requires-Dist: sphinx-favicon ; extra == 'docs'
-Requires-Dist: sphinx-inline-tabs ; extra == 'docs'
-Requires-Dist: sphinx-reredirects ; extra == 'docs'
-Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs'
-Requires-Dist: sphinx-notfound-page <2,>=1 ; extra == 'docs'
-Requires-Dist: pyproject-hooks !=1.1 ; extra == 'docs'
+Provides-Extra: doc
+Requires-Dist: sphinx >=3.5 ; extra == 'doc'
+Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
+Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
+Requires-Dist: furo ; extra == 'doc'
+Requires-Dist: sphinx-lint ; extra == 'doc'
+Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
+Requires-Dist: pygments-github-lexers ==0.0.5 ; extra == 'doc'
+Requires-Dist: sphinx-favicon ; extra == 'doc'
+Requires-Dist: sphinx-inline-tabs ; extra == 'doc'
+Requires-Dist: sphinx-reredirects ; extra == 'doc'
+Requires-Dist: sphinxcontrib-towncrier ; extra == 'doc'
+Requires-Dist: sphinx-notfound-page <2,>=1 ; extra == 'doc'
+Requires-Dist: pyproject-hooks !=1.1 ; extra == 'doc'
Provides-Extra: ssl
-Provides-Extra: testing
-Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
-Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
-Requires-Dist: pytest-mypy ; extra == 'testing'
-Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
-Requires-Dist: virtualenv >=13.0.0 ; extra == 'testing'
-Requires-Dist: wheel ; extra == 'testing'
-Requires-Dist: pip >=19.1 ; extra == 'testing'
-Requires-Dist: packaging >=23.2 ; extra == 'testing'
-Requires-Dist: jaraco.envs >=2.2 ; extra == 'testing'
-Requires-Dist: pytest-xdist >=3 ; extra == 'testing'
-Requires-Dist: jaraco.path >=3.2.0 ; extra == 'testing'
-Requires-Dist: build[virtualenv] >=1.0.3 ; extra == 'testing'
-Requires-Dist: filelock >=3.4.0 ; extra == 'testing'
-Requires-Dist: ini2toml[lite] >=0.14 ; extra == 'testing'
-Requires-Dist: tomli-w >=1.0.0 ; extra == 'testing'
-Requires-Dist: pytest-timeout ; extra == 'testing'
-Requires-Dist: pytest-home >=0.5 ; extra == 'testing'
-Requires-Dist: mypy ==1.10.0 ; extra == 'testing'
-Requires-Dist: tomli ; extra == 'testing'
-Requires-Dist: importlib-metadata ; extra == 'testing'
-Requires-Dist: pytest-subprocess ; extra == 'testing'
-Requires-Dist: pyproject-hooks !=1.1 ; extra == 'testing'
-Requires-Dist: jaraco.test ; extra == 'testing'
-Requires-Dist: pytest-cov ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: jaraco.develop >=7.21 ; (python_version >= "3.9" and sys_platform != "cygwin") and extra == 'testing'
-Requires-Dist: pytest-ruff >=0.3.2 ; (sys_platform != "cygwin") and extra == 'testing'
-Requires-Dist: pytest-perf ; (sys_platform != "cygwin") and extra == 'testing'
+Provides-Extra: test
+Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
+Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'test'
+Requires-Dist: pytest-cov ; extra == 'test'
+Requires-Dist: pytest-mypy ; extra == 'test'
+Requires-Dist: pytest-enabler >=2.2 ; extra == 'test'
+Requires-Dist: virtualenv >=13.0.0 ; extra == 'test'
+Requires-Dist: wheel ; extra == 'test'
+Requires-Dist: pip >=19.1 ; extra == 'test'
+Requires-Dist: packaging >=23.2 ; extra == 'test'
+Requires-Dist: jaraco.envs >=2.2 ; extra == 'test'
+Requires-Dist: pytest-xdist >=3 ; extra == 'test'
+Requires-Dist: jaraco.path >=3.2.0 ; extra == 'test'
+Requires-Dist: build[virtualenv] >=1.0.3 ; extra == 'test'
+Requires-Dist: filelock >=3.4.0 ; extra == 'test'
+Requires-Dist: ini2toml[lite] >=0.14 ; extra == 'test'
+Requires-Dist: tomli-w >=1.0.0 ; extra == 'test'
+Requires-Dist: pytest-timeout ; extra == 'test'
+Requires-Dist: pytest-home >=0.5 ; extra == 'test'
+Requires-Dist: mypy ==1.10.0 ; extra == 'test'
+Requires-Dist: tomli ; extra == 'test'
+Requires-Dist: importlib-metadata ; extra == 'test'
+Requires-Dist: pytest-subprocess ; extra == 'test'
+Requires-Dist: pyproject-hooks !=1.1 ; extra == 'test'
+Requires-Dist: jaraco.test ; extra == 'test'
+Requires-Dist: jaraco.develop >=7.21 ; (python_version >= "3.9" and sys_platform != "cygwin") and extra == 'test'
+Requires-Dist: pytest-ruff >=0.3.2 ; (sys_platform != "cygwin") and extra == 'test'
+Requires-Dist: pytest-perf ; (sys_platform != "cygwin") and extra == 'test'
.. |pypi-version| image:: https://img.shields.io/pypi/v/setuptools.svg
:target: https://pypi.org/project/setuptools
diff --git a/contrib/python/setuptools/py3/pkg_resources/__init__.py b/contrib/python/setuptools/py3/pkg_resources/__init__.py
index 8e01d93476..bb8cbf88fd 100644
--- a/contrib/python/setuptools/py3/pkg_resources/__init__.py
+++ b/contrib/python/setuptools/py3/pkg_resources/__init__.py
@@ -2041,7 +2041,7 @@ class ZipProvider(EggProvider):
if not WRITE_SUPPORT:
raise OSError(
- '"os.rename" and "os.unlink" are not supported ' 'on this platform'
+ '"os.rename" and "os.unlink" are not supported on this platform'
)
try:
if not self.egg_name:
@@ -2808,7 +2808,7 @@ class EntryPoint:
return ()
req = Requirement.parse('x' + extras_spec)
if req.specs:
- raise ValueError()
+ raise ValueError
return req.extras
@classmethod
diff --git a/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py b/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
index 9b9ac10aa9..daa978ff72 100644
--- a/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
+++ b/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
@@ -41,14 +41,14 @@ class VendorImporter:
"""
root, base, target = fullname.partition(self.root_name + '.')
for prefix in self.search_path:
+ extant = prefix + target
try:
- extant = prefix + target
__import__(extant)
- mod = sys.modules[extant]
- sys.modules[fullname] = mod
- return mod
except ImportError:
- pass
+ continue
+ mod = sys.modules[extant]
+ sys.modules[fullname] = mod
+ return mod
else:
raise ImportError(
"The '{target}' package is required; "
diff --git a/contrib/python/setuptools/py3/setuptools/__init__.py b/contrib/python/setuptools/py3/setuptools/__init__.py
index 5ef0f7dbd8..bf03f37b77 100644
--- a/contrib/python/setuptools/py3/setuptools/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/__init__.py
@@ -57,9 +57,9 @@ def _install_setup_requires(attrs):
"""Ignore ``pyproject.toml``, they are not related to setup_requires"""
try:
cfg, toml = super()._split_standard_project_metadata(filenames)
- return cfg, ()
except Exception:
return filenames, ()
+ return cfg, ()
def finalize_options(self):
"""
diff --git a/contrib/python/setuptools/py3/setuptools/_core_metadata.py b/contrib/python/setuptools/py3/setuptools/_core_metadata.py
index f1de9c9ba6..45aae7d70b 100644
--- a/contrib/python/setuptools/py3/setuptools/_core_metadata.py
+++ b/contrib/python/setuptools/py3/setuptools/_core_metadata.py
@@ -18,7 +18,7 @@ from distutils.util import rfc822_escape
from . import _normalization, _reqs
from .extern.packaging.markers import Marker
from .extern.packaging.requirements import Requirement
-from .extern.packaging.utils import canonicalize_name
+from .extern.packaging.utils import canonicalize_name, canonicalize_version
from .extern.packaging.version import Version
from .warnings import SetuptoolsDeprecationWarning
@@ -263,7 +263,23 @@ def _write_provides_extra(file, processed_extras, safe, unsafe):
# from pypa/distutils#244; needed only until that logic is always available
def get_fullname(self):
+ return _distribution_fullname(self.get_name(), self.get_version())
+
+
+def _distribution_fullname(name: str, version: str) -> str:
+ """
+ >>> _distribution_fullname('setup.tools', '1.0-2')
+ 'setup_tools-1.0.post2'
+ >>> _distribution_fullname('setup-tools', '1.2post2')
+ 'setup_tools-1.2.post2'
+ >>> _distribution_fullname('setup-tools', '1.0-r2')
+ 'setup_tools-1.0.post2'
+ >>> _distribution_fullname('setup.tools', '1.0.post')
+ 'setup_tools-1.0.post0'
+ >>> _distribution_fullname('setup.tools', '1.0+ubuntu-1')
+ 'setup_tools-1.0+ubuntu.1'
+ """
return "{}-{}".format(
- canonicalize_name(self.get_name()).replace('-', '_'),
- self.get_version(),
+ canonicalize_name(name).replace('-', '_'),
+ canonicalize_version(version, strip_trailing_zero=False),
)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py b/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py
index 78485dc25e..6532aa1073 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py
@@ -4,8 +4,8 @@ import functools
import os.path
from ._functools import splat
+from .compat.py39 import zip_strict
from .errors import DistutilsFileError
-from .py39compat import zip_strict
def _newer(source, target):
@@ -24,7 +24,7 @@ def newer(source, target):
Raises DistutilsFileError if 'source' does not exist.
"""
if not os.path.exists(source):
- raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source))
+ raise DistutilsFileError(f"file '{os.path.abspath(source)}' does not exist")
return _newer(source, target)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py
index a2159fef83..b0322410c5 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py
@@ -218,7 +218,7 @@ class MSVCCompiler(CCompiler):
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
super().__init__(verbose, dry_run, force)
# target platform (.plat_name is consistent with 'bdist')
self.plat_name = None
@@ -334,7 +334,7 @@ class MSVCCompiler(CCompiler):
output_dir=None,
macros=None,
include_dirs=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
depends=None,
@@ -423,7 +423,7 @@ class MSVCCompiler(CCompiler):
return objects
def create_static_lib(
- self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+ self, objects, output_libname, output_dir=None, debug=False, target_lang=None
):
if not self.initialized:
self.initialize()
@@ -452,7 +452,7 @@ class MSVCCompiler(CCompiler):
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -551,7 +551,7 @@ class MSVCCompiler(CCompiler):
def library_option(self, lib):
return self.library_filename(lib)
- def find_library_file(self, dirs, lib, debug=0):
+ def find_library_file(self, dirs, lib, debug=False):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/__init__.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/__init__.py
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/__init__.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/__init__.py
new file mode 100644
index 0000000000..e7c0aa12ca
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/__init__.py
@@ -0,0 +1,15 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "24.0"
+
+__author__ = "Donald Stufft and individual contributors"
+__email__ = "donald@stufft.io"
+
+__license__ = "BSD-2-Clause or Apache-2.0"
+__copyright__ = "2014 %s" % __author__
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_elffile.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_elffile.py
new file mode 100644
index 0000000000..6fb19b30bb
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_elffile.py
@@ -0,0 +1,108 @@
+"""
+ELF file parser.
+
+This provides a class ``ELFFile`` that parses an ELF executable in a similar
+interface to ``ZipFile``. Only the read interface is implemented.
+
+Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
+ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
+"""
+
+import enum
+import os
+import struct
+from typing import IO, Optional, Tuple
+
+
+class ELFInvalid(ValueError):
+ pass
+
+
+class EIClass(enum.IntEnum):
+ C32 = 1
+ C64 = 2
+
+
+class EIData(enum.IntEnum):
+ Lsb = 1
+ Msb = 2
+
+
+class EMachine(enum.IntEnum):
+ I386 = 3
+ S390 = 22
+ Arm = 40
+ X8664 = 62
+ AArc64 = 183
+
+
+class ELFFile:
+ """
+ Representation of an ELF executable.
+ """
+
+ def __init__(self, f: IO[bytes]) -> None:
+ self._f = f
+
+ try:
+ ident = self._read("16B")
+ except struct.error:
+ raise ELFInvalid("unable to parse identification")
+ magic = bytes(ident[:4])
+ if magic != b"\x7fELF":
+ raise ELFInvalid(f"invalid magic: {magic!r}")
+
+ self.capacity = ident[4] # Format for program header (bitness).
+ self.encoding = ident[5] # Data structure encoding (endianness).
+
+ try:
+ # e_fmt: Format for program header.
+ # p_fmt: Format for section header.
+ # p_idx: Indexes to find p_type, p_offset, and p_filesz.
+ e_fmt, self._p_fmt, self._p_idx = {
+ (1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
+ (1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
+ (2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
+ (2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
+ }[(self.capacity, self.encoding)]
+ except KeyError:
+ raise ELFInvalid(
+ f"unrecognized capacity ({self.capacity}) or "
+ f"encoding ({self.encoding})"
+ )
+
+ try:
+ (
+ _,
+ self.machine, # Architecture type.
+ _,
+ _,
+ self._e_phoff, # Offset of program header.
+ _,
+ self.flags, # Processor-specific flags.
+ _,
+ self._e_phentsize, # Size of section.
+ self._e_phnum, # Number of sections.
+ ) = self._read(e_fmt)
+ except struct.error as e:
+ raise ELFInvalid("unable to parse machine and section information") from e
+
+ def _read(self, fmt: str) -> Tuple[int, ...]:
+ return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
+
+ @property
+ def interpreter(self) -> Optional[str]:
+ """
+ The path recorded in the ``PT_INTERP`` section header.
+ """
+ for index in range(self._e_phnum):
+ self._f.seek(self._e_phoff + self._e_phentsize * index)
+ try:
+ data = self._read(self._p_fmt)
+ except struct.error:
+ continue
+ if data[self._p_idx[0]] != 3: # Not PT_INTERP.
+ continue
+ self._f.seek(data[self._p_idx[1]])
+ return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
+ return None
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_manylinux.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_manylinux.py
new file mode 100644
index 0000000000..ad62505f3f
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_manylinux.py
@@ -0,0 +1,260 @@
+import collections
+import contextlib
+import functools
+import os
+import re
+import sys
+import warnings
+from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
+
+from ._elffile import EIClass, EIData, ELFFile, EMachine
+
+EF_ARM_ABIMASK = 0xFF000000
+EF_ARM_ABI_VER5 = 0x05000000
+EF_ARM_ABI_FLOAT_HARD = 0x00000400
+
+
+# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
+# as the type for `path` until then.
+@contextlib.contextmanager
+def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
+ try:
+ with open(path, "rb") as f:
+ yield ELFFile(f)
+ except (OSError, TypeError, ValueError):
+ yield None
+
+
+def _is_linux_armhf(executable: str) -> bool:
+ # hard-float ABI can be detected from the ELF header of the running
+ # process
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
+ with _parse_elf(executable) as f:
+ return (
+ f is not None
+ and f.capacity == EIClass.C32
+ and f.encoding == EIData.Lsb
+ and f.machine == EMachine.Arm
+ and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
+ and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
+ )
+
+
+def _is_linux_i686(executable: str) -> bool:
+ with _parse_elf(executable) as f:
+ return (
+ f is not None
+ and f.capacity == EIClass.C32
+ and f.encoding == EIData.Lsb
+ and f.machine == EMachine.I386
+ )
+
+
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+ if "armv7l" in archs:
+ return _is_linux_armhf(executable)
+ if "i686" in archs:
+ return _is_linux_i686(executable)
+ allowed_archs = {
+ "x86_64",
+ "aarch64",
+ "ppc64",
+ "ppc64le",
+ "s390x",
+ "loongarch64",
+ "riscv64",
+ }
+ return any(arch in allowed_archs for arch in archs)
+
+
+# If glibc ever changes its major version, we need to know what the last
+# minor version was, so we can build the complete list of all versions.
+# For now, guess what the highest minor version might be, assume it will
+# be 50 for testing. Once this actually happens, update the dictionary
+# with the actual value.
+_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
+
+
+class _GLibCVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _glibc_version_string_confstr() -> Optional[str]:
+ """
+ Primary implementation of glibc_version_string using os.confstr.
+ """
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module.
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
+ try:
+ # Should be a string like "glibc 2.17".
+ version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
+ assert version_string is not None
+ _, version = version_string.rsplit()
+ except (AssertionError, AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def _glibc_version_string_ctypes() -> Optional[str]:
+ """
+ Fallback implementation of glibc_version_string using ctypes.
+ """
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ #
+ # We must also handle the special case where the executable is not a
+ # dynamically linked executable. This can occur when using musl libc,
+ # for example. In this situation, dlopen() will error, leading to an
+ # OSError. Interestingly, at least in the case of musl, there is no
+ # errno set on the OSError. The single string argument used to construct
+ # OSError comes from libc itself and is therefore not portable to
+ # hard code here. In any case, failure to call dlopen() means we
+ # can proceed, so we bail on our attempt.
+ try:
+ process_namespace = ctypes.CDLL(None)
+ except OSError:
+ return None
+
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str: str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+def _glibc_version_string() -> Optional[str]:
+ """Returns glibc version string, or None if not using glibc."""
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
+
+
+def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
+ """Parse glibc version.
+
+ We use a regexp instead of str.split because we want to discard any
+ random junk that might come after the minor version -- this might happen
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
+ uses version strings like "2.20-2014.11"). See gh-3588.
+ """
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
+ if not m:
+ warnings.warn(
+ f"Expected glibc version with 2 components major.minor,"
+ f" got: {version_str}",
+ RuntimeWarning,
+ )
+ return -1, -1
+ return int(m.group("major")), int(m.group("minor"))
+
+
+@functools.lru_cache()
+def _get_glibc_version() -> Tuple[int, int]:
+ version_str = _glibc_version_string()
+ if version_str is None:
+ return (-1, -1)
+ return _parse_glibc_version(version_str)
+
+
+# From PEP 513, PEP 600
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
+ sys_glibc = _get_glibc_version()
+ if sys_glibc < version:
+ return False
+ # Check for presence of _manylinux module.
+ try:
+ import _manylinux
+ except ImportError:
+ return True
+ if hasattr(_manylinux, "manylinux_compatible"):
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
+ if result is not None:
+ return bool(result)
+ return True
+ if version == _GLibCVersion(2, 5):
+ if hasattr(_manylinux, "manylinux1_compatible"):
+ return bool(_manylinux.manylinux1_compatible)
+ if version == _GLibCVersion(2, 12):
+ if hasattr(_manylinux, "manylinux2010_compatible"):
+ return bool(_manylinux.manylinux2010_compatible)
+ if version == _GLibCVersion(2, 17):
+ if hasattr(_manylinux, "manylinux2014_compatible"):
+ return bool(_manylinux.manylinux2014_compatible)
+ return True
+
+
+_LEGACY_MANYLINUX_MAP = {
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
+ (2, 17): "manylinux2014",
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
+ (2, 12): "manylinux2010",
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
+ (2, 5): "manylinux1",
+}
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+ """Generate manylinux tags compatible to the current platform.
+
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be manylinux-compatible.
+
+ :returns: An iterator of compatible manylinux tags.
+ """
+ if not _have_compatible_abi(sys.executable, archs):
+ return
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
+ too_old_glibc2 = _GLibCVersion(2, 16)
+ if set(archs) & {"x86_64", "i686"}:
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
+ too_old_glibc2 = _GLibCVersion(2, 4)
+ current_glibc = _GLibCVersion(*_get_glibc_version())
+ glibc_max_list = [current_glibc]
+ # We can assume compatibility across glibc major versions.
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
+ #
+ # Build a list of maximum glibc versions so that we can
+ # output the canonical list of all glibc from current_glibc
+ # down to too_old_glibc2, including all intermediary versions.
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
+ for arch in archs:
+ for glibc_max in glibc_max_list:
+ if glibc_max.major == too_old_glibc2.major:
+ min_minor = too_old_glibc2.minor
+ else:
+ # For other glibc major versions oldest supported is (x, 0).
+ min_minor = -1
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+ tag = "manylinux_{}_{}".format(*glibc_version)
+ if _is_compatible(arch, glibc_version):
+ yield f"{tag}_{arch}"
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+ if _is_compatible(arch, glibc_version):
+ yield f"{legacy_tag}_{arch}"
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_musllinux.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_musllinux.py
new file mode 100644
index 0000000000..86419df9d7
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_musllinux.py
@@ -0,0 +1,83 @@
+"""PEP 656 support.
+
+This module implements logic to detect if the currently running Python is
+linked against musl, and what musl version is used.
+"""
+
+import functools
+import re
+import subprocess
+import sys
+from typing import Iterator, NamedTuple, Optional, Sequence
+
+from ._elffile import ELFFile
+
+
+class _MuslVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
+ lines = [n for n in (n.strip() for n in output.splitlines()) if n]
+ if len(lines) < 2 or lines[0][:4] != "musl":
+ return None
+ m = re.match(r"Version (\d+)\.(\d+)", lines[1])
+ if not m:
+ return None
+ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
+
+
+@functools.lru_cache()
+def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
+ """Detect currently-running musl runtime version.
+
+ This is done by checking the specified executable's dynamic linking
+ information, and invoking the loader to parse its output for a version
+ string. If the loader is musl, the output would be something like::
+
+ musl libc (x86_64)
+ Version 1.2.2
+ Dynamic Program Loader
+ """
+ try:
+ with open(executable, "rb") as f:
+ ld = ELFFile(f).interpreter
+ except (OSError, TypeError, ValueError):
+ return None
+ if ld is None or "musl" not in ld:
+ return None
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
+ return _parse_musl_version(proc.stderr)
+
+
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+ """Generate musllinux tags compatible to the current platform.
+
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be musllinux-compatible.
+
+ :returns: An iterator of compatible musllinux tags.
+ """
+ sys_musl = _get_musl_version(sys.executable)
+ if sys_musl is None: # Python not dynamically linked against musl.
+ return
+ for arch in archs:
+ for minor in range(sys_musl.minor, -1, -1):
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+
+
+if __name__ == "__main__": # pragma: no cover
+ import sysconfig
+
+ plat = sysconfig.get_platform()
+ assert plat.startswith("linux-"), "not linux"
+
+ print("plat:", plat)
+ print("musl:", _get_musl_version(sys.executable))
+ print("tags:", end=" ")
+ for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
+ print(t, end="\n ")
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_parser.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_parser.py
new file mode 100644
index 0000000000..684df75457
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_parser.py
@@ -0,0 +1,356 @@
+"""Handwritten parser of dependency specifiers.
+
+The docstring for each __parse_* function contains ENBF-inspired grammar representing
+the implementation.
+"""
+
+import ast
+from typing import Any, List, NamedTuple, Optional, Tuple, Union
+
+from ._tokenizer import DEFAULT_RULES, Tokenizer
+
+
+class Node:
+ def __init__(self, value: str) -> None:
+ self.value = value
+
+ def __str__(self) -> str:
+ return self.value
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}('{self}')>"
+
+ def serialize(self) -> str:
+ raise NotImplementedError
+
+
+class Variable(Node):
+ def serialize(self) -> str:
+ return str(self)
+
+
+class Value(Node):
+ def serialize(self) -> str:
+ return f'"{self}"'
+
+
+class Op(Node):
+ def serialize(self) -> str:
+ return str(self)
+
+
+MarkerVar = Union[Variable, Value]
+MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
+# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
+# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
+# mypy does not support recursive type definition
+# https://github.com/python/mypy/issues/731
+MarkerAtom = Any
+MarkerList = List[Any]
+
+
+class ParsedRequirement(NamedTuple):
+ name: str
+ url: str
+ extras: List[str]
+ specifier: str
+ marker: Optional[MarkerList]
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for dependency specifier
+# --------------------------------------------------------------------------------------
+def parse_requirement(source: str) -> ParsedRequirement:
+ return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
+ """
+ requirement = WS? IDENTIFIER WS? extras WS? requirement_details
+ """
+ tokenizer.consume("WS")
+
+ name_token = tokenizer.expect(
+ "IDENTIFIER", expected="package name at the start of dependency specifier"
+ )
+ name = name_token.text
+ tokenizer.consume("WS")
+
+ extras = _parse_extras(tokenizer)
+ tokenizer.consume("WS")
+
+ url, specifier, marker = _parse_requirement_details(tokenizer)
+ tokenizer.expect("END", expected="end of dependency specifier")
+
+ return ParsedRequirement(name, url, extras, specifier, marker)
+
+
+def _parse_requirement_details(
+ tokenizer: Tokenizer,
+) -> Tuple[str, str, Optional[MarkerList]]:
+ """
+ requirement_details = AT URL (WS requirement_marker?)?
+ | specifier WS? (requirement_marker)?
+ """
+
+ specifier = ""
+ url = ""
+ marker = None
+
+ if tokenizer.check("AT"):
+ tokenizer.read()
+ tokenizer.consume("WS")
+
+ url_start = tokenizer.position
+ url = tokenizer.expect("URL", expected="URL after @").text
+ if tokenizer.check("END", peek=True):
+ return (url, specifier, marker)
+
+ tokenizer.expect("WS", expected="whitespace after URL")
+
+ # The input might end after whitespace.
+ if tokenizer.check("END", peek=True):
+ return (url, specifier, marker)
+
+ marker = _parse_requirement_marker(
+ tokenizer, span_start=url_start, after="URL and whitespace"
+ )
+ else:
+ specifier_start = tokenizer.position
+ specifier = _parse_specifier(tokenizer)
+ tokenizer.consume("WS")
+
+ if tokenizer.check("END", peek=True):
+ return (url, specifier, marker)
+
+ marker = _parse_requirement_marker(
+ tokenizer,
+ span_start=specifier_start,
+ after=(
+ "version specifier"
+ if specifier
+ else "name and no valid version specifier"
+ ),
+ )
+
+ return (url, specifier, marker)
+
+
+def _parse_requirement_marker(
+ tokenizer: Tokenizer, *, span_start: int, after: str
+) -> MarkerList:
+ """
+ requirement_marker = SEMICOLON marker WS?
+ """
+
+ if not tokenizer.check("SEMICOLON"):
+ tokenizer.raise_syntax_error(
+ f"Expected end or semicolon (after {after})",
+ span_start=span_start,
+ )
+ tokenizer.read()
+
+ marker = _parse_marker(tokenizer)
+ tokenizer.consume("WS")
+
+ return marker
+
+
+def _parse_extras(tokenizer: Tokenizer) -> List[str]:
+ """
+ extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
+ """
+ if not tokenizer.check("LEFT_BRACKET", peek=True):
+ return []
+
+ with tokenizer.enclosing_tokens(
+ "LEFT_BRACKET",
+ "RIGHT_BRACKET",
+ around="extras",
+ ):
+ tokenizer.consume("WS")
+ extras = _parse_extras_list(tokenizer)
+ tokenizer.consume("WS")
+
+ return extras
+
+
+def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
+ """
+ extras_list = identifier (wsp* ',' wsp* identifier)*
+ """
+ extras: List[str] = []
+
+ if not tokenizer.check("IDENTIFIER"):
+ return extras
+
+ extras.append(tokenizer.read().text)
+
+ while True:
+ tokenizer.consume("WS")
+ if tokenizer.check("IDENTIFIER", peek=True):
+ tokenizer.raise_syntax_error("Expected comma between extra names")
+ elif not tokenizer.check("COMMA"):
+ break
+
+ tokenizer.read()
+ tokenizer.consume("WS")
+
+ extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
+ extras.append(extra_token.text)
+
+ return extras
+
+
+def _parse_specifier(tokenizer: Tokenizer) -> str:
+ """
+ specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
+ | WS? version_many WS?
+ """
+ with tokenizer.enclosing_tokens(
+ "LEFT_PARENTHESIS",
+ "RIGHT_PARENTHESIS",
+ around="version specifier",
+ ):
+ tokenizer.consume("WS")
+ parsed_specifiers = _parse_version_many(tokenizer)
+ tokenizer.consume("WS")
+
+ return parsed_specifiers
+
+
+def _parse_version_many(tokenizer: Tokenizer) -> str:
+ """
+ version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
+ """
+ parsed_specifiers = ""
+ while tokenizer.check("SPECIFIER"):
+ span_start = tokenizer.position
+ parsed_specifiers += tokenizer.read().text
+ if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
+ tokenizer.raise_syntax_error(
+ ".* suffix can only be used with `==` or `!=` operators",
+ span_start=span_start,
+ span_end=tokenizer.position + 1,
+ )
+ if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
+ tokenizer.raise_syntax_error(
+ "Local version label can only be used with `==` or `!=` operators",
+ span_start=span_start,
+ span_end=tokenizer.position,
+ )
+ tokenizer.consume("WS")
+ if not tokenizer.check("COMMA"):
+ break
+ parsed_specifiers += tokenizer.read().text
+ tokenizer.consume("WS")
+
+ return parsed_specifiers
+
+
+# --------------------------------------------------------------------------------------
+# Recursive descent parser for marker expression
+# --------------------------------------------------------------------------------------
+def parse_marker(source: str) -> MarkerList:
+ return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+ retval = _parse_marker(tokenizer)
+ tokenizer.expect("END", expected="end of marker expression")
+ return retval
+
+
+def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
+ """
+ marker = marker_atom (BOOLOP marker_atom)+
+ """
+ expression = [_parse_marker_atom(tokenizer)]
+ while tokenizer.check("BOOLOP"):
+ token = tokenizer.read()
+ expr_right = _parse_marker_atom(tokenizer)
+ expression.extend((token.text, expr_right))
+ return expression
+
+
+def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
+ """
+ marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
+ | WS? marker_item WS?
+ """
+
+ tokenizer.consume("WS")
+ if tokenizer.check("LEFT_PARENTHESIS", peek=True):
+ with tokenizer.enclosing_tokens(
+ "LEFT_PARENTHESIS",
+ "RIGHT_PARENTHESIS",
+ around="marker expression",
+ ):
+ tokenizer.consume("WS")
+ marker: MarkerAtom = _parse_marker(tokenizer)
+ tokenizer.consume("WS")
+ else:
+ marker = _parse_marker_item(tokenizer)
+ tokenizer.consume("WS")
+ return marker
+
+
+def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
+ """
+ marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
+ """
+ tokenizer.consume("WS")
+ marker_var_left = _parse_marker_var(tokenizer)
+ tokenizer.consume("WS")
+ marker_op = _parse_marker_op(tokenizer)
+ tokenizer.consume("WS")
+ marker_var_right = _parse_marker_var(tokenizer)
+ tokenizer.consume("WS")
+ return (marker_var_left, marker_op, marker_var_right)
+
+
+def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
+ """
+ marker_var = VARIABLE | QUOTED_STRING
+ """
+ if tokenizer.check("VARIABLE"):
+ return process_env_var(tokenizer.read().text.replace(".", "_"))
+ elif tokenizer.check("QUOTED_STRING"):
+ return process_python_str(tokenizer.read().text)
+ else:
+ tokenizer.raise_syntax_error(
+ message="Expected a marker variable or quoted string"
+ )
+
+
+def process_env_var(env_var: str) -> Variable:
+ if env_var in ("platform_python_implementation", "python_implementation"):
+ return Variable("platform_python_implementation")
+ else:
+ return Variable(env_var)
+
+
+def process_python_str(python_str: str) -> Value:
+ value = ast.literal_eval(python_str)
+ return Value(str(value))
+
+
+def _parse_marker_op(tokenizer: Tokenizer) -> Op:
+ """
+ marker_op = IN | NOT IN | OP
+ """
+ if tokenizer.check("IN"):
+ tokenizer.read()
+ return Op("in")
+ elif tokenizer.check("NOT"):
+ tokenizer.read()
+ tokenizer.expect("WS", expected="whitespace after 'not'")
+ tokenizer.expect("IN", expected="'in' after 'not'")
+ return Op("not in")
+ elif tokenizer.check("OP"):
+ return Op(tokenizer.read().text)
+ else:
+ return tokenizer.raise_syntax_error(
+ "Expected marker operator, one of "
+ "<=, <, !=, ==, >=, >, ~=, ===, in, not in"
+ )
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_structures.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_structures.py
new file mode 100644
index 0000000000..90a6465f96
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_structures.py
@@ -0,0 +1,61 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+
+class InfinityType:
+ def __repr__(self) -> str:
+ return "Infinity"
+
+ def __hash__(self) -> int:
+ return hash(repr(self))
+
+ def __lt__(self, other: object) -> bool:
+ return False
+
+ def __le__(self, other: object) -> bool:
+ return False
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, self.__class__)
+
+ def __gt__(self, other: object) -> bool:
+ return True
+
+ def __ge__(self, other: object) -> bool:
+ return True
+
+ def __neg__(self: object) -> "NegativeInfinityType":
+ return NegativeInfinity
+
+
+Infinity = InfinityType()
+
+
+class NegativeInfinityType:
+ def __repr__(self) -> str:
+ return "-Infinity"
+
+ def __hash__(self) -> int:
+ return hash(repr(self))
+
+ def __lt__(self, other: object) -> bool:
+ return True
+
+ def __le__(self, other: object) -> bool:
+ return True
+
+ def __eq__(self, other: object) -> bool:
+ return isinstance(other, self.__class__)
+
+ def __gt__(self, other: object) -> bool:
+ return False
+
+ def __ge__(self, other: object) -> bool:
+ return False
+
+ def __neg__(self: object) -> InfinityType:
+ return Infinity
+
+
+NegativeInfinity = NegativeInfinityType()
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_tokenizer.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_tokenizer.py
new file mode 100644
index 0000000000..dd0d648d49
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/_tokenizer.py
@@ -0,0 +1,192 @@
+import contextlib
+import re
+from dataclasses import dataclass
+from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
+
+from .specifiers import Specifier
+
+
+@dataclass
+class Token:
+ name: str
+ text: str
+ position: int
+
+
+class ParserSyntaxError(Exception):
+ """The provided source text could not be parsed correctly."""
+
+ def __init__(
+ self,
+ message: str,
+ *,
+ source: str,
+ span: Tuple[int, int],
+ ) -> None:
+ self.span = span
+ self.message = message
+ self.source = source
+
+ super().__init__()
+
+ def __str__(self) -> str:
+ marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
+ return "\n ".join([self.message, self.source, marker])
+
+
+DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
+ "LEFT_PARENTHESIS": r"\(",
+ "RIGHT_PARENTHESIS": r"\)",
+ "LEFT_BRACKET": r"\[",
+ "RIGHT_BRACKET": r"\]",
+ "SEMICOLON": r";",
+ "COMMA": r",",
+ "QUOTED_STRING": re.compile(
+ r"""
+ (
+ ('[^']*')
+ |
+ ("[^"]*")
+ )
+ """,
+ re.VERBOSE,
+ ),
+ "OP": r"(===|==|~=|!=|<=|>=|<|>)",
+ "BOOLOP": r"\b(or|and)\b",
+ "IN": r"\bin\b",
+ "NOT": r"\bnot\b",
+ "VARIABLE": re.compile(
+ r"""
+ \b(
+ python_version
+ |python_full_version
+ |os[._]name
+ |sys[._]platform
+ |platform_(release|system)
+ |platform[._](version|machine|python_implementation)
+ |python_implementation
+ |implementation_(name|version)
+ |extra
+ )\b
+ """,
+ re.VERBOSE,
+ ),
+ "SPECIFIER": re.compile(
+ Specifier._operator_regex_str + Specifier._version_regex_str,
+ re.VERBOSE | re.IGNORECASE,
+ ),
+ "AT": r"\@",
+ "URL": r"[^ \t]+",
+ "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
+ "VERSION_PREFIX_TRAIL": r"\.\*",
+ "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
+ "WS": r"[ \t]+",
+ "END": r"$",
+}
+
+
+class Tokenizer:
+ """Context-sensitive token parsing.
+
+ Provides methods to examine the input stream to check whether the next token
+ matches.
+ """
+
+ def __init__(
+ self,
+ source: str,
+ *,
+ rules: "Dict[str, Union[str, re.Pattern[str]]]",
+ ) -> None:
+ self.source = source
+ self.rules: Dict[str, re.Pattern[str]] = {
+ name: re.compile(pattern) for name, pattern in rules.items()
+ }
+ self.next_token: Optional[Token] = None
+ self.position = 0
+
+ def consume(self, name: str) -> None:
+ """Move beyond provided token name, if at current position."""
+ if self.check(name):
+ self.read()
+
+ def check(self, name: str, *, peek: bool = False) -> bool:
+ """Check whether the next token has the provided name.
+
+ By default, if the check succeeds, the token *must* be read before
+ another check. If `peek` is set to `True`, the token is not loaded and
+ would need to be checked again.
+ """
+ assert (
+ self.next_token is None
+ ), f"Cannot check for {name!r}, already have {self.next_token!r}"
+ assert name in self.rules, f"Unknown token name: {name!r}"
+
+ expression = self.rules[name]
+
+ match = expression.match(self.source, self.position)
+ if match is None:
+ return False
+ if not peek:
+ self.next_token = Token(name, match[0], self.position)
+ return True
+
+ def expect(self, name: str, *, expected: str) -> Token:
+ """Expect a certain token name next, failing with a syntax error otherwise.
+
+ The token is *not* read.
+ """
+ if not self.check(name):
+ raise self.raise_syntax_error(f"Expected {expected}")
+ return self.read()
+
+ def read(self) -> Token:
+ """Consume the next token and return it."""
+ token = self.next_token
+ assert token is not None
+
+ self.position += len(token.text)
+ self.next_token = None
+
+ return token
+
+ def raise_syntax_error(
+ self,
+ message: str,
+ *,
+ span_start: Optional[int] = None,
+ span_end: Optional[int] = None,
+ ) -> NoReturn:
+ """Raise ParserSyntaxError at the given position."""
+ span = (
+ self.position if span_start is None else span_start,
+ self.position if span_end is None else span_end,
+ )
+ raise ParserSyntaxError(
+ message,
+ source=self.source,
+ span=span,
+ )
+
+ @contextlib.contextmanager
+ def enclosing_tokens(
+ self, open_token: str, close_token: str, *, around: str
+ ) -> Iterator[None]:
+ if self.check(open_token):
+ open_position = self.position
+ self.read()
+ else:
+ open_position = None
+
+ yield
+
+ if open_position is None:
+ return
+
+ if not self.check(close_token):
+ self.raise_syntax_error(
+ f"Expected matching {close_token} for {open_token}, after {around}",
+ span_start=open_position,
+ )
+
+ self.read()
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/markers.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/markers.py
new file mode 100644
index 0000000000..8b98fca723
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/markers.py
@@ -0,0 +1,252 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import operator
+import os
+import platform
+import sys
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
+from ._parser import (
+ MarkerAtom,
+ MarkerList,
+ Op,
+ Value,
+ Variable,
+ parse_marker as _parse_marker,
+)
+from ._tokenizer import ParserSyntaxError
+from .specifiers import InvalidSpecifier, Specifier
+from .utils import canonicalize_name
+
+__all__ = [
+ "InvalidMarker",
+ "UndefinedComparison",
+ "UndefinedEnvironmentName",
+ "Marker",
+ "default_environment",
+]
+
+Operator = Callable[[str, str], bool]
+
+
+class InvalidMarker(ValueError):
+ """
+ An invalid marker was found, users should refer to PEP 508.
+ """
+
+
+class UndefinedComparison(ValueError):
+ """
+ An invalid operation was attempted on a value that doesn't support it.
+ """
+
+
+class UndefinedEnvironmentName(ValueError):
+ """
+ A name was attempted to be used that does not exist inside of the
+ environment.
+ """
+
+
+def _normalize_extra_values(results: Any) -> Any:
+ """
+ Normalize extra values.
+ """
+ if isinstance(results[0], tuple):
+ lhs, op, rhs = results[0]
+ if isinstance(lhs, Variable) and lhs.value == "extra":
+ normalized_extra = canonicalize_name(rhs.value)
+ rhs = Value(normalized_extra)
+ elif isinstance(rhs, Variable) and rhs.value == "extra":
+ normalized_extra = canonicalize_name(lhs.value)
+ lhs = Value(normalized_extra)
+ results[0] = lhs, op, rhs
+ return results
+
+
+def _format_marker(
+ marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
+) -> str:
+
+ assert isinstance(marker, (list, tuple, str))
+
+ # Sometimes we have a structure like [[...]] which is a single item list
+ # where the single item is itself it's own list. In that case we want skip
+ # the rest of this function so that we don't get extraneous () on the
+ # outside.
+ if (
+ isinstance(marker, list)
+ and len(marker) == 1
+ and isinstance(marker[0], (list, tuple))
+ ):
+ return _format_marker(marker[0])
+
+ if isinstance(marker, list):
+ inner = (_format_marker(m, first=False) for m in marker)
+ if first:
+ return " ".join(inner)
+ else:
+ return "(" + " ".join(inner) + ")"
+ elif isinstance(marker, tuple):
+ return " ".join([m.serialize() for m in marker])
+ else:
+ return marker
+
+
+_operators: Dict[str, Operator] = {
+ "in": lambda lhs, rhs: lhs in rhs,
+ "not in": lambda lhs, rhs: lhs not in rhs,
+ "<": operator.lt,
+ "<=": operator.le,
+ "==": operator.eq,
+ "!=": operator.ne,
+ ">=": operator.ge,
+ ">": operator.gt,
+}
+
+
+def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
+ try:
+ spec = Specifier("".join([op.serialize(), rhs]))
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs, prereleases=True)
+
+ oper: Optional[Operator] = _operators.get(op.serialize())
+ if oper is None:
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
+
+ return oper(lhs, rhs)
+
+
+def _normalize(*values: str, key: str) -> Tuple[str, ...]:
+ # PEP 685 – Comparison of extra names for optional distribution dependencies
+ # https://peps.python.org/pep-0685/
+ # > When comparing extra names, tools MUST normalize the names being
+ # > compared using the semantics outlined in PEP 503 for names
+ if key == "extra":
+ return tuple(canonicalize_name(v) for v in values)
+
+ # other environment markers don't have such standards
+ return values
+
+
+def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
+ groups: List[List[bool]] = [[]]
+
+ for marker in markers:
+ assert isinstance(marker, (list, tuple, str))
+
+ if isinstance(marker, list):
+ groups[-1].append(_evaluate_markers(marker, environment))
+ elif isinstance(marker, tuple):
+ lhs, op, rhs = marker
+
+ if isinstance(lhs, Variable):
+ environment_key = lhs.value
+ lhs_value = environment[environment_key]
+ rhs_value = rhs.value
+ else:
+ lhs_value = lhs.value
+ environment_key = rhs.value
+ rhs_value = environment[environment_key]
+
+ lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
+ else:
+ assert marker in ["and", "or"]
+ if marker == "or":
+ groups.append([])
+
+ return any(all(item) for item in groups)
+
+
+def format_full_version(info: "sys._version_info") -> str:
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
+ kind = info.releaselevel
+ if kind != "final":
+ version += kind[0] + str(info.serial)
+ return version
+
+
+def default_environment() -> Dict[str, str]:
+ iver = format_full_version(sys.implementation.version)
+ implementation_name = sys.implementation.name
+ return {
+ "implementation_name": implementation_name,
+ "implementation_version": iver,
+ "os_name": os.name,
+ "platform_machine": platform.machine(),
+ "platform_release": platform.release(),
+ "platform_system": platform.system(),
+ "platform_version": platform.version(),
+ "python_full_version": platform.python_version(),
+ "platform_python_implementation": platform.python_implementation(),
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
+ "sys_platform": sys.platform,
+ }
+
+
+class Marker:
+ def __init__(self, marker: str) -> None:
+ # Note: We create a Marker object without calling this constructor in
+ # packaging.requirements.Requirement. If any additional logic is
+ # added here, make sure to mirror/adapt Requirement.
+ try:
+ self._markers = _normalize_extra_values(_parse_marker(marker))
+ # The attribute `_markers` can be described in terms of a recursive type:
+ # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
+ #
+ # For example, the following expression:
+ # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
+ #
+ # is parsed into:
+ # [
+ # (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
+ # 'and',
+ # [
+ # (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
+ # 'or',
+ # (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
+ # ]
+ # ]
+ except ParserSyntaxError as e:
+ raise InvalidMarker(str(e)) from e
+
+ def __str__(self) -> str:
+ return _format_marker(self._markers)
+
+ def __repr__(self) -> str:
+ return f"<Marker('{self}')>"
+
+ def __hash__(self) -> int:
+ return hash((self.__class__.__name__, str(self)))
+
+ def __eq__(self, other: Any) -> bool:
+ if not isinstance(other, Marker):
+ return NotImplemented
+
+ return str(self) == str(other)
+
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
+ """Evaluate a marker.
+
+ Return the boolean from evaluating the given marker against the
+ environment. environment is an optional argument to override all or
+ part of the determined environment.
+
+ The environment is determined from the current Python process.
+ """
+ current_environment = default_environment()
+ current_environment["extra"] = ""
+ if environment is not None:
+ current_environment.update(environment)
+ # The API used to allow setting extra to None. We need to handle this
+ # case for backwards compatibility.
+ if current_environment["extra"] is None:
+ current_environment["extra"] = ""
+
+ return _evaluate_markers(self._markers, current_environment)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/metadata.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/metadata.py
new file mode 100644
index 0000000000..fb27493079
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/metadata.py
@@ -0,0 +1,825 @@
+import email.feedparser
+import email.header
+import email.message
+import email.parser
+import email.policy
+import sys
+import typing
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from . import requirements, specifiers, utils, version as version_module
+
+T = typing.TypeVar("T")
+if sys.version_info[:2] >= (3, 8): # pragma: no cover
+ from typing import Literal, TypedDict
+else: # pragma: no cover
+ if typing.TYPE_CHECKING:
+ from typing_extensions import Literal, TypedDict
+ else:
+ try:
+ from typing_extensions import Literal, TypedDict
+ except ImportError:
+
+ class Literal:
+ def __init_subclass__(*_args, **_kwargs):
+ pass
+
+ class TypedDict:
+ def __init_subclass__(*_args, **_kwargs):
+ pass
+
+
+try:
+ ExceptionGroup
+except NameError: # pragma: no cover
+
+ class ExceptionGroup(Exception): # noqa: N818
+ """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
+
+ If :external:exc:`ExceptionGroup` is already defined by Python itself,
+ that version is used instead.
+ """
+
+ message: str
+ exceptions: List[Exception]
+
+ def __init__(self, message: str, exceptions: List[Exception]) -> None:
+ self.message = message
+ self.exceptions = exceptions
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
+
+else: # pragma: no cover
+ ExceptionGroup = ExceptionGroup
+
+
+class InvalidMetadata(ValueError):
+ """A metadata field contains invalid data."""
+
+ field: str
+ """The name of the field that contains invalid data."""
+
+ def __init__(self, field: str, message: str) -> None:
+ self.field = field
+ super().__init__(message)
+
+
+# The RawMetadata class attempts to make as few assumptions about the underlying
+# serialization formats as possible. The idea is that as long as a serialization
+# formats offer some very basic primitives in *some* way then we can support
+# serializing to and from that format.
+class RawMetadata(TypedDict, total=False):
+ """A dictionary of raw core metadata.
+
+ Each field in core metadata maps to a key of this dictionary (when data is
+ provided). The key is lower-case and underscores are used instead of dashes
+ compared to the equivalent core metadata field. Any core metadata field that
+ can be specified multiple times or can hold multiple values in a single
+ field have a key with a plural name. See :class:`Metadata` whose attributes
+ match the keys of this dictionary.
+
+ Core metadata fields that can be specified multiple times are stored as a
+ list or dict depending on which is appropriate for the field. Any fields
+ which hold multiple values in a single field are stored as a list.
+
+ """
+
+ # Metadata 1.0 - PEP 241
+ metadata_version: str
+ name: str
+ version: str
+ platforms: List[str]
+ summary: str
+ description: str
+ keywords: List[str]
+ home_page: str
+ author: str
+ author_email: str
+ license: str
+
+ # Metadata 1.1 - PEP 314
+ supported_platforms: List[str]
+ download_url: str
+ classifiers: List[str]
+ requires: List[str]
+ provides: List[str]
+ obsoletes: List[str]
+
+ # Metadata 1.2 - PEP 345
+ maintainer: str
+ maintainer_email: str
+ requires_dist: List[str]
+ provides_dist: List[str]
+ obsoletes_dist: List[str]
+ requires_python: str
+ requires_external: List[str]
+ project_urls: Dict[str, str]
+
+ # Metadata 2.0
+ # PEP 426 attempted to completely revamp the metadata format
+ # but got stuck without ever being able to build consensus on
+ # it and ultimately ended up withdrawn.
+ #
+ # However, a number of tools had started emitting METADATA with
+ # `2.0` Metadata-Version, so for historical reasons, this version
+ # was skipped.
+
+ # Metadata 2.1 - PEP 566
+ description_content_type: str
+ provides_extra: List[str]
+
+ # Metadata 2.2 - PEP 643
+ dynamic: List[str]
+
+ # Metadata 2.3 - PEP 685
+ # No new fields were added in PEP 685, just some edge case were
+ # tightened up to provide better interoptability.
+
+
+_STRING_FIELDS = {
+ "author",
+ "author_email",
+ "description",
+ "description_content_type",
+ "download_url",
+ "home_page",
+ "license",
+ "maintainer",
+ "maintainer_email",
+ "metadata_version",
+ "name",
+ "requires_python",
+ "summary",
+ "version",
+}
+
+_LIST_FIELDS = {
+ "classifiers",
+ "dynamic",
+ "obsoletes",
+ "obsoletes_dist",
+ "platforms",
+ "provides",
+ "provides_dist",
+ "provides_extra",
+ "requires",
+ "requires_dist",
+ "requires_external",
+ "supported_platforms",
+}
+
+_DICT_FIELDS = {
+ "project_urls",
+}
+
+
+def _parse_keywords(data: str) -> List[str]:
+ """Split a string of comma-separate keyboards into a list of keywords."""
+ return [k.strip() for k in data.split(",")]
+
+
+def _parse_project_urls(data: List[str]) -> Dict[str, str]:
+ """Parse a list of label/URL string pairings separated by a comma."""
+ urls = {}
+ for pair in data:
+ # Our logic is slightly tricky here as we want to try and do
+ # *something* reasonable with malformed data.
+ #
+ # The main thing that we have to worry about, is data that does
+ # not have a ',' at all to split the label from the Value. There
+ # isn't a singular right answer here, and we will fail validation
+ # later on (if the caller is validating) so it doesn't *really*
+ # matter, but since the missing value has to be an empty str
+ # and our return value is dict[str, str], if we let the key
+ # be the missing value, then they'd have multiple '' values that
+ # overwrite each other in a accumulating dict.
+ #
+ # The other potentional issue is that it's possible to have the
+ # same label multiple times in the metadata, with no solid "right"
+ # answer with what to do in that case. As such, we'll do the only
+ # thing we can, which is treat the field as unparseable and add it
+ # to our list of unparsed fields.
+ parts = [p.strip() for p in pair.split(",", 1)]
+ parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
+
+ # TODO: The spec doesn't say anything about if the keys should be
+ # considered case sensitive or not... logically they should
+ # be case-preserving and case-insensitive, but doing that
+ # would open up more cases where we might have duplicate
+ # entries.
+ label, url = parts
+ if label in urls:
+ # The label already exists in our set of urls, so this field
+ # is unparseable, and we can just add the whole thing to our
+ # unparseable data and stop processing it.
+ raise KeyError("duplicate labels in project urls")
+ urls[label] = url
+
+ return urls
+
+
+def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
+ """Get the body of the message."""
+ # If our source is a str, then our caller has managed encodings for us,
+ # and we don't need to deal with it.
+ if isinstance(source, str):
+ payload: str = msg.get_payload()
+ return payload
+ # If our source is a bytes, then we're managing the encoding and we need
+ # to deal with it.
+ else:
+ bpayload: bytes = msg.get_payload(decode=True)
+ try:
+ return bpayload.decode("utf8", "strict")
+ except UnicodeDecodeError:
+ raise ValueError("payload in an invalid encoding")
+
+
+# The various parse_FORMAT functions here are intended to be as lenient as
+# possible in their parsing, while still returning a correctly typed
+# RawMetadata.
+#
+# To aid in this, we also generally want to do as little touching of the
+# data as possible, except where there are possibly some historic holdovers
+# that make valid data awkward to work with.
+#
+# While this is a lower level, intermediate format than our ``Metadata``
+# class, some light touch ups can make a massive difference in usability.
+
+# Map METADATA fields to RawMetadata.
+_EMAIL_TO_RAW_MAPPING = {
+ "author": "author",
+ "author-email": "author_email",
+ "classifier": "classifiers",
+ "description": "description",
+ "description-content-type": "description_content_type",
+ "download-url": "download_url",
+ "dynamic": "dynamic",
+ "home-page": "home_page",
+ "keywords": "keywords",
+ "license": "license",
+ "maintainer": "maintainer",
+ "maintainer-email": "maintainer_email",
+ "metadata-version": "metadata_version",
+ "name": "name",
+ "obsoletes": "obsoletes",
+ "obsoletes-dist": "obsoletes_dist",
+ "platform": "platforms",
+ "project-url": "project_urls",
+ "provides": "provides",
+ "provides-dist": "provides_dist",
+ "provides-extra": "provides_extra",
+ "requires": "requires",
+ "requires-dist": "requires_dist",
+ "requires-external": "requires_external",
+ "requires-python": "requires_python",
+ "summary": "summary",
+ "supported-platform": "supported_platforms",
+ "version": "version",
+}
+_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
+
+
+def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
+ """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
+
+ This function returns a two-item tuple of dicts. The first dict is of
+ recognized fields from the core metadata specification. Fields that can be
+ parsed and translated into Python's built-in types are converted
+ appropriately. All other fields are left as-is. Fields that are allowed to
+ appear multiple times are stored as lists.
+
+ The second dict contains all other fields from the metadata. This includes
+ any unrecognized fields. It also includes any fields which are expected to
+ be parsed into a built-in type but were not formatted appropriately. Finally,
+ any fields that are expected to appear only once but are repeated are
+ included in this dict.
+
+ """
+ raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
+ unparsed: Dict[str, List[str]] = {}
+
+ if isinstance(data, str):
+ parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
+ else:
+ parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
+
+ # We have to wrap parsed.keys() in a set, because in the case of multiple
+ # values for a key (a list), the key will appear multiple times in the
+ # list of keys, but we're avoiding that by using get_all().
+ for name in frozenset(parsed.keys()):
+ # Header names in RFC are case insensitive, so we'll normalize to all
+ # lower case to make comparisons easier.
+ name = name.lower()
+
+ # We use get_all() here, even for fields that aren't multiple use,
+ # because otherwise someone could have e.g. two Name fields, and we
+ # would just silently ignore it rather than doing something about it.
+ headers = parsed.get_all(name) or []
+
+ # The way the email module works when parsing bytes is that it
+ # unconditionally decodes the bytes as ascii using the surrogateescape
+ # handler. When you pull that data back out (such as with get_all() ),
+ # it looks to see if the str has any surrogate escapes, and if it does
+ # it wraps it in a Header object instead of returning the string.
+ #
+ # As such, we'll look for those Header objects, and fix up the encoding.
+ value = []
+ # Flag if we have run into any issues processing the headers, thus
+ # signalling that the data belongs in 'unparsed'.
+ valid_encoding = True
+ for h in headers:
+ # It's unclear if this can return more types than just a Header or
+ # a str, so we'll just assert here to make sure.
+ assert isinstance(h, (email.header.Header, str))
+
+ # If it's a header object, we need to do our little dance to get
+ # the real data out of it. In cases where there is invalid data
+ # we're going to end up with mojibake, but there's no obvious, good
+ # way around that without reimplementing parts of the Header object
+ # ourselves.
+ #
+ # That should be fine since, if mojibacked happens, this key is
+ # going into the unparsed dict anyways.
+ if isinstance(h, email.header.Header):
+ # The Header object stores it's data as chunks, and each chunk
+ # can be independently encoded, so we'll need to check each
+ # of them.
+ chunks: List[Tuple[bytes, Optional[str]]] = []
+ for bin, encoding in email.header.decode_header(h):
+ try:
+ bin.decode("utf8", "strict")
+ except UnicodeDecodeError:
+ # Enable mojibake.
+ encoding = "latin1"
+ valid_encoding = False
+ else:
+ encoding = "utf8"
+ chunks.append((bin, encoding))
+
+ # Turn our chunks back into a Header object, then let that
+ # Header object do the right thing to turn them into a
+ # string for us.
+ value.append(str(email.header.make_header(chunks)))
+ # This is already a string, so just add it.
+ else:
+ value.append(h)
+
+ # We've processed all of our values to get them into a list of str,
+ # but we may have mojibake data, in which case this is an unparsed
+ # field.
+ if not valid_encoding:
+ unparsed[name] = value
+ continue
+
+ raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
+ if raw_name is None:
+ # This is a bit of a weird situation, we've encountered a key that
+ # we don't know what it means, so we don't know whether it's meant
+ # to be a list or not.
+ #
+ # Since we can't really tell one way or another, we'll just leave it
+ # as a list, even though it may be a single item list, because that's
+ # what makes the most sense for email headers.
+ unparsed[name] = value
+ continue
+
+ # If this is one of our string fields, then we'll check to see if our
+ # value is a list of a single item. If it is then we'll assume that
+ # it was emitted as a single string, and unwrap the str from inside
+ # the list.
+ #
+ # If it's any other kind of data, then we haven't the faintest clue
+ # what we should parse it as, and we have to just add it to our list
+ # of unparsed stuff.
+ if raw_name in _STRING_FIELDS and len(value) == 1:
+ raw[raw_name] = value[0]
+ # If this is one of our list of string fields, then we can just assign
+ # the value, since email *only* has strings, and our get_all() call
+ # above ensures that this is a list.
+ elif raw_name in _LIST_FIELDS:
+ raw[raw_name] = value
+ # Special Case: Keywords
+ # The keywords field is implemented in the metadata spec as a str,
+ # but it conceptually is a list of strings, and is serialized using
+ # ", ".join(keywords), so we'll do some light data massaging to turn
+ # this into what it logically is.
+ elif raw_name == "keywords" and len(value) == 1:
+ raw[raw_name] = _parse_keywords(value[0])
+ # Special Case: Project-URL
+ # The project urls is implemented in the metadata spec as a list of
+ # specially-formatted strings that represent a key and a value, which
+ # is fundamentally a mapping, however the email format doesn't support
+ # mappings in a sane way, so it was crammed into a list of strings
+ # instead.
+ #
+ # We will do a little light data massaging to turn this into a map as
+ # it logically should be.
+ elif raw_name == "project_urls":
+ try:
+ raw[raw_name] = _parse_project_urls(value)
+ except KeyError:
+ unparsed[name] = value
+ # Nothing that we've done has managed to parse this, so it'll just
+ # throw it in our unparseable data and move on.
+ else:
+ unparsed[name] = value
+
+ # We need to support getting the Description from the message payload in
+ # addition to getting it from the the headers. This does mean, though, there
+ # is the possibility of it being set both ways, in which case we put both
+ # in 'unparsed' since we don't know which is right.
+ try:
+ payload = _get_payload(parsed, data)
+ except ValueError:
+ unparsed.setdefault("description", []).append(
+ parsed.get_payload(decode=isinstance(data, bytes))
+ )
+ else:
+ if payload:
+ # Check to see if we've already got a description, if so then both
+ # it, and this body move to unparseable.
+ if "description" in raw:
+ description_header = cast(str, raw.pop("description"))
+ unparsed.setdefault("description", []).extend(
+ [description_header, payload]
+ )
+ elif "description" in unparsed:
+ unparsed["description"].append(payload)
+ else:
+ raw["description"] = payload
+
+ # We need to cast our `raw` to a metadata, because a TypedDict only support
+ # literal key names, but we're computing our key names on purpose, but the
+ # way this function is implemented, our `TypedDict` can only have valid key
+ # names.
+ return cast(RawMetadata, raw), unparsed
+
+
+_NOT_FOUND = object()
+
+
+# Keep the two values in sync.
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+
+_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
+
+
+class _Validator(Generic[T]):
+ """Validate a metadata field.
+
+ All _process_*() methods correspond to a core metadata field. The method is
+ called with the field's raw value. If the raw value is valid it is returned
+ in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
+ If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
+ as appropriate).
+ """
+
+ name: str
+ raw_name: str
+ added: _MetadataVersion
+
+ def __init__(
+ self,
+ *,
+ added: _MetadataVersion = "1.0",
+ ) -> None:
+ self.added = added
+
+ def __set_name__(self, _owner: "Metadata", name: str) -> None:
+ self.name = name
+ self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
+
+ def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
+ # With Python 3.8, the caching can be replaced with functools.cached_property().
+ # No need to check the cache as attribute lookup will resolve into the
+ # instance's __dict__ before __get__ is called.
+ cache = instance.__dict__
+ value = instance._raw.get(self.name)
+
+ # To make the _process_* methods easier, we'll check if the value is None
+ # and if this field is NOT a required attribute, and if both of those
+ # things are true, we'll skip the the converter. This will mean that the
+ # converters never have to deal with the None union.
+ if self.name in _REQUIRED_ATTRS or value is not None:
+ try:
+ converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
+ except AttributeError:
+ pass
+ else:
+ value = converter(value)
+
+ cache[self.name] = value
+ try:
+ del instance._raw[self.name] # type: ignore[misc]
+ except KeyError:
+ pass
+
+ return cast(T, value)
+
+ def _invalid_metadata(
+ self, msg: str, cause: Optional[Exception] = None
+ ) -> InvalidMetadata:
+ exc = InvalidMetadata(
+ self.raw_name, msg.format_map({"field": repr(self.raw_name)})
+ )
+ exc.__cause__ = cause
+ return exc
+
+ def _process_metadata_version(self, value: str) -> _MetadataVersion:
+ # Implicitly makes Metadata-Version required.
+ if value not in _VALID_METADATA_VERSIONS:
+ raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
+ return cast(_MetadataVersion, value)
+
+ def _process_name(self, value: str) -> str:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ # Validate the name as a side-effect.
+ try:
+ utils.canonicalize_name(value, validate=True)
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+ else:
+ return value
+
+ def _process_version(self, value: str) -> version_module.Version:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ try:
+ return version_module.parse(value)
+ except version_module.InvalidVersion as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+
+ def _process_summary(self, value: str) -> str:
+ """Check the field contains no newlines."""
+ if "\n" in value:
+ raise self._invalid_metadata("{field} must be a single line")
+ return value
+
+ def _process_description_content_type(self, value: str) -> str:
+ content_types = {"text/plain", "text/x-rst", "text/markdown"}
+ message = email.message.EmailMessage()
+ message["content-type"] = value
+
+ content_type, parameters = (
+ # Defaults to `text/plain` if parsing failed.
+ message.get_content_type().lower(),
+ message["content-type"].params,
+ )
+ # Check if content-type is valid or defaulted to `text/plain` and thus was
+ # not parseable.
+ if content_type not in content_types or content_type not in value.lower():
+ raise self._invalid_metadata(
+ f"{{field}} must be one of {list(content_types)}, not {value!r}"
+ )
+
+ charset = parameters.get("charset", "UTF-8")
+ if charset != "UTF-8":
+ raise self._invalid_metadata(
+ f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
+ )
+
+ markdown_variants = {"GFM", "CommonMark"}
+ variant = parameters.get("variant", "GFM") # Use an acceptable default.
+ if content_type == "text/markdown" and variant not in markdown_variants:
+ raise self._invalid_metadata(
+ f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
+ f"not {variant!r}",
+ )
+ return value
+
+ def _process_dynamic(self, value: List[str]) -> List[str]:
+ for dynamic_field in map(str.lower, value):
+ if dynamic_field in {"name", "version", "metadata-version"}:
+ raise self._invalid_metadata(
+ f"{value!r} is not allowed as a dynamic field"
+ )
+ elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
+ raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
+ return list(map(str.lower, value))
+
+ def _process_provides_extra(
+ self,
+ value: List[str],
+ ) -> List[utils.NormalizedName]:
+ normalized_names = []
+ try:
+ for name in value:
+ normalized_names.append(utils.canonicalize_name(name, validate=True))
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{name!r} is invalid for {{field}}", cause=exc
+ )
+ else:
+ return normalized_names
+
+ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
+ try:
+ return specifiers.SpecifierSet(value)
+ except specifiers.InvalidSpecifier as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+
+ def _process_requires_dist(
+ self,
+ value: List[str],
+ ) -> List[requirements.Requirement]:
+ reqs = []
+ try:
+ for req in value:
+ reqs.append(requirements.Requirement(req))
+ except requirements.InvalidRequirement as exc:
+ raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
+ else:
+ return reqs
+
+
+class Metadata:
+ """Representation of distribution metadata.
+
+ Compared to :class:`RawMetadata`, this class provides objects representing
+ metadata fields instead of only using built-in types. Any invalid metadata
+ will cause :exc:`InvalidMetadata` to be raised (with a
+ :py:attr:`~BaseException.__cause__` attribute as appropriate).
+ """
+
+ _raw: RawMetadata
+
+ @classmethod
+ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
+ """Create an instance from :class:`RawMetadata`.
+
+ If *validate* is true, all metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ ins = cls()
+ ins._raw = data.copy() # Mutations occur due to caching enriched values.
+
+ if validate:
+ exceptions: List[Exception] = []
+ try:
+ metadata_version = ins.metadata_version
+ metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
+ except InvalidMetadata as metadata_version_exc:
+ exceptions.append(metadata_version_exc)
+ metadata_version = None
+
+ # Make sure to check for the fields that are present, the required
+ # fields (so their absence can be reported).
+ fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
+ # Remove fields that have already been checked.
+ fields_to_check -= {"metadata_version"}
+
+ for key in fields_to_check:
+ try:
+ if metadata_version:
+ # Can't use getattr() as that triggers descriptor protocol which
+ # will fail due to no value for the instance argument.
+ try:
+ field_metadata_version = cls.__dict__[key].added
+ except KeyError:
+ exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
+ exceptions.append(exc)
+ continue
+ field_age = _VALID_METADATA_VERSIONS.index(
+ field_metadata_version
+ )
+ if field_age > metadata_age:
+ field = _RAW_TO_EMAIL_MAPPING[key]
+ exc = InvalidMetadata(
+ field,
+ "{field} introduced in metadata version "
+ "{field_metadata_version}, not {metadata_version}",
+ )
+ exceptions.append(exc)
+ continue
+ getattr(ins, key)
+ except InvalidMetadata as exc:
+ exceptions.append(exc)
+
+ if exceptions:
+ raise ExceptionGroup("invalid metadata", exceptions)
+
+ return ins
+
+ @classmethod
+ def from_email(
+ cls, data: Union[bytes, str], *, validate: bool = True
+ ) -> "Metadata":
+ """Parse metadata from email headers.
+
+ If *validate* is true, the metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ raw, unparsed = parse_email(data)
+
+ if validate:
+ exceptions: list[Exception] = []
+ for unparsed_key in unparsed:
+ if unparsed_key in _EMAIL_TO_RAW_MAPPING:
+ message = f"{unparsed_key!r} has invalid data"
+ else:
+ message = f"unrecognized field: {unparsed_key!r}"
+ exceptions.append(InvalidMetadata(unparsed_key, message))
+
+ if exceptions:
+ raise ExceptionGroup("unparsed", exceptions)
+
+ try:
+ return cls.from_raw(raw, validate=validate)
+ except ExceptionGroup as exc_group:
+ raise ExceptionGroup(
+ "invalid or unparsed metadata", exc_group.exceptions
+ ) from None
+
+ metadata_version: _Validator[_MetadataVersion] = _Validator()
+ """:external:ref:`core-metadata-metadata-version`
+ (required; validated to be a valid metadata version)"""
+ name: _Validator[str] = _Validator()
+ """:external:ref:`core-metadata-name`
+ (required; validated using :func:`~packaging.utils.canonicalize_name` and its
+ *validate* parameter)"""
+ version: _Validator[version_module.Version] = _Validator()
+ """:external:ref:`core-metadata-version` (required)"""
+ dynamic: _Validator[Optional[List[str]]] = _Validator(
+ added="2.2",
+ )
+ """:external:ref:`core-metadata-dynamic`
+ (validated against core metadata field names and lowercased)"""
+ platforms: _Validator[Optional[List[str]]] = _Validator()
+ """:external:ref:`core-metadata-platform`"""
+ supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-supported-platform`"""
+ summary: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
+ description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
+ """:external:ref:`core-metadata-description`"""
+ description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
+ """:external:ref:`core-metadata-description-content-type` (validated)"""
+ keywords: _Validator[Optional[List[str]]] = _Validator()
+ """:external:ref:`core-metadata-keywords`"""
+ home_page: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-home-page`"""
+ download_url: _Validator[Optional[str]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-download-url`"""
+ author: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-author`"""
+ author_email: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-author-email`"""
+ maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer`"""
+ maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer-email`"""
+ license: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-license`"""
+ classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-classifier`"""
+ requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-dist`"""
+ requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-python`"""
+ # Because `Requires-External` allows for non-PEP 440 version specifiers, we
+ # don't do any processing on the values.
+ requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-requires-external`"""
+ project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-project-url`"""
+ # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
+ # regardless of metadata version.
+ provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
+ added="2.1",
+ )
+ """:external:ref:`core-metadata-provides-extra`"""
+ provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-provides-dist`"""
+ obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-obsoletes-dist`"""
+ requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Requires`` (deprecated)"""
+ provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Provides`` (deprecated)"""
+ obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Obsoletes`` (deprecated)"""
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/py.typed b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/py.typed
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/requirements.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/requirements.py
new file mode 100644
index 0000000000..bdc43a7e98
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/requirements.py
@@ -0,0 +1,90 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from typing import Any, Iterator, Optional, Set
+
+from ._parser import parse_requirement as _parse_requirement
+from ._tokenizer import ParserSyntaxError
+from .markers import Marker, _normalize_extra_values
+from .specifiers import SpecifierSet
+from .utils import canonicalize_name
+
+
+class InvalidRequirement(ValueError):
+ """
+ An invalid requirement was found, users should refer to PEP 508.
+ """
+
+
+class Requirement:
+ """Parse a requirement.
+
+ Parse a given requirement string into its parts, such as name, specifier,
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
+ string.
+ """
+
+ # TODO: Can we test whether something is contained within a requirement?
+ # If so how do we do that? Do we need to test against the _name_ of
+ # the thing as well as the version? What about the markers?
+ # TODO: Can we normalize the name and extra name?
+
+ def __init__(self, requirement_string: str) -> None:
+ try:
+ parsed = _parse_requirement(requirement_string)
+ except ParserSyntaxError as e:
+ raise InvalidRequirement(str(e)) from e
+
+ self.name: str = parsed.name
+ self.url: Optional[str] = parsed.url or None
+ self.extras: Set[str] = set(parsed.extras or [])
+ self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
+ self.marker: Optional[Marker] = None
+ if parsed.marker is not None:
+ self.marker = Marker.__new__(Marker)
+ self.marker._markers = _normalize_extra_values(parsed.marker)
+
+ def _iter_parts(self, name: str) -> Iterator[str]:
+ yield name
+
+ if self.extras:
+ formatted_extras = ",".join(sorted(self.extras))
+ yield f"[{formatted_extras}]"
+
+ if self.specifier:
+ yield str(self.specifier)
+
+ if self.url:
+ yield f"@ {self.url}"
+ if self.marker:
+ yield " "
+
+ if self.marker:
+ yield f"; {self.marker}"
+
+ def __str__(self) -> str:
+ return "".join(self._iter_parts(self.name))
+
+ def __repr__(self) -> str:
+ return f"<Requirement('{self}')>"
+
+ def __hash__(self) -> int:
+ return hash(
+ (
+ self.__class__.__name__,
+ *self._iter_parts(canonicalize_name(self.name)),
+ )
+ )
+
+ def __eq__(self, other: Any) -> bool:
+ if not isinstance(other, Requirement):
+ return NotImplemented
+
+ return (
+ canonicalize_name(self.name) == canonicalize_name(other.name)
+ and self.extras == other.extras
+ and self.specifier == other.specifier
+ and self.url == other.url
+ and self.marker == other.marker
+ )
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/specifiers.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/specifiers.py
new file mode 100644
index 0000000000..2d015bab59
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/specifiers.py
@@ -0,0 +1,1017 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+ from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
+ from packaging.version import Version
+"""
+
+import abc
+import itertools
+import re
+from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
+
+from .utils import canonicalize_version
+from .version import Version
+
+UnparsedVersion = Union[Version, str]
+UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
+CallableOperator = Callable[[Version, str], bool]
+
+
+def _coerce_version(version: UnparsedVersion) -> Version:
+ if not isinstance(version, Version):
+ version = Version(version)
+ return version
+
+
+class InvalidSpecifier(ValueError):
+ """
+ Raised when attempting to create a :class:`Specifier` with a specifier
+ string that is invalid.
+
+ >>> Specifier("lolwat")
+ Traceback (most recent call last):
+ ...
+ packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
+ """
+
+
+class BaseSpecifier(metaclass=abc.ABCMeta):
+ @abc.abstractmethod
+ def __str__(self) -> str:
+ """
+ Returns the str representation of this Specifier-like object. This
+ should be representative of the Specifier itself.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self) -> int:
+ """
+ Returns a hash value for this Specifier-like object.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other: object) -> bool:
+ """
+ Returns a boolean representing whether or not the two Specifier-like
+ objects are equal.
+
+ :param other: The other object to check against.
+ """
+
+ @property
+ @abc.abstractmethod
+ def prereleases(self) -> Optional[bool]:
+ """Whether or not pre-releases as a whole are allowed.
+
+ This can be set to either ``True`` or ``False`` to explicitly enable or disable
+ prereleases or it can be set to ``None`` (the default) to use default semantics.
+ """
+
+ @prereleases.setter
+ def prereleases(self, value: bool) -> None:
+ """Setter for :attr:`prereleases`.
+
+ :param value: The value to set.
+ """
+
+ @abc.abstractmethod
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
+ """
+ Determines if the given item is contained within this specifier.
+ """
+
+ @abc.abstractmethod
+ def filter(
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+ ) -> Iterator[UnparsedVersionVar]:
+ """
+ Takes an iterable of items and filters them so that only items which
+ are contained within this specifier are allowed in it.
+ """
+
+
+class Specifier(BaseSpecifier):
+ """This class abstracts handling of version specifiers.
+
+ .. tip::
+
+ It is generally not required to instantiate this manually. You should instead
+ prefer to work with :class:`SpecifierSet` instead, which can parse
+ comma-separated version specifiers (which is what package metadata contains).
+ """
+
+ _operator_regex_str = r"""
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+ """
+ _version_regex_str = r"""
+ (?P<version>
+ (?:
+ # The identity operators allow for an escape hatch that will
+ # do an exact string match of the version you wish to install.
+ # This will not be parsed by PEP 440 and we cannot determine
+ # any semantic meaning from it. This operator is discouraged
+ # but included entirely as an escape hatch.
+ (?<====) # Only match for the identity operator
+ \s*
+ [^\s;)]* # The arbitrary version can be just about anything,
+ # we match everything except for whitespace, a
+ # semi-colon for marker support, and a closing paren
+ # since versions can be enclosed in them.
+ )
+ |
+ (?:
+ # The (non)equality operators allow for wild card and local
+ # versions to be specified so we have to define these two
+ # operators separately to enable that.
+ (?<===|!=) # Only match for equals and not equals
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+
+ # You cannot use a wild card and a pre-release, post-release, a dev or
+ # local version together so group them with a | and make them optional.
+ (?:
+ \.\* # Wild card syntax of .*
+ |
+ (?: # pre release
+ [-_\.]?
+ (alpha|beta|preview|pre|a|b|c|rc)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+ )?
+ )
+ |
+ (?:
+ # The compatible operator requires at least two digits in the
+ # release segment.
+ (?<=~=) # Only match for the compatible operator
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
+ (?: # pre release
+ [-_\.]?
+ (alpha|beta|preview|pre|a|b|c|rc)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ |
+ (?:
+ # All other operators only allow a sub set of what the
+ # (non)equality operators do. Specifically they do not allow
+ # local versions to be specified nor do they allow the prefix
+ # matching wild cards.
+ (?<!==|!=|~=) # We have special cases for these
+ # operators so we want to make sure they
+ # don't match here.
+
+ \s*
+ v?
+ (?:[0-9]+!)? # epoch
+ [0-9]+(?:\.[0-9]+)* # release
+ (?: # pre release
+ [-_\.]?
+ (alpha|beta|preview|pre|a|b|c|rc)
+ [-_\.]?
+ [0-9]*
+ )?
+ (?: # post release
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+ )?
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
+ )
+ )
+ """
+
+ _regex = re.compile(
+ r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
+ re.VERBOSE | re.IGNORECASE,
+ )
+
+ _operators = {
+ "~=": "compatible",
+ "==": "equal",
+ "!=": "not_equal",
+ "<=": "less_than_equal",
+ ">=": "greater_than_equal",
+ "<": "less_than",
+ ">": "greater_than",
+ "===": "arbitrary",
+ }
+
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
+ """Initialize a Specifier instance.
+
+ :param spec:
+ The string representation of a specifier which will be parsed and
+ normalized before use.
+ :param prereleases:
+ This tells the specifier if it should accept prerelease versions if
+ applicable or not. The default of ``None`` will autodetect it from the
+ given specifiers.
+ :raises InvalidSpecifier:
+ If the given specifier is invalid (i.e. bad syntax).
+ """
+ match = self._regex.search(spec)
+ if not match:
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
+
+ self._spec: Tuple[str, str] = (
+ match.group("operator").strip(),
+ match.group("version").strip(),
+ )
+
+ # Store whether or not this Specifier should accept prereleases
+ self._prereleases = prereleases
+
+ # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
+ @property # type: ignore[override]
+ def prereleases(self) -> bool:
+ # If there is an explicit prereleases set for this, then we'll just
+ # blindly use that.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # Look at all of our specifiers and determine if they are inclusive
+ # operators, and if they are if they are including an explicit
+ # prerelease.
+ operator, version = self._spec
+ if operator in ["==", ">=", "<=", "~=", "==="]:
+ # The == specifier can include a trailing .*, if it does we
+ # want to remove before parsing.
+ if operator == "==" and version.endswith(".*"):
+ version = version[:-2]
+
+ # Parse the version, and if it is a pre-release than this
+ # specifier allows pre-releases.
+ if Version(version).is_prerelease:
+ return True
+
+ return False
+
+ @prereleases.setter
+ def prereleases(self, value: bool) -> None:
+ self._prereleases = value
+
+ @property
+ def operator(self) -> str:
+ """The operator of this specifier.
+
+ >>> Specifier("==1.2.3").operator
+ '=='
+ """
+ return self._spec[0]
+
+ @property
+ def version(self) -> str:
+ """The version of this specifier.
+
+ >>> Specifier("==1.2.3").version
+ '1.2.3'
+ """
+ return self._spec[1]
+
+ def __repr__(self) -> str:
+ """A representation of the Specifier that shows all internal state.
+
+ >>> Specifier('>=1.0.0')
+ <Specifier('>=1.0.0')>
+ >>> Specifier('>=1.0.0', prereleases=False)
+ <Specifier('>=1.0.0', prereleases=False)>
+ >>> Specifier('>=1.0.0', prereleases=True)
+ <Specifier('>=1.0.0', prereleases=True)>
+ """
+ pre = (
+ f", prereleases={self.prereleases!r}"
+ if self._prereleases is not None
+ else ""
+ )
+
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
+
+ def __str__(self) -> str:
+ """A string representation of the Specifier that can be round-tripped.
+
+ >>> str(Specifier('>=1.0.0'))
+ '>=1.0.0'
+ >>> str(Specifier('>=1.0.0', prereleases=False))
+ '>=1.0.0'
+ """
+ return "{}{}".format(*self._spec)
+
+ @property
+ def _canonical_spec(self) -> Tuple[str, str]:
+ canonical_version = canonicalize_version(
+ self._spec[1],
+ strip_trailing_zero=(self._spec[0] != "~="),
+ )
+ return self._spec[0], canonical_version
+
+ def __hash__(self) -> int:
+ return hash(self._canonical_spec)
+
+ def __eq__(self, other: object) -> bool:
+ """Whether or not the two Specifier-like objects are equal.
+
+ :param other: The other object to check against.
+
+ The value of :attr:`prereleases` is ignored.
+
+ >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
+ True
+ >>> (Specifier("==1.2.3", prereleases=False) ==
+ ... Specifier("==1.2.3", prereleases=True))
+ True
+ >>> Specifier("==1.2.3") == "==1.2.3"
+ True
+ >>> Specifier("==1.2.3") == Specifier("==1.2.4")
+ False
+ >>> Specifier("==1.2.3") == Specifier("~=1.2.3")
+ False
+ """
+ if isinstance(other, str):
+ try:
+ other = self.__class__(str(other))
+ except InvalidSpecifier:
+ return NotImplemented
+ elif not isinstance(other, self.__class__):
+ return NotImplemented
+
+ return self._canonical_spec == other._canonical_spec
+
+ def _get_operator(self, op: str) -> CallableOperator:
+ operator_callable: CallableOperator = getattr(
+ self, f"_compare_{self._operators[op]}"
+ )
+ return operator_callable
+
+ def _compare_compatible(self, prospective: Version, spec: str) -> bool:
+
+ # Compatible releases have an equivalent combination of >= and ==. That
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+ # implement this in terms of the other specifiers instead of
+ # implementing it ourselves. The only thing we need to do is construct
+ # the other specifiers.
+
+ # We want everything but the last item in the version, but we want to
+ # ignore suffix segments.
+ prefix = _version_join(
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
+ )
+
+ # Add the prefix notation to the end of our string
+ prefix += ".*"
+
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
+ prospective, prefix
+ )
+
+ def _compare_equal(self, prospective: Version, spec: str) -> bool:
+
+ # We need special logic to handle prefix matching
+ if spec.endswith(".*"):
+ # In the case of prefix matching we want to ignore local segment.
+ normalized_prospective = canonicalize_version(
+ prospective.public, strip_trailing_zero=False
+ )
+ # Get the normalized version string ignoring the trailing .*
+ normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
+ # Split the spec out by bangs and dots, and pretend that there is
+ # an implicit dot in between a release segment and a pre-release segment.
+ split_spec = _version_split(normalized_spec)
+
+ # Split the prospective version out by bangs and dots, and pretend
+ # that there is an implicit dot in between a release segment and
+ # a pre-release segment.
+ split_prospective = _version_split(normalized_prospective)
+
+ # 0-pad the prospective version before shortening it to get the correct
+ # shortened version.
+ padded_prospective, _ = _pad_version(split_prospective, split_spec)
+
+ # Shorten the prospective version to be the same length as the spec
+ # so that we can determine if the specifier is a prefix of the
+ # prospective version or not.
+ shortened_prospective = padded_prospective[: len(split_spec)]
+
+ return shortened_prospective == split_spec
+ else:
+ # Convert our spec string into a Version
+ spec_version = Version(spec)
+
+ # If the specifier does not have a local segment, then we want to
+ # act as if the prospective version also does not have a local
+ # segment.
+ if not spec_version.local:
+ prospective = Version(prospective.public)
+
+ return prospective == spec_version
+
+ def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
+ return not self._compare_equal(prospective, spec)
+
+ def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) <= Version(spec)
+
+ def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
+
+ # NB: Local version identifiers are NOT permitted in the version
+ # specifier, so local version labels can be universally removed from
+ # the prospective version.
+ return Version(prospective.public) >= Version(spec)
+
+ def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
+
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec_str)
+
+ # Check to see if the prospective version is less than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective < spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a pre-release version, that we do not accept pre-release
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
+ # not match 3.1.dev0, but should match 3.0.dev0).
+ if not spec.is_prerelease and prospective.is_prerelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # less than the spec version *and* it's not a pre-release of the same
+ # version in the spec.
+ return True
+
+ def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
+
+ # Convert our spec to a Version instance, since we'll want to work with
+ # it as a version.
+ spec = Version(spec_str)
+
+ # Check to see if the prospective version is greater than the spec
+ # version. If it's not we can short circuit and just return False now
+ # instead of doing extra unneeded work.
+ if not prospective > spec:
+ return False
+
+ # This special case is here so that, unless the specifier itself
+ # includes is a post-release version, that we do not accept
+ # post-release versions for the version mentioned in the specifier
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+ if not spec.is_postrelease and prospective.is_postrelease:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # Ensure that we do not allow a local version of the version mentioned
+ # in the specifier, which is technically greater than, to match.
+ if prospective.local is not None:
+ if Version(prospective.base_version) == Version(spec.base_version):
+ return False
+
+ # If we've gotten to here, it means that prospective version is both
+ # greater than the spec version *and* it's not a pre-release of the
+ # same version in the spec.
+ return True
+
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
+ return str(prospective).lower() == str(spec).lower()
+
+ def __contains__(self, item: Union[str, Version]) -> bool:
+ """Return whether or not the item is contained in this specifier.
+
+ :param item: The item to check for.
+
+ This is used for the ``in`` operator and behaves the same as
+ :meth:`contains` with no ``prereleases`` argument passed.
+
+ >>> "1.2.3" in Specifier(">=1.2.3")
+ True
+ >>> Version("1.2.3") in Specifier(">=1.2.3")
+ True
+ >>> "1.0.0" in Specifier(">=1.2.3")
+ False
+ >>> "1.3.0a1" in Specifier(">=1.2.3")
+ False
+ >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
+ True
+ """
+ return self.contains(item)
+
+ def contains(
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
+ ) -> bool:
+ """Return whether or not the item is contained in this specifier.
+
+ :param item:
+ The item to check for, which can be a version string or a
+ :class:`Version` instance.
+ :param prereleases:
+ Whether or not to match prereleases with this Specifier. If set to
+ ``None`` (the default), it uses :attr:`prereleases` to determine
+ whether or not prereleases are allowed.
+
+ >>> Specifier(">=1.2.3").contains("1.2.3")
+ True
+ >>> Specifier(">=1.2.3").contains(Version("1.2.3"))
+ True
+ >>> Specifier(">=1.2.3").contains("1.0.0")
+ False
+ >>> Specifier(">=1.2.3").contains("1.3.0a1")
+ False
+ >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
+ True
+ >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
+ True
+ """
+
+ # Determine if prereleases are to be allowed or not.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # Normalize item to a Version, this allows us to have a shortcut for
+ # "2.0" in Specifier(">=2")
+ normalized_item = _coerce_version(item)
+
+ # Determine if we should be supporting prereleases in this specifier
+ # or not, if we do not support prereleases than we can short circuit
+ # logic if this version is a prereleases.
+ if normalized_item.is_prerelease and not prereleases:
+ return False
+
+ # Actually do the comparison to determine if this item is contained
+ # within this Specifier or not.
+ operator_callable: CallableOperator = self._get_operator(self.operator)
+ return operator_callable(normalized_item, self.version)
+
+ def filter(
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+ ) -> Iterator[UnparsedVersionVar]:
+ """Filter items in the given iterable, that match the specifier.
+
+ :param iterable:
+ An iterable that can contain version strings and :class:`Version` instances.
+ The items in the iterable will be filtered according to the specifier.
+ :param prereleases:
+ Whether or not to allow prereleases in the returned iterator. If set to
+ ``None`` (the default), it will be intelligently decide whether to allow
+ prereleases or not (based on the :attr:`prereleases` attribute, and
+ whether the only versions matching are prereleases).
+
+ This method is smarter than just ``filter(Specifier().contains, [...])``
+ because it implements the rule from :pep:`440` that a prerelease item
+ SHOULD be accepted if no other versions match the given specifier.
+
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+ ['1.3']
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
+ ['1.2.3', '1.3', <Version('1.4')>]
+ >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
+ ['1.5a1']
+ >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+ ['1.3', '1.5a1']
+ >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+ ['1.3', '1.5a1']
+ """
+
+ yielded = False
+ found_prereleases = []
+
+ kw = {"prereleases": prereleases if prereleases is not None else True}
+
+ # Attempt to iterate over all the values in the iterable and if any of
+ # them match, yield them.
+ for version in iterable:
+ parsed_version = _coerce_version(version)
+
+ if self.contains(parsed_version, **kw):
+ # If our version is a prerelease, and we were not set to allow
+ # prereleases, then we'll store it for later in case nothing
+ # else matches this specifier.
+ if parsed_version.is_prerelease and not (
+ prereleases or self.prereleases
+ ):
+ found_prereleases.append(version)
+ # Either this is not a prerelease, or we should have been
+ # accepting prereleases from the beginning.
+ else:
+ yielded = True
+ yield version
+
+ # Now that we've iterated over everything, determine if we've yielded
+ # any values, and if we have not and we have any prereleases stored up
+ # then we will go ahead and yield the prereleases.
+ if not yielded and found_prereleases:
+ for version in found_prereleases:
+ yield version
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version: str) -> List[str]:
+ """Split version into components.
+
+ The split components are intended for version comparison. The logic does
+ not attempt to retain the original version string, so joining the
+ components back with :func:`_version_join` may not produce the original
+ version string.
+ """
+ result: List[str] = []
+
+ epoch, _, rest = version.rpartition("!")
+ result.append(epoch or "0")
+
+ for item in rest.split("."):
+ match = _prefix_regex.search(item)
+ if match:
+ result.extend(match.groups())
+ else:
+ result.append(item)
+ return result
+
+
+def _version_join(components: List[str]) -> str:
+ """Join split version components into a version string.
+
+ This function assumes the input came from :func:`_version_split`, where the
+ first component must be the epoch (either empty or numeric), and all other
+ components numeric.
+ """
+ epoch, *rest = components
+ return f"{epoch}!{'.'.join(rest)}"
+
+
+def _is_not_suffix(segment: str) -> bool:
+ return not any(
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
+ )
+
+
+def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
+ left_split, right_split = [], []
+
+ # Get the release segment of our versions
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+ # Get the rest of our versions
+ left_split.append(left[len(left_split[0]) :])
+ right_split.append(right[len(right_split[0]) :])
+
+ # Insert our padding
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
+
+ return (
+ list(itertools.chain.from_iterable(left_split)),
+ list(itertools.chain.from_iterable(right_split)),
+ )
+
+
+class SpecifierSet(BaseSpecifier):
+ """This class abstracts handling of a set of version specifiers.
+
+ It can be passed a single specifier (``>=3.0``), a comma-separated list of
+ specifiers (``>=3.0,!=3.1``), or no specifier at all.
+ """
+
+ def __init__(
+ self, specifiers: str = "", prereleases: Optional[bool] = None
+ ) -> None:
+ """Initialize a SpecifierSet instance.
+
+ :param specifiers:
+ The string representation of a specifier or a comma-separated list of
+ specifiers which will be parsed and normalized before use.
+ :param prereleases:
+ This tells the SpecifierSet if it should accept prerelease versions if
+ applicable or not. The default of ``None`` will autodetect it from the
+ given specifiers.
+
+ :raises InvalidSpecifier:
+ If the given ``specifiers`` are not parseable than this exception will be
+ raised.
+ """
+
+ # Split on `,` to break each individual specifier into it's own item, and
+ # strip each item to remove leading/trailing whitespace.
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+ # Make each individual specifier a Specifier and save in a frozen set for later.
+ self._specs = frozenset(map(Specifier, split_specifiers))
+
+ # Store our prereleases value so we can use it later to determine if
+ # we accept prereleases or not.
+ self._prereleases = prereleases
+
+ @property
+ def prereleases(self) -> Optional[bool]:
+ # If we have been given an explicit prerelease modifier, then we'll
+ # pass that through here.
+ if self._prereleases is not None:
+ return self._prereleases
+
+ # If we don't have any specifiers, and we don't have a forced value,
+ # then we'll just return None since we don't know if this should have
+ # pre-releases or not.
+ if not self._specs:
+ return None
+
+ # Otherwise we'll see if any of the given specifiers accept
+ # prereleases, if any of them do we'll return True, otherwise False.
+ return any(s.prereleases for s in self._specs)
+
+ @prereleases.setter
+ def prereleases(self, value: bool) -> None:
+ self._prereleases = value
+
+ def __repr__(self) -> str:
+ """A representation of the specifier set that shows all internal state.
+
+ Note that the ordering of the individual specifiers within the set may not
+ match the input string.
+
+ >>> SpecifierSet('>=1.0.0,!=2.0.0')
+ <SpecifierSet('!=2.0.0,>=1.0.0')>
+ >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
+ <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
+ >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
+ <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
+ """
+ pre = (
+ f", prereleases={self.prereleases!r}"
+ if self._prereleases is not None
+ else ""
+ )
+
+ return f"<SpecifierSet({str(self)!r}{pre})>"
+
+ def __str__(self) -> str:
+ """A string representation of the specifier set that can be round-tripped.
+
+ Note that the ordering of the individual specifiers within the set may not
+ match the input string.
+
+ >>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
+ '!=1.0.1,>=1.0.0'
+ >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
+ '!=1.0.1,>=1.0.0'
+ """
+ return ",".join(sorted(str(s) for s in self._specs))
+
+ def __hash__(self) -> int:
+ return hash(self._specs)
+
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
+ """Return a SpecifierSet which is a combination of the two sets.
+
+ :param other: The other object to combine with.
+
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
+ <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
+ <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
+ """
+ if isinstance(other, str):
+ other = SpecifierSet(other)
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ specifier = SpecifierSet()
+ specifier._specs = frozenset(self._specs | other._specs)
+
+ if self._prereleases is None and other._prereleases is not None:
+ specifier._prereleases = other._prereleases
+ elif self._prereleases is not None and other._prereleases is None:
+ specifier._prereleases = self._prereleases
+ elif self._prereleases == other._prereleases:
+ specifier._prereleases = self._prereleases
+ else:
+ raise ValueError(
+ "Cannot combine SpecifierSets with True and False prerelease "
+ "overrides."
+ )
+
+ return specifier
+
+ def __eq__(self, other: object) -> bool:
+ """Whether or not the two SpecifierSet-like objects are equal.
+
+ :param other: The other object to check against.
+
+ The value of :attr:`prereleases` is ignored.
+
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
+ True
+ >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
+ ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
+ False
+ >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
+ False
+ """
+ if isinstance(other, (str, Specifier)):
+ other = SpecifierSet(str(other))
+ elif not isinstance(other, SpecifierSet):
+ return NotImplemented
+
+ return self._specs == other._specs
+
+ def __len__(self) -> int:
+ """Returns the number of specifiers in this specifier set."""
+ return len(self._specs)
+
+ def __iter__(self) -> Iterator[Specifier]:
+ """
+ Returns an iterator over all the underlying :class:`Specifier` instances
+ in this specifier set.
+
+ >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
+ [<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
+ """
+ return iter(self._specs)
+
+ def __contains__(self, item: UnparsedVersion) -> bool:
+ """Return whether or not the item is contained in this specifier.
+
+ :param item: The item to check for.
+
+ This is used for the ``in`` operator and behaves the same as
+ :meth:`contains` with no ``prereleases`` argument passed.
+
+ >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
+ True
+ >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
+ True
+ >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
+ False
+ >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
+ False
+ >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
+ True
+ """
+ return self.contains(item)
+
+ def contains(
+ self,
+ item: UnparsedVersion,
+ prereleases: Optional[bool] = None,
+ installed: Optional[bool] = None,
+ ) -> bool:
+ """Return whether or not the item is contained in this SpecifierSet.
+
+ :param item:
+ The item to check for, which can be a version string or a
+ :class:`Version` instance.
+ :param prereleases:
+ Whether or not to match prereleases with this SpecifierSet. If set to
+ ``None`` (the default), it uses :attr:`prereleases` to determine
+ whether or not prereleases are allowed.
+
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
+ False
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
+ False
+ >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
+ True
+ >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
+ True
+ """
+ # Ensure that our item is a Version instance.
+ if not isinstance(item, Version):
+ item = Version(item)
+
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # We can determine if we're going to allow pre-releases by looking to
+ # see if any of the underlying items supports them. If none of them do
+ # and this item is a pre-release then we do not allow it and we can
+ # short circuit that here.
+ # Note: This means that 1.0.dev1 would not be contained in something
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+ if not prereleases and item.is_prerelease:
+ return False
+
+ if installed and item.is_prerelease:
+ item = Version(item.base_version)
+
+ # We simply dispatch to the underlying specs here to make sure that the
+ # given version is contained within all of them.
+ # Note: This use of all() here means that an empty set of specifiers
+ # will always return True, this is an explicit design decision.
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
+
+ def filter(
+ self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
+ ) -> Iterator[UnparsedVersionVar]:
+ """Filter items in the given iterable, that match the specifiers in this set.
+
+ :param iterable:
+ An iterable that can contain version strings and :class:`Version` instances.
+ The items in the iterable will be filtered according to the specifier.
+ :param prereleases:
+ Whether or not to allow prereleases in the returned iterator. If set to
+ ``None`` (the default), it will be intelligently decide whether to allow
+ prereleases or not (based on the :attr:`prereleases` attribute, and
+ whether the only versions matching are prereleases).
+
+ This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
+ because it implements the rule from :pep:`440` that a prerelease item
+ SHOULD be accepted if no other versions match the given specifier.
+
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
+ ['1.3']
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
+ ['1.3', <Version('1.4')>]
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
+ []
+ >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
+ ['1.3', '1.5a1']
+ >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
+ ['1.3', '1.5a1']
+
+ An "empty" SpecifierSet will filter items based on the presence of prerelease
+ versions in the set.
+
+ >>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
+ ['1.3']
+ >>> list(SpecifierSet("").filter(["1.5a1"]))
+ ['1.5a1']
+ >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
+ ['1.3', '1.5a1']
+ >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
+ ['1.3', '1.5a1']
+ """
+ # Determine if we're forcing a prerelease or not, if we're not forcing
+ # one for this particular filter call, then we'll use whatever the
+ # SpecifierSet thinks for whether or not we should support prereleases.
+ if prereleases is None:
+ prereleases = self.prereleases
+
+ # If we have any specifiers, then we want to wrap our iterable in the
+ # filter method for each one, this will act as a logical AND amongst
+ # each specifier.
+ if self._specs:
+ for spec in self._specs:
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
+ return iter(iterable)
+ # If we do not have any specifiers, then we need to have a rough filter
+ # which will filter out any pre-releases, unless there are no final
+ # releases.
+ else:
+ filtered: List[UnparsedVersionVar] = []
+ found_prereleases: List[UnparsedVersionVar] = []
+
+ for item in iterable:
+ parsed_version = _coerce_version(item)
+
+ # Store any item which is a pre-release for later unless we've
+ # already found a final version or we are accepting prereleases
+ if parsed_version.is_prerelease and not prereleases:
+ if not filtered:
+ found_prereleases.append(item)
+ else:
+ filtered.append(item)
+
+ # If we've found no items except for pre-releases, then we'll go
+ # ahead and use the pre-releases
+ if not filtered and found_prereleases and prereleases is None:
+ return iter(found_prereleases)
+
+ return iter(filtered)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/tags.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/tags.py
new file mode 100644
index 0000000000..89f1926137
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/tags.py
@@ -0,0 +1,571 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import logging
+import platform
+import re
+import struct
+import subprocess
+import sys
+import sysconfig
+from importlib.machinery import EXTENSION_SUFFIXES
+from typing import (
+ Dict,
+ FrozenSet,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Tuple,
+ Union,
+ cast,
+)
+
+from . import _manylinux, _musllinux
+
+logger = logging.getLogger(__name__)
+
+PythonVersion = Sequence[int]
+MacVersion = Tuple[int, int]
+
+INTERPRETER_SHORT_NAMES: Dict[str, str] = {
+ "python": "py", # Generic.
+ "cpython": "cp",
+ "pypy": "pp",
+ "ironpython": "ip",
+ "jython": "jy",
+}
+
+
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
+
+
+class Tag:
+ """
+ A representation of the tag triple for a wheel.
+
+ Instances are considered immutable and thus are hashable. Equality checking
+ is also supported.
+ """
+
+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
+
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
+ self._interpreter = interpreter.lower()
+ self._abi = abi.lower()
+ self._platform = platform.lower()
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
+ # times when scanning a page of links for packages with tags matching that
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
+ # downstream consumers.
+ self._hash = hash((self._interpreter, self._abi, self._platform))
+
+ @property
+ def interpreter(self) -> str:
+ return self._interpreter
+
+ @property
+ def abi(self) -> str:
+ return self._abi
+
+ @property
+ def platform(self) -> str:
+ return self._platform
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Tag):
+ return NotImplemented
+
+ return (
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
+ and (self._platform == other._platform)
+ and (self._abi == other._abi)
+ and (self._interpreter == other._interpreter)
+ )
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __str__(self) -> str:
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
+
+ def __repr__(self) -> str:
+ return f"<{self} @ {id(self)}>"
+
+
+def parse_tag(tag: str) -> FrozenSet[Tag]:
+ """
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
+
+ Returning a set is required due to the possibility that the tag is a
+ compressed tag set.
+ """
+ tags = set()
+ interpreters, abis, platforms = tag.split("-")
+ for interpreter in interpreters.split("."):
+ for abi in abis.split("."):
+ for platform_ in platforms.split("."):
+ tags.add(Tag(interpreter, abi, platform_))
+ return frozenset(tags)
+
+
+def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
+ value: Union[int, str, None] = sysconfig.get_config_var(name)
+ if value is None and warn:
+ logger.debug(
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
+ )
+ return value
+
+
+def _normalize_string(string: str) -> str:
+ return string.replace(".", "_").replace("-", "_").replace(" ", "_")
+
+
+def _is_threaded_cpython(abis: List[str]) -> bool:
+ """
+ Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+ The threaded builds are indicated by a "t" in the abiflags.
+ """
+ if len(abis) == 0:
+ return False
+ # expect e.g., cp313
+ m = re.match(r"cp\d+(.*)", abis[0])
+ if not m:
+ return False
+ abiflags = m.group(1)
+ return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
+ """
+ Determine if the Python version supports abi3.
+
+ PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+ builds do not support abi3.
+ """
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
+
+
+def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
+ py_version = tuple(py_version) # To allow for version comparison.
+ abis = []
+ version = _version_nodot(py_version[:2])
+ threading = debug = pymalloc = ucs4 = ""
+ with_debug = _get_config_var("Py_DEBUG", warn)
+ has_refcount = hasattr(sys, "gettotalrefcount")
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
+ # extension modules is the best option.
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
+ debug = "d"
+ if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+ threading = "t"
+ if py_version < (3, 8):
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
+ if with_pymalloc or with_pymalloc is None:
+ pymalloc = "m"
+ if py_version < (3, 3):
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
+ if unicode_size == 4 or (
+ unicode_size is None and sys.maxunicode == 0x10FFFF
+ ):
+ ucs4 = "u"
+ elif debug:
+ # Debug builds can also load "normal" extension modules.
+ # We can also assume no UCS-4 or pymalloc requirement.
+ abis.append(f"cp{version}{threading}")
+ abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
+ return abis
+
+
+def cpython_tags(
+ python_version: Optional[PythonVersion] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a CPython interpreter.
+
+ The tags consist of:
+ - cp<python_version>-<abi>-<platform>
+ - cp<python_version>-abi3-<platform>
+ - cp<python_version>-none-<platform>
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
+
+ If python_version only specifies a major version then user-provided ABIs and
+ the 'none' ABItag will be used.
+
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
+ their normal position and not at the beginning.
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
+
+ if abis is None:
+ if len(python_version) > 1:
+ abis = _cpython_abis(python_version, warn)
+ else:
+ abis = []
+ abis = list(abis)
+ # 'abi3' and 'none' are explicitly handled later.
+ for explicit_abi in ("abi3", "none"):
+ try:
+ abis.remove(explicit_abi)
+ except ValueError:
+ pass
+
+ platforms = list(platforms or platform_tags())
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+
+ threading = _is_threaded_cpython(abis)
+ use_abi3 = _abi3_applies(python_version, threading)
+ if use_abi3:
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
+
+ if use_abi3:
+ for minor_version in range(python_version[1] - 1, 1, -1):
+ for platform_ in platforms:
+ interpreter = "cp{version}".format(
+ version=_version_nodot((python_version[0], minor_version))
+ )
+ yield Tag(interpreter, "abi3", platform_)
+
+
+def _generic_abi() -> List[str]:
+ """
+ Return the ABI tag based on EXT_SUFFIX.
+ """
+ # The following are examples of `EXT_SUFFIX`.
+ # We want to keep the parts which are related to the ABI and remove the
+ # parts which are related to the platform:
+ # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
+ # - mac: '.cpython-310-darwin.so' => cp310
+ # - win: '.cp310-win_amd64.pyd' => cp310
+ # - win: '.pyd' => cp37 (uses _cpython_abis())
+ # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
+ # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
+ # => graalpy_38_native
+
+ ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
+ if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
+ raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
+ parts = ext_suffix.split(".")
+ if len(parts) < 3:
+ # CPython3.7 and earlier uses ".pyd" on Windows.
+ return _cpython_abis(sys.version_info[:2])
+ soabi = parts[1]
+ if soabi.startswith("cpython"):
+ # non-windows
+ abi = "cp" + soabi.split("-")[1]
+ elif soabi.startswith("cp"):
+ # windows
+ abi = soabi.split("-")[0]
+ elif soabi.startswith("pypy"):
+ abi = "-".join(soabi.split("-")[:2])
+ elif soabi.startswith("graalpy"):
+ abi = "-".join(soabi.split("-")[:3])
+ elif soabi:
+ # pyston, ironpython, others?
+ abi = soabi
+ else:
+ return []
+ return [_normalize_string(abi)]
+
+
+def generic_tags(
+ interpreter: Optional[str] = None,
+ abis: Optional[Iterable[str]] = None,
+ platforms: Optional[Iterable[str]] = None,
+ *,
+ warn: bool = False,
+) -> Iterator[Tag]:
+ """
+ Yields the tags for a generic interpreter.
+
+ The tags consist of:
+ - <interpreter>-<abi>-<platform>
+
+ The "none" ABI will be added if it was not explicitly provided.
+ """
+ if not interpreter:
+ interp_name = interpreter_name()
+ interp_version = interpreter_version(warn=warn)
+ interpreter = "".join([interp_name, interp_version])
+ if abis is None:
+ abis = _generic_abi()
+ else:
+ abis = list(abis)
+ platforms = list(platforms or platform_tags())
+ if "none" not in abis:
+ abis.append("none")
+ for abi in abis:
+ for platform_ in platforms:
+ yield Tag(interpreter, abi, platform_)
+
+
+def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
+ """
+ Yields Python versions in descending order.
+
+ After the latest version, the major-only version will be yielded, and then
+ all previous versions of that major version.
+ """
+ if len(py_version) > 1:
+ yield f"py{_version_nodot(py_version[:2])}"
+ yield f"py{py_version[0]}"
+ if len(py_version) > 1:
+ for minor in range(py_version[1] - 1, -1, -1):
+ yield f"py{_version_nodot((py_version[0], minor))}"
+
+
+def compatible_tags(
+ python_version: Optional[PythonVersion] = None,
+ interpreter: Optional[str] = None,
+ platforms: Optional[Iterable[str]] = None,
+) -> Iterator[Tag]:
+ """
+ Yields the sequence of tags that are compatible with a specific version of Python.
+
+ The tags consist of:
+ - py*-none-<platform>
+ - <interpreter>-none-any # ... if `interpreter` is provided.
+ - py*-none-any
+ """
+ if not python_version:
+ python_version = sys.version_info[:2]
+ platforms = list(platforms or platform_tags())
+ for version in _py_interpreter_range(python_version):
+ for platform_ in platforms:
+ yield Tag(version, "none", platform_)
+ if interpreter:
+ yield Tag(interpreter, "none", "any")
+ for version in _py_interpreter_range(python_version):
+ yield Tag(version, "none", "any")
+
+
+def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
+ if not is_32bit:
+ return arch
+
+ if arch.startswith("ppc"):
+ return "ppc"
+
+ return "i386"
+
+
+def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
+ formats = [cpu_arch]
+ if cpu_arch == "x86_64":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat64", "fat32"])
+
+ elif cpu_arch == "i386":
+ if version < (10, 4):
+ return []
+ formats.extend(["intel", "fat32", "fat"])
+
+ elif cpu_arch == "ppc64":
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
+ if version > (10, 5) or version < (10, 4):
+ return []
+ formats.append("fat64")
+
+ elif cpu_arch == "ppc":
+ if version > (10, 6):
+ return []
+ formats.extend(["fat32", "fat"])
+
+ if cpu_arch in {"arm64", "x86_64"}:
+ formats.append("universal2")
+
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
+ formats.append("universal")
+
+ return formats
+
+
+def mac_platforms(
+ version: Optional[MacVersion] = None, arch: Optional[str] = None
+) -> Iterator[str]:
+ """
+ Yields the platform tags for a macOS system.
+
+ The `version` parameter is a two-item tuple specifying the macOS version to
+ generate platform tags for. The `arch` parameter is the CPU architecture to
+ generate platform tags for. Both parameters default to the appropriate value
+ for the current system.
+ """
+ version_str, _, cpu_arch = platform.mac_ver()
+ if version is None:
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+ if version == (10, 16):
+ # When built against an older macOS SDK, Python will report macOS 10.16
+ # instead of the real version.
+ version_str = subprocess.run(
+ [
+ sys.executable,
+ "-sS",
+ "-c",
+ "import platform; print(platform.mac_ver()[0])",
+ ],
+ check=True,
+ env={"SYSTEM_VERSION_COMPAT": "0"},
+ stdout=subprocess.PIPE,
+ text=True,
+ ).stdout
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
+ else:
+ version = version
+ if arch is None:
+ arch = _mac_arch(cpu_arch)
+ else:
+ arch = arch
+
+ if (10, 0) <= version and version < (11, 0):
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
+ # "minor" version number. The major version was always 10.
+ for minor_version in range(version[1], -1, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=10, minor=minor_version, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Starting with Mac OS 11, each yearly release bumps the major version
+ # number. The minor versions are now the midyear updates.
+ for major_version in range(version[0], 10, -1):
+ compat_version = major_version, 0
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=major_version, minor=0, binary_format=binary_format
+ )
+
+ if version >= (11, 0):
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
+ # releases exist.
+ #
+ # However, the "universal2" binary format can have a
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
+ # that version of macOS.
+ if arch == "x86_64":
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_formats = _mac_binary_formats(compat_version, arch)
+ for binary_format in binary_formats:
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+ else:
+ for minor_version in range(16, 3, -1):
+ compat_version = 10, minor_version
+ binary_format = "universal2"
+ yield "macosx_{major}_{minor}_{binary_format}".format(
+ major=compat_version[0],
+ minor=compat_version[1],
+ binary_format=binary_format,
+ )
+
+
+def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
+ linux = _normalize_string(sysconfig.get_platform())
+ if not linux.startswith("linux_"):
+ # we should never be here, just yield the sysconfig one and return
+ yield linux
+ return
+ if is_32bit:
+ if linux == "linux_x86_64":
+ linux = "linux_i686"
+ elif linux == "linux_aarch64":
+ linux = "linux_armv8l"
+ _, arch = linux.split("_", 1)
+ archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+ yield from _manylinux.platform_tags(archs)
+ yield from _musllinux.platform_tags(archs)
+ for arch in archs:
+ yield f"linux_{arch}"
+
+
+def _generic_platforms() -> Iterator[str]:
+ yield _normalize_string(sysconfig.get_platform())
+
+
+def platform_tags() -> Iterator[str]:
+ """
+ Provides the platform tags for this installation.
+ """
+ if platform.system() == "Darwin":
+ return mac_platforms()
+ elif platform.system() == "Linux":
+ return _linux_platforms()
+ else:
+ return _generic_platforms()
+
+
+def interpreter_name() -> str:
+ """
+ Returns the name of the running interpreter.
+
+ Some implementations have a reserved, two-letter abbreviation which will
+ be returned when appropriate.
+ """
+ name = sys.implementation.name
+ return INTERPRETER_SHORT_NAMES.get(name) or name
+
+
+def interpreter_version(*, warn: bool = False) -> str:
+ """
+ Returns the version of the running interpreter.
+ """
+ version = _get_config_var("py_version_nodot", warn=warn)
+ if version:
+ version = str(version)
+ else:
+ version = _version_nodot(sys.version_info[:2])
+ return version
+
+
+def _version_nodot(version: PythonVersion) -> str:
+ return "".join(map(str, version))
+
+
+def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
+ """
+ Returns the sequence of tag triples for the running interpreter.
+
+ The order of the sequence corresponds to priority order for the
+ interpreter, from most to least important.
+ """
+
+ interp_name = interpreter_name()
+ if interp_name == "cp":
+ yield from cpython_tags(warn=warn)
+ else:
+ yield from generic_tags()
+
+ if interp_name == "pp":
+ interp = "pp3"
+ elif interp_name == "cp":
+ interp = "cp" + interpreter_version(warn=warn)
+ else:
+ interp = None
+ yield from compatible_tags(interpreter=interp)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/utils.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/utils.py
new file mode 100644
index 0000000000..c2c2f75aa8
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/utils.py
@@ -0,0 +1,172 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+import re
+from typing import FrozenSet, NewType, Tuple, Union, cast
+
+from .tags import Tag, parse_tag
+from .version import InvalidVersion, Version
+
+BuildTag = Union[Tuple[()], Tuple[int, str]]
+NormalizedName = NewType("NormalizedName", str)
+
+
+class InvalidName(ValueError):
+ """
+ An invalid distribution name; users should refer to the packaging user guide.
+ """
+
+
+class InvalidWheelFilename(ValueError):
+ """
+ An invalid wheel filename was found, users should refer to PEP 427.
+ """
+
+
+class InvalidSdistFilename(ValueError):
+ """
+ An invalid sdist filename was found, users should refer to the packaging user guide.
+ """
+
+
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
+_canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
+# PEP 427: The build number must start with a digit.
+_build_tag_regex = re.compile(r"(\d+)(.*)")
+
+
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+ if validate and not _validate_regex.match(name):
+ raise InvalidName(f"name is invalid: {name!r}")
+ # This is taken from PEP 503.
+ value = _canonicalize_regex.sub("-", name).lower()
+ return cast(NormalizedName, value)
+
+
+def is_normalized_name(name: str) -> bool:
+ return _normalized_regex.match(name) is not None
+
+
+def canonicalize_version(
+ version: Union[Version, str], *, strip_trailing_zero: bool = True
+) -> str:
+ """
+ This is very similar to Version.__str__, but has one subtle difference
+ with the way it handles the release segment.
+ """
+ if isinstance(version, str):
+ try:
+ parsed = Version(version)
+ except InvalidVersion:
+ # Legacy versions cannot be normalized
+ return version
+ else:
+ parsed = version
+
+ parts = []
+
+ # Epoch
+ if parsed.epoch != 0:
+ parts.append(f"{parsed.epoch}!")
+
+ # Release segment
+ release_segment = ".".join(str(x) for x in parsed.release)
+ if strip_trailing_zero:
+ # NB: This strips trailing '.0's to normalize
+ release_segment = re.sub(r"(\.0)+$", "", release_segment)
+ parts.append(release_segment)
+
+ # Pre-release
+ if parsed.pre is not None:
+ parts.append("".join(str(x) for x in parsed.pre))
+
+ # Post-release
+ if parsed.post is not None:
+ parts.append(f".post{parsed.post}")
+
+ # Development release
+ if parsed.dev is not None:
+ parts.append(f".dev{parsed.dev}")
+
+ # Local version segment
+ if parsed.local is not None:
+ parts.append(f"+{parsed.local}")
+
+ return "".join(parts)
+
+
+def parse_wheel_filename(
+ filename: str,
+) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
+ if not filename.endswith(".whl"):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (extension must be '.whl'): {filename}"
+ )
+
+ filename = filename[:-4]
+ dashes = filename.count("-")
+ if dashes not in (4, 5):
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (wrong number of parts): {filename}"
+ )
+
+ parts = filename.split("-", dashes - 2)
+ name_part = parts[0]
+ # See PEP 427 for the rules on escaping the project name.
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
+ raise InvalidWheelFilename(f"Invalid project name: {filename}")
+ name = canonicalize_name(name_part)
+
+ try:
+ version = Version(parts[1])
+ except InvalidVersion as e:
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (invalid version): {filename}"
+ ) from e
+
+ if dashes == 5:
+ build_part = parts[2]
+ build_match = _build_tag_regex.match(build_part)
+ if build_match is None:
+ raise InvalidWheelFilename(
+ f"Invalid build number: {build_part} in '{filename}'"
+ )
+ build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
+ else:
+ build = ()
+ tags = parse_tag(parts[-1])
+ return (name, version, build, tags)
+
+
+def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
+ if filename.endswith(".tar.gz"):
+ file_stem = filename[: -len(".tar.gz")]
+ elif filename.endswith(".zip"):
+ file_stem = filename[: -len(".zip")]
+ else:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
+ f" {filename}"
+ )
+
+ # We are requiring a PEP 440 version, which cannot contain dashes,
+ # so we split on the last dash.
+ name_part, sep, version_part = file_stem.rpartition("-")
+ if not sep:
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
+
+ name = canonicalize_name(name_part)
+
+ try:
+ version = Version(version_part)
+ except InvalidVersion as e:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (invalid version): {filename}"
+ ) from e
+
+ return (name, version)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/version.py b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/version.py
new file mode 100644
index 0000000000..5faab9bd0d
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_vendor/packaging/version.py
@@ -0,0 +1,563 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+.. testsetup::
+
+ from packaging.version import parse, Version
+"""
+
+import itertools
+import re
+from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
+
+from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
+
+__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
+
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
+ NegativeInfinityType,
+ Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
+]
+CmpKey = Tuple[
+ int,
+ Tuple[int, ...],
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpLocalType,
+]
+VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
+
+
+class _Version(NamedTuple):
+ epoch: int
+ release: Tuple[int, ...]
+ dev: Optional[Tuple[str, int]]
+ pre: Optional[Tuple[str, int]]
+ post: Optional[Tuple[str, int]]
+ local: Optional[LocalType]
+
+
+def parse(version: str) -> "Version":
+ """Parse the given version string.
+
+ >>> parse('1.0.dev1')
+ <Version('1.0.dev1')>
+
+ :param version: The version string to parse.
+ :raises InvalidVersion: When the version string is not a valid version.
+ """
+ return Version(version)
+
+
+class InvalidVersion(ValueError):
+ """Raised when a version string is not a valid version.
+
+ >>> Version("invalid")
+ Traceback (most recent call last):
+ ...
+ packaging.version.InvalidVersion: Invalid version: 'invalid'
+ """
+
+
+class _BaseVersion:
+ _key: Tuple[Any, ...]
+
+ def __hash__(self) -> int:
+ return hash(self._key)
+
+ # Please keep the duplicated `isinstance` check
+ # in the six comparisons hereunder
+ # unless you find a way to avoid adding overhead function calls.
+ def __lt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key < other._key
+
+ def __le__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key <= other._key
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key == other._key
+
+ def __ge__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key >= other._key
+
+ def __gt__(self, other: "_BaseVersion") -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key > other._key
+
+ def __ne__(self, other: object) -> bool:
+ if not isinstance(other, _BaseVersion):
+ return NotImplemented
+
+ return self._key != other._key
+
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+_VERSION_PATTERN = r"""
+ v?
+ (?:
+ (?:(?P<epoch>[0-9]+)!)? # epoch
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P<pre> # pre-release
+ [-_\.]?
+ (?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
+ [-_\.]?
+ (?P<pre_n>[0-9]+)?
+ )?
+ (?P<post> # post release
+ (?:-(?P<post_n1>[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?P<post_l>post|rev|r)
+ [-_\.]?
+ (?P<post_n2>[0-9]+)?
+ )
+ )?
+ (?P<dev> # dev release
+ [-_\.]?
+ (?P<dev_l>dev)
+ [-_\.]?
+ (?P<dev_n>[0-9]+)?
+ )?
+ )
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+ """This class abstracts handling of a project's versions.
+
+ A :class:`Version` instance is comparison aware and can be compared and
+ sorted using the standard Python interfaces.
+
+ >>> v1 = Version("1.0a5")
+ >>> v2 = Version("1.0")
+ >>> v1
+ <Version('1.0a5')>
+ >>> v2
+ <Version('1.0')>
+ >>> v1 < v2
+ True
+ >>> v1 == v2
+ False
+ >>> v1 > v2
+ False
+ >>> v1 >= v2
+ False
+ >>> v1 <= v2
+ True
+ """
+
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+ _key: CmpKey
+
+ def __init__(self, version: str) -> None:
+ """Initialize a Version object.
+
+ :param version:
+ The string representation of a version which will be parsed and normalized
+ before use.
+ :raises InvalidVersion:
+ If the ``version`` does not conform to PEP 440 in any way then this
+ exception will be raised.
+ """
+
+ # Validate the version and parse it into pieces
+ match = self._regex.search(version)
+ if not match:
+ raise InvalidVersion(f"Invalid version: '{version}'")
+
+ # Store the parsed out pieces of the version
+ self._version = _Version(
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+ release=tuple(int(i) for i in match.group("release").split(".")),
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+ post=_parse_letter_version(
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+ ),
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+ local=_parse_local_version(match.group("local")),
+ )
+
+ # Generate a key which will be used for sorting
+ self._key = _cmpkey(
+ self._version.epoch,
+ self._version.release,
+ self._version.pre,
+ self._version.post,
+ self._version.dev,
+ self._version.local,
+ )
+
+ def __repr__(self) -> str:
+ """A representation of the Version that shows all internal state.
+
+ >>> Version('1.0.0')
+ <Version('1.0.0')>
+ """
+ return f"<Version('{self}')>"
+
+ def __str__(self) -> str:
+ """A string representation of the version that can be rounded-tripped.
+
+ >>> str(Version("1.0a5"))
+ '1.0a5'
+ """
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ # Pre-release
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
+
+ # Post-release
+ if self.post is not None:
+ parts.append(f".post{self.post}")
+
+ # Development release
+ if self.dev is not None:
+ parts.append(f".dev{self.dev}")
+
+ # Local version segment
+ if self.local is not None:
+ parts.append(f"+{self.local}")
+
+ return "".join(parts)
+
+ @property
+ def epoch(self) -> int:
+ """The epoch of the version.
+
+ >>> Version("2.0.0").epoch
+ 0
+ >>> Version("1!2.0.0").epoch
+ 1
+ """
+ return self._version.epoch
+
+ @property
+ def release(self) -> Tuple[int, ...]:
+ """The components of the "release" segment of the version.
+
+ >>> Version("1.2.3").release
+ (1, 2, 3)
+ >>> Version("2.0.0").release
+ (2, 0, 0)
+ >>> Version("1!2.0.0.post0").release
+ (2, 0, 0)
+
+ Includes trailing zeroes but not the epoch or any pre-release / development /
+ post-release suffixes.
+ """
+ return self._version.release
+
+ @property
+ def pre(self) -> Optional[Tuple[str, int]]:
+ """The pre-release segment of the version.
+
+ >>> print(Version("1.2.3").pre)
+ None
+ >>> Version("1.2.3a1").pre
+ ('a', 1)
+ >>> Version("1.2.3b1").pre
+ ('b', 1)
+ >>> Version("1.2.3rc1").pre
+ ('rc', 1)
+ """
+ return self._version.pre
+
+ @property
+ def post(self) -> Optional[int]:
+ """The post-release number of the version.
+
+ >>> print(Version("1.2.3").post)
+ None
+ >>> Version("1.2.3.post1").post
+ 1
+ """
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self) -> Optional[int]:
+ """The development number of the version.
+
+ >>> print(Version("1.2.3").dev)
+ None
+ >>> Version("1.2.3.dev1").dev
+ 1
+ """
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self) -> Optional[str]:
+ """The local version segment of the version.
+
+ >>> print(Version("1.2.3").local)
+ None
+ >>> Version("1.2.3+abc").local
+ 'abc'
+ """
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
+ def public(self) -> str:
+ """The public portion of the version.
+
+ >>> Version("1.2.3").public
+ '1.2.3'
+ >>> Version("1.2.3+abc").public
+ '1.2.3'
+ >>> Version("1.2.3+abc.dev1").public
+ '1.2.3'
+ """
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self) -> str:
+ """The "base version" of the version.
+
+ >>> Version("1.2.3").base_version
+ '1.2.3'
+ >>> Version("1.2.3+abc").base_version
+ '1.2.3'
+ >>> Version("1!1.2.3+abc.dev1").base_version
+ '1!1.2.3'
+
+ The "base version" is the public version of the project without any pre or post
+ release markers.
+ """
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ return "".join(parts)
+
+ @property
+ def is_prerelease(self) -> bool:
+ """Whether this version is a pre-release.
+
+ >>> Version("1.2.3").is_prerelease
+ False
+ >>> Version("1.2.3a1").is_prerelease
+ True
+ >>> Version("1.2.3b1").is_prerelease
+ True
+ >>> Version("1.2.3rc1").is_prerelease
+ True
+ >>> Version("1.2.3dev1").is_prerelease
+ True
+ """
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self) -> bool:
+ """Whether this version is a post-release.
+
+ >>> Version("1.2.3").is_postrelease
+ False
+ >>> Version("1.2.3.post1").is_postrelease
+ True
+ """
+ return self.post is not None
+
+ @property
+ def is_devrelease(self) -> bool:
+ """Whether this version is a development release.
+
+ >>> Version("1.2.3").is_devrelease
+ False
+ >>> Version("1.2.3.dev1").is_devrelease
+ True
+ """
+ return self.dev is not None
+
+ @property
+ def major(self) -> int:
+ """The first item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").major
+ 1
+ """
+ return self.release[0] if len(self.release) >= 1 else 0
+
+ @property
+ def minor(self) -> int:
+ """The second item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").minor
+ 2
+ >>> Version("1").minor
+ 0
+ """
+ return self.release[1] if len(self.release) >= 2 else 0
+
+ @property
+ def micro(self) -> int:
+ """The third item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").micro
+ 3
+ >>> Version("1").micro
+ 0
+ """
+ return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+ letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
+) -> Optional[Tuple[str, int]]:
+
+ if letter:
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ if number is None:
+ number = 0
+
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ if letter == "alpha":
+ letter = "a"
+ elif letter == "beta":
+ letter = "b"
+ elif letter in ["c", "pre", "preview"]:
+ letter = "rc"
+ elif letter in ["rev", "r"]:
+ letter = "post"
+
+ return letter, int(number)
+ if not letter and number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ letter = "post"
+
+ return letter, int(number)
+
+ return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_separators.split(local)
+ )
+ return None
+
+
+def _cmpkey(
+ epoch: int,
+ release: Tuple[int, ...],
+ pre: Optional[Tuple[str, int]],
+ post: Optional[Tuple[str, int]],
+ dev: Optional[Tuple[str, int]],
+ local: Optional[LocalType],
+) -> CmpKey:
+
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
+ # leading zeros until we come to something non zero, then take the rest
+ # re-reverse it back into the correct order and make it a tuple and use
+ # that for our sorting key.
+ _release = tuple(
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+ )
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ _pre: CmpPrePostDevType = NegativeInfinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ _pre = Infinity
+ else:
+ _pre = pre
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ _post: CmpPrePostDevType = NegativeInfinity
+
+ else:
+ _post = post
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ _dev: CmpPrePostDevType = Infinity
+
+ else:
+ _dev = dev
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ _local: CmpLocalType = NegativeInfinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ _local = tuple(
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+ )
+
+ return epoch, _release, _pre, _post, _dev, _local
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py b/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py
index 052f6e4646..07cd97f4d0 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py
@@ -56,7 +56,13 @@ def _get_uid(name):
def make_tarball(
- base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None
+ base_name,
+ base_dir,
+ compress="gzip",
+ verbose=False,
+ dry_run=False,
+ owner=None,
+ group=None,
):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
@@ -113,7 +119,7 @@ def make_tarball(
return tarinfo
if not dry_run:
- tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
+ tar = tarfile.open(archive_name, f'w|{tar_compression[compress]}')
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
@@ -134,7 +140,7 @@ def make_tarball(
return archive_name
-def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): # noqa: C901
+def make_zipfile(base_name, base_dir, verbose=False, dry_run=False): # noqa: C901
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Uses either the
@@ -160,12 +166,9 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): # noqa: C901
# XXX really should distinguish between "couldn't find
# external 'zip' command" and "zip failed".
raise DistutilsExecError(
- (
- "unable to create zip file '%s': "
- "could neither import the 'zipfile' module nor "
- "find a standalone zip utility"
- )
- % zip_filename
+ f"unable to create zip file '{zip_filename}': "
+ "could neither import the 'zipfile' module nor "
+ "find a standalone zip utility"
)
else:
@@ -224,8 +227,8 @@ def make_archive(
format,
root_dir=None,
base_dir=None,
- verbose=0,
- dry_run=0,
+ verbose=False,
+ dry_run=False,
owner=None,
group=None,
):
@@ -260,7 +263,7 @@ def make_archive(
try:
format_info = ARCHIVE_FORMATS[format]
except KeyError:
- raise ValueError("unknown archive format '%s'" % format)
+ raise ValueError(f"unknown archive format '{format}'")
func = format_info[0]
for arg, val in format_info[1]:
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py
index c1341e43cb..e47dca5d09 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py
@@ -61,7 +61,7 @@ class BCPPCompiler(CCompiler):
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
super().__init__(verbose, dry_run, force)
# These executables are assumed to all be in the path.
@@ -84,13 +84,13 @@ class BCPPCompiler(CCompiler):
# -- Worker methods ------------------------------------------------
- def compile( # noqa: C901
+ def compile(
self,
sources,
output_dir=None,
macros=None,
include_dirs=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
depends=None,
@@ -161,7 +161,7 @@ class BCPPCompiler(CCompiler):
# compile ()
def create_static_lib(
- self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+ self, objects, output_libname, output_dir=None, debug=False, target_lang=None
):
(objects, output_dir) = self._fix_object_args(objects, output_dir)
output_filename = self.library_filename(output_libname, output_dir=output_dir)
@@ -189,7 +189,7 @@ class BCPPCompiler(CCompiler):
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -234,11 +234,11 @@ class BCPPCompiler(CCompiler):
head, tail = os.path.split(output_filename)
modname, ext = os.path.splitext(tail)
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
- def_file = os.path.join(temp_dir, '%s.def' % modname)
+ def_file = os.path.join(temp_dir, f'{modname}.def')
contents = ['EXPORTS']
for sym in export_symbols or []:
contents.append(f' {sym}=_{sym}')
- self.execute(write_file, (def_file, contents), "writing %s" % def_file)
+ self.execute(write_file, (def_file, contents), f"writing {def_file}")
# Borland C++ has problems with '/' in paths
objects2 = map(os.path.normpath, objects)
@@ -254,7 +254,7 @@ class BCPPCompiler(CCompiler):
objects.append(file)
for ell in library_dirs:
- ld_args.append("/L%s" % os.path.normpath(ell))
+ ld_args.append(f"/L{os.path.normpath(ell)}")
ld_args.append("/L.") # we sometimes use relative paths
# list of object files
@@ -313,7 +313,7 @@ class BCPPCompiler(CCompiler):
# -- Miscellaneous methods -----------------------------------------
- def find_library_file(self, dirs, lib, debug=0):
+ def find_library_file(self, dirs, lib, debug=False):
# List of effective library names to try, in order of preference:
# xxx_bcpp.lib is better than xxx.lib
# and xxx_d.lib is better than xxx.lib if debug is set
@@ -339,7 +339,7 @@ class BCPPCompiler(CCompiler):
return None
# overwrite the one from CCompiler to support rc and res-files
- def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+ def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
if output_dir is None:
output_dir = ''
obj_names = []
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py
index 8876d73098..9d5297b944 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py
@@ -6,6 +6,7 @@ for the Distutils compiler abstraction model."""
import os
import re
import sys
+import types
import warnings
from ._itertools import always_iterable
@@ -21,7 +22,7 @@ from .errors import (
)
from .file_util import move_file
from .spawn import spawn
-from .util import execute, split_quoted
+from .util import execute, split_quoted, is_mingw
class CCompiler:
@@ -104,7 +105,7 @@ class CCompiler:
library dirs specific to this compiler class
"""
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
self.dry_run = dry_run
self.force = force
self.verbose = verbose
@@ -188,24 +189,28 @@ class CCompiler:
return None
def _check_macro_definitions(self, definitions):
- """Ensures that every element of 'definitions' is a valid macro
- definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
- nothing if all definitions are OK, raise TypeError otherwise.
- """
+ """Ensure that every element of 'definitions' is valid."""
for defn in definitions:
- if not (
- isinstance(defn, tuple)
- and (
- len(defn) in (1, 2)
- and (isinstance(defn[1], str) or defn[1] is None)
- )
- and isinstance(defn[0], str)
- ):
- raise TypeError(
- ("invalid macro definition '%s': " % defn)
- + "must be tuple (string,), (string, string), or "
- + "(string, None)"
- )
+ self._check_macro_definition(*defn)
+
+ def _check_macro_definition(self, defn):
+ """
+ Raise a TypeError if defn is not valid.
+
+ A valid definition is either a (name, value) 2-tuple or a (name,) tuple.
+ """
+ if not isinstance(defn, tuple) or not self._is_valid_macro(*defn):
+ raise TypeError(
+ f"invalid macro definition '{defn}': "
+ "must be tuple (string,), (string, string), or (string, None)"
+ )
+
+ @staticmethod
+ def _is_valid_macro(name, value=None):
+ """
+ A valid macro is a ``name : str`` and a ``value : str | None``.
+ """
+ return isinstance(name, str) and isinstance(value, (str, types.NoneType))
# -- Bookkeeping methods -------------------------------------------
@@ -342,7 +347,7 @@ class CCompiler:
extra = []
# Get the list of expected output (object) files
- objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir)
+ objects = self.object_filenames(sources, strip_dir=False, output_dir=outdir)
assert len(objects) == len(sources)
pp_opts = gen_preprocess_options(macros, incdirs)
@@ -532,7 +537,7 @@ class CCompiler:
output_dir=None,
macros=None,
include_dirs=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
depends=None,
@@ -609,7 +614,7 @@ class CCompiler:
pass
def create_static_lib(
- self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+ self, objects, output_libname, output_dir=None, debug=False, target_lang=None
):
"""Link a bunch of stuff together to create a static library file.
The "bunch of stuff" consists of the list of object files supplied
@@ -650,7 +655,7 @@ class CCompiler:
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -712,7 +717,7 @@ class CCompiler:
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -743,7 +748,7 @@ class CCompiler:
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -773,7 +778,7 @@ class CCompiler:
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
target_lang=None,
@@ -859,7 +864,7 @@ class CCompiler:
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
with os.fdopen(fd, "w", encoding='utf-8') as f:
for incl in includes:
- f.write("""#include "%s"\n""" % incl)
+ f.write(f"""#include "{incl}"\n""")
if not includes:
# Use "char func(void);" as the prototype to follow
# what autoconf does. This prototype does not match
@@ -869,22 +874,20 @@ class CCompiler:
# know the exact argument types, and the has_function
# interface does not provide that level of information.
f.write(
- """\
+ f"""\
#ifdef __cplusplus
extern "C"
#endif
-char %s(void);
+char {funcname}(void);
"""
- % funcname
)
f.write(
- """\
-int main (int argc, char **argv) {
- %s();
+ f"""\
+int main (int argc, char **argv) {{
+ {funcname}();
return 0;
-}
+}}
"""
- % funcname
)
try:
@@ -909,7 +912,7 @@ int main (int argc, char **argv) {
os.remove(fn)
return True
- def find_library_file(self, dirs, lib, debug=0):
+ def find_library_file(self, dirs, lib, debug=False):
"""Search the specified list of directories for a static or shared
library file 'lib' and return the full path to that file. If
'debug' true, look for a debugging version (if that makes sense on
@@ -952,7 +955,7 @@ int main (int argc, char **argv) {
# * exe_extension -
# extension for executable files, eg. '' or '.exe'
- def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+ def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
if output_dir is None:
output_dir = ''
return list(
@@ -987,13 +990,13 @@ int main (int argc, char **argv) {
# If abs, chop off leading /
return no_drive[os.path.isabs(no_drive) :]
- def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
+ def shared_object_filename(self, basename, strip_dir=False, output_dir=''):
assert output_dir is not None
if strip_dir:
basename = os.path.basename(basename)
return os.path.join(output_dir, basename + self.shared_lib_extension)
- def executable_filename(self, basename, strip_dir=0, output_dir=''):
+ def executable_filename(self, basename, strip_dir=False, output_dir=''):
assert output_dir is not None
if strip_dir:
basename = os.path.basename(basename)
@@ -1003,7 +1006,7 @@ int main (int argc, char **argv) {
self,
libname,
lib_type='static',
- strip_dir=0,
+ strip_dir=False,
output_dir='', # or 'shared'
):
assert output_dir is not None
@@ -1032,7 +1035,7 @@ int main (int argc, char **argv) {
print(msg)
def warn(self, msg):
- sys.stderr.write("warning: %s\n" % msg)
+ sys.stderr.write(f"warning: {msg}\n")
def execute(self, func, args, msg=None, level=1):
execute(func, args, msg, self.dry_run)
@@ -1077,6 +1080,10 @@ def get_default_compiler(osname=None, platform=None):
osname = os.name
if platform is None:
platform = sys.platform
+ # Mingw is a special case where sys.platform is 'win32' but we
+ # want to use the 'mingw32' compiler, so check it first
+ if is_mingw():
+ return 'mingw32'
for pattern, compiler in _default_compilers:
if (
re.match(pattern, platform) is not None
@@ -1125,7 +1132,7 @@ def show_compilers():
pretty_printer.print_help("List of available compilers:")
-def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
+def new_compiler(plat=None, compiler=None, verbose=False, dry_run=False, force=False):
"""Generate an instance of some CCompiler subclass for the supplied
platform/compiler combination. 'plat' defaults to 'os.name'
(eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
@@ -1145,9 +1152,9 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
(module_name, class_name, long_description) = compiler_class[compiler]
except KeyError:
- msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+ msg = f"don't know how to compile C/C++ code on platform '{plat}'"
if compiler is not None:
- msg = msg + " with '%s' compiler" % compiler
+ msg = msg + f" with '{compiler}' compiler"
raise DistutilsPlatformError(msg)
try:
@@ -1157,7 +1164,7 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
klass = vars(module)[class_name]
except ImportError:
raise DistutilsModuleError(
- "can't compile C/C++ code: unable to load module '%s'" % module_name
+ f"can't compile C/C++ code: unable to load module '{module_name}'"
)
except KeyError:
raise DistutilsModuleError(
@@ -1196,15 +1203,15 @@ def gen_preprocess_options(macros, include_dirs):
for macro in macros:
if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2):
raise TypeError(
- "bad macro definition '%s': "
- "each element of 'macros' list must be a 1- or 2-tuple" % macro
+ f"bad macro definition '{macro}': "
+ "each element of 'macros' list must be a 1- or 2-tuple"
)
if len(macro) == 1: # undefine this macro
- pp_opts.append("-U%s" % macro[0])
+ pp_opts.append(f"-U{macro[0]}")
elif len(macro) == 2:
if macro[1] is None: # define with no explicit value
- pp_opts.append("-D%s" % macro[0])
+ pp_opts.append(f"-D{macro[0]}")
else:
# XXX *don't* need to be clever about quoting the
# macro value here, because we're going to avoid the
@@ -1212,7 +1219,7 @@ def gen_preprocess_options(macros, include_dirs):
pp_opts.append("-D{}={}".format(*macro))
for dir in include_dirs:
- pp_opts.append("-I%s" % dir)
+ pp_opts.append(f"-I{dir}")
return pp_opts
@@ -1245,7 +1252,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
lib_opts.append(lib_file)
else:
compiler.warn(
- "no library file corresponding to '%s' found (skipping)" % lib
+ f"no library file corresponding to '{lib}' found (skipping)"
)
else:
lib_opts.append(compiler.library_option(lib))
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py b/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py
index 02dbf165f5..2bb97956ab 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py
@@ -87,13 +87,13 @@ class Command:
# The 'help' flag is just used for command-line parsing, so
# none of that complicated bureaucracy is needed.
- self.help = 0
+ self.help = False
# 'finalized' records whether or not 'finalize_options()' has been
# called. 'finalize_options()' itself should not pay attention to
# this flag: it is the business of 'ensure_finalized()', which
# always calls 'finalize_options()', to respect/update it.
- self.finalized = 0
+ self.finalized = False
# XXX A more explicit way to customize dry_run would be better.
def __getattr__(self, attr):
@@ -109,7 +109,7 @@ class Command:
def ensure_finalized(self):
if not self.finalized:
self.finalize_options()
- self.finalized = 1
+ self.finalized = True
# Subclasses must define:
# initialize_options()
@@ -135,7 +135,7 @@ class Command:
This method must be implemented by all command classes.
"""
raise RuntimeError(
- "abstract method -- subclass %s must override" % self.__class__
+ f"abstract method -- subclass {self.__class__} must override"
)
def finalize_options(self):
@@ -150,14 +150,14 @@ class Command:
This method must be implemented by all command classes.
"""
raise RuntimeError(
- "abstract method -- subclass %s must override" % self.__class__
+ f"abstract method -- subclass {self.__class__} must override"
)
def dump_options(self, header=None, indent=""):
from distutils.fancy_getopt import longopt_xlate
if header is None:
- header = "command options for '%s':" % self.get_command_name()
+ header = f"command options for '{self.get_command_name()}':"
self.announce(indent + header, level=logging.INFO)
indent = indent + " "
for option, _, _ in self.user_options:
@@ -178,7 +178,7 @@ class Command:
This method must be implemented by all command classes.
"""
raise RuntimeError(
- "abstract method -- subclass %s must override" % self.__class__
+ f"abstract method -- subclass {self.__class__} must override"
)
def announce(self, msg, level=logging.DEBUG):
@@ -293,7 +293,7 @@ class Command:
if getattr(self, dst_option) is None:
setattr(self, dst_option, getattr(src_cmd_obj, src_option))
- def get_finalized_command(self, command, create=1):
+ def get_finalized_command(self, command, create=True):
"""Wrapper around Distribution's 'get_command_obj()' method: find
(create if necessary and 'create' is true) the command object for
'command', call its 'ensure_finalized()' method, and return the
@@ -305,7 +305,7 @@ class Command:
# XXX rename to 'get_reinitialized_command()'? (should do the
# same in dist.py, if so)
- def reinitialize_command(self, command, reinit_subcommands=0):
+ def reinitialize_command(self, command, reinit_subcommands=False):
return self.distribution.reinitialize_command(command, reinit_subcommands)
def run_command(self, command):
@@ -340,7 +340,13 @@ class Command:
dir_util.mkpath(name, mode, dry_run=self.dry_run)
def copy_file(
- self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1
+ self,
+ infile,
+ outfile,
+ preserve_mode=True,
+ preserve_times=True,
+ link=None,
+ level=1,
):
"""Copy a file respecting verbose, dry-run and force flags. (The
former two default to whatever is in the Distribution object, and
@@ -359,9 +365,9 @@ class Command:
self,
infile,
outfile,
- preserve_mode=1,
- preserve_times=1,
- preserve_symlinks=0,
+ preserve_mode=True,
+ preserve_times=True,
+ preserve_symlinks=False,
level=1,
):
"""Copy an entire directory tree respecting verbose, dry-run,
@@ -381,7 +387,7 @@ class Command:
"""Move a file respecting dry-run flag."""
return file_util.move_file(src, dst, dry_run=self.dry_run)
- def spawn(self, cmd, search_path=1, level=1):
+ def spawn(self, cmd, search_path=True, level=1):
"""Spawn an external command respecting dry-run flag."""
from distutils.spawn import spawn
@@ -412,7 +418,7 @@ class Command:
timestamp checks.
"""
if skip_msg is None:
- skip_msg = "skipping %s (inputs unchanged)" % outfile
+ skip_msg = f"skipping {outfile} (inputs unchanged)"
# Allow 'infiles' to be a single string
if isinstance(infiles, str):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/__init__.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/__init__.py
index 028dcfa0fc..1e8fbe60c2 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/__init__.py
@@ -3,7 +3,7 @@
Package containing implementation of all the standard Distutils
commands."""
-__all__ = [ # noqa: F822
+__all__ = [
'build',
'build_py',
'build_ext',
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py
index ade98445ba..1738f4e56b 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py
@@ -41,7 +41,7 @@ class bdist(Command):
'plat-name=',
'p',
"platform name to embed in generated filenames "
- "(default: %s)" % get_platform(),
+ f"[default: {get_platform()}]",
),
('formats=', None, "formats for distribution (comma-separated list)"),
(
@@ -94,7 +94,7 @@ class bdist(Command):
self.plat_name = None
self.formats = None
self.dist_dir = None
- self.skip_build = 0
+ self.skip_build = False
self.group = None
self.owner = None
@@ -120,7 +120,7 @@ class bdist(Command):
except KeyError:
raise DistutilsPlatformError(
"don't know how to create built distributions "
- "on platform %s" % os.name
+ f"on platform {os.name}"
)
if self.dist_dir is None:
@@ -133,7 +133,7 @@ class bdist(Command):
try:
commands.append(self.format_commands[format][0])
except KeyError:
- raise DistutilsOptionError("invalid format '%s'" % format)
+ raise DistutilsOptionError(f"invalid format '{format}'")
# Reinitialize and run each command.
for i in range(len(self.formats)):
@@ -150,5 +150,5 @@ class bdist(Command):
# If we're going to need to run this command again, tell it to
# keep its temporary files around so subsequent runs go faster.
if cmd_name in commands[i + 1 :]:
- sub_cmd.keep_temp = 1
+ sub_cmd.keep_temp = True
self.run_command(cmd_name)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py
index 06502d201e..67b0c8cce9 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py
@@ -23,7 +23,7 @@ class bdist_dumb(Command):
'plat-name=',
'p',
"platform name to embed in generated filenames "
- "(default: %s)" % get_platform(),
+ f"[default: {get_platform()}]",
),
(
'format=',
@@ -33,15 +33,14 @@ class bdist_dumb(Command):
(
'keep-temp',
'k',
- "keep the pseudo-installation tree around after "
- + "creating the distribution archive",
+ "keep the pseudo-installation tree around after creating the distribution archive",
),
('dist-dir=', 'd', "directory to put final built distributions in"),
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
(
'relative',
None,
- "build the archive using relative paths (default: false)",
+ "build the archive using relative paths [default: false]",
),
(
'owner=',
@@ -63,10 +62,10 @@ class bdist_dumb(Command):
self.bdist_dir = None
self.plat_name = None
self.format = None
- self.keep_temp = 0
+ self.keep_temp = False
self.dist_dir = None
self.skip_build = None
- self.relative = 0
+ self.relative = False
self.owner = None
self.group = None
@@ -81,7 +80,7 @@ class bdist_dumb(Command):
except KeyError:
raise DistutilsPlatformError(
"don't know how to create dumb built distributions "
- "on platform %s" % os.name
+ f"on platform {os.name}"
)
self.set_undefined_options(
@@ -95,10 +94,10 @@ class bdist_dumb(Command):
if not self.skip_build:
self.run_command('build')
- install = self.reinitialize_command('install', reinit_subcommands=1)
+ install = self.reinitialize_command('install', reinit_subcommands=True)
install.root = self.bdist_dir
install.skip_build = self.skip_build
- install.warn_dir = 0
+ install.warn_dir = False
log.info("installing to %s", self.bdist_dir)
self.run_command('install')
@@ -116,7 +115,7 @@ class bdist_dumb(Command):
):
raise DistutilsPlatformError(
"can't make a dumb built distribution where "
- f"base and platbase are different ({repr(install.install_base)}, {repr(install.install_platbase)})"
+ f"base and platbase are different ({install.install_base!r}, {install.install_platbase!r})"
)
else:
archive_root = os.path.join(
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py
index 649968a5eb..d443eb09b5 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py
@@ -40,7 +40,7 @@ class bdist_rpm(Command):
'python=',
None,
"path to Python interpreter to hard-code in the .spec file "
- "(default: \"python\")",
+ "[default: \"python\"]",
),
(
'fix-python',
@@ -187,13 +187,13 @@ class bdist_rpm(Command):
self.build_requires = None
self.obsoletes = None
- self.keep_temp = 0
- self.use_rpm_opt_flags = 1
- self.rpm3_mode = 1
- self.no_autoreq = 0
+ self.keep_temp = False
+ self.use_rpm_opt_flags = True
+ self.rpm3_mode = True
+ self.no_autoreq = False
self.force_arch = None
- self.quiet = 0
+ self.quiet = False
def finalize_options(self):
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
@@ -214,7 +214,7 @@ class bdist_rpm(Command):
if os.name != 'posix':
raise DistutilsPlatformError(
- "don't know how to create RPM distributions on platform %s" % os.name
+ f"don't know how to create RPM distributions on platform {os.name}"
)
if self.binary_only and self.source_only:
raise DistutilsOptionError(
@@ -223,7 +223,7 @@ class bdist_rpm(Command):
# don't pass CFLAGS to pure python distributions
if not self.distribution.has_ext_modules():
- self.use_rpm_opt_flags = 0
+ self.use_rpm_opt_flags = False
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
self.finalize_package_data()
@@ -295,9 +295,9 @@ class bdist_rpm(Command):
# Spec file goes into 'dist_dir' if '--spec-only specified',
# build/rpm.<plat> otherwise.
- spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name())
+ spec_path = os.path.join(spec_dir, f"{self.distribution.get_name()}.spec")
self.execute(
- write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path
+ write_file, (spec_path, self._make_spec_file()), f"writing '{spec_path}'"
)
if self.spec_only: # stop if requested
@@ -322,7 +322,7 @@ class bdist_rpm(Command):
if os.path.exists(self.icon):
self.copy_file(self.icon, source_dir)
else:
- raise DistutilsFileError("icon file '%s' does not exist" % self.icon)
+ raise DistutilsFileError(f"icon file '{self.icon}' does not exist")
# build package
log.info("building RPMs")
@@ -334,9 +334,9 @@ class bdist_rpm(Command):
rpm_cmd.append('-bb')
else:
rpm_cmd.append('-ba')
- rpm_cmd.extend(['--define', '__python %s' % self.python])
+ rpm_cmd.extend(['--define', f'__python {self.python}'])
if self.rpm3_mode:
- rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)])
+ rpm_cmd.extend(['--define', f'_topdir {os.path.abspath(self.rpm_base)}'])
if not self.keep_temp:
rpm_cmd.append('--clean')
@@ -370,7 +370,7 @@ class bdist_rpm(Command):
status = out.close()
if status:
- raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
+ raise DistutilsExecError(f"Failed to execute: {q_cmd!r}")
finally:
out.close()
@@ -426,7 +426,7 @@ class bdist_rpm(Command):
# normalizing the whitespace to simplify the test for whether the
# invocation of brp-python-bytecompile passes in __python):
vendor_hook = '\n'.join([
- ' %s \\' % line.strip() for line in vendor_hook.splitlines()
+ f' {line.strip()} \\' for line in vendor_hook.splitlines()
])
problem = "brp-python-bytecompile \\\n"
fixed = "brp-python-bytecompile %{__python} \\\n"
@@ -468,7 +468,7 @@ class bdist_rpm(Command):
if not self.distribution.has_ext_modules():
spec_file.append('BuildArch: noarch')
else:
- spec_file.append('BuildArch: %s' % self.force_arch)
+ spec_file.append(f'BuildArch: {self.force_arch}')
for field in (
'Vendor',
@@ -518,7 +518,7 @@ class bdist_rpm(Command):
# rpm scripts
# figure out default build script
def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}"
- def_build = "%s build" % def_setup_call
+ def_build = f"{def_setup_call} build"
if self.use_rpm_opt_flags:
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
@@ -528,9 +528,7 @@ class bdist_rpm(Command):
# that we open and interpolate into the spec file, but the defaults
# are just text that we drop in as-is. Hmmm.
- install_cmd = (
- '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES'
- ) % def_setup_call
+ install_cmd = f'{def_setup_call} install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES'
script_options = [
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py
index d18ed503e3..caf55073af 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py
@@ -4,6 +4,7 @@ Implements the Distutils 'build' command."""
import os
import sys
+import sysconfig
from ..core import Command
from ..errors import DistutilsOptionError
@@ -26,16 +27,14 @@ class build(Command):
(
'build-lib=',
None,
- "build directory for all distribution (defaults to either "
- + "build-purelib or build-platlib",
+ "build directory for all distribution (defaults to either build-purelib or build-platlib",
),
('build-scripts=', None, "build directory for scripts"),
('build-temp=', 't', "temporary build directory"),
(
'plat-name=',
'p',
- "platform name to build for, if supported "
- "(default: %s)" % get_platform(),
+ f"platform name to build for, if supported [default: {get_platform()}]",
),
('compiler=', 'c', "specify the compiler type"),
('parallel=', 'j', "number of parallel build jobs"),
@@ -62,7 +61,7 @@ class build(Command):
self.compiler = None
self.plat_name = None
self.debug = None
- self.force = 0
+ self.force = False
self.executable = None
self.parallel = None
@@ -81,6 +80,10 @@ class build(Command):
plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}"
+ # Python 3.13+ with --disable-gil shouldn't share build directories
+ if sysconfig.get_config_var('Py_GIL_DISABLED'):
+ plat_specifier += 't'
+
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
# share the same build directories. Doing so confuses the build
# process for C modules
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py
index 360575d0cb..a600d09373 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py
@@ -57,7 +57,7 @@ class build_clib(Command):
self.define = None
self.undef = None
self.debug = None
- self.force = 0
+ self.force = False
self.compiler = None
def finalize_options(self):
@@ -138,8 +138,8 @@ class build_clib(Command):
if '/' in name or (os.sep != '/' and os.sep in name):
raise DistutilsSetupError(
- "bad library name '%s': "
- "may not contain directory separators" % lib[0]
+ f"bad library name '{lib[0]}': "
+ "may not contain directory separators"
)
if not isinstance(build_info, dict):
@@ -166,9 +166,9 @@ class build_clib(Command):
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
- "in 'libraries' option (library '%s'), "
+ f"in 'libraries' option (library '{lib_name}'), "
"'sources' must be present and must be "
- "a list of source filenames" % lib_name
+ "a list of source filenames"
)
filenames.extend(sources)
@@ -179,9 +179,9 @@ class build_clib(Command):
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
- "in 'libraries' option (library '%s'), "
+ f"in 'libraries' option (library '{lib_name}'), "
"'sources' must be present and must be "
- "a list of source filenames" % lib_name
+ "a list of source filenames"
)
sources = list(sources)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py
index 06d949aff1..18e1601a28 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py
@@ -23,7 +23,7 @@ from ..errors import (
)
from ..extension import Extension
from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
-from ..util import get_platform
+from ..util import get_platform, is_mingw
# An extension name is just a dot-separated list of Python NAMEs (ie.
# the same as a fully-qualified module name).
@@ -57,7 +57,7 @@ class build_ext(Command):
# takes care of both command-line and client options
# in between initialize_options() and finalize_options())
- sep_by = " (separated by '%s')" % os.pathsep
+ sep_by = f" (separated by '{os.pathsep}')"
user_options = [
('build-lib=', 'b', "directory for compiled extension modules"),
('build-temp=', 't', "directory for temporary files (build by-products)"),
@@ -65,13 +65,13 @@ class build_ext(Command):
'plat-name=',
'p',
"platform name to cross-compile for, if supported "
- "(default: %s)" % get_platform(),
+ f"[default: {get_platform()}]",
),
(
'inplace',
'i',
"ignore build-lib and put compiled extensions into the source "
- + "directory alongside your pure Python modules",
+ "directory alongside your pure Python modules",
),
(
'include-dirs=',
@@ -109,7 +109,7 @@ class build_ext(Command):
self.build_lib = None
self.plat_name = None
self.build_temp = None
- self.inplace = 0
+ self.inplace = False
self.package = None
self.include_dirs = None
@@ -175,7 +175,7 @@ class build_ext(Command):
# Make sure Python's include directories (for Python.h, pyconfig.h,
# etc.) are in the include search path.
py_include = sysconfig.get_python_inc()
- plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+ plat_py_include = sysconfig.get_python_inc(plat_specific=True)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
@@ -212,7 +212,7 @@ class build_ext(Command):
# for extensions under windows use different directories
# for Release and Debug builds.
# also Python's library directory must be appended to library_dirs
- if os.name == 'nt':
+ if os.name == 'nt' and not is_mingw():
# the 'libs' directory is for binary installs - we assume that
# must be the *native* platform. But we don't really support
# cross-compiling via a binary install anyway, so we let it go.
@@ -517,9 +517,9 @@ class build_ext(Command):
sources = ext.sources
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
- "in 'ext_modules' option (extension '%s'), "
+ f"in 'ext_modules' option (extension '{ext.name}'), "
"'sources' must be present and must be "
- "a list of source filenames" % ext.name
+ "a list of source filenames"
)
# sort to make the resulting .so file build reproducible
sources = sorted(sources)
@@ -663,7 +663,7 @@ class build_ext(Command):
# Windows (or so I presume!). If we find it there, great;
# if not, act like Unix and assume it's in the PATH.
for vers in ("1.3", "1.2", "1.1"):
- fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+ fn = os.path.join(f"c:\\swig{vers}", "swig.exe")
if os.path.isfile(fn):
return fn
else:
@@ -671,7 +671,7 @@ class build_ext(Command):
else:
raise DistutilsPlatformError(
"I don't know how to find (much less run) SWIG "
- "on platform '%s'" % os.name
+ f"on platform '{os.name}'"
)
# -- Name generators -----------------------------------------------
@@ -754,7 +754,7 @@ class build_ext(Command):
# pyconfig.h that MSVC groks. The other Windows compilers all seem
# to need it mentioned explicitly, though, so that's what we do.
# Append '_d' to the python import library on debug builds.
- if sys.platform == "win32":
+ if sys.platform == "win32" and not is_mingw():
from .._msvccompiler import MSVCCompiler
if not isinstance(self.compiler, MSVCCompiler):
@@ -784,7 +784,7 @@ class build_ext(Command):
# A native build on an Android device or on Cygwin
if hasattr(sys, 'getandroidapilevel'):
link_libpython = True
- elif sys.platform == 'cygwin':
+ elif sys.platform == 'cygwin' or is_mingw():
link_libpython = True
elif '_PYTHON_HOST_PLATFORM' in os.environ:
# We are cross-compiling for one of the relevant platforms
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py
index 56e6fa2e66..49d710346e 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py
@@ -38,7 +38,7 @@ class build_py(Command):
self.package = None
self.package_data = None
self.package_dir = None
- self.compile = 0
+ self.compile = False
self.optimize = 0
self.force = None
@@ -95,7 +95,7 @@ class build_py(Command):
self.build_packages()
self.build_package_data()
- self.byte_compile(self.get_outputs(include_bytecode=0))
+ self.byte_compile(self.get_outputs(include_bytecode=False))
def get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
@@ -191,12 +191,12 @@ class build_py(Command):
if package_dir != "":
if not os.path.exists(package_dir):
raise DistutilsFileError(
- "package directory '%s' does not exist" % package_dir
+ f"package directory '{package_dir}' does not exist"
)
if not os.path.isdir(package_dir):
raise DistutilsFileError(
- "supposed package directory '%s' exists, "
- "but is not a directory" % package_dir
+ f"supposed package directory '{package_dir}' exists, "
+ "but is not a directory"
)
# Directories without __init__.py are namespace packages (PEP 420).
@@ -228,7 +228,7 @@ class build_py(Command):
module = os.path.splitext(os.path.basename(f))[0]
modules.append((package, module, f))
else:
- self.debug_print("excluding %s" % setup_script)
+ self.debug_print(f"excluding {setup_script}")
return modules
def find_modules(self):
@@ -264,7 +264,7 @@ class build_py(Command):
(package_dir, checked) = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
- checked = 0
+ checked = False
if not checked:
init_py = self.check_package(package, package_dir)
@@ -306,7 +306,7 @@ class build_py(Command):
outfile_path = [build_dir] + list(package) + [module + ".py"]
return os.path.join(*outfile_path)
- def get_outputs(self, include_bytecode=1):
+ def get_outputs(self, include_bytecode=True):
modules = self.find_all_modules()
outputs = []
for package, module, _module_file in modules:
@@ -347,7 +347,7 @@ class build_py(Command):
outfile = self.get_module_outfile(self.build_lib, package, module)
dir = os.path.dirname(outfile)
self.mkpath(dir)
- return self.copy_file(module_file, outfile, preserve_mode=0)
+ return self.copy_file(module_file, outfile, preserve_mode=False)
def build_modules(self):
modules = self.find_modules()
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py
index 5f3902a027..9e5963c243 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py
@@ -96,7 +96,7 @@ class build_scripts(Command):
else:
first_line = f.readline()
if not first_line:
- self.warn("%s is an empty file (skipping)" % script)
+ self.warn(f"{script} is an empty file (skipping)")
return
shebang_match = shebang_pattern.match(first_line)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py
index 28599e109c..58b3f949f9 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py
@@ -21,7 +21,7 @@ with contextlib.suppress(ImportError):
report_level,
halt_level,
stream=None,
- debug=0,
+ debug=False,
encoding='ascii',
error_handler='replace',
):
@@ -58,9 +58,9 @@ class check(Command):
def initialize_options(self):
"""Sets default values for options."""
- self.restructuredtext = 0
+ self.restructuredtext = False
self.metadata = 1
- self.strict = 0
+ self.strict = False
self._warnings = 0
def finalize_options(self):
@@ -106,7 +106,7 @@ class check(Command):
missing.append(attr)
if missing:
- self.warn("missing required meta-data: %s" % ', '.join(missing))
+ self.warn("missing required meta-data: {}".format(', '.join(missing)))
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
@@ -147,7 +147,7 @@ class check(Command):
except AttributeError as e:
reporter.messages.append((
-1,
- 'Could not finish the parsing: %s.' % e,
+ f'Could not finish the parsing: {e}.',
'',
{},
))
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py
index 4167a83fb3..fb54a60ed4 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py
@@ -14,17 +14,17 @@ from ..dir_util import remove_tree
class clean(Command):
description = "clean up temporary files from 'build' command"
user_options = [
- ('build-base=', 'b', "base build directory (default: 'build.build-base')"),
+ ('build-base=', 'b', "base build directory [default: 'build.build-base']"),
(
'build-lib=',
None,
- "build directory for all modules (default: 'build.build-lib')",
+ "build directory for all modules [default: 'build.build-lib']",
),
- ('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"),
+ ('build-temp=', 't', "temporary build directory [default: 'build.build-temp']"),
(
'build-scripts=',
None,
- "build directory for scripts (default: 'build.build-scripts')",
+ "build directory for scripts [default: 'build.build-scripts']",
),
('bdist-base=', None, "temporary directory for built distributions"),
('all', 'a', "remove all build output, not just temporary by-products"),
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py
index d4b2b0a362..fe83c2924d 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py
@@ -94,7 +94,7 @@ class config(Command):
if not isinstance(self.compiler, CCompiler):
self.compiler = new_compiler(
- compiler=self.compiler, dry_run=self.dry_run, force=1
+ compiler=self.compiler, dry_run=self.dry_run, force=True
)
customize_compiler(self.compiler)
if self.include_dirs:
@@ -109,7 +109,7 @@ class config(Command):
with open(filename, "w", encoding='utf-8') as file:
if headers:
for header in headers:
- file.write("#include <%s>\n" % header)
+ file.write(f"#include <{header}>\n")
file.write("\n")
file.write(body)
if body[-1] != "\n":
@@ -126,7 +126,7 @@ class config(Command):
def _compile(self, body, headers, include_dirs, lang):
src = self._gen_temp_sourcefile(body, headers, lang)
if self.dump_source:
- dump_file(src, "compiling '%s':" % src)
+ dump_file(src, f"compiling '{src}':")
(obj,) = self.compiler.object_filenames([src])
self.temp_files.extend([src, obj])
self.compiler.compile([src], include_dirs=include_dirs)
@@ -292,8 +292,8 @@ class config(Command):
include_dirs=None,
libraries=None,
library_dirs=None,
- decl=0,
- call=0,
+ decl=False,
+ call=False,
):
"""Determine if function 'func' is available by constructing a
source file that refers to 'func', and compiles and links it.
@@ -311,12 +311,12 @@ class config(Command):
self._check_compiler()
body = []
if decl:
- body.append("int %s ();" % func)
+ body.append(f"int {func} ();")
body.append("int main () {")
if call:
- body.append(" %s();" % func)
+ body.append(f" {func}();")
else:
- body.append(" %s;" % func)
+ body.append(f" {func};")
body.append("}")
body = "\n".join(body) + "\n"
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py
index 8e920be4de..1fc09eef89 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py
@@ -193,8 +193,7 @@ class install(Command):
(
'install-platbase=',
None,
- "base installation directory for platform-specific files "
- + "(instead of --exec-prefix or --home)",
+ "base installation directory for platform-specific files (instead of --exec-prefix or --home)",
),
('root=', None, "install everything relative to this alternate root directory"),
# Or, explicitly set the installation scheme
@@ -211,8 +210,7 @@ class install(Command):
(
'install-lib=',
None,
- "installation directory for all module distributions "
- + "(overrides --install-purelib and --install-platlib)",
+ "installation directory for all module distributions (overrides --install-purelib and --install-platlib)",
),
('install-headers=', None, "installation directory for C/C++ headers"),
('install-scripts=', None, "installation directory for Python scripts"),
@@ -245,7 +243,7 @@ class install(Command):
user_options.append((
'user',
None,
- "install in user site-package '%s'" % USER_SITE,
+ f"install in user site-package '{USER_SITE}'",
))
boolean_options.append('user')
@@ -258,7 +256,7 @@ class install(Command):
self.prefix = None
self.exec_prefix = None
self.home = None
- self.user = 0
+ self.user = False
# These select only the installation base; it's up to the user to
# specify the installation scheme (currently, that means supplying
@@ -293,7 +291,7 @@ class install(Command):
# 'install_path_file' is always true unless some outsider meddles
# with it.
self.extra_path = None
- self.install_path_file = 1
+ self.install_path_file = True
# 'force' forces installation, even if target files are not
# out-of-date. 'skip_build' skips running the "build" command,
@@ -301,9 +299,9 @@ class install(Command):
# a user option, it's just there so the bdist_* commands can turn
# it off) determines whether we warn about installing to a
# directory not in sys.path.
- self.force = 0
- self.skip_build = 0
- self.warn_dir = 1
+ self.force = False
+ self.skip_build = False
+ self.warn_dir = True
# These are only here as a conduit from the 'build' command to the
# 'install_*' commands that do the real work. ('build_base' isn't
@@ -348,8 +346,7 @@ class install(Command):
self.install_base or self.install_platbase
):
raise DistutilsOptionError(
- "must supply either prefix/exec-prefix/home or "
- + "install-base/install-platbase -- not both"
+ "must supply either prefix/exec-prefix/home or install-base/install-platbase -- not both"
)
if self.home and (self.prefix or self.exec_prefix):
@@ -600,7 +597,7 @@ class install(Command):
self.select_scheme(os.name)
except KeyError:
raise DistutilsPlatformError(
- "I don't know how to install stuff on '%s'" % os.name
+ f"I don't know how to install stuff on '{os.name}'"
)
def select_scheme(self, name):
@@ -685,7 +682,7 @@ class install(Command):
home = convert_path(os.path.expanduser("~"))
for _name, path in self.config_vars.items():
if str(path).startswith(home) and not os.path.isdir(path):
- self.debug_print("os.makedirs('%s', 0o700)" % path)
+ self.debug_print(f"os.makedirs('{path}', 0o700)")
os.makedirs(path, 0o700)
# -- Command execution methods -------------------------------------
@@ -720,7 +717,7 @@ class install(Command):
self.execute(
write_file,
(self.record, outputs),
- "writing list of installed files to '%s'" % self.record,
+ f"writing list of installed files to '{self.record}'",
)
sys_path = map(os.path.normpath, sys.path)
@@ -745,10 +742,10 @@ class install(Command):
filename = os.path.join(self.install_libbase, self.path_file + ".pth")
if self.install_path_file:
self.execute(
- write_file, (filename, [self.extra_dirs]), "creating %s" % filename
+ write_file, (filename, [self.extra_dirs]), f"creating {filename}"
)
else:
- self.warn("path file '%s' not created" % filename)
+ self.warn(f"path file '{filename}' not created")
# -- Reporting methods ---------------------------------------------
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py
index b63a1af25e..624c0b901b 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py
@@ -19,7 +19,7 @@ class install_data(Command):
'install-dir=',
'd',
"base directory for installing data files "
- "(default: installation base dir)",
+ "[default: installation base dir]",
),
('root=', None, "install everything relative to this alternate root directory"),
('force', 'f', "force installation (overwrite existing files)"),
@@ -31,9 +31,9 @@ class install_data(Command):
self.install_dir = None
self.outfiles = []
self.root = None
- self.force = 0
+ self.force = False
self.data_files = self.distribution.data_files
- self.warn_dir = 1
+ self.warn_dir = True
def finalize_options(self):
self.set_undefined_options(
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_headers.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_headers.py
index 085272c1a2..fbb3b242ea 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_headers.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_headers.py
@@ -19,7 +19,7 @@ class install_headers(Command):
def initialize_options(self):
self.install_dir = None
- self.force = 0
+ self.force = False
self.outfiles = []
def finalize_options(self):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py
index b1f346f018..54a12d38a8 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py
@@ -54,7 +54,7 @@ class install_lib(Command):
# let the 'install' command dictate our installation directory
self.install_dir = None
self.build_dir = None
- self.force = 0
+ self.force = False
self.compile = None
self.optimize = None
self.skip_build = None
@@ -114,7 +114,7 @@ class install_lib(Command):
outfiles = self.copy_tree(self.build_dir, self.install_dir)
else:
self.warn(
- "'%s' does not exist -- no Python modules to install" % self.build_dir
+ f"'{self.build_dir}' does not exist -- no Python modules to install"
)
return
return outfiles
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py
index e66b13a16d..bb43387fb8 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py
@@ -26,7 +26,7 @@ class install_scripts(Command):
def initialize_options(self):
self.install_dir = None
- self.force = 0
+ self.force = False
self.build_dir = None
self.skip_build = None
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py
index ee6c54daba..c1acd27b54 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py
@@ -37,8 +37,8 @@ class register(PyPIRCCommand):
def initialize_options(self):
PyPIRCCommand.initialize_options(self)
- self.list_classifiers = 0
- self.strict = 0
+ self.list_classifiers = False
+ self.strict = False
def finalize_options(self):
PyPIRCCommand.finalize_options(self)
@@ -74,7 +74,7 @@ class register(PyPIRCCommand):
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.strict = self.strict
- check.restructuredtext = 1
+ check.restructuredtext = True
check.run()
def _set_config(self):
@@ -88,7 +88,7 @@ class register(PyPIRCCommand):
self.has_config = True
else:
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
- raise ValueError('%s not found in .pypirc' % self.repository)
+ raise ValueError(f'{self.repository} not found in .pypirc')
if self.repository == 'pypi':
self.repository = self.DEFAULT_REPOSITORY
self.has_config = False
@@ -192,7 +192,7 @@ Your selection [default 1]: """,
logging.INFO,
)
self.announce(
- '(the login will be stored in %s)' % self._get_rc_file(),
+ f'(the login will be stored in {self._get_rc_file()})',
logging.INFO,
)
choice = 'X'
@@ -225,7 +225,7 @@ Your selection [default 1]: """,
log.info('Server response (%s): %s', code, result)
else:
log.info('You will receive an email shortly.')
- log.info('Follow the instructions in it to ' 'complete registration.')
+ log.info('Follow the instructions in it to complete registration.')
elif choice == '3':
data = {':action': 'password_reset'}
data['email'] = ''
@@ -277,7 +277,7 @@ Your selection [default 1]: """,
for key, values in data.items():
for value in map(str, make_iterable(values)):
body.write(sep_boundary)
- body.write('\nContent-Disposition: form-data; name="%s"' % key)
+ body.write(f'\nContent-Disposition: form-data; name="{key}"')
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
@@ -288,8 +288,7 @@ Your selection [default 1]: """,
# build the Request
headers = {
- 'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'
- % boundary,
+ 'Content-type': f'multipart/form-data; boundary={boundary}; charset=utf-8',
'Content-length': str(len(body)),
}
req = urllib.request.Request(self.repository, body, headers)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py
index 387d27c90b..04333dd214 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py
@@ -125,14 +125,14 @@ class sdist(Command):
# 'use_defaults': if true, we will include the default file set
# in the manifest
- self.use_defaults = 1
- self.prune = 1
+ self.use_defaults = True
+ self.prune = True
- self.manifest_only = 0
- self.force_manifest = 0
+ self.manifest_only = False
+ self.force_manifest = False
self.formats = ['gztar']
- self.keep_temp = 0
+ self.keep_temp = False
self.dist_dir = None
self.archive_files = None
@@ -150,7 +150,7 @@ class sdist(Command):
bad_format = archive_util.check_archive_formats(self.formats)
if bad_format:
- raise DistutilsOptionError("unknown archive format '%s'" % bad_format)
+ raise DistutilsOptionError(f"unknown archive format '{bad_format}'")
if self.dist_dir is None:
self.dist_dir = "dist"
@@ -288,7 +288,7 @@ class sdist(Command):
if self._cs_path_exists(fn):
self.filelist.append(fn)
else:
- self.warn("standard file '%s' not found" % fn)
+ self.warn(f"standard file '{fn}' not found")
def _add_defaults_optional(self):
optional = ['tests/test*.py', 'test/test*.py', 'setup.cfg']
@@ -353,12 +353,12 @@ class sdist(Command):
log.info("reading manifest template '%s'", self.template)
template = TextFile(
self.template,
- strip_comments=1,
- skip_blanks=1,
- join_lines=1,
- lstrip_ws=1,
- rstrip_ws=1,
- collapse_join=1,
+ strip_comments=True,
+ skip_blanks=True,
+ join_lines=True,
+ lstrip_ws=True,
+ rstrip_ws=True,
+ collapse_join=True,
)
try:
@@ -401,7 +401,7 @@ class sdist(Command):
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs']
vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps)
- self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
+ self.filelist.exclude_pattern(vcs_ptrn, is_regex=True)
def write_manifest(self):
"""Write the file list in 'self.filelist' (presumably as filled in
@@ -410,8 +410,7 @@ class sdist(Command):
"""
if self._manifest_is_not_generated():
log.info(
- "not writing to manually maintained "
- "manifest file '%s'" % self.manifest
+ f"not writing to manually maintained manifest file '{self.manifest}'"
)
return
@@ -420,7 +419,7 @@ class sdist(Command):
self.execute(
file_util.write_file,
(self.manifest, content),
- "writing manifest file '%s'" % self.manifest,
+ f"writing manifest file '{self.manifest}'",
)
def _manifest_is_not_generated(self):
@@ -468,10 +467,10 @@ class sdist(Command):
if hasattr(os, 'link'): # can make hard links on this system
link = 'hard'
- msg = "making hard links in %s..." % base_dir
+ msg = f"making hard links in {base_dir}..."
else: # nope, have to copy
link = None
- msg = "copying files to %s..." % base_dir
+ msg = f"copying files to {base_dir}..."
if not files:
log.warning("no files to distribute -- empty manifest?")
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py
index cf541f8a82..a2461e089f 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py
@@ -41,7 +41,7 @@ class upload(PyPIRCCommand):
PyPIRCCommand.initialize_options(self)
self.username = ''
self.password = ''
- self.show_response = 0
+ self.show_response = False
self.sign = False
self.identity = None
@@ -75,7 +75,7 @@ class upload(PyPIRCCommand):
# Makes sure the repository URL is compliant
schema, netloc, url, params, query, fragments = urlparse(self.repository)
if params or query or fragments:
- raise AssertionError("Incompatible url %s" % self.repository)
+ raise AssertionError(f"Incompatible url {self.repository}")
if schema not in ('http', 'https'):
raise AssertionError("unsupported schema " + schema)
@@ -153,10 +153,10 @@ class upload(PyPIRCCommand):
end_boundary = sep_boundary + b'--\r\n'
body = io.BytesIO()
for key, values in data.items():
- title = '\r\nContent-Disposition: form-data; name="%s"' % key
+ title = f'\r\nContent-Disposition: form-data; name="{key}"'
for value in make_iterable(values):
if type(value) is tuple:
- title += '; filename="%s"' % value[0]
+ title += f'; filename="{value[0]}"'
value = value[1]
else:
value = str(value).encode('utf-8')
@@ -172,7 +172,7 @@ class upload(PyPIRCCommand):
# build the Request
headers = {
- 'Content-type': 'multipart/form-data; boundary=%s' % boundary,
+ 'Content-type': f'multipart/form-data; boundary={boundary}',
'Content-length': str(len(body)),
'Authorization': auth,
}
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py b/contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py
index b1ee3fe8b0..e12534a32c 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py
@@ -3,11 +3,11 @@ from __future__ import annotations
from .py38 import removeprefix
-def consolidate_linker_args(args: list[str]) -> str:
+def consolidate_linker_args(args: list[str]) -> list[str] | str:
"""
Ensure the return value is a string for backward compatibility.
- Retain until at least 2024-04-31. See pypa/distutils#246
+ Retain until at least 2025-04-31. See pypa/distutils#246
"""
if not all(arg.startswith('-Wl,') for arg in args):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py b/contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py
index 0af3814017..2d44211147 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py
@@ -21,3 +21,13 @@ else:
def removeprefix(self, prefix):
return self.removeprefix(prefix)
+
+
+def aix_platform(osname, version, release):
+ try:
+ import _aix_support # type: ignore
+
+ return _aix_support.aix_platform()
+ except ImportError:
+ pass
+ return f"{osname}-{version}.{release}"
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/py39compat.py b/contrib/python/setuptools/py3/setuptools/_distutils/compat/py39.py
index 1b436d7658..1b436d7658 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/py39compat.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/compat/py39.py
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/config.py b/contrib/python/setuptools/py3/setuptools/_distutils/config.py
index 83f96a9eec..ebd2e11da3 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/config.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/config.py
@@ -30,7 +30,7 @@ class PyPIRCCommand(Command):
realm = None
user_options = [
- ('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY),
+ ('repository=', 'r', f"url of repository [default: {DEFAULT_REPOSITORY}]"),
('show-response', None, 'display full response text from server'),
]
@@ -51,7 +51,7 @@ class PyPIRCCommand(Command):
"""Reads the .pypirc file."""
rc = self._get_rc_file()
if os.path.exists(rc):
- self.announce('Using PyPI login from %s' % rc)
+ self.announce(f'Using PyPI login from {rc}')
repository = self.repository or self.DEFAULT_REPOSITORY
config = RawConfigParser()
@@ -129,7 +129,7 @@ class PyPIRCCommand(Command):
"""Initialize options."""
self.repository = None
self.realm = None
- self.show_response = 0
+ self.show_response = False
def finalize_options(self):
"""Finalizes options."""
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/core.py b/contrib/python/setuptools/py3/setuptools/_distutils/core.py
index 309ce696fa..82113c47c1 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/core.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/core.py
@@ -146,7 +146,7 @@ def setup(**attrs): # noqa: C901
_setup_distribution = dist = klass(attrs)
except DistutilsSetupError as msg:
if 'name' not in attrs:
- raise SystemExit("error in setup command: %s" % msg)
+ raise SystemExit(f"error in setup command: {msg}")
else:
raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg))
@@ -170,7 +170,7 @@ def setup(**attrs): # noqa: C901
try:
ok = dist.parse_command_line()
except DistutilsArgError as msg:
- raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
+ raise SystemExit(gen_usage(dist.script_name) + f"\nerror: {msg}")
if DEBUG:
print("options (after parsing command line):")
@@ -274,11 +274,8 @@ def run_setup(script_name, script_args=None, stop_after="run"):
if _setup_distribution is None:
raise RuntimeError(
- (
- "'distutils.core.setup()' was never called -- "
- "perhaps '%s' is not a Distutils setup script?"
- )
- % script_name
+ "'distutils.core.setup()' was never called -- "
+ f"perhaps '{script_name}' is not a Distutils setup script?"
)
# I wonder if the setup script's namespace -- g and l -- would be of
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py
index 539f09d8f3..7b812fd055 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py
@@ -57,11 +57,11 @@ def get_msvcr():
try:
msc_ver = int(match.group(1))
except AttributeError:
- return
+ return []
try:
return _msvcr_lookup[msc_ver]
except KeyError:
- raise ValueError("Unknown MS Compiler version %s " % msc_ver)
+ raise ValueError(f"Unknown MS Compiler version {msc_ver} ")
_runtime_library_dirs_msg = (
@@ -83,7 +83,7 @@ class CygwinCCompiler(UnixCCompiler):
dylib_lib_format = "cyg%s%s"
exe_extension = ".exe"
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
super().__init__(verbose, dry_run, force)
status, details = check_config_h()
@@ -91,8 +91,8 @@ class CygwinCCompiler(UnixCCompiler):
if status is not CONFIG_H_OK:
self.warn(
"Python's pyconfig.h doesn't seem to support your compiler. "
- "Reason: %s. "
- "Compiling may fail because of undefined preprocessor macros." % details
+ f"Reason: {details}. "
+ "Compiling may fail because of undefined preprocessor macros."
)
self.cc = os.environ.get('CC', 'gcc')
@@ -102,10 +102,10 @@ class CygwinCCompiler(UnixCCompiler):
shared_option = "-shared"
self.set_executables(
- compiler='%s -mcygwin -O -Wall' % self.cc,
- compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
- compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
- linker_exe='%s -mcygwin' % self.cc,
+ compiler=f'{self.cc} -mcygwin -O -Wall',
+ compiler_so=f'{self.cc} -mcygwin -mdll -O -Wall',
+ compiler_cxx=f'{self.cxx} -mcygwin -O -Wall',
+ linker_exe=f'{self.cc} -mcygwin',
linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'),
)
@@ -154,7 +154,7 @@ class CygwinCCompiler(UnixCCompiler):
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -195,10 +195,10 @@ class CygwinCCompiler(UnixCCompiler):
def_file = os.path.join(temp_dir, dll_name + ".def")
# Generate .def file
- contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"]
+ contents = [f"LIBRARY {os.path.basename(output_filename)}", "EXPORTS"]
for sym in export_symbols:
contents.append(sym)
- self.execute(write_file, (def_file, contents), "writing %s" % def_file)
+ self.execute(write_file, (def_file, contents), f"writing {def_file}")
# next add options for def-file
@@ -265,7 +265,7 @@ class Mingw32CCompiler(CygwinCCompiler):
compiler_type = 'mingw32'
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
super().__init__(verbose, dry_run, force)
shared_option = "-shared"
@@ -274,10 +274,10 @@ class Mingw32CCompiler(CygwinCCompiler):
raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32')
self.set_executables(
- compiler='%s -O -Wall' % self.cc,
- compiler_so='%s -mdll -O -Wall' % self.cc,
- compiler_cxx='%s -O -Wall' % self.cxx,
- linker_exe='%s' % self.cc,
+ compiler=f'{self.cc} -O -Wall',
+ compiler_so=f'{self.cc} -shared -O -Wall',
+ compiler_cxx=f'{self.cxx} -O -Wall',
+ linker_exe=f'{self.cc}',
linker_so=f'{self.linker_dll} {shared_option}',
)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py b/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py
index 370c6ffd49..724afeff6f 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py
@@ -10,10 +10,10 @@ from .errors import DistutilsFileError, DistutilsInternalError
# cache for by mkpath() -- in addition to cheapening redundant calls,
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
-_path_created = {}
+_path_created = set()
-def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
+def mkpath(name, mode=0o777, verbose=True, dry_run=False): # noqa: C901
"""Create a directory and any missing ancestor directories.
If the directory already exists (or if 'name' is the empty string, which
@@ -45,7 +45,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
created_dirs = []
if os.path.isdir(name) or name == '':
return created_dirs
- if _path_created.get(os.path.abspath(name)):
+ if os.path.abspath(name) in _path_created:
return created_dirs
(head, tail) = os.path.split(name)
@@ -63,7 +63,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
head = os.path.join(head, d)
abs_head = os.path.abspath(head)
- if _path_created.get(abs_head):
+ if abs_head in _path_created:
continue
if verbose >= 1:
@@ -79,11 +79,11 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
)
created_dirs.append(head)
- _path_created[abs_head] = 1
+ _path_created.add(abs_head)
return created_dirs
-def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
+def create_tree(base_dir, files, mode=0o777, verbose=True, dry_run=False):
"""Create all the empty directories under 'base_dir' needed to put 'files'
there.
@@ -104,12 +104,12 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
def copy_tree( # noqa: C901
src,
dst,
- preserve_mode=1,
- preserve_times=1,
- preserve_symlinks=0,
- update=0,
- verbose=1,
- dry_run=0,
+ preserve_mode=True,
+ preserve_times=True,
+ preserve_symlinks=False,
+ update=False,
+ verbose=True,
+ dry_run=False,
):
"""Copy an entire directory tree 'src' to a new location 'dst'.
@@ -133,7 +133,7 @@ def copy_tree( # noqa: C901
from distutils.file_util import copy_file
if not dry_run and not os.path.isdir(src):
- raise DistutilsFileError("cannot copy tree '%s': not a directory" % src)
+ raise DistutilsFileError(f"cannot copy tree '{src}': not a directory")
try:
names = os.listdir(src)
except OSError as e:
@@ -202,7 +202,7 @@ def _build_cmdtuple(path, cmdtuples):
cmdtuples.append((os.rmdir, path))
-def remove_tree(directory, verbose=1, dry_run=0):
+def remove_tree(directory, verbose=True, dry_run=False):
"""Recursively remove an entire directory tree.
Any errors are ignored (apart from being reported to stdout if 'verbose'
@@ -222,7 +222,7 @@ def remove_tree(directory, verbose=1, dry_run=0):
# remove dir from cache if it's already there
abspath = os.path.abspath(cmd[1])
if abspath in _path_created:
- _path_created.pop(abspath)
+ _path_created.remove(abspath)
except OSError as exc:
log.warning("error removing %s: %s", directory, exc)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/dist.py b/contrib/python/setuptools/py3/setuptools/_distutils/dist.py
index 668ce7eb0a..d7d4ca8fc8 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/dist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/dist.py
@@ -13,6 +13,8 @@ import sys
from collections.abc import Iterable
from email import message_from_file
+from ._vendor.packaging.utils import canonicalize_name, canonicalize_version
+
try:
import warnings
except ImportError:
@@ -137,9 +139,9 @@ Common commands: (see '--help-commands' for more)
"""
# Default values for our command-line options
- self.verbose = 1
- self.dry_run = 0
- self.help = 0
+ self.verbose = True
+ self.dry_run = False
+ self.help = False
for attr in self.display_option_names:
setattr(self, attr, 0)
@@ -262,7 +264,7 @@ Common commands: (see '--help-commands' for more)
elif hasattr(self, key):
setattr(self, key, val)
else:
- msg = "Unknown distribution option: %s" % repr(key)
+ msg = f"Unknown distribution option: {key!r}"
warnings.warn(msg)
# no-user-cfg is handled before other command line args
@@ -311,9 +313,9 @@ Common commands: (see '--help-commands' for more)
for cmd_name in commands:
opt_dict = self.command_options.get(cmd_name)
if opt_dict is None:
- self.announce(indent + "no option dict for '%s' command" % cmd_name)
+ self.announce(indent + f"no option dict for '{cmd_name}' command")
else:
- self.announce(indent + "option dict for '%s' command:" % cmd_name)
+ self.announce(indent + f"option dict for '{cmd_name}' command:")
out = pformat(opt_dict)
for line in out.split('\n'):
self.announce(indent + " " + line)
@@ -339,7 +341,7 @@ Common commands: (see '--help-commands' for more)
files = [str(path) for path in self._gen_paths() if os.path.isfile(path)]
if DEBUG:
- self.announce("using config files: %s" % ', '.join(files))
+ self.announce("using config files: {}".format(', '.join(files)))
return files
@@ -395,7 +397,7 @@ Common commands: (see '--help-commands' for more)
parser = ConfigParser()
for filename in filenames:
if DEBUG:
- self.announce(" reading %s" % filename)
+ self.announce(f" reading {filename}")
parser.read(filename, encoding='utf-8')
for section in parser.sections():
options = parser.options(section)
@@ -525,7 +527,7 @@ Common commands: (see '--help-commands' for more)
# Pull the current command from the head of the command line
command = args[0]
if not command_re.match(command):
- raise SystemExit("invalid command name '%s'" % command)
+ raise SystemExit(f"invalid command name '{command}'")
self.commands.append(command)
# Dig up the command class that implements this command, so we
@@ -540,7 +542,7 @@ Common commands: (see '--help-commands' for more)
# to be sure that the basic "command" interface is implemented.
if not issubclass(cmd_class, Command):
raise DistutilsClassError(
- "command class %s must subclass Command" % cmd_class
+ f"command class {cmd_class} must subclass Command"
)
# Also make sure that the command object provides a list of its
@@ -579,7 +581,7 @@ Common commands: (see '--help-commands' for more)
parser.set_negative_aliases(negative_opt)
(args, opts) = parser.getopt(args[1:])
if hasattr(opts, 'help') and opts.help:
- self._show_help(parser, display_options=0, commands=[cmd_class])
+ self._show_help(parser, display_options=False, commands=[cmd_class])
return
if hasattr(cmd_class, 'help_options') and isinstance(
@@ -622,7 +624,7 @@ Common commands: (see '--help-commands' for more)
setattr(self.metadata, attr, value)
def _show_help(
- self, parser, global_options=1, display_options=1, commands: Iterable = ()
+ self, parser, global_options=True, display_options=True, commands: Iterable = ()
):
"""Show help for the setup script command-line in the form of
several lists of command-line options. 'parser' should be a
@@ -652,8 +654,7 @@ Common commands: (see '--help-commands' for more)
if display_options:
parser.set_option_table(self.display_options)
parser.print_help(
- "Information display options (just display "
- + "information, ignore any commands)"
+ "Information display options (just display information, ignore any commands)"
)
print()
@@ -668,7 +669,7 @@ Common commands: (see '--help-commands' for more)
)
else:
parser.set_option_table(klass.user_options)
- parser.print_help("Options for '%s' command:" % klass.__name__)
+ parser.print_help(f"Options for '{klass.__name__}' command:")
print()
print(gen_usage(self.script_name))
@@ -694,12 +695,12 @@ Common commands: (see '--help-commands' for more)
# display that metadata in the order in which the user supplied the
# metadata options.
any_display_options = 0
- is_display_option = {}
+ is_display_option = set()
for option in self.display_options:
- is_display_option[option[0]] = 1
+ is_display_option.add(option[0])
for opt, val in option_order:
- if val and is_display_option.get(opt):
+ if val and opt in is_display_option:
opt = translate_longopt(opt)
value = getattr(self.metadata, "get_" + opt)()
if opt in ('keywords', 'platforms'):
@@ -740,13 +741,13 @@ Common commands: (see '--help-commands' for more)
import distutils.command
std_commands = distutils.command.__all__
- is_std = {}
+ is_std = set()
for cmd in std_commands:
- is_std[cmd] = 1
+ is_std.add(cmd)
extra_commands = []
for cmd in self.cmdclass.keys():
- if not is_std.get(cmd):
+ if cmd not in is_std:
extra_commands.append(cmd)
max_length = 0
@@ -771,13 +772,13 @@ Common commands: (see '--help-commands' for more)
import distutils.command
std_commands = distutils.command.__all__
- is_std = {}
+ is_std = set()
for cmd in std_commands:
- is_std[cmd] = 1
+ is_std.add(cmd)
extra_commands = []
for cmd in self.cmdclass.keys():
- if not is_std.get(cmd):
+ if cmd not in is_std:
extra_commands.append(cmd)
rv = []
@@ -842,9 +843,9 @@ Common commands: (see '--help-commands' for more)
self.cmdclass[command] = klass
return klass
- raise DistutilsModuleError("invalid command '%s'" % command)
+ raise DistutilsModuleError(f"invalid command '{command}'")
- def get_command_obj(self, command, create=1):
+ def get_command_obj(self, command, create=True):
"""Return the command object for 'command'. Normally this object
is cached on a previous call to 'get_command_obj()'; if no command
object for 'command' is in the cache, then we either create and
@@ -855,12 +856,12 @@ Common commands: (see '--help-commands' for more)
if DEBUG:
self.announce(
"Distribution.get_command_obj(): "
- "creating '%s' command object" % command
+ f"creating '{command}' command object"
)
klass = self.get_command_class(command)
cmd_obj = self.command_obj[command] = klass(self)
- self.have_run[command] = 0
+ self.have_run[command] = False
# Set any options that were supplied in config files
# or on the command line. (NB. support for error
@@ -887,7 +888,7 @@ Common commands: (see '--help-commands' for more)
option_dict = self.get_option_dict(command_name)
if DEBUG:
- self.announce(" setting options for '%s' command:" % command_name)
+ self.announce(f" setting options for '{command_name}' command:")
for option, (source, value) in option_dict.items():
if DEBUG:
self.announce(f" {option} = {value} (from {source})")
@@ -915,7 +916,7 @@ Common commands: (see '--help-commands' for more)
except ValueError as msg:
raise DistutilsOptionError(msg)
- def reinitialize_command(self, command, reinit_subcommands=0):
+ def reinitialize_command(self, command, reinit_subcommands=False):
"""Reinitializes a command to the state it was in when first
returned by 'get_command_obj()': ie., initialized but not yet
finalized. This provides the opportunity to sneak option
@@ -945,8 +946,8 @@ Common commands: (see '--help-commands' for more)
if not command.finalized:
return command
command.initialize_options()
- command.finalized = 0
- self.have_run[command_name] = 0
+ command.finalized = False
+ self.have_run[command_name] = False
self._set_command_options(command)
if reinit_subcommands:
@@ -986,7 +987,7 @@ Common commands: (see '--help-commands' for more)
cmd_obj = self.get_command_obj(command)
cmd_obj.ensure_finalized()
cmd_obj.run()
- self.have_run[command] = 1
+ self.have_run[command] = True
# -- Distribution query methods ------------------------------------
@@ -1149,9 +1150,9 @@ class DistributionMetadata:
version = '1.1'
# required fields
- file.write('Metadata-Version: %s\n' % version)
- file.write('Name: %s\n' % self.get_name())
- file.write('Version: %s\n' % self.get_version())
+ file.write(f'Metadata-Version: {version}\n')
+ file.write(f'Name: {self.get_name()}\n')
+ file.write(f'Version: {self.get_version()}\n')
def maybe_write(header, val):
if val:
@@ -1189,7 +1190,26 @@ class DistributionMetadata:
return self.version or "0.0.0"
def get_fullname(self):
- return f"{self.get_name()}-{self.get_version()}"
+ return self._fullname(self.get_name(), self.get_version())
+
+ @staticmethod
+ def _fullname(name: str, version: str) -> str:
+ """
+ >>> DistributionMetadata._fullname('setup.tools', '1.0-2')
+ 'setup_tools-1.0.post2'
+ >>> DistributionMetadata._fullname('setup-tools', '1.2post2')
+ 'setup_tools-1.2.post2'
+ >>> DistributionMetadata._fullname('setup-tools', '1.0-r2')
+ 'setup_tools-1.0.post2'
+ >>> DistributionMetadata._fullname('setup.tools', '1.0.post')
+ 'setup_tools-1.0.post0'
+ >>> DistributionMetadata._fullname('setup.tools', '1.0+ubuntu-1')
+ 'setup_tools-1.0+ubuntu.1'
+ """
+ return "{}-{}".format(
+ canonicalize_name(name).replace('-', '_'),
+ canonicalize_version(version, strip_trailing_zero=False),
+ )
def get_author(self):
return self.author
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/extension.py b/contrib/python/setuptools/py3/setuptools/_distutils/extension.py
index 94e71635d9..04e871bcd6 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/extension.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/extension.py
@@ -130,7 +130,7 @@ class Extension:
if len(kw) > 0:
options = [repr(option) for option in kw]
options = ', '.join(sorted(options))
- msg = "Unknown Extension options: %s" % options
+ msg = f"Unknown Extension options: {options}"
warnings.warn(msg)
def __repr__(self):
@@ -150,11 +150,11 @@ def read_setup_file(filename): # noqa: C901
# <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
file = TextFile(
filename,
- strip_comments=1,
- skip_blanks=1,
- join_lines=1,
- lstrip_ws=1,
- rstrip_ws=1,
+ strip_comments=True,
+ skip_blanks=True,
+ join_lines=True,
+ lstrip_ws=True,
+ rstrip_ws=True,
)
try:
extensions = []
@@ -167,7 +167,7 @@ def read_setup_file(filename): # noqa: C901
continue
if line[0] == line[-1] == "*":
- file.warn("'%s' lines not handled yet" % line)
+ file.warn(f"'{line}' lines not handled yet")
continue
line = expand_makefile_vars(line, vars)
@@ -233,7 +233,7 @@ def read_setup_file(filename): # noqa: C901
# and append it to sources. Hmmmm.
ext.extra_objects.append(word)
else:
- file.warn("unrecognized argument '%s'" % word)
+ file.warn(f"unrecognized argument '{word}'")
extensions.append(ext)
finally:
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py b/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py
index e905aede4d..907cc2b73c 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py
@@ -21,7 +21,7 @@ from .errors import DistutilsArgError, DistutilsGetoptError
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
# The similarities to NAME are again not a coincidence...
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
-longopt_re = re.compile(r'^%s$' % longopt_pat)
+longopt_re = re.compile(rf'^{longopt_pat}$')
# For recognizing "negative alias" options, eg. "quiet=!verbose"
neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$")
@@ -95,7 +95,7 @@ class FancyGetopt:
def add_option(self, long_option, short_option=None, help_string=None):
if long_option in self.option_index:
raise DistutilsGetoptError(
- "option conflict: already an option '%s'" % long_option
+ f"option conflict: already an option '{long_option}'"
)
else:
option = (long_option, short_option, help_string)
@@ -118,11 +118,11 @@ class FancyGetopt:
for alias, opt in aliases.items():
if alias not in self.option_index:
raise DistutilsGetoptError(
- f"invalid {what} '{alias}': " f"option '{alias}' not defined"
+ f"invalid {what} '{alias}': option '{alias}' not defined"
)
if opt not in self.option_index:
raise DistutilsGetoptError(
- f"invalid {what} '{alias}': " f"aliased option '{opt}' not defined"
+ f"invalid {what} '{alias}': aliased option '{opt}' not defined"
)
def set_aliases(self, alias):
@@ -162,13 +162,13 @@ class FancyGetopt:
# Type- and value-check the option names
if not isinstance(long, str) or len(long) < 2:
raise DistutilsGetoptError(
- ("invalid long option '%s': must be a string of length >= 2") % long
+ f"invalid long option '{long}': must be a string of length >= 2"
)
if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
raise DistutilsGetoptError(
- "invalid short option '%s': "
- "must a single character or None" % short
+ f"invalid short option '{short}': "
+ "must a single character or None"
)
self.repeat[long] = repeat
@@ -178,7 +178,7 @@ class FancyGetopt:
if short:
short = short + ':'
long = long[0:-1]
- self.takes_arg[long] = 1
+ self.takes_arg[long] = True
else:
# Is option is a "negative alias" for some other option (eg.
# "quiet" == "!verbose")?
@@ -191,7 +191,7 @@ class FancyGetopt:
)
self.long_opts[-1] = long # XXX redundant?!
- self.takes_arg[long] = 0
+ self.takes_arg[long] = False
# If this is an alias option, make sure its "takes arg" flag is
# the same as the option it's aliased to.
@@ -210,8 +210,8 @@ class FancyGetopt:
# '='.
if not longopt_re.match(long):
raise DistutilsGetoptError(
- "invalid long option name '%s' "
- "(must be letters, numbers, hyphens only" % long
+ f"invalid long option name '{long}' "
+ "(must be letters, numbers, hyphens only"
)
self.attr_name[long] = self.get_attr_name(long)
@@ -268,7 +268,7 @@ class FancyGetopt:
attr = self.attr_name[opt]
# The only repeating option at the moment is 'verbose'.
- # It has a negative option -q quiet, which should set verbose = 0.
+ # It has a negative option -q quiet, which should set verbose = False.
if val and self.repeat.get(attr) is not None:
val = getattr(object, attr, 0) + 1
setattr(object, attr, val)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py b/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py
index 960def9cf9..b19a5dcfa4 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py
@@ -63,12 +63,12 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901
def copy_file( # noqa: C901
src,
dst,
- preserve_mode=1,
- preserve_times=1,
- update=0,
+ preserve_mode=True,
+ preserve_times=True,
+ update=False,
link=None,
- verbose=1,
- dry_run=0,
+ verbose=True,
+ dry_run=False,
):
"""Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
copied there with the same name; otherwise, it must be a filename. (If
@@ -106,7 +106,7 @@ def copy_file( # noqa: C901
if not os.path.isfile(src):
raise DistutilsFileError(
- "can't copy '%s': doesn't exist or not a regular file" % src
+ f"can't copy '{src}': doesn't exist or not a regular file"
)
if os.path.isdir(dst):
@@ -123,7 +123,7 @@ def copy_file( # noqa: C901
try:
action = _copy_action[link]
except KeyError:
- raise ValueError("invalid value '%s' for 'link' argument" % link)
+ raise ValueError(f"invalid value '{link}' for 'link' argument")
if verbose >= 1:
if os.path.basename(dst) == os.path.basename(src):
@@ -168,7 +168,7 @@ def copy_file( # noqa: C901
# XXX I suspect this is Unix-specific -- need porting help!
-def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901
+def move_file(src, dst, verbose=True, dry_run=False): # noqa: C901
"""Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
be moved into it with the same name; otherwise, 'src' is just renamed
to 'dst'. Return the new full name of the file.
@@ -186,7 +186,7 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901
return dst
if not isfile(src):
- raise DistutilsFileError("can't move '%s': not a regular file" % src)
+ raise DistutilsFileError(f"can't move '{src}': not a regular file")
if isdir(dst):
dst = os.path.join(dst, basename(src))
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py b/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py
index 71ffb2abe7..44ae9e67ef 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py
@@ -84,24 +84,24 @@ class FileList:
if action in ('include', 'exclude', 'global-include', 'global-exclude'):
if len(words) < 2:
raise DistutilsTemplateError(
- "'%s' expects <pattern1> <pattern2> ..." % action
+ f"'{action}' expects <pattern1> <pattern2> ..."
)
patterns = [convert_path(w) for w in words[1:]]
elif action in ('recursive-include', 'recursive-exclude'):
if len(words) < 3:
raise DistutilsTemplateError(
- "'%s' expects <dir> <pattern1> <pattern2> ..." % action
+ f"'{action}' expects <dir> <pattern1> <pattern2> ..."
)
dir = convert_path(words[1])
patterns = [convert_path(w) for w in words[2:]]
elif action in ('graft', 'prune'):
if len(words) != 2:
raise DistutilsTemplateError(
- "'%s' expects a single <dir_pattern>" % action
+ f"'{action}' expects a single <dir_pattern>"
)
dir_pattern = convert_path(words[1])
else:
- raise DistutilsTemplateError("unknown action '%s'" % action)
+ raise DistutilsTemplateError(f"unknown action '{action}'")
return (action, patterns, dir, dir_pattern)
@@ -119,13 +119,13 @@ class FileList:
if action == 'include':
self.debug_print("include " + ' '.join(patterns))
for pattern in patterns:
- if not self.include_pattern(pattern, anchor=1):
+ if not self.include_pattern(pattern, anchor=True):
log.warning("warning: no files found matching '%s'", pattern)
elif action == 'exclude':
self.debug_print("exclude " + ' '.join(patterns))
for pattern in patterns:
- if not self.exclude_pattern(pattern, anchor=1):
+ if not self.exclude_pattern(pattern, anchor=True):
log.warning(
(
"warning: no previously-included files "
@@ -137,7 +137,7 @@ class FileList:
elif action == 'global-include':
self.debug_print("global-include " + ' '.join(patterns))
for pattern in patterns:
- if not self.include_pattern(pattern, anchor=0):
+ if not self.include_pattern(pattern, anchor=False):
log.warning(
(
"warning: no files found matching '%s' "
@@ -149,7 +149,7 @@ class FileList:
elif action == 'global-exclude':
self.debug_print("global-exclude " + ' '.join(patterns))
for pattern in patterns:
- if not self.exclude_pattern(pattern, anchor=0):
+ if not self.exclude_pattern(pattern, anchor=False):
log.warning(
(
"warning: no previously-included files matching "
@@ -192,12 +192,12 @@ class FileList:
)
else:
raise DistutilsInternalError(
- "this cannot happen: invalid action '%s'" % action
+ f"this cannot happen: invalid action '{action}'"
)
# Filtering/selection methods
- def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+ def include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
"""Select strings (presumably filenames) from 'self.files' that
match 'pattern', a Unix-style wildcard (glob) pattern. Patterns
are not quite the same as implemented by the 'fnmatch' module: '*'
@@ -225,7 +225,7 @@ class FileList:
# XXX docstring lying about what the special chars are?
files_found = False
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
- self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern)
+ self.debug_print(f"include_pattern: applying regex r'{pattern_re.pattern}'")
# delayed loading of allfiles list
if self.allfiles is None:
@@ -238,7 +238,7 @@ class FileList:
files_found = True
return files_found
- def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
+ def exclude_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
"""Remove strings (presumably filenames) from 'files' that match
'pattern'. Other parameters are the same as for
'include_pattern()', above.
@@ -247,7 +247,7 @@ class FileList:
"""
files_found = False
pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
- self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern)
+ self.debug_print(f"exclude_pattern: applying regex r'{pattern_re.pattern}'")
for i in range(len(self.files) - 1, -1, -1):
if pattern_re.search(self.files[i]):
self.debug_print(" removing " + self.files[i])
@@ -327,12 +327,12 @@ def glob_to_re(pattern):
# we're using a regex to manipulate a regex, so we need
# to escape the backslash twice
sep = r'\\\\'
- escaped = r'\1[^%s]' % sep
+ escaped = rf'\1[^{sep}]'
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
return pattern_re
-def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
+def translate_pattern(pattern, anchor=True, prefix=None, is_regex=False):
"""Translate a shell-like wildcard pattern to a compiled regular
expression. Return the compiled regex. If 'is_regex' true,
then 'pattern' is directly compiled to a regex (if it's a string)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py
index 6a0105e484..f860a8d383 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py
@@ -144,7 +144,7 @@ class MacroExpander:
self.load_macros(version)
def set_macro(self, macro, path, key):
- self.macros["$(%s)" % macro] = Reg.get_value(path, key)
+ self.macros[f"$({macro})"] = Reg.get_value(path, key)
def load_macros(self, version):
self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
@@ -243,23 +243,23 @@ def find_vcvarsall(version):
"""
vsbase = VS_BASE % version
try:
- productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir")
+ productdir = Reg.get_value(rf"{vsbase}\Setup\VC", "productdir")
except KeyError:
log.debug("Unable to find productdir in registry")
productdir = None
if not productdir or not os.path.isdir(productdir):
- toolskey = "VS%0.f0COMNTOOLS" % version
+ toolskey = f"VS{version:0.0f}0COMNTOOLS"
toolsdir = os.environ.get(toolskey, None)
if toolsdir and os.path.isdir(toolsdir):
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
productdir = os.path.abspath(productdir)
if not os.path.isdir(productdir):
- log.debug("%s is not a valid directory" % productdir)
+ log.debug(f"{productdir} is not a valid directory")
return None
else:
- log.debug("Env var %s is not set or invalid" % toolskey)
+ log.debug(f"Env var {toolskey} is not set or invalid")
if not productdir:
log.debug("No productdir found")
return None
@@ -346,7 +346,7 @@ class MSVCCompiler(CCompiler):
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
super().__init__(verbose, dry_run, force)
self.__version = VERSION
self.__root = r"Software\Microsoft\VisualStudio"
@@ -362,7 +362,7 @@ class MSVCCompiler(CCompiler):
assert not self.initialized, "don't init multiple times"
if self.__version < 8.0:
raise DistutilsPlatformError(
- "VC %0.1f is not supported by this module" % self.__version
+ f"VC {self.__version:0.1f} is not supported by this module"
)
if plat_name is None:
plat_name = get_platform()
@@ -405,9 +405,9 @@ class MSVCCompiler(CCompiler):
if len(self.__paths) == 0:
raise DistutilsPlatformError(
- "Python was built with %s, "
+ f"Python was built with {self.__product}, "
"and extensions need to be built with the same "
- "version of the compiler, but it isn't installed." % self.__product
+ "version of the compiler, but it isn't installed."
)
self.cc = self.find_exe("cl.exe")
@@ -460,7 +460,7 @@ class MSVCCompiler(CCompiler):
# -- Worker methods ------------------------------------------------
- def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+ def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None:
@@ -474,7 +474,7 @@ class MSVCCompiler(CCompiler):
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
- raise CompileError("Don't know how to compile %s" % src_name)
+ raise CompileError(f"Don't know how to compile {src_name}")
if strip_dir:
base = os.path.basename(base)
if ext in self._rc_extensions:
@@ -491,7 +491,7 @@ class MSVCCompiler(CCompiler):
output_dir=None,
macros=None,
include_dirs=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
depends=None,
@@ -578,7 +578,7 @@ class MSVCCompiler(CCompiler):
return objects
def create_static_lib(
- self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+ self, objects, output_libname, output_dir=None, debug=False, target_lang=None
):
if not self.initialized:
self.initialize()
@@ -606,7 +606,7 @@ class MSVCCompiler(CCompiler):
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -783,7 +783,7 @@ class MSVCCompiler(CCompiler):
def library_option(self, lib):
return self.library_filename(lib)
- def find_library_file(self, dirs, lib, debug=0):
+ def find_library_file(self, dirs, lib, debug=False):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py
index ac8b68c08c..2bf94e60c9 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py
@@ -131,11 +131,11 @@ class MacroExpander:
for base in HKEYS:
d = read_values(base, path)
if d:
- self.macros["$(%s)" % macro] = d[key]
+ self.macros[f"$({macro})"] = d[key]
break
def load_macros(self, version):
- vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
+ vsbase = rf"Software\Microsoft\VisualStudio\{version:0.1f}"
self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
net = r"Software\Microsoft\.NETFramework"
@@ -253,7 +253,7 @@ class MSVCCompiler(CCompiler):
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
super().__init__(verbose, dry_run, force)
self.__version = get_build_version()
self.__arch = get_build_architecture()
@@ -264,7 +264,7 @@ class MSVCCompiler(CCompiler):
self.__macros = MacroExpander(self.__version)
else:
self.__root = r"Software\Microsoft\Devstudio"
- self.__product = "Visual Studio version %s" % self.__version
+ self.__product = f"Visual Studio version {self.__version}"
else:
# Win64. Assume this was built with the platform SDK
self.__product = "Microsoft SDK compiler %s" % (self.__version + 6)
@@ -290,9 +290,9 @@ class MSVCCompiler(CCompiler):
if len(self.__paths) == 0:
raise DistutilsPlatformError(
- "Python was built with %s, "
+ f"Python was built with {self.__product}, "
"and extensions need to be built with the same "
- "version of the compiler, but it isn't installed." % self.__product
+ "version of the compiler, but it isn't installed."
)
self.cc = self.find_exe("cl.exe")
@@ -354,7 +354,7 @@ class MSVCCompiler(CCompiler):
# -- Worker methods ------------------------------------------------
- def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+ def object_filenames(self, source_filenames, strip_dir=False, output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None:
@@ -368,7 +368,7 @@ class MSVCCompiler(CCompiler):
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
- raise CompileError("Don't know how to compile %s" % src_name)
+ raise CompileError(f"Don't know how to compile {src_name}")
if strip_dir:
base = os.path.basename(base)
if ext in self._rc_extensions:
@@ -385,7 +385,7 @@ class MSVCCompiler(CCompiler):
output_dir=None,
macros=None,
include_dirs=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
depends=None,
@@ -472,7 +472,7 @@ class MSVCCompiler(CCompiler):
return objects
def create_static_lib(
- self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+ self, objects, output_libname, output_dir=None, debug=False, target_lang=None
):
if not self.initialized:
self.initialize()
@@ -500,7 +500,7 @@ class MSVCCompiler(CCompiler):
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -585,7 +585,7 @@ class MSVCCompiler(CCompiler):
def library_option(self, lib):
return self.library_filename(lib)
- def find_library_file(self, dirs, lib, debug=0):
+ def find_library_file(self, dirs, lib, debug=False):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
@@ -654,7 +654,7 @@ class MSVCCompiler(CCompiler):
# the GUI is run.
if self.__version == 6:
for base in HKEYS:
- if read_values(base, r"%s\6.0" % self.__root) is not None:
+ if read_values(base, rf"{self.__root}\6.0") is not None:
self.warn(
"It seems you have Visual Studio 6 installed, "
"but the expected registry settings are not present.\n"
@@ -684,6 +684,6 @@ if get_build_version() >= 8.0:
OldMSVCCompiler = MSVCCompiler
# get_build_architecture not really relevant now we support cross-compile
from distutils.msvc9compiler import (
- MacroExpander, # noqa: F811
+ MacroExpander,
MSVCCompiler,
)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py b/contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py
deleted file mode 100644
index ab12119fa5..0000000000
--- a/contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py
+++ /dev/null
@@ -1,8 +0,0 @@
-def aix_platform(osname, version, release):
- try:
- import _aix_support
-
- return _aix_support.aix_platform()
- except ImportError:
- pass
- return f"{osname}-{version}.{release}"
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py b/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py
index 046b5bbb82..50d30a2761 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py
@@ -2,20 +2,47 @@
Provides the 'spawn()' function, a front-end to various platform-
specific functions for launching another program in a sub-process.
-Also provides the 'find_executable()' to search the path for a given
-executable name.
"""
+from __future__ import annotations
+
import os
+import platform
+import shutil
import subprocess
import sys
+import warnings
+
+from typing import Mapping
from ._log import log
from .debug import DEBUG
from .errors import DistutilsExecError
-def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901
+def _debug(cmd):
+ """
+ Render a subprocess command differently depending on DEBUG.
+ """
+ return cmd if DEBUG else cmd[0]
+
+
+def _inject_macos_ver(env: Mapping[str:str] | None) -> Mapping[str:str] | None:
+ if platform.system() != 'Darwin':
+ return env
+
+ from .util import MACOSX_VERSION_VAR, get_macosx_target_ver
+
+ target_ver = get_macosx_target_ver()
+ update = {MACOSX_VERSION_VAR: target_ver} if target_ver else {}
+ return {**_resolve(env), **update}
+
+
+def _resolve(env: Mapping[str:str] | None) -> Mapping[str:str]:
+ return os.environ if env is None else env
+
+
+def spawn(cmd, search_path=True, verbose=False, dry_run=False, env=None):
"""Run another program, specified as a command list 'cmd', in a new process.
'cmd' is just the argument list for the new process, ie.
@@ -31,41 +58,25 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901
Raise DistutilsExecError if running the program fails in any way; just
return on success.
"""
- # cmd is documented as a list, but just in case some code passes a tuple
- # in, protect our %-formatting code against horrible death
- cmd = list(cmd)
-
log.info(subprocess.list2cmdline(cmd))
if dry_run:
return
if search_path:
- executable = find_executable(cmd[0])
+ executable = shutil.which(cmd[0])
if executable is not None:
cmd[0] = executable
- env = env if env is not None else dict(os.environ)
-
- if sys.platform == 'darwin':
- from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver
-
- macosx_target_ver = get_macosx_target_ver()
- if macosx_target_ver:
- env[MACOSX_VERSION_VAR] = macosx_target_ver
-
try:
- proc = subprocess.Popen(cmd, env=env)
- proc.wait()
- exitcode = proc.returncode
+ subprocess.check_call(cmd, env=_inject_macos_ver(env))
except OSError as exc:
- if not DEBUG:
- cmd = cmd[0]
- raise DistutilsExecError(f"command {cmd!r} failed: {exc.args[-1]}") from exc
-
- if exitcode:
- if not DEBUG:
- cmd = cmd[0]
- raise DistutilsExecError(f"command {cmd!r} failed with exit code {exitcode}")
+ raise DistutilsExecError(
+ f"command {_debug(cmd)!r} failed: {exc.args[-1]}"
+ ) from exc
+ except subprocess.CalledProcessError as err:
+ raise DistutilsExecError(
+ f"command {_debug(cmd)!r} failed with exit code {err.returncode}"
+ ) from err
def find_executable(executable, path=None):
@@ -74,6 +85,9 @@ def find_executable(executable, path=None):
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
+ warnings.warn(
+ 'Use shutil.which instead of find_executable', DeprecationWarning, stacklevel=2
+ )
_, ext = os.path.splitext(executable)
if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
@@ -83,14 +97,13 @@ def find_executable(executable, path=None):
if path is None:
path = os.environ.get('PATH', None)
+ # bpo-35755: Don't fall through if PATH is the empty string
if path is None:
try:
path = os.confstr("CS_PATH")
except (AttributeError, ValueError):
# os.confstr() or CS_PATH is not available
path = os.defpath
- # bpo-35755: Don't use os.defpath if the PATH environment variable is
- # set to an empty string
# PATH='' doesn't match, whereas PATH=':' looks in the current directory
if not path:
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py b/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py
index 1a38e9fa79..4ba0be5602 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py
@@ -16,9 +16,10 @@ import re
import sys
import sysconfig
-from . import py39compat
from ._functools import pass_none
+from .compat import py39
from .errors import DistutilsPlatformError
+from .util import is_mingw
IS_PYPY = '__pypy__' in sys.builtin_module_names
@@ -108,7 +109,7 @@ def get_python_version():
return '%d.%d' % sys.version_info[:2]
-def get_python_inc(plat_specific=0, prefix=None):
+def get_python_inc(plat_specific=False, prefix=None):
"""Return the directory containing installed Python header files.
If 'plat_specific' is false (the default), this is the path to the
@@ -121,12 +122,14 @@ def get_python_inc(plat_specific=0, prefix=None):
"""
default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX
resolved_prefix = prefix if prefix is not None else default_prefix
+ # MinGW imitates posix like layout, but os.name != posix
+ os_name = "posix" if is_mingw() else os.name
try:
- getter = globals()[f'_get_python_inc_{os.name}']
+ getter = globals()[f'_get_python_inc_{os_name}']
except KeyError:
raise DistutilsPlatformError(
"I don't know where Python installs its C header files "
- "on platform '%s'" % os.name
+ f"on platform '{os.name}'"
)
return getter(resolved_prefix, prefix, plat_specific)
@@ -213,7 +216,7 @@ def _posix_lib(standard_lib, libpython, early_prefix, prefix):
return os.path.join(libpython, "site-packages")
-def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+def get_python_lib(plat_specific=False, standard_lib=False, prefix=None):
"""Return the directory containing the Python library (standard or
site additions).
@@ -244,7 +247,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
else:
prefix = plat_specific and EXEC_PREFIX or PREFIX
- if os.name == "posix":
+ if os.name == "posix" or is_mingw():
if plat_specific or standard_lib:
# Platform-specific modules (any module from a non-pure-Python
# module distribution) or standard Python library modules.
@@ -262,8 +265,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
return os.path.join(prefix, "Lib", "site-packages")
else:
raise DistutilsPlatformError(
- "I don't know where Python installs its library "
- "on platform '%s'" % os.name
+ f"I don't know where Python installs its library on platform '{os.name}'"
)
@@ -291,7 +293,7 @@ def customize_compiler(compiler): # noqa: C901
Mainly needed on Unix, so we can plug in the information that
varies across Unices and is stored in Python's Makefile.
"""
- if compiler.compiler_type == "unix":
+ if compiler.compiler_type in ["unix", "cygwin", "mingw32"]:
_customize_macos()
(
@@ -399,7 +401,11 @@ def parse_makefile(fn, g=None): # noqa: C901
from distutils.text_file import TextFile
fp = TextFile(
- fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape"
+ fn,
+ strip_comments=True,
+ skip_blanks=True,
+ join_lines=True,
+ errors="surrogateescape",
)
if g is None:
@@ -538,7 +544,7 @@ def get_config_vars(*args):
global _config_vars
if _config_vars is None:
_config_vars = sysconfig.get_config_vars().copy()
- py39compat.add_ext_suffix(_config_vars)
+ py39.add_ext_suffix(_config_vars)
return [_config_vars.get(name) for name in args] if args else _config_vars
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py b/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py
index 0f846e3c52..fec29c73b0 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py
@@ -97,7 +97,7 @@ class TextFile:
# sanity check client option hash
for opt in options.keys():
if opt not in self.default_options:
- raise KeyError("invalid TextFile option '%s'" % opt)
+ raise KeyError(f"invalid TextFile option '{opt}'")
if file is None:
self.open(filename)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py
index 0248bde87b..7e68596b26 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py
@@ -22,11 +22,11 @@ import shlex
import sys
from . import sysconfig
-from .compat import consolidate_linker_args
from ._log import log
from ._macos_compat import compiler_fixup
from ._modified import newer
from .ccompiler import CCompiler, gen_lib_options, gen_preprocess_options
+from .compat import consolidate_linker_args
from .errors import CompileError, DistutilsExecError, LibError, LinkError
# XXX Things not currently handled:
@@ -144,6 +144,9 @@ class UnixCCompiler(CCompiler):
xcode_stub_lib_format = dylib_lib_format
if sys.platform == "cygwin":
exe_extension = ".exe"
+ shared_lib_extension = ".dll.a"
+ dylib_lib_extension = ".dll"
+ dylib_lib_format = "cyg%s%s"
def preprocess(
self,
@@ -190,7 +193,7 @@ class UnixCCompiler(CCompiler):
raise CompileError(msg)
def create_static_lib(
- self, objects, output_libname, output_dir=None, debug=0, target_lang=None
+ self, objects, output_libname, output_dir=None, debug=False, target_lang=None
):
objects, output_dir = self._fix_object_args(objects, output_dir)
@@ -223,7 +226,7 @@ class UnixCCompiler(CCompiler):
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
@@ -362,7 +365,7 @@ class UnixCCompiler(CCompiler):
return os.path.join(match.group(1), dir[1:]) if apply_root else dir
- def find_library_file(self, dirs, lib, debug=0):
+ def find_library_file(self, dirs, lib, debug=False):
r"""
Second-guess the linker with not much hard
data to go on: GCC seems to prefer the shared library, so
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/util.py b/contrib/python/setuptools/py3/setuptools/_distutils/util.py
index 9ee77721b3..9db89b0979 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/util.py
@@ -12,6 +12,7 @@ import string
import subprocess
import sys
import sysconfig
+import tempfile
from ._log import log
from ._modified import newer
@@ -34,7 +35,7 @@ def get_host_platform():
if os.name == "posix" and hasattr(os, 'uname'):
osname, host, release, version, machine = os.uname()
if osname[:3] == "aix":
- from .py38compat import aix_platform
+ from .compat.py38 import aix_platform
return aix_platform(osname, version, release)
@@ -129,9 +130,9 @@ def convert_path(pathname):
if not pathname:
return pathname
if pathname[0] == '/':
- raise ValueError("path '%s' cannot be absolute" % pathname)
+ raise ValueError(f"path '{pathname}' cannot be absolute")
if pathname[-1] == '/':
- raise ValueError("path '%s' cannot end with '/'" % pathname)
+ raise ValueError(f"path '{pathname}' cannot end with '/'")
paths = pathname.split('/')
while '.' in paths:
@@ -158,7 +159,7 @@ def change_root(new_root, pathname):
elif os.name == 'nt':
(drive, path) = os.path.splitdrive(pathname)
- if path[0] == '\\':
+ if path[0] == os.sep:
path = path[1:]
return os.path.join(new_root, path)
@@ -240,7 +241,7 @@ _wordchars_re = _squote_re = _dquote_re = None
def _init_regex():
global _wordchars_re, _squote_re, _dquote_re
- _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+ _wordchars_re = re.compile(rf'[^\\\'\"{string.whitespace} ]*')
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
@@ -295,7 +296,7 @@ def split_quoted(s):
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
if m is None:
- raise ValueError("bad string (mismatched %s quotes?)" % s[end])
+ raise ValueError(f"bad string (mismatched {s[end]} quotes?)")
(beg, end) = m.span()
s = s[:beg] + s[beg + 1 : end - 1] + s[end:]
@@ -311,7 +312,7 @@ def split_quoted(s):
# split_quoted ()
-def execute(func, args, msg=None, verbose=0, dry_run=0):
+def execute(func, args, msg=None, verbose=False, dry_run=False):
"""Perform some action that affects the outside world (eg. by
writing to the filesystem). Such actions are special because they
are disabled by the 'dry_run' flag. This method takes care of all
@@ -349,11 +350,11 @@ def strtobool(val):
def byte_compile( # noqa: C901
py_files,
optimize=0,
- force=0,
+ force=False,
prefix=None,
base_dir=None,
- verbose=1,
- dry_run=0,
+ verbose=True,
+ dry_run=False,
direct=None,
):
"""Byte-compile a collection of Python source files to .pyc
@@ -405,20 +406,10 @@ def byte_compile( # noqa: C901
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
- try:
- from tempfile import mkstemp
-
- (script_fd, script_name) = mkstemp(".py")
- except ImportError:
- from tempfile import mktemp
-
- (script_fd, script_name) = None, mktemp(".py")
+ (script_fd, script_name) = tempfile.mkstemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
- if script_fd is not None:
- script = os.fdopen(script_fd, "w", encoding='utf-8')
- else: # pragma: no cover
- script = open(script_name, "w", encoding='utf-8')
+ script = os.fdopen(script_fd, "w", encoding='utf-8')
with script:
script.write(
@@ -443,8 +434,8 @@ files = [
f"""
byte_compile(files, optimize={optimize!r}, force={force!r},
prefix={prefix!r}, base_dir={base_dir!r},
- verbose={verbose!r}, dry_run=0,
- direct=1)
+ verbose={verbose!r}, dry_run=False,
+ direct=True)
"""
)
@@ -452,7 +443,7 @@ byte_compile(files, optimize={optimize!r}, force={force!r},
cmd.extend(subprocess._optim_args_from_interpreter_flags())
cmd.append(script_name)
spawn(cmd, dry_run=dry_run)
- execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run)
+ execute(os.remove, (script_name,), f"removing {script_name}", dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
@@ -508,3 +499,12 @@ def rfc822_escape(header):
suffix = indent if ends_in_newline else ""
return indent.join(lines) + suffix
+
+
+def is_mingw():
+ """Returns True if the current platform is mingw.
+
+ Python compiled with Mingw-w64 has sys.platform == 'win32' and
+ get_platform() starts with 'mingw'.
+ """
+ return sys.platform == 'win32' and get_platform().startswith('mingw')
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/version.py b/contrib/python/setuptools/py3/setuptools/_distutils/version.py
index 806d233ca5..942b56bf94 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/version.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/version.py
@@ -60,7 +60,7 @@ class Version:
)
def __repr__(self):
- return f"{self.__class__.__name__} ('{str(self)}')"
+ return f"{self.__class__.__name__} ('{self}')"
def __eq__(self, other):
c = self._cmp(other)
@@ -153,7 +153,7 @@ class StrictVersion(Version):
def parse(self, vstring):
match = self.version_re.match(vstring)
if not match:
- raise ValueError("invalid version number '%s'" % vstring)
+ raise ValueError(f"invalid version number '{vstring}'")
(major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6)
@@ -330,7 +330,7 @@ class LooseVersion(Version):
return self.vstring
def __repr__(self):
- return "LooseVersion ('%s')" % str(self)
+ return f"LooseVersion ('{self}')"
def _cmp(self, other):
if isinstance(other, str):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py b/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py
index 31c420168c..fe31b0ed8e 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py
@@ -20,7 +20,7 @@ def splitUp(pred):
"""
res = re_splitComparison.match(pred)
if not res:
- raise ValueError("bad package restriction syntax: %r" % pred)
+ raise ValueError(f"bad package restriction syntax: {pred!r}")
comp, verStr = res.groups()
with version.suppress_known_deprecation():
other = version.StrictVersion(verStr)
@@ -113,17 +113,17 @@ class VersionPredicate:
raise ValueError("empty package restriction")
match = re_validPackage.match(versionPredicateStr)
if not match:
- raise ValueError("bad package name in %r" % versionPredicateStr)
+ raise ValueError(f"bad package name in {versionPredicateStr!r}")
self.name, paren = match.groups()
paren = paren.strip()
if paren:
match = re_paren.match(paren)
if not match:
- raise ValueError("expected parenthesized list: %r" % paren)
+ raise ValueError(f"expected parenthesized list: {paren!r}")
str = match.groups()[0]
self.pred = [splitUp(aPred) for aPred in str.split(",")]
if not self.pred:
- raise ValueError("empty parenthesized list in %r" % versionPredicateStr)
+ raise ValueError(f"empty parenthesized list in {versionPredicateStr!r}")
else:
self.pred = []
@@ -167,7 +167,7 @@ def split_provision(value):
value = value.strip()
m = _provision_rx.match(value)
if not m:
- raise ValueError("illegal provides specification: %r" % value)
+ raise ValueError(f"illegal provides specification: {value!r}")
ver = m.group(2) or None
if ver:
with version.suppress_known_deprecation():
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py
index c7a7ca61cf..af1e7fa5cc 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py
@@ -135,7 +135,7 @@ class zOSCCompiler(UnixCCompiler):
return zos_compilers.get(zos_compiler_names[0], 'ibm-openxl')
- def __init__(self, verbose=0, dry_run=0, force=0):
+ def __init__(self, verbose=False, dry_run=False, force=False):
super().__init__(verbose, dry_run, force)
self.zos_compiler = self._get_zos_compiler_name()
sysconfig.customize_compiler(self)
@@ -172,7 +172,7 @@ class zOSCCompiler(UnixCCompiler):
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
- debug=0,
+ debug=False,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
diff --git a/contrib/python/setuptools/py3/setuptools/_importlib.py b/contrib/python/setuptools/py3/setuptools/_importlib.py
index bd2b01e2b5..8e52888d6f 100644
--- a/contrib/python/setuptools/py3/setuptools/_importlib.py
+++ b/contrib/python/setuptools/py3/setuptools/_importlib.py
@@ -42,7 +42,7 @@ if sys.version_info < (3, 10):
disable_importlib_metadata_finder(metadata)
else:
- import importlib.metadata as metadata # noqa: F401
+ import importlib.metadata as metadata
if sys.version_info < (3, 9):
diff --git a/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py b/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py
index 73476e0cec..559f7d6032 100644
--- a/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py
+++ b/contrib/python/setuptools/py3/setuptools/command/bdist_egg.py
@@ -74,7 +74,7 @@ class bdist_egg(Command):
'keep-temp',
'k',
"keep the pseudo-installation tree around after "
- + "creating the distribution archive",
+ "creating the distribution archive",
),
('dist-dir=', 'd', "directory to put final built distributions in"),
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
@@ -290,9 +290,11 @@ class bdist_egg(Command):
paths = {self.bdist_dir: ''}
for base, dirs, files in sorted_walk(self.bdist_dir):
- for filename in files:
- if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
- all_outputs.append(paths[base] + filename)
+ all_outputs.extend(
+ paths[base] + filename
+ for filename in files
+ if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS
+ )
for filename in dirs:
paths[os.path.join(base, filename)] = paths[base] + filename + '/'
diff --git a/contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py b/contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py
index a81187598a..d8cdd4e406 100644
--- a/contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py
+++ b/contrib/python/setuptools/py3/setuptools/command/bdist_wheel.py
@@ -176,7 +176,7 @@ class bdist_wheel(Command):
"plat-name=",
"p",
"platform name to embed in generated filenames "
- f"(default: {get_platform(None)})",
+ f"[default: {get_platform(None)}]",
),
(
"keep-temp",
@@ -189,7 +189,7 @@ class bdist_wheel(Command):
(
"relative",
None,
- "build the archive using relative paths (default: false)",
+ "build the archive using relative paths [default: false]",
),
(
"owner=",
@@ -201,18 +201,18 @@ class bdist_wheel(Command):
"g",
"Group name used when creating a tar file [default: current group]",
),
- ("universal", None, "make a universal wheel (default: false)"),
+ ("universal", None, "make a universal wheel [default: false]"),
(
"compression=",
None,
- "zipfile compression (one of: {}) (default: 'deflated')".format(
+ "zipfile compression (one of: {}) [default: 'deflated']".format(
", ".join(supported_compressions)
),
),
(
"python-tag=",
None,
- f"Python implementation compatibility tag (default: '{python_tag()}')",
+ f"Python implementation compatibility tag [default: '{python_tag()}']",
),
(
"build-number=",
@@ -224,7 +224,7 @@ class bdist_wheel(Command):
(
"py-limited-api=",
None,
- "Python tag (cp32|cp33|cpNN) for abi3 wheel tag (default: false)",
+ "Python tag (cp32|cp33|cpNN) for abi3 wheel tag [default: false]",
),
]
diff --git a/contrib/python/setuptools/py3/setuptools/command/dist_info.py b/contrib/python/setuptools/py3/setuptools/command/dist_info.py
index 52c0721903..2adc1c46f3 100644
--- a/contrib/python/setuptools/py3/setuptools/command/dist_info.py
+++ b/contrib/python/setuptools/py3/setuptools/command/dist_info.py
@@ -28,7 +28,7 @@ class dist_info(Command):
'output-dir=',
'o',
"directory inside of which the .dist-info will be"
- "created (default: top of the source tree)",
+ "created [default: top of the source tree]",
),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"),
diff --git a/contrib/python/setuptools/py3/setuptools/command/easy_install.py b/contrib/python/setuptools/py3/setuptools/command/easy_install.py
index 5ec5080131..e6ce3fcc05 100644
--- a/contrib/python/setuptools/py3/setuptools/command/easy_install.py
+++ b/contrib/python/setuptools/py3/setuptools/command/easy_install.py
@@ -23,7 +23,6 @@ from distutils.errors import (
)
from distutils import log, dir_util
from distutils.command.build_scripts import first_line_re
-from distutils.spawn import find_executable
from distutils.command import install
import sys
import os
@@ -102,9 +101,9 @@ def _to_bytes(s):
def isascii(s):
try:
s.encode('ascii')
- return True
except UnicodeError:
return False
+ return True
def _one_liner(text):
@@ -237,7 +236,7 @@ class easy_install(Command):
dist = get_distribution('setuptools')
tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
print(tmpl.format(**locals()))
- raise SystemExit()
+ raise SystemExit
def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME
self.version and self._render_version()
@@ -467,7 +466,7 @@ class easy_install(Command):
def warn_deprecated_options(self):
pass
- def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME
+ def check_site_dir(self): # is too complex (12) # FIXME
"""Verify that self.install_dir is .pth-capable dir, if needed"""
instdir = normalize_path(self.install_dir)
@@ -526,7 +525,7 @@ class easy_install(Command):
%s
"""
- ).lstrip() # noqa
+ ).lstrip()
__not_exists_id = textwrap.dedent(
"""
@@ -534,7 +533,7 @@ class easy_install(Command):
choose a different installation directory (using the -d or --install-dir
option).
"""
- ).lstrip() # noqa
+ ).lstrip()
__access_msg = textwrap.dedent(
"""
@@ -552,7 +551,7 @@ class easy_install(Command):
Please make the appropriate changes for your system and try again.
"""
- ).lstrip() # noqa
+ ).lstrip()
def cant_write_to_target(self):
msg = self.__cant_write_msg % (
@@ -939,7 +938,7 @@ class easy_install(Command):
return Distribution.from_filename(egg_path, metadata=metadata)
# FIXME: 'easy_install.install_egg' is too complex (11)
- def install_egg(self, egg_path, tmpdir): # noqa: C901
+ def install_egg(self, egg_path, tmpdir):
destination = os.path.join(
self.install_dir,
os.path.basename(egg_path),
@@ -1131,7 +1130,7 @@ class easy_install(Command):
pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
"""
- ).lstrip() # noqa
+ ).lstrip()
__id_warning = textwrap.dedent(
"""
@@ -1139,7 +1138,7 @@ class easy_install(Command):
this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.)
"""
- ) # noqa
+ )
def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users"""
@@ -1166,7 +1165,7 @@ class easy_install(Command):
See the setuptools documentation for the "develop" command for more info.
"""
- ).lstrip() # noqa
+ ).lstrip()
def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script)
@@ -1203,10 +1202,11 @@ class easy_install(Command):
self.run_setup(setup_script, setup_base, args)
all_eggs = Environment([dist_dir])
- eggs = []
- for key in all_eggs:
- for dist in all_eggs[key]:
- eggs.append(self.install_egg(dist.location, setup_base))
+ eggs = [
+ self.install_egg(dist.location, setup_base)
+ for key in all_eggs
+ for dist in all_eggs[key]
+ ]
if not eggs and not self.dry_run:
log.warn("No eggs found in %s (setup script problem?)", dist_dir)
return eggs
@@ -2275,7 +2275,7 @@ class WindowsScriptWriter(ScriptWriter):
to an executable on the system.
"""
clean_header = new_header[2:-1].strip('"')
- return sys.platform != 'win32' or find_executable(clean_header)
+ return sys.platform != 'win32' or shutil.which(clean_header)
class WindowsExecutableLauncherWriter(WindowsScriptWriter):
diff --git a/contrib/python/setuptools/py3/setuptools/command/egg_info.py b/contrib/python/setuptools/py3/setuptools/command/egg_info.py
index ccc2db8972..2f20303341 100644
--- a/contrib/python/setuptools/py3/setuptools/command/egg_info.py
+++ b/contrib/python/setuptools/py3/setuptools/command/egg_info.py
@@ -172,7 +172,7 @@ class egg_info(InfoCommon, Command):
'egg-base=',
'e',
"directory containing .egg-info directories"
- " (default: top of the source tree)",
+ " [default: top of the source tree]",
),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"),
@@ -363,16 +363,16 @@ class FileList(_FileList):
}
log_map = {
'include': "warning: no files found matching '%s'",
- 'exclude': ("warning: no previously-included files found " "matching '%s'"),
+ 'exclude': ("warning: no previously-included files found matching '%s'"),
'global-include': (
- "warning: no files found matching '%s' " "anywhere in distribution"
+ "warning: no files found matching '%s' anywhere in distribution"
),
'global-exclude': (
"warning: no previously-included files matching "
"'%s' found anywhere in distribution"
),
'recursive-include': (
- "warning: no files found matching '%s' " "under directory '%s'"
+ "warning: no files found matching '%s' under directory '%s'"
),
'recursive-exclude': (
"warning: no previously-included files matching "
diff --git a/contrib/python/setuptools/py3/setuptools/command/register.py b/contrib/python/setuptools/py3/setuptools/command/register.py
index b8266b9a60..beee9782e7 100644
--- a/contrib/python/setuptools/py3/setuptools/command/register.py
+++ b/contrib/python/setuptools/py3/setuptools/command/register.py
@@ -10,7 +10,7 @@ class register(orig.register):
def run(self):
msg = (
"The register command has been removed, use twine to upload "
- + "instead (https://pypi.org/p/twine)"
+ "instead (https://pypi.org/p/twine)"
)
self.announce("ERROR: " + msg, log.ERROR)
diff --git a/contrib/python/setuptools/py3/setuptools/command/sdist.py b/contrib/python/setuptools/py3/setuptools/command/sdist.py
index d455f44c5e..a834ba4a78 100644
--- a/contrib/python/setuptools/py3/setuptools/command/sdist.py
+++ b/contrib/python/setuptools/py3/setuptools/command/sdist.py
@@ -29,7 +29,7 @@ class sdist(orig.sdist):
(
'dist-dir=',
'd',
- "directory to put the source distribution archive(s) in " "[default: dist]",
+ "directory to put the source distribution archive(s) in [default: dist]",
),
(
'owner=',
diff --git a/contrib/python/setuptools/py3/setuptools/command/upload.py b/contrib/python/setuptools/py3/setuptools/command/upload.py
index ec7f81e227..1cca47cea9 100644
--- a/contrib/python/setuptools/py3/setuptools/command/upload.py
+++ b/contrib/python/setuptools/py3/setuptools/command/upload.py
@@ -10,7 +10,7 @@ class upload(orig.upload):
def run(self):
msg = (
"The upload command has been removed, use twine to upload "
- + "instead (https://pypi.org/p/twine)"
+ "instead (https://pypi.org/p/twine)"
)
self.announce("ERROR: " + msg, log.ERROR)
diff --git a/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py b/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py
index 5a8700051e..f44271c5dd 100644
--- a/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py
+++ b/contrib/python/setuptools/py3/setuptools/config/_apply_pyprojecttoml.py
@@ -31,8 +31,8 @@ from ..warnings import SetuptoolsWarning
if TYPE_CHECKING:
from distutils.dist import _OptionsList
- from setuptools._importlib import metadata # noqa
- from setuptools.dist import Distribution # noqa
+ from setuptools._importlib import metadata
+ from setuptools.dist import Distribution
EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like
_ProjectReadmeValue = Union[str, Dict[str, str]]
diff --git a/contrib/python/setuptools/py3/setuptools/config/_validate_pyproject/formats.py b/contrib/python/setuptools/py3/setuptools/config/_validate_pyproject/formats.py
index 5a0599cbb5..aacf4092b0 100644
--- a/contrib/python/setuptools/py3/setuptools/config/_validate_pyproject/formats.py
+++ b/contrib/python/setuptools/py3/setuptools/config/_validate_pyproject/formats.py
@@ -91,9 +91,9 @@ try:
"""
try:
_req.Requirement(value)
- return True
except _req.InvalidRequirement:
return False
+ return True
except ImportError: # pragma: no cover
_logger.warning(
diff --git a/contrib/python/setuptools/py3/setuptools/config/expand.py b/contrib/python/setuptools/py3/setuptools/config/expand.py
index 6ea6cf6d0e..e5f5dc586e 100644
--- a/contrib/python/setuptools/py3/setuptools/config/expand.py
+++ b/contrib/python/setuptools/py3/setuptools/config/expand.py
@@ -47,7 +47,7 @@ from ..discovery import find_package_path
from ..warnings import SetuptoolsWarning
if TYPE_CHECKING:
- from setuptools.dist import Distribution # noqa
+ from setuptools.dist import Distribution
_K = TypeVar("_K")
_V = TypeVar("_V", covariant=True)
diff --git a/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py b/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py
index c8dae5f751..d41c956cbd 100644
--- a/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py
+++ b/contrib/python/setuptools/py3/setuptools/config/pyprojecttoml.py
@@ -25,7 +25,7 @@ from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic
from ._apply_pyprojecttoml import apply as _apply
if TYPE_CHECKING:
- from setuptools.dist import Distribution # noqa
+ from setuptools.dist import Distribution
from typing_extensions import Self
_logger = logging.getLogger(__name__)
diff --git a/contrib/python/setuptools/py3/setuptools/config/setupcfg.py b/contrib/python/setuptools/py3/setuptools/config/setupcfg.py
index 0a7a42eb09..80ebe3d9bd 100644
--- a/contrib/python/setuptools/py3/setuptools/config/setupcfg.py
+++ b/contrib/python/setuptools/py3/setuptools/config/setupcfg.py
@@ -39,9 +39,9 @@ from ..warnings import SetuptoolsDeprecationWarning
from . import expand
if TYPE_CHECKING:
- from distutils.dist import DistributionMetadata # noqa
+ from distutils.dist import DistributionMetadata
- from setuptools.dist import Distribution # noqa
+ from setuptools.dist import Distribution
SingleCommandOptions = Dict["str", Tuple["str", Any]]
"""Dict that associate the name of the options of a particular command to a
diff --git a/contrib/python/setuptools/py3/setuptools/depends.py b/contrib/python/setuptools/py3/setuptools/depends.py
index b6af51c410..2226b6784a 100644
--- a/contrib/python/setuptools/py3/setuptools/depends.py
+++ b/contrib/python/setuptools/py3/setuptools/depends.py
@@ -58,11 +58,11 @@ class Require:
if self.attribute is None:
try:
f, p, i = find_module(self.module, paths)
- if f:
- f.close()
- return default
except ImportError:
return None
+ if f:
+ f.close()
+ return default
v = get_module_constant(self.module, self.attribute, default, paths)
diff --git a/contrib/python/setuptools/py3/setuptools/discovery.py b/contrib/python/setuptools/py3/setuptools/discovery.py
index 880d414033..3179852c69 100644
--- a/contrib/python/setuptools/py3/setuptools/discovery.py
+++ b/contrib/python/setuptools/py3/setuptools/discovery.py
@@ -62,7 +62,7 @@ StrIter = Iterator[str]
chain_iter = itertools.chain.from_iterable
if TYPE_CHECKING:
- from setuptools import Distribution # noqa
+ from setuptools import Distribution
def _valid_name(path: StrPath) -> bool:
diff --git a/contrib/python/setuptools/py3/setuptools/dist.py b/contrib/python/setuptools/py3/setuptools/dist.py
index 43762960ba..32e8d43c64 100644
--- a/contrib/python/setuptools/py3/setuptools/dist.py
+++ b/contrib/python/setuptools/py3/setuptools/dist.py
@@ -158,9 +158,7 @@ def check_specifier(dist, attr, value):
try:
SpecifierSet(value)
except (InvalidSpecifier, AttributeError) as error:
- tmpl = (
- "{attr!r} must be a string " "containing valid version specifiers; {error}"
- )
+ tmpl = "{attr!r} must be a string containing valid version specifiers; {error}"
raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error
diff --git a/contrib/python/setuptools/py3/setuptools/extension.py b/contrib/python/setuptools/py3/setuptools/extension.py
index 8caad78d4b..25420f42de 100644
--- a/contrib/python/setuptools/py3/setuptools/extension.py
+++ b/contrib/python/setuptools/py3/setuptools/extension.py
@@ -16,10 +16,9 @@ def _have_cython():
try:
# from (cython_impl) import build_ext
__import__(cython_impl, fromlist=['build_ext']).build_ext
- return True
except Exception:
- pass
- return False
+ return False
+ return True
# for compatibility
diff --git a/contrib/python/setuptools/py3/setuptools/extern/__init__.py b/contrib/python/setuptools/py3/setuptools/extern/__init__.py
index 5ad7169e3b..f9b6eea70d 100644
--- a/contrib/python/setuptools/py3/setuptools/extern/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/extern/__init__.py
@@ -32,14 +32,14 @@ class VendorImporter:
"""
root, base, target = fullname.partition(self.root_name + '.')
for prefix in self.search_path:
+ extant = prefix + target
try:
- extant = prefix + target
__import__(extant)
- mod = sys.modules[extant]
- sys.modules[fullname] = mod
- return mod
except ImportError:
- pass
+ continue
+ mod = sys.modules[extant]
+ sys.modules[fullname] = mod
+ return mod
else:
raise ImportError(
"The '{target}' package is required; "
diff --git a/contrib/python/setuptools/py3/setuptools/msvc.py b/contrib/python/setuptools/py3/setuptools/msvc.py
index f86c480d18..a3d350fe50 100644
--- a/contrib/python/setuptools/py3/setuptools/msvc.py
+++ b/contrib/python/setuptools/py3/setuptools/msvc.py
@@ -842,7 +842,7 @@ class SystemInfo:
"""
return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
- @property # noqa: C901
+ @property
def WindowsSdkDir(self): # noqa: C901 # is too complex (12) # FIXME
"""
Microsoft Windows SDK directory.
@@ -1087,6 +1087,7 @@ class SystemInfo:
return 'v3.5', 'v2.0.50727'
elif self.vs_ver == 8.0:
return 'v3.0', 'v2.0.50727'
+ return None
@staticmethod
def _use_last_dir_name(path, prefix=''):
@@ -1648,6 +1649,7 @@ class EnvironmentInfo:
path = join(prefix, arch_subdir, crt_dir, vcruntime)
if isfile(path):
return path
+ return None
def return_env(self, exists=True):
"""
diff --git a/contrib/python/setuptools/py3/setuptools/namespaces.py b/contrib/python/setuptools/py3/setuptools/namespaces.py
index 0185d55f94..2f2c1cfbe1 100644
--- a/contrib/python/setuptools/py3/setuptools/namespaces.py
+++ b/contrib/python/setuptools/py3/setuptools/namespaces.py
@@ -55,7 +55,7 @@ class Installer:
"importlib.machinery.PathFinder.find_spec(%(pkg)r, "
"[os.path.dirname(p)])))"
),
- ("m = m or " "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"),
+ ("m = m or sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"),
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)",
)
diff --git a/contrib/python/setuptools/py3/setuptools/package_index.py b/contrib/python/setuptools/py3/setuptools/package_index.py
index c91e419923..2c807f6b4e 100644
--- a/contrib/python/setuptools/py3/setuptools/package_index.py
+++ b/contrib/python/setuptools/py3/setuptools/package_index.py
@@ -856,7 +856,7 @@ class PackageIndex(Environment):
def _download_vcs(self, url, spec_filename):
vcs = self._resolve_vcs(url)
if not vcs:
- return
+ return None
if vcs == 'svn':
raise DistutilsError(
f"Invalid config, SVN download is not supported: {url}"
@@ -1136,9 +1136,7 @@ def local_open(url):
f += '/'
files.append('<a href="{name}">{name}</a>'.format(name=f))
else:
- tmpl = (
- "<html><head><title>{url}</title>" "</head><body>{files}</body></html>"
- )
+ tmpl = "<html><head><title>{url}</title></head><body>{files}</body></html>"
body = tmpl.format(url=url, files='\n'.join(files))
status, message = 200, "OK"
else:
diff --git a/contrib/python/setuptools/py3/ya.make b/contrib/python/setuptools/py3/ya.make
index a22c66f39e..f4b0026586 100644
--- a/contrib/python/setuptools/py3/ya.make
+++ b/contrib/python/setuptools/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(70.1.1)
+VERSION(70.2.0)
LICENSE(MIT)
@@ -80,6 +80,21 @@ PY_SRCS(
setuptools/_distutils/_macos_compat.py
setuptools/_distutils/_modified.py
setuptools/_distutils/_msvccompiler.py
+ setuptools/_distutils/_vendor/__init__.py
+ setuptools/_distutils/_vendor/packaging/__init__.py
+ setuptools/_distutils/_vendor/packaging/_elffile.py
+ setuptools/_distutils/_vendor/packaging/_manylinux.py
+ setuptools/_distutils/_vendor/packaging/_musllinux.py
+ setuptools/_distutils/_vendor/packaging/_parser.py
+ setuptools/_distutils/_vendor/packaging/_structures.py
+ setuptools/_distutils/_vendor/packaging/_tokenizer.py
+ setuptools/_distutils/_vendor/packaging/markers.py
+ setuptools/_distutils/_vendor/packaging/metadata.py
+ setuptools/_distutils/_vendor/packaging/requirements.py
+ setuptools/_distutils/_vendor/packaging/specifiers.py
+ setuptools/_distutils/_vendor/packaging/tags.py
+ setuptools/_distutils/_vendor/packaging/utils.py
+ setuptools/_distutils/_vendor/packaging/version.py
setuptools/_distutils/archive_util.py
setuptools/_distutils/bcppcompiler.py
setuptools/_distutils/ccompiler.py
@@ -108,6 +123,7 @@ PY_SRCS(
setuptools/_distutils/command/upload.py
setuptools/_distutils/compat/__init__.py
setuptools/_distutils/compat/py38.py
+ setuptools/_distutils/compat/py39.py
setuptools/_distutils/config.py
setuptools/_distutils/core.py
setuptools/_distutils/cygwinccompiler.py
@@ -123,8 +139,6 @@ PY_SRCS(
setuptools/_distutils/log.py
setuptools/_distutils/msvc9compiler.py
setuptools/_distutils/msvccompiler.py
- setuptools/_distutils/py38compat.py
- setuptools/_distutils/py39compat.py
setuptools/_distutils/spawn.py
setuptools/_distutils/sysconfig.py
setuptools/_distutils/text_file.py
@@ -274,6 +288,7 @@ RESOURCE_FILES(
pkg_resources/_vendor/more_itertools/py.typed
pkg_resources/_vendor/packaging/py.typed
pkg_resources/_vendor/platformdirs/py.typed
+ setuptools/_distutils/_vendor/packaging/py.typed
setuptools/_vendor/importlib_metadata/py.typed
setuptools/_vendor/importlib_resources/py.typed
setuptools/_vendor/jaraco/functools/py.typed