aboutsummaryrefslogtreecommitdiffstats
path: root/contrib
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-04-28 21:15:00 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-04-28 21:24:04 +0300
commit67bf49d08acf1277eff4c336021ac22d964bb4c4 (patch)
tree172b1f64d80667020d9d75045e1cda647450285c /contrib
parentf5c2f57f16eea2ec7ec33130b725dbe9a392fa26 (diff)
downloadydb-67bf49d08acf1277eff4c336021ac22d964bb4c4.tar.gz
Intermediate changes
Diffstat (limited to 'contrib')
-rw-r--r--contrib/python/setuptools/py3/.dist-info/METADATA7
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/backports/__init__.py0
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/backports/tarfile.py2900
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/context.py137
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/__init__.py (renamed from contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools.py)205
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/__init__.pyi128
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/py.typed0
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/__init__.py2
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.py400
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.pyi41
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.py230
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.pyi29
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/__init__.py4
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_manylinux.py74
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_musllinux.py19
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_parser.py13
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/metadata.py441
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/requirements.py45
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/specifiers.py63
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/tags.py63
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/utils.py39
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/version.py63
-rw-r--r--contrib/python/setuptools/py3/pkg_resources/extern/__init__.py1
-rw-r--r--contrib/python/setuptools/py3/setuptools/_core_metadata.py9
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/__init__.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_collections.py17
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_itertools.py52
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_log.py1
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_macos_compat.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_modified.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py16
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py6
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py16
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py62
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/cmd.py16
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/_framework_compat.py9
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py28
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py22
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py115
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build.py5
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py5
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py62
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py18
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py20
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/check.py18
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py3
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/config.py29
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install.py60
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py3
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_egg_info.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py3
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py3
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/py37compat.py31
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/register.py35
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py40
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py32
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py15
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py23
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/config.py24
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/core.py21
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py45
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py19
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/dist.py56
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/extension.py12
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py37
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/file_util.py45
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/filelist.py18
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/log.py1
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py31
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py31
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/spawn.py14
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py54
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/text_file.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py36
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/util.py53
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/version.py52
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py8
-rw-r--r--contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py229
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/backports/__init__.py0
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/backports/tarfile.py2900
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/jaraco/context.py137
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/__init__.py (renamed from contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools.py)205
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/__init__.pyi128
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/py.typed0
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/__init__.py4
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/_manylinux.py74
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/_musllinux.py19
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/_parser.py13
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/metadata.py441
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/requirements.py45
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/specifiers.py63
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/tags.py63
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/utils.py39
-rw-r--r--contrib/python/setuptools/py3/setuptools/_vendor/packaging/version.py63
-rw-r--r--contrib/python/setuptools/py3/setuptools/command/easy_install.py2
-rw-r--r--contrib/python/setuptools/py3/setuptools/extern/__init__.py1
-rw-r--r--contrib/python/setuptools/py3/setuptools/monkey.py1
-rw-r--r--contrib/python/setuptools/py3/ya.make19
99 files changed, 9350 insertions, 1347 deletions
diff --git a/contrib/python/setuptools/py3/.dist-info/METADATA b/contrib/python/setuptools/py3/.dist-info/METADATA
index 3ccc19e703..010ae0a947 100644
--- a/contrib/python/setuptools/py3/.dist-info/METADATA
+++ b/contrib/python/setuptools/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: setuptools
-Version: 69.2.0
+Version: 69.5.1
Summary: Easily download, build, install, upgrade, and uninstall Python packages
Home-page: https://github.com/pypa/setuptools
Author: Python Packaging Authority
@@ -22,7 +22,6 @@ License-File: LICENSE
Provides-Extra: certs
Provides-Extra: docs
Requires-Dist: sphinx >=3.5 ; extra == 'docs'
-Requires-Dist: sphinx <7.2.5 ; extra == 'docs'
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
Requires-Dist: furo ; extra == 'docs'
@@ -36,8 +35,9 @@ Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs'
Requires-Dist: sphinx-notfound-page <2,>=1 ; extra == 'docs'
Provides-Extra: ssl
Provides-Extra: testing
-Requires-Dist: pytest >=6 ; extra == 'testing'
+Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
+Requires-Dist: pytest-mypy ; extra == 'testing'
Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
Requires-Dist: virtualenv >=13.0.0 ; extra == 'testing'
Requires-Dist: wheel ; extra == 'testing'
@@ -68,7 +68,6 @@ Requires-Dist: build[virtualenv] >=1.0.3 ; extra == 'testing-integration'
Requires-Dist: filelock >=3.4.0 ; extra == 'testing-integration'
Requires-Dist: packaging >=23.2 ; extra == 'testing-integration'
Requires-Dist: pytest-cov ; (platform_python_implementation != "PyPy") and extra == 'testing'
-Requires-Dist: pytest-mypy >=0.9.1 ; (platform_python_implementation != "PyPy") and extra == 'testing'
Requires-Dist: jaraco.develop >=7.21 ; (python_version >= "3.9" and sys_platform != "cygwin") and extra == 'testing'
Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'testing'
Requires-Dist: pytest-perf ; (sys_platform != "cygwin") and extra == 'testing'
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/backports/__init__.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/backports/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/backports/__init__.py
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/backports/tarfile.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/backports/tarfile.py
new file mode 100644
index 0000000000..a7a9a6e7b9
--- /dev/null
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/backports/tarfile.py
@@ -0,0 +1,2900 @@
+#!/usr/bin/env python3
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+"""Read from and write to tar format archives.
+"""
+
+version = "0.9.0"
+__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+from builtins import open as bltn_open
+import sys
+import os
+import io
+import shutil
+import stat
+import time
+import struct
+import copy
+import re
+import warnings
+
+try:
+ import pwd
+except ImportError:
+ pwd = None
+try:
+ import grp
+except ImportError:
+ grp = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+# OSError (winerror=1314) will be raised if the caller does not hold the
+# SeCreateSymbolicLinkPrivilege privilege
+symlink_exception = (AttributeError, NotImplementedError, OSError)
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
+ "CompressionError", "StreamError", "ExtractError", "HeaderError",
+ "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
+ "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+ "tar_filter", "FilterError", "AbsoluteLinkError",
+ "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+ "LinkOutsideDestinationError"]
+
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0" # the null character
+BLOCKSIZE = 512 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20 # length of records
+GNU_MAGIC = b"ustar \0" # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
+
+LENGTH_NAME = 100 # maximum length of a filename
+LENGTH_LINK = 100 # maximum length of a linkname
+LENGTH_PREFIX = 155 # maximum length of the prefix field
+
+REGTYPE = b"0" # regular file
+AREGTYPE = b"\0" # regular file
+LNKTYPE = b"1" # link (inside tarfile)
+SYMTYPE = b"2" # symbolic link
+CHRTYPE = b"3" # character special device
+BLKTYPE = b"4" # block special device
+DIRTYPE = b"5" # directory
+FIFOTYPE = b"6" # fifo special device
+CONTTYPE = b"7" # contiguous file
+
+GNUTYPE_LONGNAME = b"L" # GNU tar longname
+GNUTYPE_LONGLINK = b"K" # GNU tar longlink
+GNUTYPE_SPARSE = b"S" # GNU tar sparse file
+
+XHDTYPE = b"x" # POSIX.1-2001 extended header
+XGLTYPE = b"g" # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X" # Solaris extended header
+
+USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1 # GNU tar format
+PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = PAX_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+ SYMTYPE, DIRTYPE, FIFOTYPE,
+ CONTTYPE, CHRTYPE, BLKTYPE,
+ GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+ CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+ "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+ "atime": float,
+ "ctime": float,
+ "mtime": float,
+ "uid": int,
+ "gid": int,
+ "size": int
+}
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name == "nt":
+ ENCODING = "utf-8"
+else:
+ ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+ """Convert a string to a null-terminated bytes object.
+ """
+ if s is None:
+ raise ValueError("metadata cannot contain None")
+ s = s.encode(encoding, errors)
+ return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+ """Convert a null-terminated bytes object to a string.
+ """
+ p = s.find(b"\0")
+ if p != -1:
+ s = s[:p]
+ return s.decode(encoding, errors)
+
+def nti(s):
+ """Convert a number field to a python number.
+ """
+ # There are two possible encodings for a number field, see
+ # itn() below.
+ if s[0] in (0o200, 0o377):
+ n = 0
+ for i in range(len(s) - 1):
+ n <<= 8
+ n += s[i + 1]
+ if s[0] == 0o377:
+ n = -(256 ** (len(s) - 1) - n)
+ else:
+ try:
+ s = nts(s, "ascii", "strict")
+ n = int(s.strip() or "0", 8)
+ except ValueError:
+ raise InvalidHeaderError("invalid header")
+ return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+ """Convert a python number to a number field.
+ """
+ # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+ # octal digits followed by a null-byte, this allows values up to
+ # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+ # that if necessary. A leading 0o200 or 0o377 byte indicate this
+ # particular encoding, the following digits-1 bytes are a big-endian
+ # base-256 representation. This allows values up to (256**(digits-1))-1.
+ # A 0o200 byte indicates a positive number, a 0o377 byte a negative
+ # number.
+ original_n = n
+ n = int(n)
+ if 0 <= n < 8 ** (digits - 1):
+ s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
+ elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
+ if n >= 0:
+ s = bytearray([0o200])
+ else:
+ s = bytearray([0o377])
+ n = 256 ** digits + n
+
+ for i in range(digits - 1):
+ s.insert(1, n & 0o377)
+ n >>= 8
+ else:
+ raise ValueError("overflow in number field")
+
+ return s
+
+def calc_chksums(buf):
+ """Calculate the checksum for a member's header by summing up all
+ characters except for the chksum field which is treated as if
+ it was filled with spaces. According to the GNU tar sources,
+ some tars (Sun and NeXT) calculate chksum with signed char,
+ which will be different if there are chars in the buffer with
+ the high bit set. So we calculate two checksums, unsigned and
+ signed.
+ """
+ unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
+ signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
+ return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
+ """Copy length bytes from fileobj src to fileobj dst.
+ If length is None, copy the entire content.
+ """
+ bufsize = bufsize or 16 * 1024
+ if length == 0:
+ return
+ if length is None:
+ shutil.copyfileobj(src, dst, bufsize)
+ return
+
+ blocks, remainder = divmod(length, bufsize)
+ for b in range(blocks):
+ buf = src.read(bufsize)
+ if len(buf) < bufsize:
+ raise exception("unexpected end of data")
+ dst.write(buf)
+
+ if remainder != 0:
+ buf = src.read(remainder)
+ if len(buf) < remainder:
+ raise exception("unexpected end of data")
+ dst.write(buf)
+ return
+
+def _safe_print(s):
+ encoding = getattr(sys.stdout, 'encoding', None)
+ if encoding is not None:
+ s = s.encode(encoding, 'backslashreplace').decode(encoding)
+ print(s, end=' ')
+
+
+class TarError(Exception):
+ """Base exception."""
+ pass
+class ExtractError(TarError):
+ """General exception for extract errors."""
+ pass
+class ReadError(TarError):
+ """Exception for unreadable tar archives."""
+ pass
+class CompressionError(TarError):
+ """Exception for unavailable compression methods."""
+ pass
+class StreamError(TarError):
+ """Exception for unsupported operations on stream-like TarFiles."""
+ pass
+class HeaderError(TarError):
+ """Base exception for header errors."""
+ pass
+class EmptyHeaderError(HeaderError):
+ """Exception for empty headers."""
+ pass
+class TruncatedHeaderError(HeaderError):
+ """Exception for truncated headers."""
+ pass
+class EOFHeaderError(HeaderError):
+ """Exception for end of file headers."""
+ pass
+class InvalidHeaderError(HeaderError):
+ """Exception for invalid headers."""
+ pass
+class SubsequentHeaderError(HeaderError):
+ """Exception for missing and invalid extended headers."""
+ pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile:
+ """Low-level file object. Supports reading and writing.
+ It is used instead of a regular file object for streaming
+ access.
+ """
+
+ def __init__(self, name, mode):
+ mode = {
+ "r": os.O_RDONLY,
+ "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+ }[mode]
+ if hasattr(os, "O_BINARY"):
+ mode |= os.O_BINARY
+ self.fd = os.open(name, mode, 0o666)
+
+ def close(self):
+ os.close(self.fd)
+
+ def read(self, size):
+ return os.read(self.fd, size)
+
+ def write(self, s):
+ os.write(self.fd, s)
+
+class _Stream:
+ """Class that serves as an adapter between TarFile and
+ a stream-like object. The stream-like object only
+ needs to have a read() or write() method that works with bytes,
+ and the method is accessed blockwise.
+ Use of gzip or bzip2 compression is possible.
+ A stream-like object could be for example: sys.stdin.buffer,
+ sys.stdout.buffer, a socket, a tape device etc.
+
+ _Stream is intended to be used only internally.
+ """
+
+ def __init__(self, name, mode, comptype, fileobj, bufsize,
+ compresslevel):
+ """Construct a _Stream object.
+ """
+ self._extfileobj = True
+ if fileobj is None:
+ fileobj = _LowLevelFile(name, mode)
+ self._extfileobj = False
+
+ if comptype == '*':
+ # Enable transparent compression detection for the
+ # stream interface
+ fileobj = _StreamProxy(fileobj)
+ comptype = fileobj.getcomptype()
+
+ self.name = name or ""
+ self.mode = mode
+ self.comptype = comptype
+ self.fileobj = fileobj
+ self.bufsize = bufsize
+ self.buf = b""
+ self.pos = 0
+ self.closed = False
+
+ try:
+ if comptype == "gz":
+ try:
+ import zlib
+ except ImportError:
+ raise CompressionError("zlib module is not available") from None
+ self.zlib = zlib
+ self.crc = zlib.crc32(b"")
+ if mode == "r":
+ self.exception = zlib.error
+ self._init_read_gz()
+ else:
+ self._init_write_gz(compresslevel)
+
+ elif comptype == "bz2":
+ try:
+ import bz2
+ except ImportError:
+ raise CompressionError("bz2 module is not available") from None
+ if mode == "r":
+ self.dbuf = b""
+ self.cmp = bz2.BZ2Decompressor()
+ self.exception = OSError
+ else:
+ self.cmp = bz2.BZ2Compressor(compresslevel)
+
+ elif comptype == "xz":
+ try:
+ import lzma
+ except ImportError:
+ raise CompressionError("lzma module is not available") from None
+ if mode == "r":
+ self.dbuf = b""
+ self.cmp = lzma.LZMADecompressor()
+ self.exception = lzma.LZMAError
+ else:
+ self.cmp = lzma.LZMACompressor()
+
+ elif comptype != "tar":
+ raise CompressionError("unknown compression type %r" % comptype)
+
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ def __del__(self):
+ if hasattr(self, "closed") and not self.closed:
+ self.close()
+
+ def _init_write_gz(self, compresslevel):
+ """Initialize for writing with gzip compression.
+ """
+ self.cmp = self.zlib.compressobj(compresslevel,
+ self.zlib.DEFLATED,
+ -self.zlib.MAX_WBITS,
+ self.zlib.DEF_MEM_LEVEL,
+ 0)
+ timestamp = struct.pack("<L", int(time.time()))
+ self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
+ if self.name.endswith(".gz"):
+ self.name = self.name[:-3]
+ # Honor "directory components removed" from RFC1952
+ self.name = os.path.basename(self.name)
+ # RFC1952 says we must use ISO-8859-1 for the FNAME field.
+ self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
+
+ def write(self, s):
+ """Write string s to the stream.
+ """
+ if self.comptype == "gz":
+ self.crc = self.zlib.crc32(s, self.crc)
+ self.pos += len(s)
+ if self.comptype != "tar":
+ s = self.cmp.compress(s)
+ self.__write(s)
+
+ def __write(self, s):
+ """Write string s to the stream if a whole new block
+ is ready to be written.
+ """
+ self.buf += s
+ while len(self.buf) > self.bufsize:
+ self.fileobj.write(self.buf[:self.bufsize])
+ self.buf = self.buf[self.bufsize:]
+
+ def close(self):
+ """Close the _Stream object. No operation should be
+ done on it afterwards.
+ """
+ if self.closed:
+ return
+
+ self.closed = True
+ try:
+ if self.mode == "w" and self.comptype != "tar":
+ self.buf += self.cmp.flush()
+
+ if self.mode == "w" and self.buf:
+ self.fileobj.write(self.buf)
+ self.buf = b""
+ if self.comptype == "gz":
+ self.fileobj.write(struct.pack("<L", self.crc))
+ self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
+ finally:
+ if not self._extfileobj:
+ self.fileobj.close()
+
+ def _init_read_gz(self):
+ """Initialize for reading a gzip compressed fileobj.
+ """
+ self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
+ self.dbuf = b""
+
+ # taken from gzip.GzipFile with some alterations
+ if self.__read(2) != b"\037\213":
+ raise ReadError("not a gzip file")
+ if self.__read(1) != b"\010":
+ raise CompressionError("unsupported compression method")
+
+ flag = ord(self.__read(1))
+ self.__read(6)
+
+ if flag & 4:
+ xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
+ self.read(xlen)
+ if flag & 8:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 16:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 2:
+ self.__read(2)
+
+ def tell(self):
+ """Return the stream's file pointer position.
+ """
+ return self.pos
+
+ def seek(self, pos=0):
+ """Set the stream's file pointer to pos. Negative seeking
+ is forbidden.
+ """
+ if pos - self.pos >= 0:
+ blocks, remainder = divmod(pos - self.pos, self.bufsize)
+ for i in range(blocks):
+ self.read(self.bufsize)
+ self.read(remainder)
+ else:
+ raise StreamError("seeking backwards is not allowed")
+ return self.pos
+
+ def read(self, size):
+ """Return the next size number of bytes from the stream."""
+ assert size is not None
+ buf = self._read(size)
+ self.pos += len(buf)
+ return buf
+
+ def _read(self, size):
+ """Return size bytes from the stream.
+ """
+ if self.comptype == "tar":
+ return self.__read(size)
+
+ c = len(self.dbuf)
+ t = [self.dbuf]
+ while c < size:
+ # Skip underlying buffer to avoid unaligned double buffering.
+ if self.buf:
+ buf = self.buf
+ self.buf = b""
+ else:
+ buf = self.fileobj.read(self.bufsize)
+ if not buf:
+ break
+ try:
+ buf = self.cmp.decompress(buf)
+ except self.exception as e:
+ raise ReadError("invalid compressed data") from e
+ t.append(buf)
+ c += len(buf)
+ t = b"".join(t)
+ self.dbuf = t[size:]
+ return t[:size]
+
+ def __read(self, size):
+ """Return size bytes from stream. If internal buffer is empty,
+ read another block from the stream.
+ """
+ c = len(self.buf)
+ t = [self.buf]
+ while c < size:
+ buf = self.fileobj.read(self.bufsize)
+ if not buf:
+ break
+ t.append(buf)
+ c += len(buf)
+ t = b"".join(t)
+ self.buf = t[size:]
+ return t[:size]
+# class _Stream
+
+class _StreamProxy(object):
+ """Small proxy class that enables transparent compression
+ detection for the Stream interface (mode 'r|*').
+ """
+
+ def __init__(self, fileobj):
+ self.fileobj = fileobj
+ self.buf = self.fileobj.read(BLOCKSIZE)
+
+ def read(self, size):
+ self.read = self.fileobj.read
+ return self.buf
+
+ def getcomptype(self):
+ if self.buf.startswith(b"\x1f\x8b\x08"):
+ return "gz"
+ elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
+ return "bz2"
+ elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
+ return "xz"
+ else:
+ return "tar"
+
+ def close(self):
+ self.fileobj.close()
+# class StreamProxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+ """A thin wrapper around an existing file object that
+ provides a part of its data as an individual file
+ object.
+ """
+
+ def __init__(self, fileobj, offset, size, name, blockinfo=None):
+ self.fileobj = fileobj
+ self.offset = offset
+ self.size = size
+ self.position = 0
+ self.name = name
+ self.closed = False
+
+ if blockinfo is None:
+ blockinfo = [(0, size)]
+
+ # Construct a map with data and zero blocks.
+ self.map_index = 0
+ self.map = []
+ lastpos = 0
+ realpos = self.offset
+ for offset, size in blockinfo:
+ if offset > lastpos:
+ self.map.append((False, lastpos, offset, None))
+ self.map.append((True, offset, offset + size, realpos))
+ realpos += size
+ lastpos = offset + size
+ if lastpos < self.size:
+ self.map.append((False, lastpos, self.size, None))
+
+ def flush(self):
+ pass
+
+ def readable(self):
+ return True
+
+ def writable(self):
+ return False
+
+ def seekable(self):
+ return self.fileobj.seekable()
+
+ def tell(self):
+ """Return the current file position.
+ """
+ return self.position
+
+ def seek(self, position, whence=io.SEEK_SET):
+ """Seek to a position in the file.
+ """
+ if whence == io.SEEK_SET:
+ self.position = min(max(position, 0), self.size)
+ elif whence == io.SEEK_CUR:
+ if position < 0:
+ self.position = max(self.position + position, 0)
+ else:
+ self.position = min(self.position + position, self.size)
+ elif whence == io.SEEK_END:
+ self.position = max(min(self.size + position, self.size), 0)
+ else:
+ raise ValueError("Invalid argument")
+ return self.position
+
+ def read(self, size=None):
+ """Read data from the file.
+ """
+ if size is None:
+ size = self.size - self.position
+ else:
+ size = min(size, self.size - self.position)
+
+ buf = b""
+ while size > 0:
+ while True:
+ data, start, stop, offset = self.map[self.map_index]
+ if start <= self.position < stop:
+ break
+ else:
+ self.map_index += 1
+ if self.map_index == len(self.map):
+ self.map_index = 0
+ length = min(size, stop - self.position)
+ if data:
+ self.fileobj.seek(offset + (self.position - start))
+ b = self.fileobj.read(length)
+ if len(b) != length:
+ raise ReadError("unexpected end of data")
+ buf += b
+ else:
+ buf += NUL * length
+ size -= length
+ self.position += length
+ return buf
+
+ def readinto(self, b):
+ buf = self.read(len(b))
+ b[:len(buf)] = buf
+ return len(buf)
+
+ def close(self):
+ self.closed = True
+#class _FileInFile
+
+class ExFileObject(io.BufferedReader):
+
+ def __init__(self, tarfile, tarinfo):
+ fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
+ tarinfo.size, tarinfo.name, tarinfo.sparse)
+ super().__init__(fileobj)
+#class ExFileObject
+
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+ pass
+
+class AbsolutePathError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+ + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+ + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+ new_attrs = {}
+ name = member.name
+ dest_path = os.path.realpath(dest_path)
+ # Strip leading / (tar's directory separator) from filenames.
+ # Include os.sep (target OS directory separator) as well.
+ if name.startswith(('/', os.sep)):
+ name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+ if os.path.isabs(name):
+ # Path is absolute even after stripping.
+ # For example, 'C:/foo' on Windows.
+ raise AbsolutePathError(member)
+ # Ensure we stay in the destination
+ target_path = os.path.realpath(os.path.join(dest_path, name))
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise OutsideDestinationError(member, target_path)
+ # Limit permissions (no high bits, and go-w)
+ mode = member.mode
+ if mode is not None:
+ # Strip high bits & group/other write bits
+ mode = mode & 0o755
+ if for_data:
+ # For data, handle permissions & file types
+ if member.isreg() or member.islnk():
+ if not mode & 0o100:
+ # Clear executable bits if not executable by user
+ mode &= ~0o111
+ # Ensure owner can read & write
+ mode |= 0o600
+ elif member.isdir() or member.issym():
+ # Ignore mode for directories & symlinks
+ mode = None
+ else:
+ # Reject special files
+ raise SpecialFileError(member)
+ if mode != member.mode:
+ new_attrs['mode'] = mode
+ if for_data:
+ # Ignore ownership for 'data'
+ if member.uid is not None:
+ new_attrs['uid'] = None
+ if member.gid is not None:
+ new_attrs['gid'] = None
+ if member.uname is not None:
+ new_attrs['uname'] = None
+ if member.gname is not None:
+ new_attrs['gname'] = None
+ # Check link destination for 'data'
+ if member.islnk() or member.issym():
+ if os.path.isabs(member.linkname):
+ raise AbsoluteLinkError(member)
+ if member.issym():
+ target_path = os.path.join(dest_path,
+ os.path.dirname(name),
+ member.linkname)
+ else:
+ target_path = os.path.join(dest_path,
+ member.linkname)
+ target_path = os.path.realpath(target_path)
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise LinkOutsideDestinationError(member, target_path)
+ return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+ return member
+
+def tar_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, False)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+def data_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, True)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+_NAMED_FILTERS = {
+ "fully_trusted": fully_trusted_filter,
+ "tar": tar_filter,
+ "data": data_filter,
+}
+
+#------------------
+# Exported Classes
+#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
+class TarInfo(object):
+ """Informational class which holds the details about an
+ archive member given by a tar header block.
+ TarInfo objects are returned by TarFile.getmember(),
+ TarFile.getmembers() and TarFile.gettarinfo() and are
+ usually created internally.
+ """
+
+ __slots__ = dict(
+ name = 'Name of the archive member.',
+ mode = 'Permission bits.',
+ uid = 'User ID of the user who originally stored this member.',
+ gid = 'Group ID of the user who originally stored this member.',
+ size = 'Size in bytes.',
+ mtime = 'Time of last modification.',
+ chksum = 'Header checksum.',
+ type = ('File type. type is usually one of these constants: '
+ 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
+ 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
+ linkname = ('Name of the target file name, which is only present '
+ 'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
+ uname = 'User name.',
+ gname = 'Group name.',
+ devmajor = 'Device major number.',
+ devminor = 'Device minor number.',
+ offset = 'The tar header starts here.',
+ offset_data = "The file's data starts here.",
+ pax_headers = ('A dictionary containing key-value pairs of an '
+ 'associated pax extended header.'),
+ sparse = 'Sparse member information.',
+ tarfile = None,
+ _sparse_structs = None,
+ _link_target = None,
+ )
+
+ def __init__(self, name=""):
+ """Construct a TarInfo object. name is the optional name
+ of the member.
+ """
+ self.name = name # member name
+ self.mode = 0o644 # file permissions
+ self.uid = 0 # user id
+ self.gid = 0 # group id
+ self.size = 0 # file size
+ self.mtime = 0 # modification time
+ self.chksum = 0 # header checksum
+ self.type = REGTYPE # member type
+ self.linkname = "" # link name
+ self.uname = "" # user name
+ self.gname = "" # group name
+ self.devmajor = 0 # device major number
+ self.devminor = 0 # device minor number
+
+ self.offset = 0 # the tar header starts here
+ self.offset_data = 0 # the file's data starts here
+
+ self.sparse = None # sparse member information
+ self.pax_headers = {} # pax header information
+
+ @property
+ def path(self):
+ 'In pax headers, "name" is called "path".'
+ return self.name
+
+ @path.setter
+ def path(self, name):
+ self.name = name
+
+ @property
+ def linkpath(self):
+ 'In pax headers, "linkname" is called "linkpath".'
+ return self.linkname
+
+ @linkpath.setter
+ def linkpath(self, linkname):
+ self.linkname = linkname
+
+ def __repr__(self):
+ return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+ def replace(self, *,
+ name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+ uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+ deep=True, _KEEP=_KEEP):
+ """Return a deep copy of self with the given attributes replaced.
+ """
+ if deep:
+ result = copy.deepcopy(self)
+ else:
+ result = copy.copy(self)
+ if name is not _KEEP:
+ result.name = name
+ if mtime is not _KEEP:
+ result.mtime = mtime
+ if mode is not _KEEP:
+ result.mode = mode
+ if linkname is not _KEEP:
+ result.linkname = linkname
+ if uid is not _KEEP:
+ result.uid = uid
+ if gid is not _KEEP:
+ result.gid = gid
+ if uname is not _KEEP:
+ result.uname = uname
+ if gname is not _KEEP:
+ result.gname = gname
+ return result
+
+ def get_info(self):
+ """Return the TarInfo's attributes as a dictionary.
+ """
+ if self.mode is None:
+ mode = None
+ else:
+ mode = self.mode & 0o7777
+ info = {
+ "name": self.name,
+ "mode": mode,
+ "uid": self.uid,
+ "gid": self.gid,
+ "size": self.size,
+ "mtime": self.mtime,
+ "chksum": self.chksum,
+ "type": self.type,
+ "linkname": self.linkname,
+ "uname": self.uname,
+ "gname": self.gname,
+ "devmajor": self.devmajor,
+ "devminor": self.devminor
+ }
+
+ if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+ info["name"] += "/"
+
+ return info
+
+ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+ """Return a tar header as a string of 512 byte blocks.
+ """
+ info = self.get_info()
+ for name, value in info.items():
+ if value is None:
+ raise ValueError("%s may not be None" % name)
+
+ if format == USTAR_FORMAT:
+ return self.create_ustar_header(info, encoding, errors)
+ elif format == GNU_FORMAT:
+ return self.create_gnu_header(info, encoding, errors)
+ elif format == PAX_FORMAT:
+ return self.create_pax_header(info, encoding)
+ else:
+ raise ValueError("invalid format")
+
+ def create_ustar_header(self, info, encoding, errors):
+ """Return the object as a ustar header block.
+ """
+ info["magic"] = POSIX_MAGIC
+
+ if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+ raise ValueError("linkname is too long")
+
+ if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+ info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
+
+ return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+ def create_gnu_header(self, info, encoding, errors):
+ """Return the object as a GNU header block sequence.
+ """
+ info["magic"] = GNU_MAGIC
+
+ buf = b""
+ if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+ buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+ if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+ buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+ return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+ def create_pax_header(self, info, encoding):
+ """Return the object as a ustar header block. If it cannot be
+ represented this way, prepend a pax extended header sequence
+ with supplement information.
+ """
+ info["magic"] = POSIX_MAGIC
+ pax_headers = self.pax_headers.copy()
+
+ # Test string fields for values that exceed the field length or cannot
+ # be represented in ASCII encoding.
+ for name, hname, length in (
+ ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+ ("uname", "uname", 32), ("gname", "gname", 32)):
+
+ if hname in pax_headers:
+ # The pax header has priority.
+ continue
+
+ # Try to encode the string as ASCII.
+ try:
+ info[name].encode("ascii", "strict")
+ except UnicodeEncodeError:
+ pax_headers[hname] = info[name]
+ continue
+
+ if len(info[name]) > length:
+ pax_headers[hname] = info[name]
+
+ # Test number fields for values that exceed the field limit or values
+ # that like to be stored as float.
+ for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+ needs_pax = False
+
+ val = info[name]
+ val_is_float = isinstance(val, float)
+ val_int = round(val) if val_is_float else val
+ if not 0 <= val_int < 8 ** (digits - 1):
+ # Avoid overflow.
+ info[name] = 0
+ needs_pax = True
+ elif val_is_float:
+ # Put rounded value in ustar header, and full
+ # precision value in pax header.
+ info[name] = val_int
+ needs_pax = True
+
+ # The existing pax header has priority.
+ if needs_pax and name not in pax_headers:
+ pax_headers[name] = str(val)
+
+ # Create a pax extended header if necessary.
+ if pax_headers:
+ buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+ else:
+ buf = b""
+
+ return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+ @classmethod
+ def create_pax_global_header(cls, pax_headers):
+ """Return the object as a pax global header block sequence.
+ """
+ return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
+
+ def _posix_split_name(self, name, encoding, errors):
+ """Split a name longer than 100 chars into a prefix
+ and a name part.
+ """
+ components = name.split("/")
+ for i in range(1, len(components)):
+ prefix = "/".join(components[:i])
+ name = "/".join(components[i:])
+ if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
+ len(name.encode(encoding, errors)) <= LENGTH_NAME:
+ break
+ else:
+ raise ValueError("name is too long")
+
+ return prefix, name
+
+ @staticmethod
+ def _create_header(info, format, encoding, errors):
+ """Return a header block. info is a dictionary with file
+ information, format must be one of the *_FORMAT constants.
+ """
+ has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+ if has_device_fields:
+ devmajor = itn(info.get("devmajor", 0), 8, format)
+ devminor = itn(info.get("devminor", 0), 8, format)
+ else:
+ devmajor = stn("", 8, encoding, errors)
+ devminor = stn("", 8, encoding, errors)
+
+ # None values in metadata should cause ValueError.
+ # itn()/stn() do this for all fields except type.
+ filetype = info.get("type", REGTYPE)
+ if filetype is None:
+ raise ValueError("TarInfo.type must not be None")
+
+ parts = [
+ stn(info.get("name", ""), 100, encoding, errors),
+ itn(info.get("mode", 0) & 0o7777, 8, format),
+ itn(info.get("uid", 0), 8, format),
+ itn(info.get("gid", 0), 8, format),
+ itn(info.get("size", 0), 12, format),
+ itn(info.get("mtime", 0), 12, format),
+ b" ", # checksum field
+ filetype,
+ stn(info.get("linkname", ""), 100, encoding, errors),
+ info.get("magic", POSIX_MAGIC),
+ stn(info.get("uname", ""), 32, encoding, errors),
+ stn(info.get("gname", ""), 32, encoding, errors),
+ devmajor,
+ devminor,
+ stn(info.get("prefix", ""), 155, encoding, errors)
+ ]
+
+ buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+ chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+ buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
+ return buf
+
+ @staticmethod
+ def _create_payload(payload):
+ """Return the string payload filled with zero bytes
+ up to the next 512 byte border.
+ """
+ blocks, remainder = divmod(len(payload), BLOCKSIZE)
+ if remainder > 0:
+ payload += (BLOCKSIZE - remainder) * NUL
+ return payload
+
+ @classmethod
+ def _create_gnu_long_header(cls, name, type, encoding, errors):
+ """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+ for name.
+ """
+ name = name.encode(encoding, errors) + NUL
+
+ info = {}
+ info["name"] = "././@LongLink"
+ info["type"] = type
+ info["size"] = len(name)
+ info["magic"] = GNU_MAGIC
+
+ # create extended header + name blocks.
+ return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+ cls._create_payload(name)
+
+ @classmethod
+ def _create_pax_generic_header(cls, pax_headers, type, encoding):
+ """Return a POSIX.1-2008 extended or global header sequence
+ that contains a list of keyword, value pairs. The values
+ must be strings.
+ """
+ # Check if one of the fields contains surrogate characters and thereby
+ # forces hdrcharset=BINARY, see _proc_pax() for more information.
+ binary = False
+ for keyword, value in pax_headers.items():
+ try:
+ value.encode("utf-8", "strict")
+ except UnicodeEncodeError:
+ binary = True
+ break
+
+ records = b""
+ if binary:
+ # Put the hdrcharset field at the beginning of the header.
+ records += b"21 hdrcharset=BINARY\n"
+
+ for keyword, value in pax_headers.items():
+ keyword = keyword.encode("utf-8")
+ if binary:
+ # Try to restore the original byte representation of `value'.
+ # Needless to say, that the encoding must match the string.
+ value = value.encode(encoding, "surrogateescape")
+ else:
+ value = value.encode("utf-8")
+
+ l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
+ n = p = 0
+ while True:
+ n = l + len(str(p))
+ if n == p:
+ break
+ p = n
+ records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+ # We use a hardcoded "././@PaxHeader" name like star does
+ # instead of the one that POSIX recommends.
+ info = {}
+ info["name"] = "././@PaxHeader"
+ info["type"] = type
+ info["size"] = len(records)
+ info["magic"] = POSIX_MAGIC
+
+ # Create pax header + record blocks.
+ return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+ cls._create_payload(records)
+
+ @classmethod
+ def frombuf(cls, buf, encoding, errors):
+ """Construct a TarInfo object from a 512 byte bytes object.
+ """
+ if len(buf) == 0:
+ raise EmptyHeaderError("empty header")
+ if len(buf) != BLOCKSIZE:
+ raise TruncatedHeaderError("truncated header")
+ if buf.count(NUL) == BLOCKSIZE:
+ raise EOFHeaderError("end of file header")
+
+ chksum = nti(buf[148:156])
+ if chksum not in calc_chksums(buf):
+ raise InvalidHeaderError("bad checksum")
+
+ obj = cls()
+ obj.name = nts(buf[0:100], encoding, errors)
+ obj.mode = nti(buf[100:108])
+ obj.uid = nti(buf[108:116])
+ obj.gid = nti(buf[116:124])
+ obj.size = nti(buf[124:136])
+ obj.mtime = nti(buf[136:148])
+ obj.chksum = chksum
+ obj.type = buf[156:157]
+ obj.linkname = nts(buf[157:257], encoding, errors)
+ obj.uname = nts(buf[265:297], encoding, errors)
+ obj.gname = nts(buf[297:329], encoding, errors)
+ obj.devmajor = nti(buf[329:337])
+ obj.devminor = nti(buf[337:345])
+ prefix = nts(buf[345:500], encoding, errors)
+
+ # Old V7 tar format represents a directory as a regular
+ # file with a trailing slash.
+ if obj.type == AREGTYPE and obj.name.endswith("/"):
+ obj.type = DIRTYPE
+
+ # The old GNU sparse format occupies some of the unused
+ # space in the buffer for up to 4 sparse structures.
+ # Save them for later processing in _proc_sparse().
+ if obj.type == GNUTYPE_SPARSE:
+ pos = 386
+ structs = []
+ for i in range(4):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[482])
+ origsize = nti(buf[483:495])
+ obj._sparse_structs = (structs, isextended, origsize)
+
+ # Remove redundant slashes from directories.
+ if obj.isdir():
+ obj.name = obj.name.rstrip("/")
+
+ # Reconstruct a ustar longname.
+ if prefix and obj.type not in GNU_TYPES:
+ obj.name = prefix + "/" + obj.name
+ return obj
+
+ @classmethod
+ def fromtarfile(cls, tarfile):
+ """Return the next TarInfo object from TarFile object
+ tarfile.
+ """
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+ obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+ return obj._proc_member(tarfile)
+
+ #--------------------------------------------------------------------------
+ # The following are methods that are called depending on the type of a
+ # member. The entry point is _proc_member() which can be overridden in a
+ # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+ # implement the following
+ # operations:
+ # 1. Set self.offset_data to the position where the data blocks begin,
+ # if there is data that follows.
+ # 2. Set tarfile.offset to the position where the next member's header will
+ # begin.
+ # 3. Return self or another valid TarInfo object.
+ def _proc_member(self, tarfile):
+ """Choose the right processing method depending on
+ the type and call it.
+ """
+ if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+ return self._proc_gnulong(tarfile)
+ elif self.type == GNUTYPE_SPARSE:
+ return self._proc_sparse(tarfile)
+ elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+ return self._proc_pax(tarfile)
+ else:
+ return self._proc_builtin(tarfile)
+
+ def _proc_builtin(self, tarfile):
+ """Process a builtin type or an unknown type which
+ will be treated as a regular file.
+ """
+ self.offset_data = tarfile.fileobj.tell()
+ offset = self.offset_data
+ if self.isreg() or self.type not in SUPPORTED_TYPES:
+ # Skip the following data blocks.
+ offset += self._block(self.size)
+ tarfile.offset = offset
+
+ # Patch the TarInfo object with saved global
+ # header information.
+ self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+ # Remove redundant slashes from directories. This is to be consistent
+ # with frombuf().
+ if self.isdir():
+ self.name = self.name.rstrip("/")
+
+ return self
+
+ def _proc_gnulong(self, tarfile):
+ """Process the blocks that hold a GNU longname
+ or longlink member.
+ """
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # Fetch the next header and process it.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError as e:
+ raise SubsequentHeaderError(str(e)) from None
+
+ # Patch the TarInfo object from the next header with
+ # the longname information.
+ next.offset = self.offset
+ if self.type == GNUTYPE_LONGNAME:
+ next.name = nts(buf, tarfile.encoding, tarfile.errors)
+ elif self.type == GNUTYPE_LONGLINK:
+ next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+ # Remove redundant slashes from directories. This is to be consistent
+ # with frombuf().
+ if next.isdir():
+ next.name = next.name.removesuffix("/")
+
+ return next
+
+ def _proc_sparse(self, tarfile):
+ """Process a GNU sparse header plus extra headers.
+ """
+ # We already collected some sparse structures in frombuf().
+ structs, isextended, origsize = self._sparse_structs
+ del self._sparse_structs
+
+ # Collect sparse structures from extended header blocks.
+ while isextended:
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ pos = 0
+ for i in range(21):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ if offset and numbytes:
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[504])
+ self.sparse = structs
+
+ self.offset_data = tarfile.fileobj.tell()
+ tarfile.offset = self.offset_data + self._block(self.size)
+ self.size = origsize
+ return self
+
+ def _proc_pax(self, tarfile):
+ """Process an extended or global header as described in
+ POSIX.1-2008.
+ """
+ # Read the header information.
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # A pax header stores supplemental information for either
+ # the following file (extended) or all following files
+ # (global).
+ if self.type == XGLTYPE:
+ pax_headers = tarfile.pax_headers
+ else:
+ pax_headers = tarfile.pax_headers.copy()
+
+ # Check if the pax header contains a hdrcharset field. This tells us
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+ # implementations are allowed to store them as raw binary strings if
+ # the translation to UTF-8 fails.
+ match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+ if match is not None:
+ pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
+
+ # For the time being, we don't care about anything other than "BINARY".
+ # The only other value that is currently allowed by the standard is
+ # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+ hdrcharset = pax_headers.get("hdrcharset")
+ if hdrcharset == "BINARY":
+ encoding = tarfile.encoding
+ else:
+ encoding = "utf-8"
+
+ # Parse pax header information. A record looks like that:
+ # "%d %s=%s\n" % (length, keyword, value). length is the size
+ # of the complete record including the length field itself and
+ # the newline. keyword and value are both UTF-8 encoded strings.
+ regex = re.compile(br"(\d+) ([^=]+)=")
+ pos = 0
+ while match := regex.match(buf, pos):
+ length, keyword = match.groups()
+ length = int(length)
+ if length == 0:
+ raise InvalidHeaderError("invalid header")
+ value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+ # Normally, we could just use "utf-8" as the encoding and "strict"
+ # as the error handler, but we better not take the risk. For
+ # example, GNU tar <= 1.23 is known to store filenames it cannot
+ # translate to UTF-8 as raw strings (unfortunately without a
+ # hdrcharset=BINARY header).
+ # We first try the strict standard encoding, and if that fails we
+ # fall back on the user's encoding and error handler.
+ keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
+ tarfile.errors)
+ if keyword in PAX_NAME_FIELDS:
+ value = self._decode_pax_field(value, encoding, tarfile.encoding,
+ tarfile.errors)
+ else:
+ value = self._decode_pax_field(value, "utf-8", "utf-8",
+ tarfile.errors)
+
+ pax_headers[keyword] = value
+ pos += length
+
+ # Fetch the next header.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError as e:
+ raise SubsequentHeaderError(str(e)) from None
+
+ # Process GNU sparse information.
+ if "GNU.sparse.map" in pax_headers:
+ # GNU extended sparse format version 0.1.
+ self._proc_gnusparse_01(next, pax_headers)
+
+ elif "GNU.sparse.size" in pax_headers:
+ # GNU extended sparse format version 0.0.
+ self._proc_gnusparse_00(next, pax_headers, buf)
+
+ elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+ # GNU extended sparse format version 1.0.
+ self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+ if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+ # Patch the TarInfo object with the extended header info.
+ next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+ next.offset = self.offset
+
+ if "size" in pax_headers:
+ # If the extended header replaces the size field,
+ # we need to recalculate the offset where the next
+ # header starts.
+ offset = next.offset_data
+ if next.isreg() or next.type not in SUPPORTED_TYPES:
+ offset += next._block(next.size)
+ tarfile.offset = offset
+
+ return next
+
+ def _proc_gnusparse_00(self, next, pax_headers, buf):
+ """Process a GNU tar extended sparse header, version 0.0.
+ """
+ offsets = []
+ for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+ offsets.append(int(match.group(1)))
+ numbytes = []
+ for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+ numbytes.append(int(match.group(1)))
+ next.sparse = list(zip(offsets, numbytes))
+
+ def _proc_gnusparse_01(self, next, pax_headers):
+ """Process a GNU tar extended sparse header, version 0.1.
+ """
+ sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+ """Process a GNU tar extended sparse header, version 1.0.
+ """
+ fields = None
+ sparse = []
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ fields, buf = buf.split(b"\n", 1)
+ fields = int(fields)
+ while len(sparse) < fields * 2:
+ if b"\n" not in buf:
+ buf += tarfile.fileobj.read(BLOCKSIZE)
+ number, buf = buf.split(b"\n", 1)
+ sparse.append(int(number))
+ next.offset_data = tarfile.fileobj.tell()
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _apply_pax_info(self, pax_headers, encoding, errors):
+ """Replace fields with supplemental information from a previous
+ pax extended or global header.
+ """
+ for keyword, value in pax_headers.items():
+ if keyword == "GNU.sparse.name":
+ setattr(self, "path", value)
+ elif keyword == "GNU.sparse.size":
+ setattr(self, "size", int(value))
+ elif keyword == "GNU.sparse.realsize":
+ setattr(self, "size", int(value))
+ elif keyword in PAX_FIELDS:
+ if keyword in PAX_NUMBER_FIELDS:
+ try:
+ value = PAX_NUMBER_FIELDS[keyword](value)
+ except ValueError:
+ value = 0
+ if keyword == "path":
+ value = value.rstrip("/")
+ setattr(self, keyword, value)
+
+ self.pax_headers = pax_headers.copy()
+
+ def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+ """Decode a single field from a pax record.
+ """
+ try:
+ return value.decode(encoding, "strict")
+ except UnicodeDecodeError:
+ return value.decode(fallback_encoding, fallback_errors)
+
+ def _block(self, count):
+ """Round up a byte count by BLOCKSIZE and return it,
+ e.g. _block(834) => 1024.
+ """
+ blocks, remainder = divmod(count, BLOCKSIZE)
+ if remainder:
+ blocks += 1
+ return blocks * BLOCKSIZE
+
+ def isreg(self):
+ 'Return True if the Tarinfo object is a regular file.'
+ return self.type in REGULAR_TYPES
+
+ def isfile(self):
+ 'Return True if the Tarinfo object is a regular file.'
+ return self.isreg()
+
+ def isdir(self):
+ 'Return True if it is a directory.'
+ return self.type == DIRTYPE
+
+ def issym(self):
+ 'Return True if it is a symbolic link.'
+ return self.type == SYMTYPE
+
+ def islnk(self):
+ 'Return True if it is a hard link.'
+ return self.type == LNKTYPE
+
+ def ischr(self):
+ 'Return True if it is a character device.'
+ return self.type == CHRTYPE
+
+ def isblk(self):
+ 'Return True if it is a block device.'
+ return self.type == BLKTYPE
+
+ def isfifo(self):
+ 'Return True if it is a FIFO.'
+ return self.type == FIFOTYPE
+
+ def issparse(self):
+ return self.sparse is not None
+
+ def isdev(self):
+ 'Return True if it is one of character device, block device or FIFO.'
+ return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+ """The TarFile Class provides an interface to tar archives.
+ """
+
+ debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
+
+ dereference = False # If true, add content of linked file to the
+ # tar file, else the link.
+
+ ignore_zeros = False # If true, skips empty or invalid blocks and
+ # continues processing.
+
+ errorlevel = 1 # If 0, fatal errors only appear in debug
+ # messages (if debug >= 0). If > 0, errors
+ # are passed to the caller as exceptions.
+
+ format = DEFAULT_FORMAT # The format to use when creating an archive.
+
+ encoding = ENCODING # Encoding for 8-bit character strings.
+
+ errors = None # Error handler for unicode conversion.
+
+ tarinfo = TarInfo # The default TarInfo class to use.
+
+ fileobject = ExFileObject # The file-object for extractfile().
+
+ extraction_filter = None # The default filter for extraction.
+
+ def __init__(self, name=None, mode="r", fileobj=None, format=None,
+ tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+ errors="surrogateescape", pax_headers=None, debug=None,
+ errorlevel=None, copybufsize=None):
+ """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+ read from an existing archive, 'a' to append data to an existing
+ file or 'w' to create a new file overwriting an existing one. `mode'
+ defaults to 'r'.
+ If `fileobj' is given, it is used for reading or writing data. If it
+ can be determined, `mode' is overridden by `fileobj's mode.
+ `fileobj' is not closed, when TarFile is closed.
+ """
+ modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
+ if mode not in modes:
+ raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+ self.mode = mode
+ self._mode = modes[mode]
+
+ if not fileobj:
+ if self.mode == "a" and not os.path.exists(name):
+ # Create nonexistent files in append mode.
+ self.mode = "w"
+ self._mode = "wb"
+ fileobj = bltn_open(name, self._mode)
+ self._extfileobj = False
+ else:
+ if (name is None and hasattr(fileobj, "name") and
+ isinstance(fileobj.name, (str, bytes))):
+ name = fileobj.name
+ if hasattr(fileobj, "mode"):
+ self._mode = fileobj.mode
+ self._extfileobj = True
+ self.name = os.path.abspath(name) if name else None
+ self.fileobj = fileobj
+
+ # Init attributes.
+ if format is not None:
+ self.format = format
+ if tarinfo is not None:
+ self.tarinfo = tarinfo
+ if dereference is not None:
+ self.dereference = dereference
+ if ignore_zeros is not None:
+ self.ignore_zeros = ignore_zeros
+ if encoding is not None:
+ self.encoding = encoding
+ self.errors = errors
+
+ if pax_headers is not None and self.format == PAX_FORMAT:
+ self.pax_headers = pax_headers
+ else:
+ self.pax_headers = {}
+
+ if debug is not None:
+ self.debug = debug
+ if errorlevel is not None:
+ self.errorlevel = errorlevel
+
+ # Init datastructures.
+ self.copybufsize = copybufsize
+ self.closed = False
+ self.members = [] # list of members as TarInfo objects
+ self._loaded = False # flag if all members have been read
+ self.offset = self.fileobj.tell()
+ # current position in the archive file
+ self.inodes = {} # dictionary caching the inodes of
+ # archive members already added
+
+ try:
+ if self.mode == "r":
+ self.firstmember = None
+ self.firstmember = self.next()
+
+ if self.mode == "a":
+ # Move to the end of the archive,
+ # before the first empty block.
+ while True:
+ self.fileobj.seek(self.offset)
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ self.members.append(tarinfo)
+ except EOFHeaderError:
+ self.fileobj.seek(self.offset)
+ break
+ except HeaderError as e:
+ raise ReadError(str(e)) from None
+
+ if self.mode in ("a", "w", "x"):
+ self._loaded = True
+
+ if self.pax_headers:
+ buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ #--------------------------------------------------------------------------
+ # Below are the classmethods which act as alternate constructors to the
+ # TarFile class. The open() method is the only one that is needed for
+ # public use; it is the "super"-constructor and is able to select an
+ # adequate "sub"-constructor for a particular compression using the mapping
+ # from OPEN_METH.
+ #
+ # This concept allows one to subclass TarFile without losing the comfort of
+ # the super-constructor. A sub-constructor is registered and made available
+ # by adding it to the mapping in OPEN_METH.
+
+ @classmethod
+ def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+ r"""Open a tar archive for reading, writing or appending. Return
+ an appropriate TarFile class.
+
+ mode:
+ 'r' or 'r:\*' open for reading with transparent compression
+ 'r:' open for reading exclusively uncompressed
+ 'r:gz' open for reading with gzip compression
+ 'r:bz2' open for reading with bzip2 compression
+ 'r:xz' open for reading with lzma compression
+ 'a' or 'a:' open for appending, creating the file if necessary
+ 'w' or 'w:' open for writing without compression
+ 'w:gz' open for writing with gzip compression
+ 'w:bz2' open for writing with bzip2 compression
+ 'w:xz' open for writing with lzma compression
+
+ 'x' or 'x:' create a tarfile exclusively without compression, raise
+ an exception if the file is already created
+ 'x:gz' create a gzip compressed tarfile, raise an exception
+ if the file is already created
+ 'x:bz2' create a bzip2 compressed tarfile, raise an exception
+ if the file is already created
+ 'x:xz' create an lzma compressed tarfile, raise an exception
+ if the file is already created
+
+ 'r|\*' open a stream of tar blocks with transparent compression
+ 'r|' open an uncompressed stream of tar blocks for reading
+ 'r|gz' open a gzip compressed stream of tar blocks
+ 'r|bz2' open a bzip2 compressed stream of tar blocks
+ 'r|xz' open an lzma compressed stream of tar blocks
+ 'w|' open an uncompressed stream for writing
+ 'w|gz' open a gzip compressed stream for writing
+ 'w|bz2' open a bzip2 compressed stream for writing
+ 'w|xz' open an lzma compressed stream for writing
+ """
+
+ if not name and not fileobj:
+ raise ValueError("nothing to open")
+
+ if mode in ("r", "r:*"):
+ # Find out which *open() is appropriate for opening the file.
+ def not_compressed(comptype):
+ return cls.OPEN_METH[comptype] == 'taropen'
+ error_msgs = []
+ for comptype in sorted(cls.OPEN_METH, key=not_compressed):
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ if fileobj is not None:
+ saved_pos = fileobj.tell()
+ try:
+ return func(name, "r", fileobj, **kwargs)
+ except (ReadError, CompressionError) as e:
+ error_msgs.append(f'- method {comptype}: {e!r}')
+ if fileobj is not None:
+ fileobj.seek(saved_pos)
+ continue
+ error_msgs_summary = '\n'.join(error_msgs)
+ raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
+
+ elif ":" in mode:
+ filemode, comptype = mode.split(":", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ # Select the *open() function according to
+ # given compression.
+ if comptype in cls.OPEN_METH:
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ else:
+ raise CompressionError("unknown compression type %r" % comptype)
+ return func(name, filemode, fileobj, **kwargs)
+
+ elif "|" in mode:
+ filemode, comptype = mode.split("|", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ if filemode not in ("r", "w"):
+ raise ValueError("mode must be 'r' or 'w'")
+
+ compresslevel = kwargs.pop("compresslevel", 9)
+ stream = _Stream(name, filemode, comptype, fileobj, bufsize,
+ compresslevel)
+ try:
+ t = cls(name, filemode, stream, **kwargs)
+ except:
+ stream.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ elif mode in ("a", "w", "x"):
+ return cls.taropen(name, mode, fileobj, **kwargs)
+
+ raise ValueError("undiscernible mode")
+
+ @classmethod
+ def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+ """Open uncompressed tar archive name for reading or writing.
+ """
+ if mode not in ("r", "a", "w", "x"):
+ raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+ return cls(name, mode, fileobj, **kwargs)
+
+ @classmethod
+ def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open gzip compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if mode not in ("r", "w", "x"):
+ raise ValueError("mode must be 'r', 'w' or 'x'")
+
+ try:
+ from gzip import GzipFile
+ except ImportError:
+ raise CompressionError("gzip module is not available") from None
+
+ try:
+ fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
+ except OSError as e:
+ if fileobj is not None and mode == 'r':
+ raise ReadError("not a gzip file") from e
+ raise
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except OSError as e:
+ fileobj.close()
+ if mode == 'r':
+ raise ReadError("not a gzip file") from e
+ raise
+ except:
+ fileobj.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ @classmethod
+ def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open bzip2 compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if mode not in ("r", "w", "x"):
+ raise ValueError("mode must be 'r', 'w' or 'x'")
+
+ try:
+ from bz2 import BZ2File
+ except ImportError:
+ raise CompressionError("bz2 module is not available") from None
+
+ fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except (OSError, EOFError) as e:
+ fileobj.close()
+ if mode == 'r':
+ raise ReadError("not a bzip2 file") from e
+ raise
+ except:
+ fileobj.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ @classmethod
+ def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
+ """Open lzma compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if mode not in ("r", "w", "x"):
+ raise ValueError("mode must be 'r', 'w' or 'x'")
+
+ try:
+ from lzma import LZMAFile, LZMAError
+ except ImportError:
+ raise CompressionError("lzma module is not available") from None
+
+ fileobj = LZMAFile(fileobj or name, mode, preset=preset)
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except (LZMAError, EOFError) as e:
+ fileobj.close()
+ if mode == 'r':
+ raise ReadError("not an lzma file") from e
+ raise
+ except:
+ fileobj.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ # All *open() methods are registered here.
+ OPEN_METH = {
+ "tar": "taropen", # uncompressed tar
+ "gz": "gzopen", # gzip compressed tar
+ "bz2": "bz2open", # bzip2 compressed tar
+ "xz": "xzopen" # lzma compressed tar
+ }
+
+ #--------------------------------------------------------------------------
+ # The public methods which TarFile provides:
+
+ def close(self):
+ """Close the TarFile. In write-mode, two finishing zero blocks are
+ appended to the archive.
+ """
+ if self.closed:
+ return
+
+ self.closed = True
+ try:
+ if self.mode in ("a", "w", "x"):
+ self.fileobj.write(NUL * (BLOCKSIZE * 2))
+ self.offset += (BLOCKSIZE * 2)
+ # fill up the end with zero-blocks
+ # (like option -b20 for tar does)
+ blocks, remainder = divmod(self.offset, RECORDSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (RECORDSIZE - remainder))
+ finally:
+ if not self._extfileobj:
+ self.fileobj.close()
+
+ def getmember(self, name):
+ """Return a TarInfo object for member ``name``. If ``name`` can not be
+ found in the archive, KeyError is raised. If a member occurs more
+ than once in the archive, its last occurrence is assumed to be the
+ most up-to-date version.
+ """
+ tarinfo = self._getmember(name.rstrip('/'))
+ if tarinfo is None:
+ raise KeyError("filename %r not found" % name)
+ return tarinfo
+
+ def getmembers(self):
+ """Return the members of the archive as a list of TarInfo objects. The
+ list has the same order as the members in the archive.
+ """
+ self._check()
+ if not self._loaded: # if we want to obtain a list of
+ self._load() # all members, we first have to
+ # scan the whole archive.
+ return self.members
+
+ def getnames(self):
+ """Return the members of the archive as a list of their names. It has
+ the same order as the list returned by getmembers().
+ """
+ return [tarinfo.name for tarinfo in self.getmembers()]
+
+ def gettarinfo(self, name=None, arcname=None, fileobj=None):
+ """Create a TarInfo object from the result of os.stat or equivalent
+ on an existing file. The file is either named by ``name``, or
+ specified as a file object ``fileobj`` with a file descriptor. If
+ given, ``arcname`` specifies an alternative name for the file in the
+ archive, otherwise, the name is taken from the 'name' attribute of
+ 'fileobj', or the 'name' argument. The name should be a text
+ string.
+ """
+ self._check("awx")
+
+ # When fileobj is given, replace name by
+ # fileobj's real name.
+ if fileobj is not None:
+ name = fileobj.name
+
+ # Building the name of the member in the archive.
+ # Backward slashes are converted to forward slashes,
+ # Absolute paths are turned to relative paths.
+ if arcname is None:
+ arcname = name
+ drv, arcname = os.path.splitdrive(arcname)
+ arcname = arcname.replace(os.sep, "/")
+ arcname = arcname.lstrip("/")
+
+ # Now, fill the TarInfo object with
+ # information specific for the file.
+ tarinfo = self.tarinfo()
+ tarinfo.tarfile = self # Not needed
+
+ # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
+ if fileobj is None:
+ if not self.dereference:
+ statres = os.lstat(name)
+ else:
+ statres = os.stat(name)
+ else:
+ statres = os.fstat(fileobj.fileno())
+ linkname = ""
+
+ stmd = statres.st_mode
+ if stat.S_ISREG(stmd):
+ inode = (statres.st_ino, statres.st_dev)
+ if not self.dereference and statres.st_nlink > 1 and \
+ inode in self.inodes and arcname != self.inodes[inode]:
+ # Is it a hardlink to an already
+ # archived file?
+ type = LNKTYPE
+ linkname = self.inodes[inode]
+ else:
+ # The inode is added only if its valid.
+ # For win32 it is always 0.
+ type = REGTYPE
+ if inode[0]:
+ self.inodes[inode] = arcname
+ elif stat.S_ISDIR(stmd):
+ type = DIRTYPE
+ elif stat.S_ISFIFO(stmd):
+ type = FIFOTYPE
+ elif stat.S_ISLNK(stmd):
+ type = SYMTYPE
+ linkname = os.readlink(name)
+ elif stat.S_ISCHR(stmd):
+ type = CHRTYPE
+ elif stat.S_ISBLK(stmd):
+ type = BLKTYPE
+ else:
+ return None
+
+ # Fill the TarInfo object with all
+ # information we can get.
+ tarinfo.name = arcname
+ tarinfo.mode = stmd
+ tarinfo.uid = statres.st_uid
+ tarinfo.gid = statres.st_gid
+ if type == REGTYPE:
+ tarinfo.size = statres.st_size
+ else:
+ tarinfo.size = 0
+ tarinfo.mtime = statres.st_mtime
+ tarinfo.type = type
+ tarinfo.linkname = linkname
+ if pwd:
+ try:
+ tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+ except KeyError:
+ pass
+ if grp:
+ try:
+ tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+ except KeyError:
+ pass
+
+ if type in (CHRTYPE, BLKTYPE):
+ if hasattr(os, "major") and hasattr(os, "minor"):
+ tarinfo.devmajor = os.major(statres.st_rdev)
+ tarinfo.devminor = os.minor(statres.st_rdev)
+ return tarinfo
+
+ def list(self, verbose=True, *, members=None):
+ """Print a table of contents to sys.stdout. If ``verbose`` is False, only
+ the names of the members are printed. If it is True, an `ls -l'-like
+ output is produced. ``members`` is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ self._check()
+
+ if members is None:
+ members = self
+ for tarinfo in members:
+ if verbose:
+ if tarinfo.mode is None:
+ _safe_print("??????????")
+ else:
+ _safe_print(stat.filemode(tarinfo.mode))
+ _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+ tarinfo.gname or tarinfo.gid))
+ if tarinfo.ischr() or tarinfo.isblk():
+ _safe_print("%10s" %
+ ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
+ else:
+ _safe_print("%10d" % tarinfo.size)
+ if tarinfo.mtime is None:
+ _safe_print("????-??-?? ??:??:??")
+ else:
+ _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+ % time.localtime(tarinfo.mtime)[:6])
+
+ _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
+
+ if verbose:
+ if tarinfo.issym():
+ _safe_print("-> " + tarinfo.linkname)
+ if tarinfo.islnk():
+ _safe_print("link to " + tarinfo.linkname)
+ print()
+
+ def add(self, name, arcname=None, recursive=True, *, filter=None):
+ """Add the file ``name`` to the archive. ``name`` may be any type of file
+ (directory, fifo, symbolic link, etc.). If given, ``arcname``
+ specifies an alternative name for the file in the archive.
+ Directories are added recursively by default. This can be avoided by
+ setting ``recursive`` to False. ``filter`` is a function
+ that expects a TarInfo object argument and returns the changed
+ TarInfo object, if it returns None the TarInfo object will be
+ excluded from the archive.
+ """
+ self._check("awx")
+
+ if arcname is None:
+ arcname = name
+
+ # Skip if somebody tries to archive the archive...
+ if self.name is not None and os.path.abspath(name) == self.name:
+ self._dbg(2, "tarfile: Skipped %r" % name)
+ return
+
+ self._dbg(1, name)
+
+ # Create a TarInfo object from the file.
+ tarinfo = self.gettarinfo(name, arcname)
+
+ if tarinfo is None:
+ self._dbg(1, "tarfile: Unsupported type %r" % name)
+ return
+
+ # Change or exclude the TarInfo object.
+ if filter is not None:
+ tarinfo = filter(tarinfo)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % name)
+ return
+
+ # Append the tar header and data to the archive.
+ if tarinfo.isreg():
+ with bltn_open(name, "rb") as f:
+ self.addfile(tarinfo, f)
+
+ elif tarinfo.isdir():
+ self.addfile(tarinfo)
+ if recursive:
+ for f in sorted(os.listdir(name)):
+ self.add(os.path.join(name, f), os.path.join(arcname, f),
+ recursive, filter=filter)
+
+ else:
+ self.addfile(tarinfo)
+
+ def addfile(self, tarinfo, fileobj=None):
+ """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
+ given, it should be a binary file, and tarinfo.size bytes are read
+ from it and added to the archive. You can create TarInfo objects
+ directly, or by using gettarinfo().
+ """
+ self._check("awx")
+
+ tarinfo = copy.copy(tarinfo)
+
+ buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+ bufsize=self.copybufsize
+ # If there's data to follow, append it.
+ if fileobj is not None:
+ copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
+ blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+ blocks += 1
+ self.offset += blocks * BLOCKSIZE
+
+ self.members.append(tarinfo)
+
+ def _get_filter_function(self, filter):
+ if filter is None:
+ filter = self.extraction_filter
+ if filter is None:
+ warnings.warn(
+ 'Python 3.14 will, by default, filter extracted tar '
+ + 'archives and reject files or modify their metadata. '
+ + 'Use the filter argument to control this behavior.',
+ DeprecationWarning)
+ return fully_trusted_filter
+ if isinstance(filter, str):
+ raise TypeError(
+ 'String names are not supported for '
+ + 'TarFile.extraction_filter. Use a function such as '
+ + 'tarfile.data_filter directly.')
+ return filter
+ if callable(filter):
+ return filter
+ try:
+ return _NAMED_FILTERS[filter]
+ except KeyError:
+ raise ValueError(f"filter {filter!r} not found") from None
+
+ def extractall(self, path=".", members=None, *, numeric_owner=False,
+ filter=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers(). If `numeric_owner` is True, only
+ the numbers for user/group names are used and not the names.
+
+ The `filter` function will be called on each member just
+ before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
+ """
+ directories = []
+
+ filter_function = self._get_filter_function(filter)
+ if members is None:
+ members = self
+
+ for member in members:
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is None:
+ continue
+ if tarinfo.isdir():
+ # For directories, delay setting attributes until later,
+ # since permissions can interfere with extraction and
+ # extracting contents can reset mtime.
+ directories.append(tarinfo)
+ self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+ numeric_owner=numeric_owner)
+
+ # Reverse sort directories.
+ directories.sort(key=lambda a: a.name, reverse=True)
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+
+ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+ filter=None):
+ """Extract a member from the archive to the current working directory,
+ using its full name. Its file information is extracted as accurately
+ as possible. `member' may be a filename or a TarInfo object. You can
+ specify a different directory using `path'. File attributes (owner,
+ mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+ is True, only the numbers for user/group names are used and not
+ the names.
+
+ The `filter` function will be called before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
+ """
+ filter_function = self._get_filter_function(filter)
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is not None:
+ self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+
+ def _get_extract_tarinfo(self, member, filter_function, path):
+ """Get filtered TarInfo (or None) from member, which might be a str"""
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ unfiltered = tarinfo
+ try:
+ tarinfo = filter_function(tarinfo, path)
+ except (OSError, FilterError) as e:
+ self._handle_fatal_error(e)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+ return None
+ # Prepare the link target for makelink().
+ if tarinfo.islnk():
+ tarinfo = copy.copy(tarinfo)
+ tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+ return tarinfo
+
+ def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+ """Extract from filtered tarinfo to disk"""
+ self._check("r")
+
+ try:
+ self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+ set_attrs=set_attrs,
+ numeric_owner=numeric_owner)
+ except OSError as e:
+ self._handle_fatal_error(e)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+
+ def _handle_nonfatal_error(self, e):
+ """Handle non-fatal error (ExtractError) according to errorlevel"""
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def _handle_fatal_error(self, e):
+ """Handle "fatal" error according to self.errorlevel"""
+ if self.errorlevel > 0:
+ raise
+ elif isinstance(e, OSError):
+ if e.filename is None:
+ self._dbg(1, "tarfile: %s" % e.strerror)
+ else:
+ self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ else:
+ self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
+
+ def extractfile(self, member):
+ """Extract a member from the archive as a file object. ``member`` may be
+ a filename or a TarInfo object. If ``member`` is a regular file or
+ a link, an io.BufferedReader object is returned. For all other
+ existing members, None is returned. If ``member`` does not appear
+ in the archive, KeyError is raised.
+ """
+ self._check("r")
+
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
+ # Members with unknown types are treated as regular files.
+ return self.fileobject(self, tarinfo)
+
+ elif tarinfo.islnk() or tarinfo.issym():
+ if isinstance(self.fileobj, _Stream):
+ # A small but ugly workaround for the case that someone tries
+ # to extract a (sym)link as a file-object from a non-seekable
+ # stream of tar blocks.
+ raise StreamError("cannot extract (sym)link as file object")
+ else:
+ # A (sym)link's file object is its target's file object.
+ return self.extractfile(self._find_link_target(tarinfo))
+ else:
+ # If there's no data associated with the member (directory, chrdev,
+ # blkdev, etc.), return None instead of a file object.
+ return None
+
+ def _extract_member(self, tarinfo, targetpath, set_attrs=True,
+ numeric_owner=False):
+ """Extract the TarInfo object tarinfo to a physical
+ file called targetpath.
+ """
+ # Fetch the TarInfo object for the given name
+ # and build the destination pathname, replacing
+ # forward slashes to platform specific separators.
+ targetpath = targetpath.rstrip("/")
+ targetpath = targetpath.replace("/", os.sep)
+
+ # Create all upper directories.
+ upperdirs = os.path.dirname(targetpath)
+ if upperdirs and not os.path.exists(upperdirs):
+ # Create directories that are not part of the archive with
+ # default permissions.
+ os.makedirs(upperdirs)
+
+ if tarinfo.islnk() or tarinfo.issym():
+ self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+ else:
+ self._dbg(1, tarinfo.name)
+
+ if tarinfo.isreg():
+ self.makefile(tarinfo, targetpath)
+ elif tarinfo.isdir():
+ self.makedir(tarinfo, targetpath)
+ elif tarinfo.isfifo():
+ self.makefifo(tarinfo, targetpath)
+ elif tarinfo.ischr() or tarinfo.isblk():
+ self.makedev(tarinfo, targetpath)
+ elif tarinfo.islnk() or tarinfo.issym():
+ self.makelink(tarinfo, targetpath)
+ elif tarinfo.type not in SUPPORTED_TYPES:
+ self.makeunknown(tarinfo, targetpath)
+ else:
+ self.makefile(tarinfo, targetpath)
+
+ if set_attrs:
+ self.chown(tarinfo, targetpath, numeric_owner)
+ if not tarinfo.issym():
+ self.chmod(tarinfo, targetpath)
+ self.utime(tarinfo, targetpath)
+
+ #--------------------------------------------------------------------------
+ # Below are the different file methods. They are called via
+ # _extract_member() when extract() is called. They can be replaced in a
+ # subclass to implement other functionality.
+
+ def makedir(self, tarinfo, targetpath):
+ """Make a directory called targetpath.
+ """
+ try:
+ if tarinfo.mode is None:
+ # Use the system's default mode
+ os.mkdir(targetpath)
+ else:
+ # Use a safe mode for the directory, the real mode is set
+ # later in _extract_member().
+ os.mkdir(targetpath, 0o700)
+ except FileExistsError:
+ if not os.path.isdir(targetpath):
+ raise
+
+ def makefile(self, tarinfo, targetpath):
+ """Make a file called targetpath.
+ """
+ source = self.fileobj
+ source.seek(tarinfo.offset_data)
+ bufsize = self.copybufsize
+ with bltn_open(targetpath, "wb") as target:
+ if tarinfo.sparse is not None:
+ for offset, size in tarinfo.sparse:
+ target.seek(offset)
+ copyfileobj(source, target, size, ReadError, bufsize)
+ target.seek(tarinfo.size)
+ target.truncate()
+ else:
+ copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
+
+ def makeunknown(self, tarinfo, targetpath):
+ """Make a file from a TarInfo object with an unknown type
+ at targetpath.
+ """
+ self.makefile(tarinfo, targetpath)
+ self._dbg(1, "tarfile: Unknown file type %r, " \
+ "extracted as regular file." % tarinfo.type)
+
+ def makefifo(self, tarinfo, targetpath):
+ """Make a fifo called targetpath.
+ """
+ if hasattr(os, "mkfifo"):
+ os.mkfifo(targetpath)
+ else:
+ raise ExtractError("fifo not supported by system")
+
+ def makedev(self, tarinfo, targetpath):
+ """Make a character or block device called targetpath.
+ """
+ if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+ raise ExtractError("special devices not supported by system")
+
+ mode = tarinfo.mode
+ if mode is None:
+ # Use mknod's default
+ mode = 0o600
+ if tarinfo.isblk():
+ mode |= stat.S_IFBLK
+ else:
+ mode |= stat.S_IFCHR
+
+ os.mknod(targetpath, mode,
+ os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+ def makelink(self, tarinfo, targetpath):
+ """Make a (symbolic) link called targetpath. If it cannot be created
+ (platform limitation), we try to make a copy of the referenced file
+ instead of a link.
+ """
+ try:
+ # For systems that support symbolic and hard links.
+ if tarinfo.issym():
+ if os.path.lexists(targetpath):
+ # Avoid FileExistsError on following os.symlink.
+ os.unlink(targetpath)
+ os.symlink(tarinfo.linkname, targetpath)
+ else:
+ if os.path.exists(tarinfo._link_target):
+ os.link(tarinfo._link_target, targetpath)
+ else:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except symlink_exception:
+ try:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except KeyError:
+ raise ExtractError("unable to resolve link inside archive") from None
+
+ def chown(self, tarinfo, targetpath, numeric_owner):
+ """Set owner of targetpath according to tarinfo. If numeric_owner
+ is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
+ is False, fall back to .gid/.uid when the search based on name
+ fails.
+ """
+ if hasattr(os, "geteuid") and os.geteuid() == 0:
+ # We have to be root to do so.
+ g = tarinfo.gid
+ u = tarinfo.uid
+ if not numeric_owner:
+ try:
+ if grp and tarinfo.gname:
+ g = grp.getgrnam(tarinfo.gname)[2]
+ except KeyError:
+ pass
+ try:
+ if pwd and tarinfo.uname:
+ u = pwd.getpwnam(tarinfo.uname)[2]
+ except KeyError:
+ pass
+ if g is None:
+ g = -1
+ if u is None:
+ u = -1
+ try:
+ if tarinfo.issym() and hasattr(os, "lchown"):
+ os.lchown(targetpath, u, g)
+ else:
+ os.chown(targetpath, u, g)
+ except OSError as e:
+ raise ExtractError("could not change owner") from e
+
+ def chmod(self, tarinfo, targetpath):
+ """Set file permissions of targetpath according to tarinfo.
+ """
+ if tarinfo.mode is None:
+ return
+ try:
+ os.chmod(targetpath, tarinfo.mode)
+ except OSError as e:
+ raise ExtractError("could not change mode") from e
+
+ def utime(self, tarinfo, targetpath):
+ """Set modification time of targetpath according to tarinfo.
+ """
+ mtime = tarinfo.mtime
+ if mtime is None:
+ return
+ if not hasattr(os, 'utime'):
+ return
+ try:
+ os.utime(targetpath, (mtime, mtime))
+ except OSError as e:
+ raise ExtractError("could not change modification time") from e
+
+ #--------------------------------------------------------------------------
+ def next(self):
+ """Return the next member of the archive as a TarInfo object, when
+ TarFile is opened for reading. Return None if there is no more
+ available.
+ """
+ self._check("ra")
+ if self.firstmember is not None:
+ m = self.firstmember
+ self.firstmember = None
+ return m
+
+ # Advance the file pointer.
+ if self.offset != self.fileobj.tell():
+ if self.offset == 0:
+ return None
+ self.fileobj.seek(self.offset - 1)
+ if not self.fileobj.read(1):
+ raise ReadError("unexpected end of data")
+
+ # Read the next block.
+ tarinfo = None
+ while True:
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ except EOFHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ except InvalidHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ elif self.offset == 0:
+ raise ReadError(str(e)) from None
+ except EmptyHeaderError:
+ if self.offset == 0:
+ raise ReadError("empty file") from None
+ except TruncatedHeaderError as e:
+ if self.offset == 0:
+ raise ReadError(str(e)) from None
+ except SubsequentHeaderError as e:
+ raise ReadError(str(e)) from None
+ except Exception as e:
+ try:
+ import zlib
+ if isinstance(e, zlib.error):
+ raise ReadError(f'zlib error: {e}') from None
+ else:
+ raise e
+ except ImportError:
+ raise e
+ break
+
+ if tarinfo is not None:
+ self.members.append(tarinfo)
+ else:
+ self._loaded = True
+
+ return tarinfo
+
+ #--------------------------------------------------------------------------
+ # Little helper methods:
+
+ def _getmember(self, name, tarinfo=None, normalize=False):
+ """Find an archive member by name from bottom to top.
+ If tarinfo is given, it is used as the starting point.
+ """
+ # Ensure that all members have been loaded.
+ members = self.getmembers()
+
+ # Limit the member search list up to tarinfo.
+ skipping = False
+ if tarinfo is not None:
+ try:
+ index = members.index(tarinfo)
+ except ValueError:
+ # The given starting point might be a (modified) copy.
+ # We'll later skip members until we find an equivalent.
+ skipping = True
+ else:
+ # Happy fast path
+ members = members[:index]
+
+ if normalize:
+ name = os.path.normpath(name)
+
+ for member in reversed(members):
+ if skipping:
+ if tarinfo.offset == member.offset:
+ skipping = False
+ continue
+ if normalize:
+ member_name = os.path.normpath(member.name)
+ else:
+ member_name = member.name
+
+ if name == member_name:
+ return member
+
+ if skipping:
+ # Starting point was not found
+ raise ValueError(tarinfo)
+
+ def _load(self):
+ """Read through the entire archive file and look for readable
+ members.
+ """
+ while self.next() is not None:
+ pass
+ self._loaded = True
+
+ def _check(self, mode=None):
+ """Check if TarFile is still open, and if the operation's mode
+ corresponds to TarFile's mode.
+ """
+ if self.closed:
+ raise OSError("%s is closed" % self.__class__.__name__)
+ if mode is not None and self.mode not in mode:
+ raise OSError("bad operation for mode %r" % self.mode)
+
+ def _find_link_target(self, tarinfo):
+ """Find the target member of a symlink or hardlink member in the
+ archive.
+ """
+ if tarinfo.issym():
+ # Always search the entire archive.
+ linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
+ limit = None
+ else:
+ # Search the archive before the link, because a hard link is
+ # just a reference to an already archived file.
+ linkname = tarinfo.linkname
+ limit = tarinfo
+
+ member = self._getmember(linkname, tarinfo=limit, normalize=True)
+ if member is None:
+ raise KeyError("linkname %r not found" % linkname)
+ return member
+
+ def __iter__(self):
+ """Provide an iterator object.
+ """
+ if self._loaded:
+ yield from self.members
+ return
+
+ # Yield items using TarFile's next() method.
+ # When all members have been read, set TarFile as _loaded.
+ index = 0
+ # Fix for SF #1100429: Under rare circumstances it can
+ # happen that getmembers() is called during iteration,
+ # which will have already exhausted the next() method.
+ if self.firstmember is not None:
+ tarinfo = self.next()
+ index += 1
+ yield tarinfo
+
+ while True:
+ if index < len(self.members):
+ tarinfo = self.members[index]
+ elif not self._loaded:
+ tarinfo = self.next()
+ if not tarinfo:
+ self._loaded = True
+ return
+ else:
+ return
+ index += 1
+ yield tarinfo
+
+ def _dbg(self, level, msg):
+ """Write debugging output to sys.stderr.
+ """
+ if level <= self.debug:
+ print(msg, file=sys.stderr)
+
+ def __enter__(self):
+ self._check()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ self.close()
+ else:
+ # An exception occurred. We must not call close() because
+ # it would try to write end-of-archive blocks and padding.
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+
+#--------------------
+# exported functions
+#--------------------
+
+def is_tarfile(name):
+ """Return True if name points to a tar archive that we
+ are able to handle, else return False.
+
+ 'name' should be a string, file, or file-like object.
+ """
+ try:
+ if hasattr(name, "read"):
+ pos = name.tell()
+ t = open(fileobj=name)
+ name.seek(pos)
+ else:
+ t = open(name)
+ t.close()
+ return True
+ except TarError:
+ return False
+
+open = TarFile.open
+
+
+def main():
+ import argparse
+
+ description = 'A simple command-line interface for tarfile module.'
+ parser = argparse.ArgumentParser(description=description)
+ parser.add_argument('-v', '--verbose', action='store_true', default=False,
+ help='Verbose output')
+ parser.add_argument('--filter', metavar='<filtername>',
+ choices=_NAMED_FILTERS,
+ help='Filter for extraction')
+
+ group = parser.add_mutually_exclusive_group(required=True)
+ group.add_argument('-l', '--list', metavar='<tarfile>',
+ help='Show listing of a tarfile')
+ group.add_argument('-e', '--extract', nargs='+',
+ metavar=('<tarfile>', '<output_dir>'),
+ help='Extract tarfile into target dir')
+ group.add_argument('-c', '--create', nargs='+',
+ metavar=('<name>', '<file>'),
+ help='Create tarfile from sources')
+ group.add_argument('-t', '--test', metavar='<tarfile>',
+ help='Test if a tarfile is valid')
+
+ args = parser.parse_args()
+
+ if args.filter and args.extract is None:
+ parser.exit(1, '--filter is only valid for extraction\n')
+
+ if args.test is not None:
+ src = args.test
+ if is_tarfile(src):
+ with open(src, 'r') as tar:
+ tar.getmembers()
+ print(tar.getmembers(), file=sys.stderr)
+ if args.verbose:
+ print('{!r} is a tar archive.'.format(src))
+ else:
+ parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+ elif args.list is not None:
+ src = args.list
+ if is_tarfile(src):
+ with TarFile.open(src, 'r:*') as tf:
+ tf.list(verbose=args.verbose)
+ else:
+ parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+ elif args.extract is not None:
+ if len(args.extract) == 1:
+ src = args.extract[0]
+ curdir = os.curdir
+ elif len(args.extract) == 2:
+ src, curdir = args.extract
+ else:
+ parser.exit(1, parser.format_help())
+
+ if is_tarfile(src):
+ with TarFile.open(src, 'r:*') as tf:
+ tf.extractall(path=curdir, filter=args.filter)
+ if args.verbose:
+ if curdir == '.':
+ msg = '{!r} file is extracted.'.format(src)
+ else:
+ msg = ('{!r} file is extracted '
+ 'into {!r} directory.').format(src, curdir)
+ print(msg)
+ else:
+ parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+ elif args.create is not None:
+ tar_name = args.create.pop(0)
+ _, ext = os.path.splitext(tar_name)
+ compressions = {
+ # gz
+ '.gz': 'gz',
+ '.tgz': 'gz',
+ # xz
+ '.xz': 'xz',
+ '.txz': 'xz',
+ # bz2
+ '.bz2': 'bz2',
+ '.tbz': 'bz2',
+ '.tbz2': 'bz2',
+ '.tb2': 'bz2',
+ }
+ tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
+ tar_files = args.create
+
+ with TarFile.open(tar_name, tar_mode) as tf:
+ for file_name in tar_files:
+ tf.add(file_name)
+
+ if args.verbose:
+ print('{!r} file created.'.format(tar_name))
+
+if __name__ == '__main__':
+ main()
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/context.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/context.py
index b0d1ef37cb..c42f6135d5 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/context.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/context.py
@@ -1,15 +1,26 @@
-import os
-import subprocess
+from __future__ import annotations
+
import contextlib
import functools
-import tempfile
-import shutil
import operator
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import urllib.request
import warnings
+from typing import Iterator
+
+
+if sys.version_info < (3, 12):
+ from pkg_resources.extern.backports import tarfile
+else:
+ import tarfile
@contextlib.contextmanager
-def pushd(dir):
+def pushd(dir: str | os.PathLike) -> Iterator[str | os.PathLike]:
"""
>>> tmp_path = getfixture('tmp_path')
>>> with pushd(tmp_path):
@@ -26,33 +37,88 @@ def pushd(dir):
@contextlib.contextmanager
-def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
+def tarball(
+ url, target_dir: str | os.PathLike | None = None
+) -> Iterator[str | os.PathLike]:
"""
- Get a tarball, extract it, change to that directory, yield, then
- clean up.
- `runner` is the function to invoke commands.
- `pushd` is a context manager for changing the directory.
+ Get a tarball, extract it, yield, then clean up.
+
+ >>> import urllib.request
+ >>> url = getfixture('tarfile_served')
+ >>> target = getfixture('tmp_path') / 'out'
+ >>> tb = tarball(url, target_dir=target)
+ >>> import pathlib
+ >>> with tb as extracted:
+ ... contents = pathlib.Path(extracted, 'contents.txt').read_text(encoding='utf-8')
+ >>> assert not os.path.exists(extracted)
"""
if target_dir is None:
target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
- if runner is None:
- runner = functools.partial(subprocess.check_call, shell=True)
- else:
- warnings.warn("runner parameter is deprecated", DeprecationWarning)
# In the tar command, use --strip-components=1 to strip the first path and
# then
# use -C to cause the files to be extracted to {target_dir}. This ensures
# that we always know where the files were extracted.
- runner('mkdir {target_dir}'.format(**vars()))
+ os.mkdir(target_dir)
try:
- getter = 'wget {url} -O -'
- extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
- cmd = ' | '.join((getter, extract))
- runner(cmd.format(compression=infer_compression(url), **vars()))
- with pushd(target_dir):
- yield target_dir
+ req = urllib.request.urlopen(url)
+ with tarfile.open(fileobj=req, mode='r|*') as tf:
+ tf.extractall(path=target_dir, filter=strip_first_component)
+ yield target_dir
finally:
- runner('rm -Rf {target_dir}'.format(**vars()))
+ shutil.rmtree(target_dir)
+
+
+def strip_first_component(
+ member: tarfile.TarInfo,
+ path,
+) -> tarfile.TarInfo:
+ _, member.name = member.name.split('/', 1)
+ return member
+
+
+def _compose(*cmgrs):
+ """
+ Compose any number of dependent context managers into a single one.
+
+ The last, innermost context manager may take arbitrary arguments, but
+ each successive context manager should accept the result from the
+ previous as a single parameter.
+
+ Like :func:`jaraco.functools.compose`, behavior works from right to
+ left, so the context manager should be indicated from outermost to
+ innermost.
+
+ Example, to create a context manager to change to a temporary
+ directory:
+
+ >>> temp_dir_as_cwd = _compose(pushd, temp_dir)
+ >>> with temp_dir_as_cwd() as dir:
+ ... assert os.path.samefile(os.getcwd(), dir)
+ """
+
+ def compose_two(inner, outer):
+ def composed(*args, **kwargs):
+ with inner(*args, **kwargs) as saved, outer(saved) as res:
+ yield res
+
+ return contextlib.contextmanager(composed)
+
+ return functools.reduce(compose_two, reversed(cmgrs))
+
+
+tarball_cwd = _compose(pushd, tarball)
+
+
+@contextlib.contextmanager
+def tarball_context(*args, **kwargs):
+ warnings.warn(
+ "tarball_context is deprecated. Use tarball or tarball_cwd instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ pushd_ctx = kwargs.pop('pushd', pushd)
+ with tarball(*args, **kwargs) as tball, pushd_ctx(tball) as dir:
+ yield dir
def infer_compression(url):
@@ -68,6 +134,11 @@ def infer_compression(url):
>>> infer_compression('file.xz')
'J'
"""
+ warnings.warn(
+ "infer_compression is deprecated with no replacement",
+ DeprecationWarning,
+ stacklevel=2,
+ )
# cheat and just assume it's the last two characters
compression_indicator = url[-2:]
mapping = dict(gz='z', bz='j', xz='J')
@@ -84,7 +155,7 @@ def temp_dir(remover=shutil.rmtree):
>>> import pathlib
>>> with temp_dir() as the_dir:
... assert os.path.isdir(the_dir)
- ... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents')
+ ... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents', encoding='utf-8')
>>> assert not os.path.exists(the_dir)
"""
temp_dir = tempfile.mkdtemp()
@@ -113,15 +184,23 @@ def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
yield repo_dir
-@contextlib.contextmanager
def null():
"""
A null context suitable to stand in for a meaningful context.
>>> with null() as value:
... assert value is None
+
+ This context is most useful when dealing with two or more code
+ branches but only some need a context. Wrap the others in a null
+ context to provide symmetry across all options.
"""
- yield
+ warnings.warn(
+ "null is deprecated. Use contextlib.nullcontext",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return contextlib.nullcontext()
class ExceptionTrap:
@@ -267,13 +346,7 @@ class on_interrupt(contextlib.ContextDecorator):
... on_interrupt('ignore')(do_interrupt)()
"""
- def __init__(
- self,
- action='error',
- # py3.7 compat
- # /,
- code=1,
- ):
+ def __init__(self, action='error', /, code=1):
self.action = action
self.code = code
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/__init__.py
index 67aeadc353..f523099c72 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/__init__.py
@@ -1,18 +1,14 @@
+import collections.abc
import functools
-import time
import inspect
-import collections
-import types
import itertools
+import operator
+import time
+import types
import warnings
import pkg_resources.extern.more_itertools
-from typing import Callable, TypeVar
-
-
-CallableT = TypeVar("CallableT", bound=Callable[..., object])
-
def compose(*funcs):
"""
@@ -38,24 +34,6 @@ def compose(*funcs):
return functools.reduce(compose_two, funcs)
-def method_caller(method_name, *args, **kwargs):
- """
- Return a function that will call a named method on the
- target object with optional positional and keyword
- arguments.
-
- >>> lower = method_caller('lower')
- >>> lower('MyString')
- 'mystring'
- """
-
- def call_method(target):
- func = getattr(target, method_name)
- return func(*args, **kwargs)
-
- return call_method
-
-
def once(func):
"""
Decorate func so it's only ever called the first time.
@@ -98,12 +76,7 @@ def once(func):
return wrapper
-def method_cache(
- method: CallableT,
- cache_wrapper: Callable[
- [CallableT], CallableT
- ] = functools.lru_cache(), # type: ignore[assignment]
-) -> CallableT:
+def method_cache(method, cache_wrapper=functools.lru_cache()):
"""
Wrap lru_cache to support storing the cache data in the object instances.
@@ -171,21 +144,17 @@ def method_cache(
for another implementation and additional justification.
"""
- def wrapper(self: object, *args: object, **kwargs: object) -> object:
+ def wrapper(self, *args, **kwargs):
# it's the first call, replace the method with a cached, bound method
- bound_method: CallableT = types.MethodType( # type: ignore[assignment]
- method, self
- )
+ bound_method = types.MethodType(method, self)
cached_method = cache_wrapper(bound_method)
setattr(self, method.__name__, cached_method)
return cached_method(*args, **kwargs)
# Support cache clear even before cache has been created.
- wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
+ wrapper.cache_clear = lambda: None
- return ( # type: ignore[return-value]
- _special_method_cache(method, cache_wrapper) or wrapper
- )
+ return _special_method_cache(method, cache_wrapper) or wrapper
def _special_method_cache(method, cache_wrapper):
@@ -201,12 +170,13 @@ def _special_method_cache(method, cache_wrapper):
"""
name = method.__name__
special_names = '__getattr__', '__getitem__'
+
if name not in special_names:
- return
+ return None
wrapper_name = '__cached' + name
- def proxy(self, *args, **kwargs):
+ def proxy(self, /, *args, **kwargs):
if wrapper_name not in vars(self):
bound = types.MethodType(method, self)
cache = cache_wrapper(bound)
@@ -243,7 +213,7 @@ def result_invoke(action):
r"""
Decorate a function with an action function that is
invoked on the results returned from the decorated
- function (for its side-effect), then return the original
+ function (for its side effect), then return the original
result.
>>> @result_invoke(print)
@@ -267,7 +237,7 @@ def result_invoke(action):
return wrap
-def invoke(f, *args, **kwargs):
+def invoke(f, /, *args, **kwargs):
"""
Call a function for its side effect after initialization.
@@ -302,25 +272,15 @@ def invoke(f, *args, **kwargs):
Use functools.partial to pass parameters to the initial call
>>> @functools.partial(invoke, name='bingo')
- ... def func(name): print("called with", name)
+ ... def func(name): print('called with', name)
called with bingo
"""
f(*args, **kwargs)
return f
-def call_aside(*args, **kwargs):
- """
- Deprecated name for invoke.
- """
- warnings.warn("call_aside is deprecated, use invoke", DeprecationWarning)
- return invoke(*args, **kwargs)
-
-
class Throttler:
- """
- Rate-limit a function (or other callable)
- """
+ """Rate-limit a function (or other callable)."""
def __init__(self, func, max_rate=float('Inf')):
if isinstance(func, Throttler):
@@ -337,20 +297,20 @@ class Throttler:
return self.func(*args, **kwargs)
def _wait(self):
- "ensure at least 1/max_rate seconds from last call"
+ """Ensure at least 1/max_rate seconds from last call."""
elapsed = time.time() - self.last_called
must_wait = 1 / self.max_rate - elapsed
time.sleep(max(0, must_wait))
self.last_called = time.time()
- def __get__(self, obj, type=None):
+ def __get__(self, obj, owner=None):
return first_invoke(self._wait, functools.partial(self.func, obj))
def first_invoke(func1, func2):
"""
Return a function that when invoked will invoke func1 without
- any parameters (for its side-effect) and then invoke func2
+ any parameters (for its side effect) and then invoke func2
with whatever parameters were passed, returning its result.
"""
@@ -361,6 +321,17 @@ def first_invoke(func1, func2):
return wrapper
+method_caller = first_invoke(
+ lambda: warnings.warn(
+ '`jaraco.functools.method_caller` is deprecated, '
+ 'use `operator.methodcaller` instead',
+ DeprecationWarning,
+ stacklevel=3,
+ ),
+ operator.methodcaller,
+)
+
+
def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
"""
Given a callable func, trap the indicated exceptions
@@ -369,7 +340,7 @@ def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
to propagate.
"""
attempts = itertools.count() if retries == float('inf') else range(retries)
- for attempt in attempts:
+ for _ in attempts:
try:
return func()
except trap:
@@ -406,7 +377,7 @@ def retry(*r_args, **r_kwargs):
def print_yielded(func):
"""
- Convert a generator into a function that prints all yielded elements
+ Convert a generator into a function that prints all yielded elements.
>>> @print_yielded
... def x():
@@ -422,7 +393,7 @@ def print_yielded(func):
def pass_none(func):
"""
- Wrap func so it's not called if its first param is None
+ Wrap func so it's not called if its first param is None.
>>> print_text = pass_none(print)
>>> print_text('text')
@@ -431,9 +402,10 @@ def pass_none(func):
"""
@functools.wraps(func)
- def wrapper(param, *args, **kwargs):
+ def wrapper(param, /, *args, **kwargs):
if param is not None:
return func(param, *args, **kwargs)
+ return None
return wrapper
@@ -507,7 +479,7 @@ def save_method_args(method):
args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
@functools.wraps(method)
- def wrapper(self, *args, **kwargs):
+ def wrapper(self, /, *args, **kwargs):
attr_name = '_saved_' + method.__name__
attr = args_and_kwargs(args, kwargs)
setattr(self, attr_name, attr)
@@ -554,3 +526,108 @@ def except_(*exceptions, replace=None, use=None):
return wrapper
return decorate
+
+
+def identity(x):
+ """
+ Return the argument.
+
+ >>> o = object()
+ >>> identity(o) is o
+ True
+ """
+ return x
+
+
+def bypass_when(check, *, _op=identity):
+ """
+ Decorate a function to return its parameter when ``check``.
+
+ >>> bypassed = [] # False
+
+ >>> @bypass_when(bypassed)
+ ... def double(x):
+ ... return x * 2
+ >>> double(2)
+ 4
+ >>> bypassed[:] = [object()] # True
+ >>> double(2)
+ 2
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(param, /):
+ return param if _op(check) else func(param)
+
+ return wrapper
+
+ return decorate
+
+
+def bypass_unless(check):
+ """
+ Decorate a function to return its parameter unless ``check``.
+
+ >>> enabled = [object()] # True
+
+ >>> @bypass_unless(enabled)
+ ... def double(x):
+ ... return x * 2
+ >>> double(2)
+ 4
+ >>> del enabled[:] # False
+ >>> double(2)
+ 2
+ """
+ return bypass_when(check, _op=operator.not_)
+
+
+@functools.singledispatch
+def _splat_inner(args, func):
+ """Splat args to func."""
+ return func(*args)
+
+
+@_splat_inner.register
+def _(args: collections.abc.Mapping, func):
+ """Splat kargs to func as kwargs."""
+ return func(**args)
+
+
+def splat(func):
+ """
+ Wrap func to expect its parameters to be passed positionally in a tuple.
+
+ Has a similar effect to that of ``itertools.starmap`` over
+ simple ``map``.
+
+ >>> pairs = [(-1, 1), (0, 2)]
+ >>> pkg_resources.extern.more_itertools.consume(itertools.starmap(print, pairs))
+ -1 1
+ 0 2
+ >>> pkg_resources.extern.more_itertools.consume(map(splat(print), pairs))
+ -1 1
+ 0 2
+
+ The approach generalizes to other iterators that don't have a "star"
+ equivalent, such as a "starfilter".
+
+ >>> list(filter(splat(operator.add), pairs))
+ [(0, 2)]
+
+ Splat also accepts a mapping argument.
+
+ >>> def is_nice(msg, code):
+ ... return "smile" in msg or code == 0
+ >>> msgs = [
+ ... dict(msg='smile!', code=20),
+ ... dict(msg='error :(', code=1),
+ ... dict(msg='unknown', code=0),
+ ... ]
+ >>> for msg in filter(splat(is_nice), msgs):
+ ... print(msg)
+ {'msg': 'smile!', 'code': 20}
+ {'msg': 'unknown', 'code': 0}
+ """
+ return functools.wraps(func)(functools.partial(_splat_inner, func=func))
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/__init__.pyi b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/__init__.pyi
new file mode 100644
index 0000000000..c2b9ab1757
--- /dev/null
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/__init__.pyi
@@ -0,0 +1,128 @@
+from collections.abc import Callable, Hashable, Iterator
+from functools import partial
+from operator import methodcaller
+import sys
+from typing import (
+ Any,
+ Generic,
+ Protocol,
+ TypeVar,
+ overload,
+)
+
+if sys.version_info >= (3, 10):
+ from typing import Concatenate, ParamSpec
+else:
+ from typing_extensions import Concatenate, ParamSpec
+
+_P = ParamSpec('_P')
+_R = TypeVar('_R')
+_T = TypeVar('_T')
+_R1 = TypeVar('_R1')
+_R2 = TypeVar('_R2')
+_V = TypeVar('_V')
+_S = TypeVar('_S')
+_R_co = TypeVar('_R_co', covariant=True)
+
+class _OnceCallable(Protocol[_P, _R]):
+ saved_result: _R
+ reset: Callable[[], None]
+ def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
+
+class _ProxyMethodCacheWrapper(Protocol[_R_co]):
+ cache_clear: Callable[[], None]
+ def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+class _MethodCacheWrapper(Protocol[_R_co]):
+ def cache_clear(self) -> None: ...
+ def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+# `compose()` overloads below will cover most use cases.
+
+@overload
+def compose(
+ __func1: Callable[[_R], _T],
+ __func2: Callable[_P, _R],
+ /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+ __func1: Callable[[_R], _T],
+ __func2: Callable[[_R1], _R],
+ __func3: Callable[_P, _R1],
+ /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+ __func1: Callable[[_R], _T],
+ __func2: Callable[[_R2], _R],
+ __func3: Callable[[_R1], _R2],
+ __func4: Callable[_P, _R1],
+ /,
+) -> Callable[_P, _T]: ...
+def once(func: Callable[_P, _R]) -> _OnceCallable[_P, _R]: ...
+def method_cache(
+ method: Callable[..., _R],
+ cache_wrapper: Callable[[Callable[..., _R]], _MethodCacheWrapper[_R]] = ...,
+) -> _MethodCacheWrapper[_R] | _ProxyMethodCacheWrapper[_R]: ...
+def apply(
+ transform: Callable[[_R], _T]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _T]]: ...
+def result_invoke(
+ action: Callable[[_R], Any]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ...
+def invoke(
+ f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+def call_aside(
+ f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+
+class Throttler(Generic[_R]):
+ last_called: float
+ func: Callable[..., _R]
+ max_rate: float
+ def __init__(
+ self, func: Callable[..., _R] | Throttler[_R], max_rate: float = ...
+ ) -> None: ...
+ def reset(self) -> None: ...
+ def __call__(self, *args: Any, **kwargs: Any) -> _R: ...
+ def __get__(self, obj: Any, owner: type[Any] | None = ...) -> Callable[..., _R]: ...
+
+def first_invoke(
+ func1: Callable[..., Any], func2: Callable[_P, _R]
+) -> Callable[_P, _R]: ...
+
+method_caller: Callable[..., methodcaller]
+
+def retry_call(
+ func: Callable[..., _R],
+ cleanup: Callable[..., None] = ...,
+ retries: int | float = ...,
+ trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> _R: ...
+def retry(
+ cleanup: Callable[..., None] = ...,
+ retries: int | float = ...,
+ trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> Callable[[Callable[..., _R]], Callable[..., _R]]: ...
+def print_yielded(func: Callable[_P, Iterator[Any]]) -> Callable[_P, None]: ...
+def pass_none(
+ func: Callable[Concatenate[_T, _P], _R]
+) -> Callable[Concatenate[_T, _P], _R]: ...
+def assign_params(
+ func: Callable[..., _R], namespace: dict[str, Any]
+) -> partial[_R]: ...
+def save_method_args(
+ method: Callable[Concatenate[_S, _P], _R]
+) -> Callable[Concatenate[_S, _P], _R]: ...
+def except_(
+ *exceptions: type[BaseException], replace: Any = ..., use: Any = ...
+) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ...
+def identity(x: _T) -> _T: ...
+def bypass_when(
+ check: _V, *, _op: Callable[[_V], Any] = ...
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
+def bypass_unless(
+ check: Any,
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/py.typed b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/jaraco/functools/py.typed
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/__init__.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/__init__.py
index 66443971df..aff94a9abd 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/__init__.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/__init__.py
@@ -3,4 +3,4 @@
from .more import * # noqa
from .recipes import * # noqa
-__version__ = '9.1.0'
+__version__ = '10.2.0'
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.py
index e0e2d3de92..d0957681f5 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.py
@@ -2,7 +2,7 @@ import warnings
from collections import Counter, defaultdict, deque, abc
from collections.abc import Sequence
-from functools import partial, reduce, wraps
+from functools import cached_property, partial, reduce, wraps
from heapq import heapify, heapreplace, heappop
from itertools import (
chain,
@@ -17,8 +17,9 @@ from itertools import (
takewhile,
tee,
zip_longest,
+ product,
)
-from math import exp, factorial, floor, log
+from math import exp, factorial, floor, log, perm, comb
from queue import Empty, Queue
from random import random, randrange, uniform
from operator import itemgetter, mul, sub, gt, lt, ge, le
@@ -36,6 +37,7 @@ from .recipes import (
take,
unique_everseen,
all_equal,
+ batched,
)
__all__ = [
@@ -53,6 +55,7 @@ __all__ = [
'circular_shifts',
'collapse',
'combination_index',
+ 'combination_with_replacement_index',
'consecutive_groups',
'constrained_batches',
'consumer',
@@ -65,8 +68,10 @@ __all__ = [
'divide',
'duplicates_everseen',
'duplicates_justseen',
+ 'classify_unique',
'exactly_n',
'filter_except',
+ 'filter_map',
'first',
'gray_product',
'groupby_transform',
@@ -80,6 +85,7 @@ __all__ = [
'is_sorted',
'islice_extended',
'iterate',
+ 'iter_suppress',
'last',
'locate',
'longest_common_prefix',
@@ -93,10 +99,13 @@ __all__ = [
'nth_or_last',
'nth_permutation',
'nth_product',
+ 'nth_combination_with_replacement',
'numeric_range',
'one',
'only',
+ 'outer_product',
'padded',
+ 'partial_product',
'partitions',
'peekable',
'permutation_index',
@@ -125,6 +134,7 @@ __all__ = [
'strictly_n',
'substrings',
'substrings_indexes',
+ 'takewhile_inclusive',
'time_limited',
'unique_in_window',
'unique_to_each',
@@ -191,15 +201,14 @@ def first(iterable, default=_marker):
``next(iter(iterable), default)``.
"""
- try:
- return next(iter(iterable))
- except StopIteration as e:
- if default is _marker:
- raise ValueError(
- 'first() was called on an empty iterable, and no '
- 'default value was provided.'
- ) from e
- return default
+ for item in iterable:
+ return item
+ if default is _marker:
+ raise ValueError(
+ 'first() was called on an empty iterable, and no '
+ 'default value was provided.'
+ )
+ return default
def last(iterable, default=_marker):
@@ -472,7 +481,10 @@ def iterate(func, start):
"""
while True:
yield start
- start = func(start)
+ try:
+ start = func(start)
+ except StopIteration:
+ break
def with_iter(context_manager):
@@ -572,6 +584,9 @@ def strictly_n(iterable, n, too_short=None, too_long=None):
>>> list(strictly_n(iterable, n))
['a', 'b', 'c', 'd']
+ Note that the returned iterable must be consumed in order for the check to
+ be made.
+
By default, *too_short* and *too_long* are functions that raise
``ValueError``.
@@ -909,7 +924,7 @@ def substrings_indexes(seq, reverse=False):
class bucket:
- """Wrap *iterable* and return an object that buckets it iterable into
+ """Wrap *iterable* and return an object that buckets the iterable into
child iterables based on a *key* function.
>>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3']
@@ -2069,7 +2084,6 @@ class numeric_range(abc.Sequence, abc.Hashable):
if self._step == self._zero:
raise ValueError('numeric_range() arg 3 must not be zero')
self._growing = self._step > self._zero
- self._init_len()
def __bool__(self):
if self._growing:
@@ -2145,7 +2159,8 @@ class numeric_range(abc.Sequence, abc.Hashable):
def __len__(self):
return self._len
- def _init_len(self):
+ @cached_property
+ def _len(self):
if self._growing:
start = self._start
stop = self._stop
@@ -2156,10 +2171,10 @@ class numeric_range(abc.Sequence, abc.Hashable):
step = -self._step
distance = stop - start
if distance <= self._zero:
- self._len = 0
+ return 0
else: # distance > 0 and step > 0: regular euclidean division
q, r = divmod(distance, step)
- self._len = int(q) + int(r != self._zero)
+ return int(q) + int(r != self._zero)
def __reduce__(self):
return numeric_range, (self._start, self._stop, self._step)
@@ -2699,6 +2714,9 @@ class seekable:
>>> it.seek(10)
>>> next(it)
'10'
+ >>> it.relative_seek(-2) # Seeking relative to the current position
+ >>> next(it)
+ '9'
>>> it.seek(20) # Seeking past the end of the source isn't a problem
>>> list(it)
[]
@@ -2812,6 +2830,10 @@ class seekable:
if remainder > 0:
consume(self, remainder)
+ def relative_seek(self, count):
+ index = len(self._cache)
+ self.seek(max(index + count, 0))
+
class run_length:
"""
@@ -3205,6 +3227,8 @@ class time_limited:
stops if the time elapsed is greater than *limit_seconds*. If your time
limit is 1 second, but it takes 2 seconds to generate the first item from
the iterable, the function will run for 2 seconds and not yield anything.
+ As a special case, when *limit_seconds* is zero, the iterator never
+ returns anything.
"""
@@ -3220,6 +3244,9 @@ class time_limited:
return self
def __next__(self):
+ if self.limit_seconds == 0:
+ self.timed_out = True
+ raise StopIteration
item = next(self._iterable)
if monotonic() - self._start_time > self.limit_seconds:
self.timed_out = True
@@ -3339,7 +3366,7 @@ def iequals(*iterables):
>>> iequals("abc", "acb")
False
- Not to be confused with :func:`all_equals`, which checks whether all
+ Not to be confused with :func:`all_equal`, which checks whether all
elements of iterable are equal to each other.
"""
@@ -3835,7 +3862,7 @@ def nth_permutation(iterable, r, index):
elif not 0 <= r < n:
raise ValueError
else:
- c = factorial(n) // factorial(n - r)
+ c = perm(n, r)
if index < 0:
index += c
@@ -3858,6 +3885,52 @@ def nth_permutation(iterable, r, index):
return tuple(map(pool.pop, result))
+def nth_combination_with_replacement(iterable, r, index):
+ """Equivalent to
+ ``list(combinations_with_replacement(iterable, r))[index]``.
+
+
+ The subsequences with repetition of *iterable* that are of length *r* can
+ be ordered lexicographically. :func:`nth_combination_with_replacement`
+ computes the subsequence at sort position *index* directly, without
+ computing the previous subsequences with replacement.
+
+ >>> nth_combination_with_replacement(range(5), 3, 5)
+ (0, 1, 1)
+
+ ``ValueError`` will be raised If *r* is negative or greater than the length
+ of *iterable*.
+ ``IndexError`` will be raised if the given *index* is invalid.
+ """
+ pool = tuple(iterable)
+ n = len(pool)
+ if (r < 0) or (r > n):
+ raise ValueError
+
+ c = comb(n + r - 1, r)
+
+ if index < 0:
+ index += c
+
+ if (index < 0) or (index >= c):
+ raise IndexError
+
+ result = []
+ i = 0
+ while r:
+ r -= 1
+ while n >= 0:
+ num_combs = comb(n + r - 1, r)
+ if index < num_combs:
+ break
+ n -= 1
+ i += 1
+ index -= num_combs
+ result.append(pool[i])
+
+ return tuple(result)
+
+
def value_chain(*args):
"""Yield all arguments passed to the function in the same order in which
they were passed. If an argument itself is iterable then iterate over its
@@ -3949,9 +4022,66 @@ def combination_index(element, iterable):
for i, j in enumerate(reversed(indexes), start=1):
j = n - j
if i <= j:
- index += factorial(j) // (factorial(i) * factorial(j - i))
+ index += comb(j, i)
+
+ return comb(n + 1, k + 1) - index
+
+
+def combination_with_replacement_index(element, iterable):
+ """Equivalent to
+ ``list(combinations_with_replacement(iterable, r)).index(element)``
+
+ The subsequences with repetition of *iterable* that are of length *r* can
+ be ordered lexicographically. :func:`combination_with_replacement_index`
+ computes the index of the first *element*, without computing the previous
+ combinations with replacement.
+
+ >>> combination_with_replacement_index('adf', 'abcdefg')
+ 20
+
+ ``ValueError`` will be raised if the given *element* isn't one of the
+ combinations with replacement of *iterable*.
+ """
+ element = tuple(element)
+ l = len(element)
+ element = enumerate(element)
+
+ k, y = next(element, (None, None))
+ if k is None:
+ return 0
+
+ indexes = []
+ pool = tuple(iterable)
+ for n, x in enumerate(pool):
+ while x == y:
+ indexes.append(n)
+ tmp, y = next(element, (None, None))
+ if tmp is None:
+ break
+ else:
+ k = tmp
+ if y is None:
+ break
+ else:
+ raise ValueError(
+ 'element is not a combination with replacement of iterable'
+ )
+
+ n = len(pool)
+ occupations = [0] * n
+ for p in indexes:
+ occupations[p] += 1
+
+ index = 0
+ cumulative_sum = 0
+ for k in range(1, n):
+ cumulative_sum += occupations[k - 1]
+ j = l + n - 1 - k - cumulative_sum
+ i = n - k
+ if i <= j:
+ index += comb(j, i)
- return factorial(n + 1) // (factorial(k + 1) * factorial(n - k)) - index
+ return index
def permutation_index(element, iterable):
@@ -4056,26 +4186,20 @@ def _chunked_even_finite(iterable, N, n):
num_full = N - partial_size * num_lists
num_partial = num_lists - num_full
- buffer = []
- iterator = iter(iterable)
-
# Yield num_full lists of full_size
- for x in iterator:
- buffer.append(x)
- if len(buffer) == full_size:
- yield buffer
- buffer = []
- num_full -= 1
- if num_full <= 0:
- break
+ partial_start_idx = num_full * full_size
+ if full_size > 0:
+ for i in range(0, partial_start_idx, full_size):
+ yield list(islice(iterable, i, i + full_size))
# Yield num_partial lists of partial_size
- for x in iterator:
- buffer.append(x)
- if len(buffer) == partial_size:
- yield buffer
- buffer = []
- num_partial -= 1
+ if partial_size > 0:
+ for i in range(
+ partial_start_idx,
+ partial_start_idx + (num_partial * partial_size),
+ partial_size,
+ ):
+ yield list(islice(iterable, i, i + partial_size))
def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
@@ -4114,30 +4238,23 @@ def zip_broadcast(*objects, scalar_types=(str, bytes), strict=False):
if not size:
return
+ new_item = [None] * size
iterables, iterable_positions = [], []
- scalars, scalar_positions = [], []
for i, obj in enumerate(objects):
if is_scalar(obj):
- scalars.append(obj)
- scalar_positions.append(i)
+ new_item[i] = obj
else:
iterables.append(iter(obj))
iterable_positions.append(i)
- if len(scalars) == size:
+ if not iterables:
yield tuple(objects)
return
zipper = _zip_equal if strict else zip
for item in zipper(*iterables):
- new_item = [None] * size
-
- for i, elem in zip(iterable_positions, item):
- new_item[i] = elem
-
- for i, elem in zip(scalar_positions, scalars):
- new_item[i] = elem
-
+ for i, new_item[i] in zip(iterable_positions, item):
+ pass
yield tuple(new_item)
@@ -4162,22 +4279,23 @@ def unique_in_window(iterable, n, key=None):
raise ValueError('n must be greater than 0')
window = deque(maxlen=n)
- uniques = set()
+ counts = defaultdict(int)
use_key = key is not None
for item in iterable:
- k = key(item) if use_key else item
- if k in uniques:
- continue
-
- if len(uniques) == n:
- uniques.discard(window[0])
+ if len(window) == n:
+ to_discard = window[0]
+ if counts[to_discard] == 1:
+ del counts[to_discard]
+ else:
+ counts[to_discard] -= 1
- uniques.add(k)
+ k = key(item) if use_key else item
+ if k not in counts:
+ yield item
+ counts[k] += 1
window.append(k)
- yield item
-
def duplicates_everseen(iterable, key=None):
"""Yield duplicate elements after their first appearance.
@@ -4187,7 +4305,7 @@ def duplicates_everseen(iterable, key=None):
>>> list(duplicates_everseen('AaaBbbCccAaa', str.lower))
['a', 'a', 'b', 'b', 'c', 'c', 'A', 'a', 'a']
- This function is analagous to :func:`unique_everseen` and is subject to
+ This function is analogous to :func:`unique_everseen` and is subject to
the same performance considerations.
"""
@@ -4217,15 +4335,52 @@ def duplicates_justseen(iterable, key=None):
>>> list(duplicates_justseen('AaaBbbCccAaa', str.lower))
['a', 'a', 'b', 'b', 'c', 'c', 'a', 'a']
- This function is analagous to :func:`unique_justseen`.
+ This function is analogous to :func:`unique_justseen`.
"""
- return flatten(
- map(
- lambda group_tuple: islice_extended(group_tuple[1])[1:],
- groupby(iterable, key),
- )
- )
+ return flatten(g for _, g in groupby(iterable, key) for _ in g)
+
+
+def classify_unique(iterable, key=None):
+ """Classify each element in terms of its uniqueness.
+
+ For each element in the input iterable, return a 3-tuple consisting of:
+
+ 1. The element itself
+ 2. ``False`` if the element is equal to the one preceding it in the input,
+ ``True`` otherwise (i.e. the equivalent of :func:`unique_justseen`)
+ 3. ``False`` if this element has been seen anywhere in the input before,
+ ``True`` otherwise (i.e. the equivalent of :func:`unique_everseen`)
+
+ >>> list(classify_unique('otto')) # doctest: +NORMALIZE_WHITESPACE
+ [('o', True, True),
+ ('t', True, True),
+ ('t', False, False),
+ ('o', True, False)]
+
+ This function is analogous to :func:`unique_everseen` and is subject to
+ the same performance considerations.
+
+ """
+ seen_set = set()
+ seen_list = []
+ use_key = key is not None
+ previous = None
+
+ for i, element in enumerate(iterable):
+ k = key(element) if use_key else element
+ is_unique_justseen = not i or previous != k
+ previous = k
+ is_unique_everseen = False
+ try:
+ if k not in seen_set:
+ seen_set.add(k)
+ is_unique_everseen = True
+ except TypeError:
+ if k not in seen_list:
+ seen_list.append(k)
+ is_unique_everseen = True
+ yield element, is_unique_justseen, is_unique_everseen
def minmax(iterable_or_value, *others, key=None, default=_marker):
@@ -4389,3 +4544,112 @@ def gray_product(*iterables):
o[j] = -o[j]
f[j] = f[j + 1]
f[j + 1] = j + 1
+
+
+def partial_product(*iterables):
+ """Yields tuples containing one item from each iterator, with subsequent
+ tuples changing a single item at a time by advancing each iterator until it
+ is exhausted. This sequence guarantees every value in each iterable is
+ output at least once without generating all possible combinations.
+
+ This may be useful, for example, when testing an expensive function.
+
+ >>> list(partial_product('AB', 'C', 'DEF'))
+ [('A', 'C', 'D'), ('B', 'C', 'D'), ('B', 'C', 'E'), ('B', 'C', 'F')]
+ """
+
+ iterators = list(map(iter, iterables))
+
+ try:
+ prod = [next(it) for it in iterators]
+ except StopIteration:
+ return
+ yield tuple(prod)
+
+ for i, it in enumerate(iterators):
+ for prod[i] in it:
+ yield tuple(prod)
+
+
+def takewhile_inclusive(predicate, iterable):
+ """A variant of :func:`takewhile` that yields one additional element.
+
+ >>> list(takewhile_inclusive(lambda x: x < 5, [1, 4, 6, 4, 1]))
+ [1, 4, 6]
+
+ :func:`takewhile` would return ``[1, 4]``.
+ """
+ for x in iterable:
+ yield x
+ if not predicate(x):
+ break
+
+
+def outer_product(func, xs, ys, *args, **kwargs):
+ """A generalized outer product that applies a binary function to all
+ pairs of items. Returns a 2D matrix with ``len(xs)`` rows and ``len(ys)``
+ columns.
+ Also accepts ``*args`` and ``**kwargs`` that are passed to ``func``.
+
+ Multiplication table:
+
+ >>> list(outer_product(mul, range(1, 4), range(1, 6)))
+ [(1, 2, 3, 4, 5), (2, 4, 6, 8, 10), (3, 6, 9, 12, 15)]
+
+ Cross tabulation:
+
+ >>> xs = ['A', 'B', 'A', 'A', 'B', 'B', 'A', 'A', 'B', 'B']
+ >>> ys = ['X', 'X', 'X', 'Y', 'Z', 'Z', 'Y', 'Y', 'Z', 'Z']
+ >>> rows = list(zip(xs, ys))
+ >>> count_rows = lambda x, y: rows.count((x, y))
+ >>> list(outer_product(count_rows, sorted(set(xs)), sorted(set(ys))))
+ [(2, 3, 0), (1, 0, 4)]
+
+ Usage with ``*args`` and ``**kwargs``:
+
+ >>> animals = ['cat', 'wolf', 'mouse']
+ >>> list(outer_product(min, animals, animals, key=len))
+ [('cat', 'cat', 'cat'), ('cat', 'wolf', 'wolf'), ('cat', 'wolf', 'mouse')]
+ """
+ ys = tuple(ys)
+ return batched(
+ starmap(lambda x, y: func(x, y, *args, **kwargs), product(xs, ys)),
+ n=len(ys),
+ )
+
+
+def iter_suppress(iterable, *exceptions):
+ """Yield each of the items from *iterable*. If the iteration raises one of
+ the specified *exceptions*, that exception will be suppressed and iteration
+ will stop.
+
+ >>> from itertools import chain
+ >>> def breaks_at_five(x):
+ ... while True:
+ ... if x >= 5:
+ ... raise RuntimeError
+ ... yield x
+ ... x += 1
+ >>> it_1 = iter_suppress(breaks_at_five(1), RuntimeError)
+ >>> it_2 = iter_suppress(breaks_at_five(2), RuntimeError)
+ >>> list(chain(it_1, it_2))
+ [1, 2, 3, 4, 2, 3, 4]
+ """
+ try:
+ yield from iterable
+ except exceptions:
+ return
+
+
+def filter_map(func, iterable):
+ """Apply *func* to every element of *iterable*, yielding only those which
+ are not ``None``.
+
+ >>> elems = ['1', 'a', '2', 'b', '3']
+ >>> list(filter_map(lambda s: int(s) if s.isnumeric() else None, elems))
+ [1, 2, 3]
+ """
+ for x in iterable:
+ y = func(x)
+ if y is not None:
+ yield y
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.pyi b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.pyi
index 75c5232c1a..9a5fc911a3 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.pyi
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/more.pyi
@@ -29,7 +29,7 @@ _U = TypeVar('_U')
_V = TypeVar('_V')
_W = TypeVar('_W')
_T_co = TypeVar('_T_co', covariant=True)
-_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[object]])
+_GenFn = TypeVar('_GenFn', bound=Callable[..., Iterator[Any]])
_Raisable = BaseException | Type[BaseException]
@type_check_only
@@ -74,7 +74,7 @@ class peekable(Generic[_T], Iterator[_T]):
def __getitem__(self, index: slice) -> list[_T]: ...
def consumer(func: _GenFn) -> _GenFn: ...
-def ilen(iterable: Iterable[object]) -> int: ...
+def ilen(iterable: Iterable[_T]) -> int: ...
def iterate(func: Callable[[_T], _T], start: _T) -> Iterator[_T]: ...
def with_iter(
context_manager: ContextManager[Iterable[_T]],
@@ -116,7 +116,7 @@ class bucket(Generic[_T, _U], Container[_U]):
self,
iterable: Iterable[_T],
key: Callable[[_T], _U],
- validator: Callable[[object], object] | None = ...,
+ validator: Callable[[_U], object] | None = ...,
) -> None: ...
def __contains__(self, value: object) -> bool: ...
def __iter__(self) -> Iterator[_U]: ...
@@ -383,7 +383,7 @@ def mark_ends(
iterable: Iterable[_T],
) -> Iterable[tuple[bool, bool, _T]]: ...
def locate(
- iterable: Iterable[object],
+ iterable: Iterable[_T],
pred: Callable[..., Any] = ...,
window_size: int | None = ...,
) -> Iterator[int]: ...
@@ -440,6 +440,7 @@ class seekable(Generic[_T], Iterator[_T]):
def peek(self, default: _U) -> _T | _U: ...
def elements(self) -> SequenceView[_T]: ...
def seek(self, index: int) -> None: ...
+ def relative_seek(self, count: int) -> None: ...
class run_length:
@staticmethod
@@ -578,6 +579,9 @@ def all_unique(
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
) -> bool: ...
def nth_product(index: int, *args: Iterable[_T]) -> tuple[_T, ...]: ...
+def nth_combination_with_replacement(
+ iterable: Iterable[_T], r: int, index: int
+) -> tuple[_T, ...]: ...
def nth_permutation(
iterable: Iterable[_T], r: int, index: int
) -> tuple[_T, ...]: ...
@@ -586,6 +590,9 @@ def product_index(element: Iterable[_T], *args: Iterable[_T]) -> int: ...
def combination_index(
element: Iterable[_T], iterable: Iterable[_T]
) -> int: ...
+def combination_with_replacement_index(
+ element: Iterable[_T], iterable: Iterable[_T]
+) -> int: ...
def permutation_index(
element: Iterable[_T], iterable: Iterable[_T]
) -> int: ...
@@ -611,6 +618,9 @@ def duplicates_everseen(
def duplicates_justseen(
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
) -> Iterator[_T]: ...
+def classify_unique(
+ iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
+) -> Iterator[tuple[_T, bool, bool]]: ...
class _SupportsLessThan(Protocol):
def __lt__(self, __other: Any) -> bool: ...
@@ -655,12 +665,31 @@ def minmax(
def longest_common_prefix(
iterables: Iterable[Iterable[_T]],
) -> Iterator[_T]: ...
-def iequals(*iterables: Iterable[object]) -> bool: ...
+def iequals(*iterables: Iterable[Any]) -> bool: ...
def constrained_batches(
- iterable: Iterable[object],
+ iterable: Iterable[_T],
max_size: int,
max_count: int | None = ...,
get_len: Callable[[_T], object] = ...,
strict: bool = ...,
) -> Iterator[tuple[_T]]: ...
def gray_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
+def partial_product(*iterables: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
+def takewhile_inclusive(
+ predicate: Callable[[_T], bool], iterable: Iterable[_T]
+) -> Iterator[_T]: ...
+def outer_product(
+ func: Callable[[_T, _U], _V],
+ xs: Iterable[_T],
+ ys: Iterable[_U],
+ *args: Any,
+ **kwargs: Any,
+) -> Iterator[tuple[_V, ...]]: ...
+def iter_suppress(
+ iterable: Iterable[_T],
+ *exceptions: Type[BaseException],
+) -> Iterator[_T]: ...
+def filter_map(
+ func: Callable[[_T], _V | None],
+ iterable: Iterable[_T],
+) -> Iterator[_V]: ...
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.py
index 3facc2e3a6..145e3cb5bd 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.py
@@ -9,11 +9,10 @@ Some backward-compatible usability improvements have been made.
"""
import math
import operator
-import warnings
from collections import deque
from collections.abc import Sized
-from functools import reduce
+from functools import partial, reduce
from itertools import (
chain,
combinations,
@@ -52,10 +51,13 @@ __all__ = [
'pad_none',
'pairwise',
'partition',
+ 'polynomial_eval',
'polynomial_from_roots',
+ 'polynomial_derivative',
'powerset',
'prepend',
'quantify',
+ 'reshape',
'random_combination_with_replacement',
'random_combination',
'random_permutation',
@@ -65,9 +67,11 @@ __all__ = [
'sieve',
'sliding_window',
'subslices',
+ 'sum_of_squares',
'tabulate',
'tail',
'take',
+ 'totient',
'transpose',
'triplewise',
'unique_everseen',
@@ -77,6 +81,18 @@ __all__ = [
_marker = object()
+# zip with strict is available for Python 3.10+
+try:
+ zip(strict=True)
+except TypeError:
+ _zip_strict = zip
+else:
+ _zip_strict = partial(zip, strict=True)
+
+# math.sumprod is available for Python 3.12+
+_sumprod = getattr(math, 'sumprod', lambda x, y: dotproduct(x, y))
+
+
def take(n, iterable):
"""Return first *n* items of the iterable as a list.
@@ -293,7 +309,7 @@ def _pairwise(iterable):
"""
a, b = tee(iterable)
next(b, None)
- yield from zip(a, b)
+ return zip(a, b)
try:
@@ -303,7 +319,7 @@ except ImportError:
else:
def pairwise(iterable):
- yield from itertools_pairwise(iterable)
+ return itertools_pairwise(iterable)
pairwise.__doc__ = _pairwise.__doc__
@@ -334,13 +350,9 @@ def _zip_equal(*iterables):
for i, it in enumerate(iterables[1:], 1):
size = len(it)
if size != first_size:
- break
- else:
- # If we didn't break out, we can use the built-in zip.
- return zip(*iterables)
-
- # If we did break out, there was a mismatch.
- raise UnequalIterablesError(details=(first_size, i, size))
+ raise UnequalIterablesError(details=(first_size, i, size))
+ # All sizes are equal, we can use the built-in zip.
+ return zip(*iterables)
# If any one of the iterables didn't have a length, start reading
# them until one runs out.
except TypeError:
@@ -433,12 +445,9 @@ def partition(pred, iterable):
if pred is None:
pred = bool
- evaluations = ((pred(x), x) for x in iterable)
- t1, t2 = tee(evaluations)
- return (
- (x for (cond, x) in t1 if not cond),
- (x for (cond, x) in t2 if cond),
- )
+ t1, t2, p = tee(iterable, 3)
+ p1, p2 = tee(map(pred, p))
+ return (compress(t1, map(operator.not_, p1)), compress(t2, p2))
def powerset(iterable):
@@ -486,7 +495,7 @@ def unique_everseen(iterable, key=None):
>>> list(unique_everseen(iterable, key=tuple)) # Faster
[[1, 2], [2, 3]]
- Similary, you may want to convert unhashable ``set`` objects with
+ Similarly, you may want to convert unhashable ``set`` objects with
``key=frozenset``. For ``dict`` objects,
``key=lambda x: frozenset(x.items())`` can be used.
@@ -518,6 +527,9 @@ def unique_justseen(iterable, key=None):
['A', 'B', 'C', 'A', 'D']
"""
+ if key is None:
+ return map(operator.itemgetter(0), groupby(iterable))
+
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
@@ -712,12 +724,14 @@ def convolve(signal, kernel):
is immediately consumed and stored.
"""
+ # This implementation intentionally doesn't match the one in the itertools
+ # documentation.
kernel = tuple(kernel)[::-1]
n = len(kernel)
window = deque([0], maxlen=n) * n
for x in chain(signal, repeat(0, n - 1)):
window.append(x)
- yield sum(map(operator.mul, kernel, window))
+ yield _sumprod(kernel, window)
def before_and_after(predicate, it):
@@ -778,9 +792,7 @@ def sliding_window(iterable, n):
For a variant with more features, see :func:`windowed`.
"""
it = iter(iterable)
- window = deque(islice(it, n), maxlen=n)
- if len(window) == n:
- yield tuple(window)
+ window = deque(islice(it, n - 1), maxlen=n)
for x in it:
window.append(x)
yield tuple(window)
@@ -807,39 +819,38 @@ def polynomial_from_roots(roots):
>>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60
[1, -4, -17, 60]
"""
- # Use math.prod for Python 3.8+,
- prod = getattr(math, 'prod', lambda x: reduce(operator.mul, x, 1))
- roots = list(map(operator.neg, roots))
- return [
- sum(map(prod, combinations(roots, k))) for k in range(len(roots) + 1)
- ]
+ factors = zip(repeat(1), map(operator.neg, roots))
+ return list(reduce(convolve, factors, [1]))
-def iter_index(iterable, value, start=0):
+def iter_index(iterable, value, start=0, stop=None):
"""Yield the index of each place in *iterable* that *value* occurs,
- beginning with index *start*.
+ beginning with index *start* and ending before index *stop*.
See :func:`locate` for a more general means of finding the indexes
associated with particular values.
>>> list(iter_index('AABCADEAF', 'A'))
[0, 1, 4, 7]
+ >>> list(iter_index('AABCADEAF', 'A', 1)) # start index is inclusive
+ [1, 4, 7]
+ >>> list(iter_index('AABCADEAF', 'A', 1, 7)) # stop index is not inclusive
+ [1, 4]
"""
- try:
- seq_index = iterable.index
- except AttributeError:
+ seq_index = getattr(iterable, 'index', None)
+ if seq_index is None:
# Slow path for general iterables
- it = islice(iterable, start, None)
+ it = islice(iterable, start, stop)
for i, element in enumerate(it, start):
if element is value or element == value:
yield i
else:
# Fast path for sequences
+ stop = len(iterable) if stop is None else stop
i = start - 1
try:
while True:
- i = seq_index(value, i + 1)
- yield i
+ yield (i := seq_index(value, i + 1, stop))
except ValueError:
pass
@@ -850,81 +861,152 @@ def sieve(n):
>>> list(sieve(30))
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
"""
- isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
+ if n > 2:
+ yield 2
+ start = 3
data = bytearray((0, 1)) * (n // 2)
- data[:3] = 0, 0, 0
- limit = isqrt(n) + 1
- for p in compress(range(limit), data):
+ limit = math.isqrt(n) + 1
+ for p in iter_index(data, 1, start, limit):
+ yield from iter_index(data, 1, start, p * p)
data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
- data[2] = 1
- return iter_index(data, 1) if n > 2 else iter([])
+ start = p * p
+ yield from iter_index(data, 1, start)
-def batched(iterable, n):
- """Batch data into lists of length *n*. The last batch may be shorter.
+def _batched(iterable, n, *, strict=False):
+ """Batch data into tuples of length *n*. If the number of items in
+ *iterable* is not divisible by *n*:
+ * The last batch will be shorter if *strict* is ``False``.
+ * :exc:`ValueError` will be raised if *strict* is ``True``.
>>> list(batched('ABCDEFG', 3))
- [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)]
- This recipe is from the ``itertools`` docs. This library also provides
- :func:`chunked`, which has a different implementation.
+ On Python 3.13 and above, this is an alias for :func:`itertools.batched`.
"""
- if hexversion >= 0x30C00A0: # Python 3.12.0a0
- warnings.warn(
- (
- 'batched will be removed in a future version of '
- 'more-itertools. Use the standard library '
- 'itertools.batched function instead'
- ),
- DeprecationWarning,
- )
-
+ if n < 1:
+ raise ValueError('n must be at least one')
it = iter(iterable)
- while True:
- batch = list(islice(it, n))
- if not batch:
- break
+ while batch := tuple(islice(it, n)):
+ if strict and len(batch) != n:
+ raise ValueError('batched(): incomplete batch')
yield batch
+if hexversion >= 0x30D00A2:
+ from itertools import batched as itertools_batched
+
+ def batched(iterable, n, *, strict=False):
+ return itertools_batched(iterable, n, strict=strict)
+
+else:
+ batched = _batched
+
+ batched.__doc__ = _batched.__doc__
+
+
def transpose(it):
- """Swap the rows and columns of the input.
+ """Swap the rows and columns of the input matrix.
>>> list(transpose([(1, 2, 3), (11, 22, 33)]))
[(1, 11), (2, 22), (3, 33)]
The caller should ensure that the dimensions of the input are compatible.
+ If the input is empty, no output will be produced.
+ """
+ return _zip_strict(*it)
+
+
+def reshape(matrix, cols):
+ """Reshape the 2-D input *matrix* to have a column count given by *cols*.
+
+ >>> matrix = [(0, 1), (2, 3), (4, 5)]
+ >>> cols = 3
+ >>> list(reshape(matrix, cols))
+ [(0, 1, 2), (3, 4, 5)]
"""
- # TODO: when 3.9 goes end-of-life, add stric=True to this.
- return zip(*it)
+ return batched(chain.from_iterable(matrix), cols)
def matmul(m1, m2):
"""Multiply two matrices.
+
>>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
- [[49, 80], [41, 60]]
+ [(49, 80), (41, 60)]
The caller should ensure that the dimensions of the input matrices are
compatible with each other.
"""
n = len(m2[0])
- return batched(starmap(dotproduct, product(m1, transpose(m2))), n)
+ return batched(starmap(_sumprod, product(m1, transpose(m2))), n)
def factor(n):
"""Yield the prime factors of n.
+
>>> list(factor(360))
[2, 2, 2, 3, 3, 5]
"""
- isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
- for prime in sieve(isqrt(n) + 1):
- while True:
- quotient, remainder = divmod(n, prime)
- if remainder:
- break
+ for prime in sieve(math.isqrt(n) + 1):
+ while not n % prime:
yield prime
- n = quotient
+ n //= prime
if n == 1:
return
- if n >= 2:
+ if n > 1:
yield n
+
+
+def polynomial_eval(coefficients, x):
+ """Evaluate a polynomial at a specific value.
+
+ Example: evaluating x^3 - 4 * x^2 - 17 * x + 60 at x = 2.5:
+
+ >>> coefficients = [1, -4, -17, 60]
+ >>> x = 2.5
+ >>> polynomial_eval(coefficients, x)
+ 8.125
+ """
+ n = len(coefficients)
+ if n == 0:
+ return x * 0 # coerce zero to the type of x
+ powers = map(pow, repeat(x), reversed(range(n)))
+ return _sumprod(coefficients, powers)
+
+
+def sum_of_squares(it):
+ """Return the sum of the squares of the input values.
+
+ >>> sum_of_squares([10, 20, 30])
+ 1400
+ """
+ return _sumprod(*tee(it))
+
+
+def polynomial_derivative(coefficients):
+ """Compute the first derivative of a polynomial.
+
+ Example: evaluating the derivative of x^3 - 4 * x^2 - 17 * x + 60
+
+ >>> coefficients = [1, -4, -17, 60]
+ >>> derivative_coefficients = polynomial_derivative(coefficients)
+ >>> derivative_coefficients
+ [3, -8, -17]
+ """
+ n = len(coefficients)
+ powers = reversed(range(1, n))
+ return list(map(operator.mul, coefficients, powers))
+
+
+def totient(n):
+ """Return the count of natural numbers up to *n* that are coprime with *n*.
+
+ >>> totient(9)
+ 6
+ >>> totient(12)
+ 4
+ """
+ for p in unique_justseen(factor(n)):
+ n = n // p * (p - 1)
+
+ return n
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.pyi b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.pyi
index 0267ed569e..ed4c19db49 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.pyi
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/more_itertools/recipes.pyi
@@ -14,6 +14,8 @@ from typing import (
# Type and type variable definitions
_T = TypeVar('_T')
+_T1 = TypeVar('_T1')
+_T2 = TypeVar('_T2')
_U = TypeVar('_U')
def take(n: int, iterable: Iterable[_T]) -> list[_T]: ...
@@ -21,19 +23,19 @@ def tabulate(
function: Callable[[int], _T], start: int = ...
) -> Iterator[_T]: ...
def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
-def consume(iterator: Iterable[object], n: int | None = ...) -> None: ...
+def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
@overload
def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
@overload
def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
-def all_equal(iterable: Iterable[object]) -> bool: ...
+def all_equal(iterable: Iterable[_T]) -> bool: ...
def quantify(
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
) -> int: ...
def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
-def dotproduct(vec1: Iterable[object], vec2: Iterable[object]) -> object: ...
+def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ...
def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
def repeatfunc(
func: Callable[..., _U], times: int | None = ..., *args: Any
@@ -101,19 +103,26 @@ def sliding_window(
iterable: Iterable[_T], n: int
) -> Iterator[tuple[_T, ...]]: ...
def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
-def polynomial_from_roots(roots: Sequence[int]) -> list[int]: ...
+def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
def iter_index(
- iterable: Iterable[object],
+ iterable: Iterable[_T],
value: Any,
start: int | None = ...,
+ stop: int | None = ...,
) -> Iterator[int]: ...
def sieve(n: int) -> Iterator[int]: ...
def batched(
- iterable: Iterable[_T],
- n: int,
-) -> Iterator[list[_T]]: ...
+ iterable: Iterable[_T], n: int, *, strict: bool = False
+) -> Iterator[tuple[_T]]: ...
def transpose(
it: Iterable[Iterable[_T]],
-) -> tuple[Iterator[_T], ...]: ...
-def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[list[_T]]: ...
+) -> Iterator[tuple[_T, ...]]: ...
+def reshape(
+ matrix: Iterable[Iterable[_T]], cols: int
+) -> Iterator[tuple[_T, ...]]: ...
+def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
def factor(n: int) -> Iterator[int]: ...
+def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
+def sum_of_squares(it: Iterable[_T]) -> _T: ...
+def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...
+def totient(n: int) -> int: ...
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/__init__.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/__init__.py
index 13cadc7f04..e7c0aa12ca 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/__init__.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/__init__.py
@@ -6,10 +6,10 @@ __title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
-__version__ = "23.1"
+__version__ = "24.0"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "BSD-2-Clause or Apache-2.0"
-__copyright__ = "2014-2019 %s" % __author__
+__copyright__ = "2014 %s" % __author__
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_manylinux.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_manylinux.py
index 449c655be6..ad62505f3f 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_manylinux.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_manylinux.py
@@ -5,7 +5,7 @@ import os
import re
import sys
import warnings
-from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
+from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
from ._elffile import EIClass, EIData, ELFFile, EMachine
@@ -50,12 +50,21 @@ def _is_linux_i686(executable: str) -> bool:
)
-def _have_compatible_abi(executable: str, arch: str) -> bool:
- if arch == "armv7l":
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+ if "armv7l" in archs:
return _is_linux_armhf(executable)
- if arch == "i686":
+ if "i686" in archs:
return _is_linux_i686(executable)
- return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+ allowed_archs = {
+ "x86_64",
+ "aarch64",
+ "ppc64",
+ "ppc64le",
+ "s390x",
+ "loongarch64",
+ "riscv64",
+ }
+ return any(arch in allowed_archs for arch in archs)
# If glibc ever changes its major version, we need to know what the last
@@ -81,7 +90,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
try:
# Should be a string like "glibc 2.17".
- version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
+ version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
assert version_string is not None
_, version = version_string.rsplit()
except (AssertionError, AttributeError, OSError, ValueError):
@@ -167,13 +176,13 @@ def _get_glibc_version() -> Tuple[int, int]:
# From PEP 513, PEP 600
-def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
sys_glibc = _get_glibc_version()
if sys_glibc < version:
return False
# Check for presence of _manylinux module.
try:
- import _manylinux # noqa
+ import _manylinux
except ImportError:
return True
if hasattr(_manylinux, "manylinux_compatible"):
@@ -203,12 +212,22 @@ _LEGACY_MANYLINUX_MAP = {
}
-def platform_tags(linux: str, arch: str) -> Iterator[str]:
- if not _have_compatible_abi(sys.executable, arch):
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+ """Generate manylinux tags compatible to the current platform.
+
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be manylinux-compatible.
+
+ :returns: An iterator of compatible manylinux tags.
+ """
+ if not _have_compatible_abi(sys.executable, archs):
return
# Oldest glibc to be supported regardless of architecture is (2, 17).
too_old_glibc2 = _GLibCVersion(2, 16)
- if arch in {"x86_64", "i686"}:
+ if set(archs) & {"x86_64", "i686"}:
# On x86/i686 also oldest glibc to be supported is (2, 5).
too_old_glibc2 = _GLibCVersion(2, 4)
current_glibc = _GLibCVersion(*_get_glibc_version())
@@ -222,19 +241,20 @@ def platform_tags(linux: str, arch: str) -> Iterator[str]:
for glibc_major in range(current_glibc.major - 1, 1, -1):
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
- for glibc_max in glibc_max_list:
- if glibc_max.major == too_old_glibc2.major:
- min_minor = too_old_glibc2.minor
- else:
- # For other glibc major versions oldest supported is (x, 0).
- min_minor = -1
- for glibc_minor in range(glibc_max.minor, min_minor, -1):
- glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
- tag = "manylinux_{}_{}".format(*glibc_version)
- if _is_compatible(tag, arch, glibc_version):
- yield linux.replace("linux", tag)
- # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
- if glibc_version in _LEGACY_MANYLINUX_MAP:
- legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
- if _is_compatible(legacy_tag, arch, glibc_version):
- yield linux.replace("linux", legacy_tag)
+ for arch in archs:
+ for glibc_max in glibc_max_list:
+ if glibc_max.major == too_old_glibc2.major:
+ min_minor = too_old_glibc2.minor
+ else:
+ # For other glibc major versions oldest supported is (x, 0).
+ min_minor = -1
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+ tag = "manylinux_{}_{}".format(*glibc_version)
+ if _is_compatible(arch, glibc_version):
+ yield f"{tag}_{arch}"
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+ if _is_compatible(arch, glibc_version):
+ yield f"{legacy_tag}_{arch}"
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_musllinux.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_musllinux.py
index 706ba600a9..86419df9d7 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_musllinux.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_musllinux.py
@@ -8,7 +8,7 @@ import functools
import re
import subprocess
import sys
-from typing import Iterator, NamedTuple, Optional
+from typing import Iterator, NamedTuple, Optional, Sequence
from ._elffile import ELFFile
@@ -47,24 +47,27 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
return None
if ld is None or "musl" not in ld:
return None
- proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
return _parse_musl_version(proc.stderr)
-def platform_tags(arch: str) -> Iterator[str]:
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
"""Generate musllinux tags compatible to the current platform.
- :param arch: Should be the part of platform tag after the ``linux_``
- prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
- prerequisite for the current platform to be musllinux-compatible.
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be musllinux-compatible.
:returns: An iterator of compatible musllinux tags.
"""
sys_musl = _get_musl_version(sys.executable)
if sys_musl is None: # Python not dynamically linked against musl.
return
- for minor in range(sys_musl.minor, -1, -1):
- yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+ for arch in archs:
+ for minor in range(sys_musl.minor, -1, -1):
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
if __name__ == "__main__": # pragma: no cover
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_parser.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_parser.py
index 5a18b758fe..684df75457 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_parser.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_parser.py
@@ -252,7 +252,13 @@ def _parse_version_many(tokenizer: Tokenizer) -> str:
# Recursive descent parser for marker expression
# --------------------------------------------------------------------------------------
def parse_marker(source: str) -> MarkerList:
- return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
+ return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+ retval = _parse_marker(tokenizer)
+ tokenizer.expect("END", expected="end of marker expression")
+ return retval
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
@@ -318,10 +324,7 @@ def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
def process_env_var(env_var: str) -> Variable:
- if (
- env_var == "platform_python_implementation"
- or env_var == "python_implementation"
- ):
+ if env_var in ("platform_python_implementation", "python_implementation"):
return Variable("platform_python_implementation")
else:
return Variable(env_var)
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/metadata.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/metadata.py
index e76a60c395..fb27493079 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/metadata.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/metadata.py
@@ -5,23 +5,77 @@ import email.parser
import email.policy
import sys
import typing
-from typing import Dict, List, Optional, Tuple, Union, cast
-
-if sys.version_info >= (3, 8): # pragma: no cover
- from typing import TypedDict
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from . import requirements, specifiers, utils, version as version_module
+
+T = typing.TypeVar("T")
+if sys.version_info[:2] >= (3, 8): # pragma: no cover
+ from typing import Literal, TypedDict
else: # pragma: no cover
if typing.TYPE_CHECKING:
- from typing_extensions import TypedDict
+ from typing_extensions import Literal, TypedDict
else:
try:
- from typing_extensions import TypedDict
+ from typing_extensions import Literal, TypedDict
except ImportError:
+ class Literal:
+ def __init_subclass__(*_args, **_kwargs):
+ pass
+
class TypedDict:
def __init_subclass__(*_args, **_kwargs):
pass
+try:
+ ExceptionGroup
+except NameError: # pragma: no cover
+
+ class ExceptionGroup(Exception): # noqa: N818
+ """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
+
+ If :external:exc:`ExceptionGroup` is already defined by Python itself,
+ that version is used instead.
+ """
+
+ message: str
+ exceptions: List[Exception]
+
+ def __init__(self, message: str, exceptions: List[Exception]) -> None:
+ self.message = message
+ self.exceptions = exceptions
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
+
+else: # pragma: no cover
+ ExceptionGroup = ExceptionGroup
+
+
+class InvalidMetadata(ValueError):
+ """A metadata field contains invalid data."""
+
+ field: str
+ """The name of the field that contains invalid data."""
+
+ def __init__(self, field: str, message: str) -> None:
+ self.field = field
+ super().__init__(message)
+
+
# The RawMetadata class attempts to make as few assumptions about the underlying
# serialization formats as possible. The idea is that as long as a serialization
# formats offer some very basic primitives in *some* way then we can support
@@ -33,7 +87,8 @@ class RawMetadata(TypedDict, total=False):
provided). The key is lower-case and underscores are used instead of dashes
compared to the equivalent core metadata field. Any core metadata field that
can be specified multiple times or can hold multiple values in a single
- field have a key with a plural name.
+ field have a key with a plural name. See :class:`Metadata` whose attributes
+ match the keys of this dictionary.
Core metadata fields that can be specified multiple times are stored as a
list or dict depending on which is appropriate for the field. Any fields
@@ -77,7 +132,7 @@ class RawMetadata(TypedDict, total=False):
# but got stuck without ever being able to build consensus on
# it and ultimately ended up withdrawn.
#
- # However, a number of tools had started emiting METADATA with
+ # However, a number of tools had started emitting METADATA with
# `2.0` Metadata-Version, so for historical reasons, this version
# was skipped.
@@ -110,7 +165,7 @@ _STRING_FIELDS = {
"version",
}
-_LIST_STRING_FIELDS = {
+_LIST_FIELDS = {
"classifiers",
"dynamic",
"obsoletes",
@@ -125,6 +180,10 @@ _LIST_STRING_FIELDS = {
"supported_platforms",
}
+_DICT_FIELDS = {
+ "project_urls",
+}
+
def _parse_keywords(data: str) -> List[str]:
"""Split a string of comma-separate keyboards into a list of keywords."""
@@ -230,10 +289,11 @@ _EMAIL_TO_RAW_MAPPING = {
"supported-platform": "supported_platforms",
"version": "version",
}
+_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
- """Parse a distribution's metadata.
+ """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
This function returns a two-item tuple of dicts. The first dict is of
recognized fields from the core metadata specification. Fields that can be
@@ -267,7 +327,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# We use get_all() here, even for fields that aren't multiple use,
# because otherwise someone could have e.g. two Name fields, and we
# would just silently ignore it rather than doing something about it.
- headers = parsed.get_all(name)
+ headers = parsed.get_all(name) or []
# The way the email module works when parsing bytes is that it
# unconditionally decodes the bytes as ascii using the surrogateescape
@@ -349,7 +409,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# If this is one of our list of string fields, then we can just assign
# the value, since email *only* has strings, and our get_all() call
# above ensures that this is a list.
- elif raw_name in _LIST_STRING_FIELDS:
+ elif raw_name in _LIST_FIELDS:
raw[raw_name] = value
# Special Case: Keywords
# The keywords field is implemented in the metadata spec as a str,
@@ -406,3 +466,360 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# way this function is implemented, our `TypedDict` can only have valid key
# names.
return cast(RawMetadata, raw), unparsed
+
+
+_NOT_FOUND = object()
+
+
+# Keep the two values in sync.
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+
+_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
+
+
+class _Validator(Generic[T]):
+ """Validate a metadata field.
+
+ All _process_*() methods correspond to a core metadata field. The method is
+ called with the field's raw value. If the raw value is valid it is returned
+ in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
+ If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
+ as appropriate).
+ """
+
+ name: str
+ raw_name: str
+ added: _MetadataVersion
+
+ def __init__(
+ self,
+ *,
+ added: _MetadataVersion = "1.0",
+ ) -> None:
+ self.added = added
+
+ def __set_name__(self, _owner: "Metadata", name: str) -> None:
+ self.name = name
+ self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
+
+ def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
+ # With Python 3.8, the caching can be replaced with functools.cached_property().
+ # No need to check the cache as attribute lookup will resolve into the
+ # instance's __dict__ before __get__ is called.
+ cache = instance.__dict__
+ value = instance._raw.get(self.name)
+
+ # To make the _process_* methods easier, we'll check if the value is None
+ # and if this field is NOT a required attribute, and if both of those
+ # things are true, we'll skip the the converter. This will mean that the
+ # converters never have to deal with the None union.
+ if self.name in _REQUIRED_ATTRS or value is not None:
+ try:
+ converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
+ except AttributeError:
+ pass
+ else:
+ value = converter(value)
+
+ cache[self.name] = value
+ try:
+ del instance._raw[self.name] # type: ignore[misc]
+ except KeyError:
+ pass
+
+ return cast(T, value)
+
+ def _invalid_metadata(
+ self, msg: str, cause: Optional[Exception] = None
+ ) -> InvalidMetadata:
+ exc = InvalidMetadata(
+ self.raw_name, msg.format_map({"field": repr(self.raw_name)})
+ )
+ exc.__cause__ = cause
+ return exc
+
+ def _process_metadata_version(self, value: str) -> _MetadataVersion:
+ # Implicitly makes Metadata-Version required.
+ if value not in _VALID_METADATA_VERSIONS:
+ raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
+ return cast(_MetadataVersion, value)
+
+ def _process_name(self, value: str) -> str:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ # Validate the name as a side-effect.
+ try:
+ utils.canonicalize_name(value, validate=True)
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+ else:
+ return value
+
+ def _process_version(self, value: str) -> version_module.Version:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ try:
+ return version_module.parse(value)
+ except version_module.InvalidVersion as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+
+ def _process_summary(self, value: str) -> str:
+ """Check the field contains no newlines."""
+ if "\n" in value:
+ raise self._invalid_metadata("{field} must be a single line")
+ return value
+
+ def _process_description_content_type(self, value: str) -> str:
+ content_types = {"text/plain", "text/x-rst", "text/markdown"}
+ message = email.message.EmailMessage()
+ message["content-type"] = value
+
+ content_type, parameters = (
+ # Defaults to `text/plain` if parsing failed.
+ message.get_content_type().lower(),
+ message["content-type"].params,
+ )
+ # Check if content-type is valid or defaulted to `text/plain` and thus was
+ # not parseable.
+ if content_type not in content_types or content_type not in value.lower():
+ raise self._invalid_metadata(
+ f"{{field}} must be one of {list(content_types)}, not {value!r}"
+ )
+
+ charset = parameters.get("charset", "UTF-8")
+ if charset != "UTF-8":
+ raise self._invalid_metadata(
+ f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
+ )
+
+ markdown_variants = {"GFM", "CommonMark"}
+ variant = parameters.get("variant", "GFM") # Use an acceptable default.
+ if content_type == "text/markdown" and variant not in markdown_variants:
+ raise self._invalid_metadata(
+ f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
+ f"not {variant!r}",
+ )
+ return value
+
+ def _process_dynamic(self, value: List[str]) -> List[str]:
+ for dynamic_field in map(str.lower, value):
+ if dynamic_field in {"name", "version", "metadata-version"}:
+ raise self._invalid_metadata(
+ f"{value!r} is not allowed as a dynamic field"
+ )
+ elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
+ raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
+ return list(map(str.lower, value))
+
+ def _process_provides_extra(
+ self,
+ value: List[str],
+ ) -> List[utils.NormalizedName]:
+ normalized_names = []
+ try:
+ for name in value:
+ normalized_names.append(utils.canonicalize_name(name, validate=True))
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{name!r} is invalid for {{field}}", cause=exc
+ )
+ else:
+ return normalized_names
+
+ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
+ try:
+ return specifiers.SpecifierSet(value)
+ except specifiers.InvalidSpecifier as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+
+ def _process_requires_dist(
+ self,
+ value: List[str],
+ ) -> List[requirements.Requirement]:
+ reqs = []
+ try:
+ for req in value:
+ reqs.append(requirements.Requirement(req))
+ except requirements.InvalidRequirement as exc:
+ raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
+ else:
+ return reqs
+
+
+class Metadata:
+ """Representation of distribution metadata.
+
+ Compared to :class:`RawMetadata`, this class provides objects representing
+ metadata fields instead of only using built-in types. Any invalid metadata
+ will cause :exc:`InvalidMetadata` to be raised (with a
+ :py:attr:`~BaseException.__cause__` attribute as appropriate).
+ """
+
+ _raw: RawMetadata
+
+ @classmethod
+ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
+ """Create an instance from :class:`RawMetadata`.
+
+ If *validate* is true, all metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ ins = cls()
+ ins._raw = data.copy() # Mutations occur due to caching enriched values.
+
+ if validate:
+ exceptions: List[Exception] = []
+ try:
+ metadata_version = ins.metadata_version
+ metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
+ except InvalidMetadata as metadata_version_exc:
+ exceptions.append(metadata_version_exc)
+ metadata_version = None
+
+ # Make sure to check for the fields that are present, the required
+ # fields (so their absence can be reported).
+ fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
+ # Remove fields that have already been checked.
+ fields_to_check -= {"metadata_version"}
+
+ for key in fields_to_check:
+ try:
+ if metadata_version:
+ # Can't use getattr() as that triggers descriptor protocol which
+ # will fail due to no value for the instance argument.
+ try:
+ field_metadata_version = cls.__dict__[key].added
+ except KeyError:
+ exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
+ exceptions.append(exc)
+ continue
+ field_age = _VALID_METADATA_VERSIONS.index(
+ field_metadata_version
+ )
+ if field_age > metadata_age:
+ field = _RAW_TO_EMAIL_MAPPING[key]
+ exc = InvalidMetadata(
+ field,
+ "{field} introduced in metadata version "
+ "{field_metadata_version}, not {metadata_version}",
+ )
+ exceptions.append(exc)
+ continue
+ getattr(ins, key)
+ except InvalidMetadata as exc:
+ exceptions.append(exc)
+
+ if exceptions:
+ raise ExceptionGroup("invalid metadata", exceptions)
+
+ return ins
+
+ @classmethod
+ def from_email(
+ cls, data: Union[bytes, str], *, validate: bool = True
+ ) -> "Metadata":
+ """Parse metadata from email headers.
+
+ If *validate* is true, the metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ raw, unparsed = parse_email(data)
+
+ if validate:
+ exceptions: list[Exception] = []
+ for unparsed_key in unparsed:
+ if unparsed_key in _EMAIL_TO_RAW_MAPPING:
+ message = f"{unparsed_key!r} has invalid data"
+ else:
+ message = f"unrecognized field: {unparsed_key!r}"
+ exceptions.append(InvalidMetadata(unparsed_key, message))
+
+ if exceptions:
+ raise ExceptionGroup("unparsed", exceptions)
+
+ try:
+ return cls.from_raw(raw, validate=validate)
+ except ExceptionGroup as exc_group:
+ raise ExceptionGroup(
+ "invalid or unparsed metadata", exc_group.exceptions
+ ) from None
+
+ metadata_version: _Validator[_MetadataVersion] = _Validator()
+ """:external:ref:`core-metadata-metadata-version`
+ (required; validated to be a valid metadata version)"""
+ name: _Validator[str] = _Validator()
+ """:external:ref:`core-metadata-name`
+ (required; validated using :func:`~packaging.utils.canonicalize_name` and its
+ *validate* parameter)"""
+ version: _Validator[version_module.Version] = _Validator()
+ """:external:ref:`core-metadata-version` (required)"""
+ dynamic: _Validator[Optional[List[str]]] = _Validator(
+ added="2.2",
+ )
+ """:external:ref:`core-metadata-dynamic`
+ (validated against core metadata field names and lowercased)"""
+ platforms: _Validator[Optional[List[str]]] = _Validator()
+ """:external:ref:`core-metadata-platform`"""
+ supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-supported-platform`"""
+ summary: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
+ description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
+ """:external:ref:`core-metadata-description`"""
+ description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
+ """:external:ref:`core-metadata-description-content-type` (validated)"""
+ keywords: _Validator[Optional[List[str]]] = _Validator()
+ """:external:ref:`core-metadata-keywords`"""
+ home_page: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-home-page`"""
+ download_url: _Validator[Optional[str]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-download-url`"""
+ author: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-author`"""
+ author_email: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-author-email`"""
+ maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer`"""
+ maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer-email`"""
+ license: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-license`"""
+ classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-classifier`"""
+ requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-dist`"""
+ requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-python`"""
+ # Because `Requires-External` allows for non-PEP 440 version specifiers, we
+ # don't do any processing on the values.
+ requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-requires-external`"""
+ project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-project-url`"""
+ # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
+ # regardless of metadata version.
+ provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
+ added="2.1",
+ )
+ """:external:ref:`core-metadata-provides-extra`"""
+ provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-provides-dist`"""
+ obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-obsoletes-dist`"""
+ requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Requires`` (deprecated)"""
+ provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Provides`` (deprecated)"""
+ obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Obsoletes`` (deprecated)"""
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/requirements.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/requirements.py
index f34bfa85c8..bdc43a7e98 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/requirements.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/requirements.py
@@ -2,13 +2,13 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-import urllib.parse
-from typing import Any, List, Optional, Set
+from typing import Any, Iterator, Optional, Set
from ._parser import parse_requirement as _parse_requirement
from ._tokenizer import ParserSyntaxError
from .markers import Marker, _normalize_extra_values
from .specifiers import SpecifierSet
+from .utils import canonicalize_name
class InvalidRequirement(ValueError):
@@ -37,57 +37,52 @@ class Requirement:
raise InvalidRequirement(str(e)) from e
self.name: str = parsed.name
- if parsed.url:
- parsed_url = urllib.parse.urlparse(parsed.url)
- if parsed_url.scheme == "file":
- if urllib.parse.urlunparse(parsed_url) != parsed.url:
- raise InvalidRequirement("Invalid URL given")
- elif not (parsed_url.scheme and parsed_url.netloc) or (
- not parsed_url.scheme and not parsed_url.netloc
- ):
- raise InvalidRequirement(f"Invalid URL: {parsed.url}")
- self.url: Optional[str] = parsed.url
- else:
- self.url = None
- self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
+ self.url: Optional[str] = parsed.url or None
+ self.extras: Set[str] = set(parsed.extras or [])
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
self.marker: Optional[Marker] = None
if parsed.marker is not None:
self.marker = Marker.__new__(Marker)
self.marker._markers = _normalize_extra_values(parsed.marker)
- def __str__(self) -> str:
- parts: List[str] = [self.name]
+ def _iter_parts(self, name: str) -> Iterator[str]:
+ yield name
if self.extras:
formatted_extras = ",".join(sorted(self.extras))
- parts.append(f"[{formatted_extras}]")
+ yield f"[{formatted_extras}]"
if self.specifier:
- parts.append(str(self.specifier))
+ yield str(self.specifier)
if self.url:
- parts.append(f"@ {self.url}")
+ yield f"@ {self.url}"
if self.marker:
- parts.append(" ")
+ yield " "
if self.marker:
- parts.append(f"; {self.marker}")
+ yield f"; {self.marker}"
- return "".join(parts)
+ def __str__(self) -> str:
+ return "".join(self._iter_parts(self.name))
def __repr__(self) -> str:
return f"<Requirement('{self}')>"
def __hash__(self) -> int:
- return hash((self.__class__.__name__, str(self)))
+ return hash(
+ (
+ self.__class__.__name__,
+ *self._iter_parts(canonicalize_name(self.name)),
+ )
+ )
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Requirement):
return NotImplemented
return (
- self.name == other.name
+ canonicalize_name(self.name) == canonicalize_name(other.name)
and self.extras == other.extras
and self.specifier == other.specifier
and self.url == other.url
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/specifiers.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/specifiers.py
index ba8fe37b7f..2d015bab59 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/specifiers.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/specifiers.py
@@ -11,17 +11,7 @@
import abc
import itertools
import re
-from typing import (
- Callable,
- Iterable,
- Iterator,
- List,
- Optional,
- Set,
- Tuple,
- TypeVar,
- Union,
-)
+from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
from .utils import canonicalize_version
from .version import Version
@@ -383,7 +373,7 @@ class Specifier(BaseSpecifier):
# We want everything but the last item in the version, but we want to
# ignore suffix segments.
- prefix = ".".join(
+ prefix = _version_join(
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
)
@@ -404,13 +394,13 @@ class Specifier(BaseSpecifier):
)
# Get the normalized version string ignoring the trailing .*
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
- # Split the spec out by dots, and pretend that there is an implicit
- # dot in between a release segment and a pre-release segment.
+ # Split the spec out by bangs and dots, and pretend that there is
+ # an implicit dot in between a release segment and a pre-release segment.
split_spec = _version_split(normalized_spec)
- # Split the prospective version out by dots, and pretend that there
- # is an implicit dot in between a release segment and a pre-release
- # segment.
+ # Split the prospective version out by bangs and dots, and pretend
+ # that there is an implicit dot in between a release segment and
+ # a pre-release segment.
split_prospective = _version_split(normalized_prospective)
# 0-pad the prospective version before shortening it to get the correct
@@ -644,8 +634,19 @@ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version: str) -> List[str]:
+ """Split version into components.
+
+ The split components are intended for version comparison. The logic does
+ not attempt to retain the original version string, so joining the
+ components back with :func:`_version_join` may not produce the original
+ version string.
+ """
result: List[str] = []
- for item in version.split("."):
+
+ epoch, _, rest = version.rpartition("!")
+ result.append(epoch or "0")
+
+ for item in rest.split("."):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
@@ -654,6 +655,17 @@ def _version_split(version: str) -> List[str]:
return result
+def _version_join(components: List[str]) -> str:
+ """Join split version components into a version string.
+
+ This function assumes the input came from :func:`_version_split`, where the
+ first component must be the epoch (either empty or numeric), and all other
+ components numeric.
+ """
+ epoch, *rest = components
+ return f"{epoch}!{'.'.join(rest)}"
+
+
def _is_not_suffix(segment: str) -> bool:
return not any(
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
@@ -675,7 +687,10 @@ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
- return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+ return (
+ list(itertools.chain.from_iterable(left_split)),
+ list(itertools.chain.from_iterable(right_split)),
+ )
class SpecifierSet(BaseSpecifier):
@@ -707,14 +722,8 @@ class SpecifierSet(BaseSpecifier):
# strip each item to remove leading/trailing whitespace.
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
- # Parsed each individual specifier, attempting first to make it a
- # Specifier.
- parsed: Set[Specifier] = set()
- for specifier in split_specifiers:
- parsed.add(Specifier(specifier))
-
- # Turn our parsed specifiers into a frozen set and save them for later.
- self._specs = frozenset(parsed)
+ # Make each individual specifier a Specifier and save in a frozen set for later.
+ self._specs = frozenset(map(Specifier, split_specifiers))
# Store our prereleases value so we can use it later to determine if
# we accept prereleases or not.
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/tags.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/tags.py
index 76d243414d..89f1926137 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/tags.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/tags.py
@@ -4,6 +4,8 @@
import logging
import platform
+import re
+import struct
import subprocess
import sys
import sysconfig
@@ -37,7 +39,7 @@ INTERPRETER_SHORT_NAMES: Dict[str, str] = {
}
-_32_BIT_INTERPRETER = sys.maxsize <= 2**32
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
class Tag:
@@ -123,20 +125,37 @@ def _normalize_string(string: str) -> str:
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
-def _abi3_applies(python_version: PythonVersion) -> bool:
+def _is_threaded_cpython(abis: List[str]) -> bool:
+ """
+ Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+ The threaded builds are indicated by a "t" in the abiflags.
+ """
+ if len(abis) == 0:
+ return False
+ # expect e.g., cp313
+ m = re.match(r"cp\d+(.*)", abis[0])
+ if not m:
+ return False
+ abiflags = m.group(1)
+ return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
"""
Determine if the Python version supports abi3.
- PEP 384 was first implemented in Python 3.2.
+ PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+ builds do not support abi3.
"""
- return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
py_version = tuple(py_version) # To allow for version comparison.
abis = []
version = _version_nodot(py_version[:2])
- debug = pymalloc = ucs4 = ""
+ threading = debug = pymalloc = ucs4 = ""
with_debug = _get_config_var("Py_DEBUG", warn)
has_refcount = hasattr(sys, "gettotalrefcount")
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
@@ -145,6 +164,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
if with_debug or (with_debug is None and (has_refcount or has_ext)):
debug = "d"
+ if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+ threading = "t"
if py_version < (3, 8):
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
if with_pymalloc or with_pymalloc is None:
@@ -158,13 +179,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
elif debug:
# Debug builds can also load "normal" extension modules.
# We can also assume no UCS-4 or pymalloc requirement.
- abis.append(f"cp{version}")
- abis.insert(
- 0,
- "cp{version}{debug}{pymalloc}{ucs4}".format(
- version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
- ),
- )
+ abis.append(f"cp{version}{threading}")
+ abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
return abis
@@ -212,11 +228,14 @@ def cpython_tags(
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
- if _abi3_applies(python_version):
+
+ threading = _is_threaded_cpython(abis)
+ use_abi3 = _abi3_applies(python_version, threading)
+ if use_abi3:
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
- if _abi3_applies(python_version):
+ if use_abi3:
for minor_version in range(python_version[1] - 1, 1, -1):
for platform_ in platforms:
interpreter = "cp{version}".format(
@@ -406,7 +425,7 @@ def mac_platforms(
check=True,
env={"SYSTEM_VERSION_COMPAT": "0"},
stdout=subprocess.PIPE,
- universal_newlines=True,
+ text=True,
).stdout
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
else:
@@ -469,15 +488,21 @@ def mac_platforms(
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
linux = _normalize_string(sysconfig.get_platform())
+ if not linux.startswith("linux_"):
+ # we should never be here, just yield the sysconfig one and return
+ yield linux
+ return
if is_32bit:
if linux == "linux_x86_64":
linux = "linux_i686"
elif linux == "linux_aarch64":
- linux = "linux_armv7l"
+ linux = "linux_armv8l"
_, arch = linux.split("_", 1)
- yield from _manylinux.platform_tags(linux, arch)
- yield from _musllinux.platform_tags(arch)
- yield linux
+ archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+ yield from _manylinux.platform_tags(archs)
+ yield from _musllinux.platform_tags(archs)
+ for arch in archs:
+ yield f"linux_{arch}"
def _generic_platforms() -> Iterator[str]:
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/utils.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/utils.py
index 33c613b749..c2c2f75aa8 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/utils.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/utils.py
@@ -12,6 +12,12 @@ BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
+class InvalidName(ValueError):
+ """
+ An invalid distribution name; users should refer to the packaging user guide.
+ """
+
+
class InvalidWheelFilename(ValueError):
"""
An invalid wheel filename was found, users should refer to PEP 427.
@@ -24,17 +30,28 @@ class InvalidSdistFilename(ValueError):
"""
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
_canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
# PEP 427: The build number must start with a digit.
_build_tag_regex = re.compile(r"(\d+)(.*)")
-def canonicalize_name(name: str) -> NormalizedName:
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+ if validate and not _validate_regex.match(name):
+ raise InvalidName(f"name is invalid: {name!r}")
# This is taken from PEP 503.
value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
+def is_normalized_name(name: str) -> bool:
+ return _normalized_regex.match(name) is not None
+
+
def canonicalize_version(
version: Union[Version, str], *, strip_trailing_zero: bool = True
) -> str:
@@ -100,11 +117,18 @@ def parse_wheel_filename(
parts = filename.split("-", dashes - 2)
name_part = parts[0]
- # See PEP 427 for the rules on escaping the project name
+ # See PEP 427 for the rules on escaping the project name.
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
raise InvalidWheelFilename(f"Invalid project name: {filename}")
name = canonicalize_name(name_part)
- version = Version(parts[1])
+
+ try:
+ version = Version(parts[1])
+ except InvalidVersion as e:
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (invalid version): {filename}"
+ ) from e
+
if dashes == 5:
build_part = parts[2]
build_match = _build_tag_regex.match(build_part)
@@ -137,5 +161,12 @@ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
name = canonicalize_name(name_part)
- version = Version(version_part)
+
+ try:
+ version = Version(version_part)
+ except InvalidVersion as e:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (invalid version): {filename}"
+ ) from e
+
return (name, version)
diff --git a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/version.py b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/version.py
index b30e8cbf84..5faab9bd0d 100644
--- a/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/version.py
+++ b/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/version.py
@@ -7,37 +7,39 @@
from packaging.version import parse, Version
"""
-import collections
import itertools
import re
-from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
+from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
-InfiniteTypes = Union[InfinityType, NegativeInfinityType]
-PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
-SubLocalType = Union[InfiniteTypes, int, str]
-LocalType = Union[
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
NegativeInfinityType,
- Tuple[
- Union[
- SubLocalType,
- Tuple[SubLocalType, str],
- Tuple[NegativeInfinityType, SubLocalType],
- ],
- ...,
- ],
+ Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
]
CmpKey = Tuple[
- int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+ int,
+ Tuple[int, ...],
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpLocalType,
]
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
-_Version = collections.namedtuple(
- "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
-)
+
+class _Version(NamedTuple):
+ epoch: int
+ release: Tuple[int, ...]
+ dev: Optional[Tuple[str, int]]
+ pre: Optional[Tuple[str, int]]
+ post: Optional[Tuple[str, int]]
+ local: Optional[LocalType]
def parse(version: str) -> "Version":
@@ -117,7 +119,7 @@ _VERSION_PATTERN = r"""
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
- (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ (?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
@@ -269,8 +271,7 @@ class Version(_BaseVersion):
>>> Version("1!2.0.0").epoch
1
"""
- _epoch: int = self._version.epoch
- return _epoch
+ return self._version.epoch
@property
def release(self) -> Tuple[int, ...]:
@@ -286,8 +287,7 @@ class Version(_BaseVersion):
Includes trailing zeroes but not the epoch or any pre-release / development /
post-release suffixes.
"""
- _release: Tuple[int, ...] = self._version.release
- return _release
+ return self._version.release
@property
def pre(self) -> Optional[Tuple[str, int]]:
@@ -302,8 +302,7 @@ class Version(_BaseVersion):
>>> Version("1.2.3rc1").pre
('rc', 1)
"""
- _pre: Optional[Tuple[str, int]] = self._version.pre
- return _pre
+ return self._version.pre
@property
def post(self) -> Optional[int]:
@@ -451,7 +450,7 @@ class Version(_BaseVersion):
def _parse_letter_version(
- letter: str, number: Union[str, bytes, SupportsInt]
+ letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
) -> Optional[Tuple[str, int]]:
if letter:
@@ -489,7 +488,7 @@ def _parse_letter_version(
_local_version_separators = re.compile(r"[\._-]")
-def _parse_local_version(local: str) -> Optional[LocalType]:
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
@@ -507,7 +506,7 @@ def _cmpkey(
pre: Optional[Tuple[str, int]],
post: Optional[Tuple[str, int]],
dev: Optional[Tuple[str, int]],
- local: Optional[Tuple[SubLocalType]],
+ local: Optional[LocalType],
) -> CmpKey:
# When we compare a release version, we want to compare it with all of the
@@ -524,7 +523,7 @@ def _cmpkey(
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
- _pre: PrePostDevType = NegativeInfinity
+ _pre: CmpPrePostDevType = NegativeInfinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
@@ -534,21 +533,21 @@ def _cmpkey(
# Versions without a post segment should sort before those with one.
if post is None:
- _post: PrePostDevType = NegativeInfinity
+ _post: CmpPrePostDevType = NegativeInfinity
else:
_post = post
# Versions without a development segment should sort after those with one.
if dev is None:
- _dev: PrePostDevType = Infinity
+ _dev: CmpPrePostDevType = Infinity
else:
_dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
- _local: LocalType = NegativeInfinity
+ _local: CmpLocalType = NegativeInfinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
diff --git a/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py b/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
index 948bcc6094..df96f7f26d 100644
--- a/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
+++ b/contrib/python/setuptools/py3/pkg_resources/extern/__init__.py
@@ -76,5 +76,6 @@ names = (
'jaraco',
'importlib_resources',
'more_itertools',
+ 'backports',
)
VendorImporter(__name__, names).install()
diff --git a/contrib/python/setuptools/py3/setuptools/_core_metadata.py b/contrib/python/setuptools/py3/setuptools/_core_metadata.py
index 5dd97c7719..9b4f38ded2 100644
--- a/contrib/python/setuptools/py3/setuptools/_core_metadata.py
+++ b/contrib/python/setuptools/py3/setuptools/_core_metadata.py
@@ -17,6 +17,7 @@ from distutils.util import rfc822_escape
from . import _normalization, _reqs
from .extern.packaging.markers import Marker
from .extern.packaging.requirements import Requirement
+from .extern.packaging.utils import canonicalize_name
from .extern.packaging.version import Version
from .warnings import SetuptoolsDeprecationWarning
@@ -257,3 +258,11 @@ def _write_provides_extra(file, processed_extras, safe, unsafe):
else:
processed_extras[safe] = unsafe
file.write(f"Provides-Extra: {safe}\n")
+
+
+# from pypa/distutils#244; needed only until that logic is always available
+def get_fullname(self):
+ return "{}-{}".format(
+ canonicalize_name(self.get_name()).replace('-', '_'),
+ self.get_version(),
+ )
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/__init__.py b/contrib/python/setuptools/py3/setuptools/_distutils/__init__.py
index 1a188c35cb..e374d5c560 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/__init__.py
@@ -1,5 +1,5 @@
-import sys
import importlib
+import sys
__version__, _, _ = sys.version.partition(' ')
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_collections.py b/contrib/python/setuptools/py3/setuptools/_distutils/_collections.py
index 5ad21cc7c9..d11a83467c 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/_collections.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_collections.py
@@ -1,7 +1,11 @@
+from __future__ import annotations
+
import collections
import functools
import itertools
import operator
+from collections.abc import Mapping
+from typing import Any
# from jaraco.collections 3.5.1
@@ -58,7 +62,7 @@ class DictStack(list, collections.abc.Mapping):
return len(list(iter(self)))
-# from jaraco.collections 3.7
+# from jaraco.collections 5.0.1
class RangeMap(dict):
"""
A dictionary-like object that uses the keys as bounds for a range.
@@ -70,7 +74,7 @@ class RangeMap(dict):
One may supply keyword parameters to be passed to the sort function used
to sort keys (i.e. key, reverse) as sort_params.
- Let's create a map that maps 1-3 -> 'a', 4-6 -> 'b'
+ Create a map that maps 1-3 -> 'a', 4-6 -> 'b'
>>> r = RangeMap({3: 'a', 6: 'b'}) # boy, that was easy
>>> r[1], r[2], r[3], r[4], r[5], r[6]
@@ -82,7 +86,7 @@ class RangeMap(dict):
>>> r[4.5]
'b'
- But you'll notice that the way rangemap is defined, it must be open-ended
+ Notice that the way rangemap is defined, it must be open-ended
on one side.
>>> r[0]
@@ -140,7 +144,12 @@ class RangeMap(dict):
"""
- def __init__(self, source, sort_params={}, key_match_comparator=operator.le):
+ def __init__(
+ self,
+ source,
+ sort_params: Mapping[str, Any] = {},
+ key_match_comparator=operator.le,
+ ):
dict.__init__(self, source)
self.sort_params = sort_params
self.match = key_match_comparator
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_itertools.py b/contrib/python/setuptools/py3/setuptools/_distutils/_itertools.py
new file mode 100644
index 0000000000..85b2951186
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_itertools.py
@@ -0,0 +1,52 @@
+# from more_itertools 10.2
+def always_iterable(obj, base_type=(str, bytes)):
+ """If *obj* is iterable, return an iterator over its items::
+
+ >>> obj = (1, 2, 3)
+ >>> list(always_iterable(obj))
+ [1, 2, 3]
+
+ If *obj* is not iterable, return a one-item iterable containing *obj*::
+
+ >>> obj = 1
+ >>> list(always_iterable(obj))
+ [1]
+
+ If *obj* is ``None``, return an empty iterable:
+
+ >>> obj = None
+ >>> list(always_iterable(None))
+ []
+
+ By default, binary and text strings are not considered iterable::
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj))
+ ['foo']
+
+ If *base_type* is set, objects for which ``isinstance(obj, base_type)``
+ returns ``True`` won't be considered iterable.
+
+ >>> obj = {'a': 1}
+ >>> list(always_iterable(obj)) # Iterate over the dict's keys
+ ['a']
+ >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit
+ [{'a': 1}]
+
+ Set *base_type* to ``None`` to avoid any special handling and treat objects
+ Python considers iterable as iterable:
+
+ >>> obj = 'foo'
+ >>> list(always_iterable(obj, base_type=None))
+ ['f', 'o', 'o']
+ """
+ if obj is None:
+ return iter(())
+
+ if (base_type is not None) and isinstance(obj, base_type):
+ return iter((obj,))
+
+ try:
+ return iter(obj)
+ except TypeError:
+ return iter((obj,))
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_log.py b/contrib/python/setuptools/py3/setuptools/_distutils/_log.py
index 4a2ae0acb8..0148f157ff 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/_log.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_log.py
@@ -1,4 +1,3 @@
import logging
-
log = logging.getLogger()
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_macos_compat.py b/contrib/python/setuptools/py3/setuptools/_distutils/_macos_compat.py
index 17769e9154..76ecb96abe 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/_macos_compat.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_macos_compat.py
@@ -1,5 +1,5 @@
-import sys
import importlib
+import sys
def bypass_compiler_fixup(cmd, args):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py b/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py
index fbb95a8f27..78485dc25e 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_modified.py
@@ -3,9 +3,9 @@
import functools
import os.path
+from ._functools import splat
from .errors import DistutilsFileError
from .py39compat import zip_strict
-from ._functools import splat
def _newer(source, target):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py
index 4f081c7e92..a2159fef83 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/_msvccompiler.py
@@ -13,28 +13,28 @@ for older versions in distutils.msvc9compiler and distutils.msvccompiler.
# ported to VS 2005 and VS 2008 by Christian Heimes
# ported to VS 2015 by Steve Dower
+import contextlib
import os
import subprocess
-import contextlib
-import warnings
import unittest.mock as mock
+import warnings
with contextlib.suppress(ImportError):
import winreg
+from itertools import count
+
+from ._log import log
+from .ccompiler import CCompiler, gen_lib_options
from .errors import (
+ CompileError,
DistutilsExecError,
DistutilsPlatformError,
- CompileError,
LibError,
LinkError,
)
-from .ccompiler import CCompiler, gen_lib_options
-from ._log import log
from .util import get_platform
-from itertools import count
-
def _find_vc2015():
try:
@@ -253,7 +253,7 @@ class MSVCCompiler(CCompiler):
vc_env = _get_vc_env(plat_spec)
if not vc_env:
raise DistutilsPlatformError(
- "Unable to find a compatible " "Visual Studio installation."
+ "Unable to find a compatible Visual Studio installation."
)
self._configure(vc_env)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py b/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py
index 7f9e1e00cc..052f6e4646 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/archive_util.py
@@ -4,8 +4,8 @@ Utility functions for creating archive files (tarballs, zip files,
that sort of thing)."""
import os
-from warnings import warn
import sys
+from warnings import warn
try:
import zipfile
@@ -13,10 +13,10 @@ except ImportError:
zipfile = None
+from ._log import log
+from .dir_util import mkpath
from .errors import DistutilsExecError
from .spawn import spawn
-from .dir_util import mkpath
-from ._log import log
try:
from pwd import getpwnam
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py
index 3c2ba15410..c1341e43cb 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/bcppcompiler.py
@@ -11,22 +11,20 @@ for the Borland C++ compiler.
# someone should sit down and factor out the common code as
# WindowsCCompiler! --GPW
-
import os
import warnings
+from ._log import log
+from ._modified import newer
+from .ccompiler import CCompiler, gen_preprocess_options
from .errors import (
- DistutilsExecError,
CompileError,
+ DistutilsExecError,
LibError,
LinkError,
UnknownFileError,
)
-from .ccompiler import CCompiler, gen_preprocess_options
from .file_util import write_file
-from ._modified import newer
-from ._log import log
-
warnings.warn(
"bcppcompiler is deprecated and slated to be removed "
@@ -239,7 +237,7 @@ class BCPPCompiler(CCompiler):
def_file = os.path.join(temp_dir, '%s.def' % modname)
contents = ['EXPORTS']
for sym in export_symbols or []:
- contents.append(' {}=_{}'.format(sym, sym))
+ contents.append(f' {sym}=_{sym}')
self.execute(write_file, (def_file, contents), "writing %s" % def_file)
# Borland C++ has problems with '/' in paths
@@ -349,9 +347,7 @@ class BCPPCompiler(CCompiler):
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext(os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc', '.res']):
- raise UnknownFileError(
- "unknown file type '{}' (from '{}')".format(ext, src_name)
- )
+ raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
if strip_dir:
base = os.path.basename(base)
if ext == '.res':
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py
index c1c7d5476e..8876d73098 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/ccompiler.py
@@ -3,24 +3,25 @@
Contains CCompiler, an abstract base class that defines the interface
for the Distutils compiler abstraction model."""
-import sys
import os
import re
+import sys
import warnings
+from ._itertools import always_iterable
+from ._log import log
+from ._modified import newer_group
+from .dir_util import mkpath
from .errors import (
CompileError,
+ DistutilsModuleError,
+ DistutilsPlatformError,
LinkError,
UnknownFileError,
- DistutilsPlatformError,
- DistutilsModuleError,
)
-from .spawn import spawn
from .file_util import move_file
-from .dir_util import mkpath
-from ._modified import newer_group
-from .util import split_quoted, execute
-from ._log import log
+from .spawn import spawn
+from .util import execute, split_quoted
class CCompiler:
@@ -168,8 +169,7 @@ class CCompiler:
for key in kwargs:
if key not in self.executables:
raise ValueError(
- "unknown executable '%s' for class %s"
- % (key, self.__class__.__name__)
+ f"unknown executable '{key}' for class {self.__class__.__name__}"
)
self.set_executable(key, kwargs[key])
@@ -382,7 +382,7 @@ class CCompiler:
raise TypeError("'output_dir' must be a string or None")
if macros is None:
- macros = self.macros
+ macros = list(self.macros)
elif isinstance(macros, list):
macros = macros + (self.macros or [])
else:
@@ -441,14 +441,14 @@ class CCompiler:
fixed versions of all arguments.
"""
if libraries is None:
- libraries = self.libraries
+ libraries = list(self.libraries)
elif isinstance(libraries, (list, tuple)):
libraries = list(libraries) + (self.libraries or [])
else:
raise TypeError("'libraries' (if supplied) must be a list of strings")
if library_dirs is None:
- library_dirs = self.library_dirs
+ library_dirs = list(self.library_dirs)
elif isinstance(library_dirs, (list, tuple)):
library_dirs = list(library_dirs) + (self.library_dirs or [])
else:
@@ -458,14 +458,14 @@ class CCompiler:
library_dirs += self.__class__.library_dirs
if runtime_library_dirs is None:
- runtime_library_dirs = self.runtime_library_dirs
+ runtime_library_dirs = list(self.runtime_library_dirs)
elif isinstance(runtime_library_dirs, (list, tuple)):
runtime_library_dirs = list(runtime_library_dirs) + (
self.runtime_library_dirs or []
)
else:
raise TypeError(
- "'runtime_library_dirs' (if supplied) " "must be a list of strings"
+ "'runtime_library_dirs' (if supplied) must be a list of strings"
)
return (libraries, library_dirs, runtime_library_dirs)
@@ -857,8 +857,7 @@ class CCompiler:
if library_dirs is None:
library_dirs = []
fd, fname = tempfile.mkstemp(".c", funcname, text=True)
- f = os.fdopen(fd, "w")
- try:
+ with os.fdopen(fd, "w", encoding='utf-8') as f:
for incl in includes:
f.write("""#include "%s"\n""" % incl)
if not includes:
@@ -887,8 +886,7 @@ int main (int argc, char **argv) {
"""
% funcname
)
- finally:
- f.close()
+
try:
objects = self.compile([fname], include_dirs=include_dirs)
except CompileError:
@@ -972,9 +970,7 @@ int main (int argc, char **argv) {
try:
new_ext = self.out_extensions[ext]
except LookupError:
- raise UnknownFileError(
- "unknown file type '{}' (from '{}')".format(ext, src_name)
- )
+ raise UnknownFileError(f"unknown file type '{ext}' (from '{src_name}')")
if strip_dir:
base = os.path.basename(base)
return os.path.join(output_dir, base + new_ext)
@@ -1004,7 +1000,11 @@ int main (int argc, char **argv) {
return os.path.join(output_dir, basename + (self.exe_extension or ''))
def library_filename(
- self, libname, lib_type='static', strip_dir=0, output_dir='' # or 'shared'
+ self,
+ libname,
+ lib_type='static',
+ strip_dir=0,
+ output_dir='', # or 'shared'
):
assert output_dir is not None
expected = '"static", "shared", "dylib", "xcode_stub"'
@@ -1056,6 +1056,7 @@ _default_compilers = (
# on a cygwin built python we can use gcc like an ordinary UNIXish
# compiler
('cygwin.*', 'unix'),
+ ('zos', 'zos'),
# OS name mappings
('posix', 'unix'),
('nt', 'msvc'),
@@ -1103,6 +1104,7 @@ compiler_class = {
"Mingw32 port of GNU C Compiler for Win32",
),
'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"),
+ 'zos': ('zosccompiler', 'zOSCCompiler', 'IBM XL C/C++ Compilers'),
}
@@ -1159,8 +1161,8 @@ def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
)
except KeyError:
raise DistutilsModuleError(
- "can't compile C/C++ code: unable to find class '%s' "
- "in module '%s'" % (class_name, module_name)
+ f"can't compile C/C++ code: unable to find class '{class_name}' "
+ f"in module '{module_name}'"
)
# XXX The None is necessary to preserve backwards compatibility
@@ -1207,7 +1209,7 @@ def gen_preprocess_options(macros, include_dirs):
# XXX *don't* need to be clever about quoting the
# macro value here, because we're going to avoid the
# shell at all costs when we spawn the command!
- pp_opts.append("-D%s=%s" % macro)
+ pp_opts.append("-D{}={}".format(*macro))
for dir in include_dirs:
pp_opts.append("-I%s" % dir)
@@ -1227,11 +1229,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
lib_opts.append(compiler.library_dir_option(dir))
for dir in runtime_library_dirs:
- opt = compiler.runtime_library_dir_option(dir)
- if isinstance(opt, list):
- lib_opts = lib_opts + opt
- else:
- lib_opts.append(opt)
+ lib_opts.extend(always_iterable(compiler.runtime_library_dir_option(dir)))
# XXX it's important that we *not* remove redundant library mentions!
# sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
@@ -1247,7 +1245,7 @@ def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries):
lib_opts.append(lib_file)
else:
compiler.warn(
- "no library file corresponding to " "'%s' found (skipping)" % lib
+ "no library file corresponding to '%s' found (skipping)" % lib
)
else:
lib_opts.append(compiler.library_option(lib))
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py b/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py
index 8fdcbc0ea2..02dbf165f5 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/cmd.py
@@ -4,14 +4,14 @@ Provides the Command class, the base class for the command classes
in the distutils.command package.
"""
-import sys
+import logging
import os
import re
-import logging
+import sys
-from .errors import DistutilsOptionError
-from . import util, dir_util, file_util, archive_util, _modified
+from . import _modified, archive_util, dir_util, file_util, util
from ._log import log
+from .errors import DistutilsOptionError
class Command:
@@ -165,7 +165,7 @@ class Command:
if option[-1] == "=":
option = option[:-1]
value = getattr(self, option)
- self.announce(indent + "{} = {}".format(option, value), level=logging.INFO)
+ self.announce(indent + f"{option} = {value}", level=logging.INFO)
def run(self):
"""A command's raison d'etre: carry out the action it exists to
@@ -213,9 +213,7 @@ class Command:
setattr(self, option, default)
return default
elif not isinstance(val, str):
- raise DistutilsOptionError(
- "'{}' must be a {} (got `{}`)".format(option, what, val)
- )
+ raise DistutilsOptionError(f"'{option}' must be a {what} (got `{val}`)")
return val
def ensure_string(self, option, default=None):
@@ -242,7 +240,7 @@ class Command:
ok = False
if not ok:
raise DistutilsOptionError(
- "'{}' must be a list of strings (got {!r})".format(option, val)
+ f"'{option}' must be a list of strings (got {val!r})"
)
def _ensure_tested_string(self, option, tester, what, error_fmt, default=None):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/_framework_compat.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/_framework_compat.py
index cffa27cb08..00d34bc7d8 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/_framework_compat.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/_framework_compat.py
@@ -2,15 +2,14 @@
Backward compatibility for homebrew builds on macOS.
"""
-
-import sys
-import os
import functools
+import os
import subprocess
+import sys
import sysconfig
-@functools.lru_cache()
+@functools.lru_cache
def enabled():
"""
Only enabled for Python 3.9 framework homebrew builds
@@ -38,7 +37,7 @@ schemes = dict(
)
-@functools.lru_cache()
+@functools.lru_cache
def vars():
if not enabled():
return {}
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py
index 6329039ce4..ade98445ba 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist.py
@@ -7,7 +7,7 @@ import os
import warnings
from ..core import Command
-from ..errors import DistutilsPlatformError, DistutilsOptionError
+from ..errors import DistutilsOptionError, DistutilsPlatformError
from ..util import get_platform
@@ -47,18 +47,18 @@ class bdist(Command):
(
'dist-dir=',
'd',
- "directory to put final built distributions in " "[default: dist]",
+ "directory to put final built distributions in [default: dist]",
),
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
(
'owner=',
'u',
- "Owner name used when creating a tar file" " [default: current user]",
+ "Owner name used when creating a tar file [default: current user]",
),
(
'group=',
'g',
- "Group name used when creating a tar file" " [default: current group]",
+ "Group name used when creating a tar file [default: current group]",
),
]
@@ -76,17 +76,15 @@ class bdist(Command):
default_format = {'posix': 'gztar', 'nt': 'zip'}
# Define commands in preferred order for the --help-formats option
- format_commands = ListCompat(
- {
- 'rpm': ('bdist_rpm', "RPM distribution"),
- 'gztar': ('bdist_dumb', "gzip'ed tar file"),
- 'bztar': ('bdist_dumb', "bzip2'ed tar file"),
- 'xztar': ('bdist_dumb', "xz'ed tar file"),
- 'ztar': ('bdist_dumb', "compressed tar file"),
- 'tar': ('bdist_dumb', "tar file"),
- 'zip': ('bdist_dumb', "ZIP file"),
- }
- )
+ format_commands = ListCompat({
+ 'rpm': ('bdist_rpm', "RPM distribution"),
+ 'gztar': ('bdist_dumb', "gzip'ed tar file"),
+ 'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+ 'xztar': ('bdist_dumb', "xz'ed tar file"),
+ 'ztar': ('bdist_dumb', "compressed tar file"),
+ 'tar': ('bdist_dumb', "tar file"),
+ 'zip': ('bdist_dumb', "ZIP file"),
+ })
# for compatibility until consumers only reference format_commands
format_command = format_commands
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py
index 01dd79079b..06502d201e 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_dumb.py
@@ -5,12 +5,13 @@ distribution -- i.e., just an archive to be unpacked under $prefix or
$exec_prefix)."""
import os
+from distutils._log import log
+
from ..core import Command
-from ..util import get_platform
-from ..dir_util import remove_tree, ensure_relative
+from ..dir_util import ensure_relative, remove_tree
from ..errors import DistutilsPlatformError
from ..sysconfig import get_python_version
-from distutils._log import log
+from ..util import get_platform
class bdist_dumb(Command):
@@ -27,7 +28,7 @@ class bdist_dumb(Command):
(
'format=',
'f',
- "archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)",
+ "archive format to create (tar, gztar, bztar, xztar, ztar, zip)",
),
(
'keep-temp',
@@ -40,17 +41,17 @@ class bdist_dumb(Command):
(
'relative',
None,
- "build the archive using relative paths " "(default: false)",
+ "build the archive using relative paths (default: false)",
),
(
'owner=',
'u',
- "Owner name used when creating a tar file" " [default: current user]",
+ "Owner name used when creating a tar file [default: current user]",
),
(
'group=',
'g',
- "Group name used when creating a tar file" " [default: current group]",
+ "Group name used when creating a tar file [default: current group]",
),
]
@@ -104,9 +105,7 @@ class bdist_dumb(Command):
# And make an archive relative to the root of the
# pseudo-installation tree.
- archive_basename = "{}.{}".format(
- self.distribution.get_fullname(), self.plat_name
- )
+ archive_basename = f"{self.distribution.get_fullname()}.{self.plat_name}"
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
@@ -117,8 +116,7 @@ class bdist_dumb(Command):
):
raise DistutilsPlatformError(
"can't make a dumb built distribution where "
- "base and platbase are different (%s, %s)"
- % (repr(install.install_base), repr(install.install_platbase))
+ f"base and platbase are different ({repr(install.install_base)}, {repr(install.install_platbase)})"
)
else:
archive_root = os.path.join(
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py
index 3ed608b479..649968a5eb 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/bdist_rpm.py
@@ -3,21 +3,21 @@
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
distributions)."""
+import os
import subprocess
import sys
-import os
+from distutils._log import log
from ..core import Command
from ..debug import DEBUG
-from ..file_util import write_file
from ..errors import (
+ DistutilsExecError,
+ DistutilsFileError,
DistutilsOptionError,
DistutilsPlatformError,
- DistutilsFileError,
- DistutilsExecError,
)
+from ..file_util import write_file
from ..sysconfig import get_python_version
-from distutils._log import log
class bdist_rpm(Command):
@@ -34,7 +34,7 @@ class bdist_rpm(Command):
(
'dist-dir=',
'd',
- "directory to put final RPM files in " "(and .spec files if --spec-only)",
+ "directory to put final RPM files in (and .spec files if --spec-only)",
),
(
'python=',
@@ -75,7 +75,7 @@ class bdist_rpm(Command):
(
'packager=',
None,
- "RPM packager (eg. \"Jane Doe <jane@example.net>\") " "[default: vendor]",
+ "RPM packager (eg. \"Jane Doe <jane@example.net>\") [default: vendor]",
),
('doc-files=', None, "list of documentation files (space or comma-separated)"),
('changelog=', None, "RPM changelog"),
@@ -214,7 +214,7 @@ class bdist_rpm(Command):
if os.name != 'posix':
raise DistutilsPlatformError(
- "don't know how to create RPM " "distributions on platform %s" % os.name
+ "don't know how to create RPM distributions on platform %s" % os.name
)
if self.binary_only and self.source_only:
raise DistutilsOptionError(
@@ -232,8 +232,7 @@ class bdist_rpm(Command):
self.ensure_string('group', "Development/Libraries")
self.ensure_string(
'vendor',
- "%s <%s>"
- % (self.distribution.get_contact(), self.distribution.get_contact_email()),
+ f"{self.distribution.get_contact()} <{self.distribution.get_contact_email()}>",
)
self.ensure_string('packager')
self.ensure_string_list('doc_files')
@@ -352,11 +351,7 @@ class bdist_rpm(Command):
nvr_string = "%{name}-%{version}-%{release}"
src_rpm = nvr_string + ".src.rpm"
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
- q_cmd = r"rpm -q --qf '{} {}\n' --specfile '{}'".format(
- src_rpm,
- non_src_rpm,
- spec_path,
- )
+ q_cmd = rf"rpm -q --qf '{src_rpm} {non_src_rpm}\n' --specfile '{spec_path}'"
out = os.popen(q_cmd)
try:
@@ -401,9 +396,11 @@ class bdist_rpm(Command):
if os.path.exists(rpm):
self.move_file(rpm, self.dist_dir)
filename = os.path.join(self.dist_dir, os.path.basename(rpm))
- self.distribution.dist_files.append(
- ('bdist_rpm', pyversion, filename)
- )
+ self.distribution.dist_files.append((
+ 'bdist_rpm',
+ pyversion,
+ filename,
+ ))
def _dist_path(self, path):
return os.path.join(self.dist_dir, os.path.basename(path))
@@ -428,14 +425,14 @@ class bdist_rpm(Command):
# Generate a potential replacement value for __os_install_post (whilst
# normalizing the whitespace to simplify the test for whether the
# invocation of brp-python-bytecompile passes in __python):
- vendor_hook = '\n'.join(
- [' %s \\' % line.strip() for line in vendor_hook.splitlines()]
- )
+ vendor_hook = '\n'.join([
+ ' %s \\' % line.strip() for line in vendor_hook.splitlines()
+ ])
problem = "brp-python-bytecompile \\\n"
fixed = "brp-python-bytecompile %{__python} \\\n"
fixed_hook = vendor_hook.replace(problem, fixed)
if fixed_hook != vendor_hook:
- spec_file.append('# Workaround for http://bugs.python.org/issue14443')
+ spec_file.append('# Workaround for https://bugs.python.org/issue14443')
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
# put locale summaries into spec file
@@ -445,13 +442,11 @@ class bdist_rpm(Command):
# spec_file.append('Summary(%s): %s' % (locale,
# self.summaries[locale]))
- spec_file.extend(
- [
- 'Name: %{name}',
- 'Version: %{version}',
- 'Release: %{release}',
- ]
- )
+ spec_file.extend([
+ 'Name: %{name}',
+ 'Version: %{version}',
+ 'Release: %{release}',
+ ])
# XXX yuck! this filename is available from the "sdist" command,
# but only after it has run: and we create the spec file before
@@ -461,14 +456,12 @@ class bdist_rpm(Command):
else:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
- spec_file.extend(
- [
- 'License: ' + (self.distribution.get_license() or "UNKNOWN"),
- 'Group: ' + self.group,
- 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
- 'Prefix: %{_prefix}',
- ]
- )
+ spec_file.extend([
+ 'License: ' + (self.distribution.get_license() or "UNKNOWN"),
+ 'Group: ' + self.group,
+ 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
+ 'Prefix: %{_prefix}',
+ ])
if not self.force_arch:
# noarch if no extension modules
@@ -489,7 +482,7 @@ class bdist_rpm(Command):
if isinstance(val, list):
spec_file.append('{}: {}'.format(field, ' '.join(val)))
elif val is not None:
- spec_file.append('{}: {}'.format(field, val))
+ spec_file.append(f'{field}: {val}')
if self.distribution.get_url():
spec_file.append('Url: ' + self.distribution.get_url())
@@ -506,13 +499,11 @@ class bdist_rpm(Command):
if self.no_autoreq:
spec_file.append('AutoReq: 0')
- spec_file.extend(
- [
- '',
- '%description',
- self.distribution.get_long_description() or "",
- ]
- )
+ spec_file.extend([
+ '',
+ '%description',
+ self.distribution.get_long_description() or "",
+ ])
# put locale descriptions into spec file
# XXX again, suppressed because config file syntax doesn't
@@ -526,7 +517,7 @@ class bdist_rpm(Command):
# rpm scripts
# figure out default build script
- def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0]))
+ def_setup_call = f"{self.python} {os.path.basename(sys.argv[0])}"
def_build = "%s build" % def_setup_call
if self.use_rpm_opt_flags:
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
@@ -558,12 +549,10 @@ class bdist_rpm(Command):
# use 'default' as contents of script
val = getattr(self, attr)
if val or default:
- spec_file.extend(
- [
- '',
- '%' + rpm_opt,
- ]
- )
+ spec_file.extend([
+ '',
+ '%' + rpm_opt,
+ ])
if val:
with open(val) as f:
spec_file.extend(f.read().split('\n'))
@@ -571,24 +560,20 @@ class bdist_rpm(Command):
spec_file.append(default)
# files section
- spec_file.extend(
- [
- '',
- '%files -f INSTALLED_FILES',
- '%defattr(-,root,root)',
- ]
- )
+ spec_file.extend([
+ '',
+ '%files -f INSTALLED_FILES',
+ '%defattr(-,root,root)',
+ ])
if self.doc_files:
spec_file.append('%doc ' + ' '.join(self.doc_files))
if self.changelog:
- spec_file.extend(
- [
- '',
- '%changelog',
- ]
- )
+ spec_file.extend([
+ '',
+ '%changelog',
+ ])
spec_file.extend(self.changelog)
return spec_file
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py
index cc9b367ef9..d18ed503e3 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build.py
@@ -2,8 +2,9 @@
Implements the Distutils 'build' command."""
-import sys
import os
+import sys
+
from ..core import Command
from ..errors import DistutilsOptionError
from ..util import get_platform
@@ -78,7 +79,7 @@ class build(Command):
"using './configure --help' on your platform)"
)
- plat_specifier = ".{}-{}".format(self.plat_name, sys.implementation.cache_tag)
+ plat_specifier = f".{self.plat_name}-{sys.implementation.cache_tag}"
# Make it so Python 2.x and Python 2.x with --with-pydebug don't
# share the same build directories. Doing so confuses the build
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py
index b3f679b67d..360575d0cb 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_clib.py
@@ -15,10 +15,11 @@ module."""
# cut 'n paste. Sigh.
import os
+from distutils._log import log
+
from ..core import Command
from ..errors import DistutilsSetupError
from ..sysconfig import customize_compiler
-from distutils._log import log
def show_compilers():
@@ -154,7 +155,7 @@ class build_clib(Command):
return None
lib_names = []
- for lib_name, build_info in self.libraries:
+ for lib_name, _build_info in self.libraries:
lib_names.append(lib_name)
return lib_names
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py
index b48f462626..06d949aff1 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_ext.py
@@ -8,24 +8,22 @@ import contextlib
import os
import re
import sys
+from distutils._log import log
+from site import USER_BASE
+
+from .._modified import newer_group
from ..core import Command
from ..errors import (
- DistutilsOptionError,
- DistutilsSetupError,
CCompilerError,
- DistutilsError,
CompileError,
+ DistutilsError,
+ DistutilsOptionError,
DistutilsPlatformError,
+ DistutilsSetupError,
)
-from ..sysconfig import customize_compiler, get_python_version
-from ..sysconfig import get_config_h_filename
-from .._modified import newer_group
from ..extension import Extension
+from ..sysconfig import customize_compiler, get_config_h_filename, get_python_version
from ..util import get_platform
-from distutils._log import log
-from . import py37compat
-
-from site import USER_BASE
# An extension name is just a dot-separated list of Python NAMEs (ie.
# the same as a fully-qualified module name).
@@ -130,6 +128,31 @@ class build_ext(Command):
self.user = None
self.parallel = None
+ @staticmethod
+ def _python_lib_dir(sysconfig):
+ """
+ Resolve Python's library directory for building extensions
+ that rely on a shared Python library.
+
+ See python/cpython#44264 and python/cpython#48686
+ """
+ if not sysconfig.get_config_var('Py_ENABLE_SHARED'):
+ return
+
+ if sysconfig.python_build:
+ yield '.'
+ return
+
+ if sys.platform == 'zos':
+ # On z/OS, a user is not required to install Python to
+ # a predetermined path, but can use Python portably
+ installed_dir = sysconfig.get_config_var('base')
+ lib_dir = sysconfig.get_config_var('platlibdir')
+ yield os.path.join(installed_dir, lib_dir)
+ else:
+ # building third party extensions
+ yield sysconfig.get_config_var('LIBDIR')
+
def finalize_options(self): # noqa: C901
from distutils import sysconfig
@@ -231,16 +254,7 @@ class build_ext(Command):
# building python standard extensions
self.library_dirs.append('.')
- # For building extensions with a shared Python library,
- # Python's library directory must be appended to library_dirs
- # See Issues: #1600860, #4366
- if sysconfig.get_config_var('Py_ENABLE_SHARED'):
- if not sysconfig.python_build:
- # building third party extensions
- self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
- else:
- # building python standard extensions
- self.library_dirs.append('.')
+ self.library_dirs.extend(self._python_lib_dir(sysconfig))
# The argument parsing will result in self.define being a string, but
# it has to be a list of 2-tuples. All the preprocessor symbols
@@ -412,9 +426,7 @@ class build_ext(Command):
# Medium-easy stuff: same syntax/semantics, different names.
ext.runtime_library_dirs = build_info.get('rpath')
if 'def_file' in build_info:
- log.warning(
- "'def_file' element of build info dict " "no longer supported"
- )
+ log.warning("'def_file' element of build info dict no longer supported")
# Non-trivial stuff: 'macros' split into 'define_macros'
# and 'undef_macros'.
@@ -499,7 +511,7 @@ class build_ext(Command):
except (CCompilerError, DistutilsError, CompileError) as e:
if not ext.optional:
raise
- self.warn('building extension "{}" failed: {}'.format(ext.name, e))
+ self.warn(f'building extension "{ext.name}" failed: {e}')
def build_extension(self, ext):
sources = ext.sources
@@ -785,4 +797,4 @@ class build_ext(Command):
ldversion = get_config_var('LDVERSION')
return ext.libraries + ['python' + ldversion]
- return ext.libraries + py37compat.pythonlib()
+ return ext.libraries
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py
index d9df95922f..56e6fa2e66 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_py.py
@@ -2,15 +2,15 @@
Implements the Distutils 'build_py' command."""
-import os
+import glob
import importlib.util
+import os
import sys
-import glob
+from distutils._log import log
from ..core import Command
-from ..errors import DistutilsOptionError, DistutilsFileError
+from ..errors import DistutilsFileError, DistutilsOptionError
from ..util import convert_path
-from distutils._log import log
class build_py(Command):
@@ -129,14 +129,14 @@ class build_py(Command):
os.path.join(glob.escape(src_dir), convert_path(pattern))
)
# Files that match more than one pattern are only added once
- files.extend(
- [fn for fn in filelist if fn not in files and os.path.isfile(fn)]
- )
+ files.extend([
+ fn for fn in filelist if fn not in files and os.path.isfile(fn)
+ ])
return files
def build_package_data(self):
"""Copy data files into build directory"""
- for package, src_dir, build_dir, filenames in self.data_files:
+ for _package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
@@ -309,7 +309,7 @@ class build_py(Command):
def get_outputs(self, include_bytecode=1):
modules = self.find_all_modules()
outputs = []
- for package, module, module_file in modules:
+ for package, module, _module_file in modules:
package = package.split('.')
filename = self.get_module_outfile(self.build_lib, package, module)
outputs.append(filename)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py
index 1a4d67f492..5f3902a027 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/build_scripts.py
@@ -4,13 +4,14 @@ Implements the Distutils 'build_scripts' command."""
import os
import re
-from stat import ST_MODE
+import tokenize
from distutils import sysconfig
-from ..core import Command
+from distutils._log import log
+from stat import ST_MODE
+
from .._modified import newer
+from ..core import Command
from ..util import convert_path
-from distutils._log import log
-import tokenize
shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
"""
@@ -109,8 +110,7 @@ class build_scripts(Command):
else:
executable = os.path.join(
sysconfig.get_config_var("BINDIR"),
- "python%s%s"
- % (
+ "python{}{}".format(
sysconfig.get_config_var("VERSION"),
sysconfig.get_config_var("EXE"),
),
@@ -156,9 +156,7 @@ class build_scripts(Command):
try:
shebang.encode('utf-8')
except UnicodeEncodeError:
- raise ValueError(
- "The shebang ({!r}) is not encodable " "to utf-8".format(shebang)
- )
+ raise ValueError(f"The shebang ({shebang!r}) is not encodable to utf-8")
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be encodable to
@@ -167,6 +165,6 @@ class build_scripts(Command):
shebang.encode(encoding)
except UnicodeEncodeError:
raise ValueError(
- "The shebang ({!r}) is not encodable "
- "to the script encoding ({})".format(shebang, encoding)
+ f"The shebang ({shebang!r}) is not encodable "
+ f"to the script encoding ({encoding})"
)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py
index 575e49fb4b..28599e109c 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/check.py
@@ -2,16 +2,17 @@
Implements the Distutils 'check' command.
"""
+
import contextlib
from ..core import Command
from ..errors import DistutilsSetupError
with contextlib.suppress(ImportError):
- import docutils.utils
- import docutils.parsers.rst
import docutils.frontend
import docutils.nodes
+ import docutils.parsers.rst
+ import docutils.utils
class SilentReporter(docutils.utils.Reporter):
def __init__(
@@ -32,7 +33,7 @@ with contextlib.suppress(ImportError):
def system_message(self, level, message, *children, **kwargs):
self.messages.append((level, message, children, kwargs))
return docutils.nodes.system_message(
- message, level=level, type=self.levels[level], *children, **kwargs
+ message, *children, level=level, type=self.levels[level], **kwargs
)
@@ -115,7 +116,7 @@ class check(Command):
if line is None:
warning = warning[1]
else:
- warning = '{} (line {})'.format(warning[1], line)
+ warning = f'{warning[1]} (line {line})'
self.warn(warning)
def _check_rst_data(self, data):
@@ -144,8 +145,11 @@ class check(Command):
try:
parser.parse(data, document)
except AttributeError as e:
- reporter.messages.append(
- (-1, 'Could not finish the parsing: %s.' % e, '', {})
- )
+ reporter.messages.append((
+ -1,
+ 'Could not finish the parsing: %s.' % e,
+ '',
+ {},
+ ))
return reporter.messages
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py
index 9413f7cfcb..4167a83fb3 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/clean.py
@@ -5,9 +5,10 @@ Implements the Distutils 'clean' command."""
# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
import os
+from distutils._log import log
+
from ..core import Command
from ..dir_util import remove_tree
-from distutils._log import log
class clean(Command):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py
index 494d97d16f..d4b2b0a362 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/config.py
@@ -9,13 +9,17 @@ configure-like tasks: "try to compile this C code", or "figure out where
this header file lives".
"""
+from __future__ import annotations
+
import os
+import pathlib
import re
+from collections.abc import Sequence
+from distutils._log import log
from ..core import Command
from ..errors import DistutilsExecError
from ..sysconfig import customize_compiler
-from distutils._log import log
LANG_EXT = {"c": ".c", "c++": ".cxx"}
@@ -102,7 +106,7 @@ class config(Command):
def _gen_temp_sourcefile(self, body, headers, lang):
filename = "_configtest" + LANG_EXT[lang]
- with open(filename, "w") as file:
+ with open(filename, "w", encoding='utf-8') as file:
if headers:
for header in headers:
file.write("#include <%s>\n" % header)
@@ -199,15 +203,8 @@ class config(Command):
if isinstance(pattern, str):
pattern = re.compile(pattern)
- with open(out) as file:
- match = False
- while True:
- line = file.readline()
- if line == '':
- break
- if pattern.search(line):
- match = True
- break
+ with open(out, encoding='utf-8') as file:
+ match = any(pattern.search(line) for line in file)
self._clean()
return match
@@ -331,7 +328,7 @@ class config(Command):
library_dirs=None,
headers=None,
include_dirs=None,
- other_libraries=[],
+ other_libraries: Sequence[str] = [],
):
"""Determine if 'library' is available to be linked against,
without actually checking that any particular symbols are provided
@@ -346,7 +343,7 @@ class config(Command):
"int main (void) { }",
headers,
include_dirs,
- [library] + other_libraries,
+ [library] + list(other_libraries),
library_dirs,
)
@@ -369,8 +366,4 @@ def dump_file(filename, head=None):
log.info('%s', filename)
else:
log.info(head)
- file = open(filename)
- try:
- log.info(file.read())
- finally:
- file.close()
+ log.info(pathlib.Path(filename).read_text(encoding='utf-8'))
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py
index a7ac4e6077..8e920be4de 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install.py
@@ -2,25 +2,22 @@
Implements the Distutils 'install' command."""
-import sys
-import os
import contextlib
-import sysconfig
import itertools
-
+import os
+import sys
+import sysconfig
from distutils._log import log
+from site import USER_BASE, USER_SITE
+
+from .. import _collections
from ..core import Command
from ..debug import DEBUG
-from ..sysconfig import get_config_vars
-from ..file_util import write_file
-from ..util import convert_path, subst_vars, change_root
-from ..util import get_platform
from ..errors import DistutilsOptionError, DistutilsPlatformError
+from ..file_util import write_file
+from ..sysconfig import get_config_vars
+from ..util import change_root, convert_path, get_platform, subst_vars
from . import _framework_compat as fw
-from .. import _collections
-
-from site import USER_BASE
-from site import USER_SITE
HAS_USER_SITE = True
@@ -245,9 +242,11 @@ class install(Command):
boolean_options = ['compile', 'force', 'skip-build']
if HAS_USER_SITE:
- user_options.append(
- ('user', None, "install in user site-package '%s'" % USER_SITE)
- )
+ user_options.append((
+ 'user',
+ None,
+ "install in user site-package '%s'" % USER_SITE,
+ ))
boolean_options.append('user')
negative_opt = {'no-compile': 'compile'}
@@ -432,9 +431,12 @@ class install(Command):
local_vars['userbase'] = self.install_userbase
local_vars['usersite'] = self.install_usersite
- self.config_vars = _collections.DictStack(
- [fw.vars(), compat_vars, sysconfig.get_config_vars(), local_vars]
- )
+ self.config_vars = _collections.DictStack([
+ fw.vars(),
+ compat_vars,
+ sysconfig.get_config_vars(),
+ local_vars,
+ ])
self.expand_basedirs()
@@ -620,16 +622,14 @@ class install(Command):
def expand_dirs(self):
"""Calls `os.path.expanduser` on install dirs."""
- self._expand_attrs(
- [
- 'install_purelib',
- 'install_platlib',
- 'install_lib',
- 'install_headers',
- 'install_scripts',
- 'install_data',
- ]
- )
+ self._expand_attrs([
+ 'install_purelib',
+ 'install_platlib',
+ 'install_lib',
+ 'install_headers',
+ 'install_scripts',
+ 'install_data',
+ ])
def convert_paths(self, *names):
"""Call `convert_path` over `names`."""
@@ -683,7 +683,7 @@ class install(Command):
if not self.user:
return
home = convert_path(os.path.expanduser("~"))
- for name, path in self.config_vars.items():
+ for _name, path in self.config_vars.items():
if str(path).startswith(home) and not os.path.isdir(path):
self.debug_print("os.makedirs('%s', 0o700)" % path)
os.makedirs(path, 0o700)
@@ -701,7 +701,7 @@ class install(Command):
# internally, and not to sys.path, so we don't check the platform
# matches what we are running.
if self.warn_dir and build_plat != get_platform():
- raise DistutilsPlatformError("Can't install when " "cross-compiling")
+ raise DistutilsPlatformError("Can't install when cross-compiling")
# Run all sub-commands (at least those that need to be run)
for cmd_name in self.get_sub_commands():
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py
index 7ba35eef82..b63a1af25e 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_data.py
@@ -6,6 +6,7 @@ platform-independent data files."""
# contributed by Bastian Kleineidam
import os
+
from ..core import Command
from ..util import change_root, convert_path
@@ -51,7 +52,7 @@ class install_data(Command):
if self.warn_dir:
self.warn(
"setup script did not provide a directory for "
- "'%s' -- installing right in '%s'" % (f, self.install_dir)
+ f"'{f}' -- installing right in '{self.install_dir}'"
)
(out, _) = self.copy_file(f, self.install_dir)
self.outfiles.append(out)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_egg_info.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_egg_info.py
index f3e8f3447d..4fbb3440ab 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_egg_info.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_egg_info.py
@@ -6,12 +6,12 @@ a package's PKG-INFO metadata.
"""
import os
-import sys
import re
+import sys
-from ..cmd import Command
from .. import dir_util
from .._log import log
+from ..cmd import Command
class install_egg_info(Command):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py
index be4c243321..b1f346f018 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_lib.py
@@ -3,14 +3,13 @@
Implements the Distutils 'install_lib' command
(install all Python modules)."""
-import os
import importlib.util
+import os
import sys
from ..core import Command
from ..errors import DistutilsOptionError
-
# Extension for Python source files.
PYTHON_SOURCE_EXTENSION = ".py"
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py
index 20f07aaa27..e66b13a16d 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/install_scripts.py
@@ -6,10 +6,11 @@ Python scripts."""
# contributed by Bastian Kleineidam
import os
-from ..core import Command
from distutils._log import log
from stat import ST_MODE
+from ..core import Command
+
class install_scripts(Command):
description = "install scripts (Python or otherwise)"
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/py37compat.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/py37compat.py
deleted file mode 100644
index aa0c0a7fcd..0000000000
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/py37compat.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import sys
-
-
-def _pythonlib_compat():
- """
- On Python 3.7 and earlier, distutils would include the Python
- library. See pypa/distutils#9.
- """
- from distutils import sysconfig
-
- if not sysconfig.get_config_var('Py_ENABLED_SHARED'):
- return
-
- yield 'python{}.{}{}'.format(
- sys.hexversion >> 24,
- (sys.hexversion >> 16) & 0xFF,
- sysconfig.get_config_var('ABIFLAGS'),
- )
-
-
-def compose(f1, f2):
- return lambda *args, **kwargs: f1(f2(*args, **kwargs))
-
-
-pythonlib = (
- compose(list, _pythonlib_compat)
- if sys.version_info < (3, 8)
- and sys.platform != 'darwin'
- and sys.platform[:3] != 'aix'
- else list
-)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py
index c19aabb91f..ee6c54daba 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/register.py
@@ -10,10 +10,11 @@ import io
import logging
import urllib.parse
import urllib.request
+from distutils._log import log
from warnings import warn
+from .._itertools import always_iterable
from ..core import PyPIRCCommand
-from distutils._log import log
class register(PyPIRCCommand):
@@ -77,7 +78,7 @@ class register(PyPIRCCommand):
check.run()
def _set_config(self):
- '''Reads the configuration file and set attributes.'''
+ """Reads the configuration file and set attributes."""
config = self._read_pypirc()
if config != {}:
self.username = config['username']
@@ -93,19 +94,19 @@ class register(PyPIRCCommand):
self.has_config = False
def classifiers(self):
- '''Fetch the list of classifiers from the server.'''
+ """Fetch the list of classifiers from the server."""
url = self.repository + '?:action=list_classifiers'
response = urllib.request.urlopen(url)
log.info(self._read_pypi_response(response))
def verify_metadata(self):
- '''Send the metadata to the package index server to be checked.'''
+ """Send the metadata to the package index server to be checked."""
# send the info to the server and report the result
(code, result) = self.post_to_server(self.build_post_data('verify'))
log.info('Server response (%s): %s', code, result)
def send_metadata(self): # noqa: C901
- '''Send the metadata to the package index server.
+ """Send the metadata to the package index server.
Well, do the following:
1. figure who the user is, and then
@@ -131,7 +132,7 @@ class register(PyPIRCCommand):
2. register as a new user, or
3. set the password to a random string and email the user.
- '''
+ """
# see if we can short-cut and get the username/password from the
# config
if self.has_config:
@@ -146,13 +147,13 @@ class register(PyPIRCCommand):
choices = '1 2 3 4'.split()
while choice not in choices:
self.announce(
- '''\
+ """\
We need to know who you are, so please choose either:
1. use your existing login,
2. register as a new user,
3. have the server generate a new password for you (and email it to you), or
4. quit
-Your selection [default 1]: ''',
+Your selection [default 1]: """,
logging.INFO,
)
choice = input()
@@ -174,7 +175,7 @@ Your selection [default 1]: ''',
auth.add_password(self.realm, host, username, password)
# send the info to the server and report the result
code, result = self.post_to_server(self.build_post_data('submit'), auth)
- self.announce('Server response ({}): {}'.format(code, result), logging.INFO)
+ self.announce(f'Server response ({code}): {result}', logging.INFO)
# possibly save the login
if code == 200:
@@ -262,7 +263,7 @@ Your selection [default 1]: ''',
return data
def post_to_server(self, data, auth=None): # noqa: C901
- '''Post a query to the server, and return a string response.'''
+ """Post a query to the server, and return a string response."""
if 'name' in data:
self.announce(
'Registering {} to {}'.format(data['name'], self.repository),
@@ -273,12 +274,8 @@ Your selection [default 1]: ''',
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = io.StringIO()
- for key, value in data.items():
- # handle multiple entries for the same name
- if type(value) not in (type([]), type(())):
- value = [value]
- for value in value:
- value = str(value)
+ for key, values in data.items():
+ for value in map(str, make_iterable(values)):
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"' % key)
body.write("\n\n")
@@ -318,3 +315,9 @@ Your selection [default 1]: ''',
msg = '\n'.join(('-' * 75, data, '-' * 75))
self.announce(msg, logging.INFO)
return result
+
+
+def make_iterable(values):
+ if values is None:
+ return [None]
+ return always_iterable(values)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py
index ac489726ca..387d27c90b 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/sdist.py
@@ -4,26 +4,25 @@ Implements the Distutils 'sdist' command (create a source distribution)."""
import os
import sys
+from distutils import archive_util, dir_util, file_util
+from distutils._log import log
from glob import glob
+from itertools import filterfalse
from warnings import warn
from ..core import Command
-from distutils import dir_util
-from distutils import file_util
-from distutils import archive_util
-from ..text_file import TextFile
+from ..errors import DistutilsOptionError, DistutilsTemplateError
from ..filelist import FileList
-from distutils._log import log
+from ..text_file import TextFile
from ..util import convert_path
-from ..errors import DistutilsOptionError, DistutilsTemplateError
def show_formats():
"""Print all possible values for the 'formats' option (used by
the "--help-formats" command-line option).
"""
- from ..fancy_getopt import FancyGetopt
from ..archive_util import ARCHIVE_FORMATS
+ from ..fancy_getopt import FancyGetopt
formats = []
for format in ARCHIVE_FORMATS.keys():
@@ -62,7 +61,7 @@ class sdist(Command):
(
'manifest-only',
'o',
- "just regenerate the manifest and then stop " "(implies --force-manifest)",
+ "just regenerate the manifest and then stop (implies --force-manifest)",
),
(
'force-manifest',
@@ -79,7 +78,7 @@ class sdist(Command):
(
'dist-dir=',
'd',
- "directory to put the source distribution archive(s) in " "[default: dist]",
+ "directory to put the source distribution archive(s) in [default: dist]",
),
(
'metadata-check',
@@ -309,7 +308,7 @@ class sdist(Command):
# getting package_data files
# (computed in build_py.data_files by build_py.finalize_options)
- for pkg, src_dir, build_dir, filenames in build_py.data_files:
+ for _pkg, src_dir, _build_dir, filenames in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
@@ -429,11 +428,8 @@ class sdist(Command):
if not os.path.isfile(self.manifest):
return False
- fp = open(self.manifest)
- try:
- first_line = fp.readline()
- finally:
- fp.close()
+ with open(self.manifest, encoding='utf-8') as fp:
+ first_line = next(fp)
return first_line != '# file GENERATED by distutils, do NOT edit\n'
def read_manifest(self):
@@ -442,13 +438,11 @@ class sdist(Command):
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
- with open(self.manifest) as manifest:
- for line in manifest:
+ with open(self.manifest, encoding='utf-8') as lines:
+ self.filelist.extend(
# ignore comments and blank lines
- line = line.strip()
- if line.startswith('#') or not line:
- continue
- self.filelist.append(line)
+ filter(None, filterfalse(is_comment, map(str.strip, lines)))
+ )
def make_release_tree(self, base_dir, files):
"""Create the directory tree that will become the source
@@ -528,3 +522,7 @@ class sdist(Command):
was run, or None if the command hasn't run yet.
"""
return self.archive_files
+
+
+def is_comment(line):
+ return line.startswith('#')
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py b/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py
index caf15f04a6..cf541f8a82 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/command/upload.py
@@ -5,18 +5,19 @@ Implements the Distutils 'upload' subcommand (upload package to a package
index).
"""
-import os
-import io
import hashlib
+import io
import logging
+import os
from base64 import standard_b64encode
-from urllib.request import urlopen, Request, HTTPError
from urllib.parse import urlparse
-from ..errors import DistutilsError, DistutilsOptionError
+from urllib.request import HTTPError, Request, urlopen
+
+from .._itertools import always_iterable
from ..core import PyPIRCCommand
+from ..errors import DistutilsError, DistutilsOptionError
from ..spawn import spawn
-
# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
# https://bugs.python.org/issue40698
_FILE_CONTENT_DIGESTS = {
@@ -151,12 +152,9 @@ class upload(PyPIRCCommand):
sep_boundary = b'\r\n--' + boundary.encode('ascii')
end_boundary = sep_boundary + b'--\r\n'
body = io.BytesIO()
- for key, value in data.items():
+ for key, values in data.items():
title = '\r\nContent-Disposition: form-data; name="%s"' % key
- # handle multiple entries for the same name
- if not isinstance(value, list):
- value = [value]
- for value in value:
+ for value in make_iterable(values):
if type(value) is tuple:
title += '; filename="%s"' % value[0]
value = value[1]
@@ -169,7 +167,7 @@ class upload(PyPIRCCommand):
body.write(end_boundary)
body = body.getvalue()
- msg = "Submitting {} to {}".format(filename, self.repository)
+ msg = f"Submitting {filename} to {self.repository}"
self.announce(msg, logging.INFO)
# build the Request
@@ -193,14 +191,18 @@ class upload(PyPIRCCommand):
raise
if status == 200:
- self.announce(
- 'Server response ({}): {}'.format(status, reason), logging.INFO
- )
+ self.announce(f'Server response ({status}): {reason}', logging.INFO)
if self.show_response:
text = self._read_pypi_response(result)
msg = '\n'.join(('-' * 75, text, '-' * 75))
self.announce(msg, logging.INFO)
else:
- msg = 'Upload failed ({}): {}'.format(status, reason)
+ msg = f'Upload failed ({status}): {reason}'
self.announce(msg, logging.ERROR)
raise DistutilsError(msg)
+
+
+def make_iterable(values):
+ if values is None:
+ return [None]
+ return always_iterable(values, base_type=(bytes, str, tuple))
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py b/contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py
new file mode 100644
index 0000000000..b1ee3fe8b0
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/compat/__init__.py
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from .py38 import removeprefix
+
+
+def consolidate_linker_args(args: list[str]) -> str:
+ """
+ Ensure the return value is a string for backward compatibility.
+
+ Retain until at least 2024-04-31. See pypa/distutils#246
+ """
+
+ if not all(arg.startswith('-Wl,') for arg in args):
+ return args
+ return '-Wl,' + ','.join(removeprefix(arg, '-Wl,') for arg in args)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py b/contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py
new file mode 100644
index 0000000000..0af3814017
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/compat/py38.py
@@ -0,0 +1,23 @@
+import sys
+
+if sys.version_info < (3, 9):
+
+ def removesuffix(self, suffix):
+ # suffix='' should not call self[:-0].
+ if suffix and self.endswith(suffix):
+ return self[: -len(suffix)]
+ else:
+ return self[:]
+
+ def removeprefix(self, prefix):
+ if self.startswith(prefix):
+ return self[len(prefix) :]
+ else:
+ return self[:]
+else:
+
+ def removesuffix(self, suffix):
+ return self.removesuffix(suffix)
+
+ def removeprefix(self, prefix):
+ return self.removeprefix(prefix)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/config.py b/contrib/python/setuptools/py3/setuptools/_distutils/config.py
index 9a4044adaf..83f96a9eec 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/config.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/config.py
@@ -3,6 +3,8 @@
Provides the PyPIRCCommand class, the base class for the command classes
that uses .pypirc in the distutils.command package.
"""
+
+import email.message
import os
from configparser import RawConfigParser
@@ -41,7 +43,8 @@ class PyPIRCCommand(Command):
def _store_pypirc(self, username, password):
"""Creates a default .pypirc file."""
rc = self._get_rc_file()
- with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f:
+ raw = os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600)
+ with os.fdopen(raw, 'w', encoding='utf-8') as f:
f.write(DEFAULT_PYPIRC % (username, password))
def _read_pypirc(self): # noqa: C901
@@ -52,7 +55,7 @@ class PyPIRCCommand(Command):
repository = self.repository or self.DEFAULT_REPOSITORY
config = RawConfigParser()
- config.read(rc)
+ config.read(rc, encoding='utf-8')
sections = config.sections()
if 'distutils' in sections:
# let's get the list of servers
@@ -119,11 +122,8 @@ class PyPIRCCommand(Command):
def _read_pypi_response(self, response):
"""Read and decode a PyPI HTTP response."""
- import cgi
-
content_type = response.getheader('content-type', 'text/plain')
- encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii')
- return response.read().decode(encoding)
+ return response.read().decode(_extract_encoding(content_type))
def initialize_options(self):
"""Initialize options."""
@@ -137,3 +137,15 @@ class PyPIRCCommand(Command):
self.repository = self.DEFAULT_REPOSITORY
if self.realm is None:
self.realm = self.DEFAULT_REALM
+
+
+def _extract_encoding(content_type):
+ """
+ >>> _extract_encoding('text/plain')
+ 'ascii'
+ >>> _extract_encoding('text/html; charset="utf8"')
+ 'utf8'
+ """
+ msg = email.message.EmailMessage()
+ msg['content-type'] = content_type
+ return msg['content-type'].params.get('charset', 'ascii')
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/core.py b/contrib/python/setuptools/py3/setuptools/_distutils/core.py
index 05d2971994..309ce696fa 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/core.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/core.py
@@ -10,21 +10,20 @@ import os
import sys
import tokenize
+from .cmd import Command
+from .config import PyPIRCCommand
from .debug import DEBUG
+
+# Mainly import these so setup scripts can "from distutils.core import" them.
+from .dist import Distribution
from .errors import (
- DistutilsSetupError,
- DistutilsError,
CCompilerError,
DistutilsArgError,
+ DistutilsError,
+ DistutilsSetupError,
)
-
-# Mainly import these so setup scripts can "from distutils.core import" them.
-from .dist import Distribution
-from .cmd import Command
-from .config import PyPIRCCommand
from .extension import Extension
-
__all__ = ['Distribution', 'Command', 'PyPIRCCommand', 'Extension', 'setup']
# This is a barebones help message generated displayed when the user
@@ -203,10 +202,10 @@ def run_commands(dist):
raise SystemExit("interrupted")
except OSError as exc:
if DEBUG:
- sys.stderr.write("error: {}\n".format(exc))
+ sys.stderr.write(f"error: {exc}\n")
raise
else:
- raise SystemExit("error: {}".format(exc))
+ raise SystemExit(f"error: {exc}")
except (DistutilsError, CCompilerError) as msg:
if DEBUG:
@@ -249,7 +248,7 @@ def run_setup(script_name, script_args=None, stop_after="run"):
used to drive the Distutils.
"""
if stop_after not in ('init', 'config', 'commandline', 'run'):
- raise ValueError("invalid value for 'stop_after': {!r}".format(stop_after))
+ raise ValueError(f"invalid value for 'stop_after': {stop_after!r}")
global _setup_stop_after, _setup_distribution
_setup_stop_after = stop_after
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py
index 47efa377c5..539f09d8f3 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/cygwinccompiler.py
@@ -6,25 +6,25 @@ the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
cygwin in no-cygwin mode).
"""
+import copy
import os
+import pathlib
import re
-import sys
-import copy
import shlex
+import sys
import warnings
from subprocess import check_output
-from .unixccompiler import UnixCCompiler
-from .file_util import write_file
+from ._collections import RangeMap
from .errors import (
- DistutilsExecError,
- DistutilsPlatformError,
CCompilerError,
CompileError,
+ DistutilsExecError,
+ DistutilsPlatformError,
)
+from .file_util import write_file
+from .unixccompiler import UnixCCompiler
from .version import LooseVersion, suppress_known_deprecation
-from ._collections import RangeMap
-
_msvcr_lookup = RangeMap.left(
{
@@ -87,9 +87,7 @@ class CygwinCCompiler(UnixCCompiler):
super().__init__(verbose, dry_run, force)
status, details = check_config_h()
- self.debug_print(
- "Python's GCC status: {} (details: {})".format(status, details)
- )
+ self.debug_print(f"Python's GCC status: {status} (details: {details})")
if status is not CONFIG_H_OK:
self.warn(
"Python's pyconfig.h doesn't seem to support your compiler. "
@@ -108,7 +106,7 @@ class CygwinCCompiler(UnixCCompiler):
compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc,
compiler_cxx='%s -mcygwin -O -Wall' % self.cxx,
linker_exe='%s -mcygwin' % self.cc,
- linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)),
+ linker_so=(f'{self.linker_dll} -mcygwin {shared_option}'),
)
# Include the appropriate MSVC runtime library if Python was built
@@ -280,7 +278,7 @@ class Mingw32CCompiler(CygwinCCompiler):
compiler_so='%s -mdll -O -Wall' % self.cc,
compiler_cxx='%s -O -Wall' % self.cxx,
linker_exe='%s' % self.cc,
- linker_so='{} {}'.format(self.linker_dll, shared_option),
+ linker_so=f'{self.linker_dll} {shared_option}',
)
def runtime_library_dir_option(self, dir):
@@ -331,20 +329,21 @@ def check_config_h():
# let's see if __GNUC__ is mentioned in python.h
fn = sysconfig.get_config_h_filename()
try:
- config_h = open(fn)
- try:
- if "__GNUC__" in config_h.read():
- return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn
- else:
- return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn
- finally:
- config_h.close()
+ config_h = pathlib.Path(fn).read_text(encoding='utf-8')
+ substring = '__GNUC__'
+ if substring in config_h:
+ code = CONFIG_H_OK
+ mention_inflected = 'mentions'
+ else:
+ code = CONFIG_H_NOTOK
+ mention_inflected = 'does not mention'
+ return code, f"{fn!r} {mention_inflected} {substring!r}"
except OSError as exc:
- return (CONFIG_H_UNCERTAIN, "couldn't read '{}': {}".format(fn, exc.strerror))
+ return (CONFIG_H_UNCERTAIN, f"couldn't read '{fn}': {exc.strerror}")
def is_cygwincc(cc):
- '''Try to determine if the compiler that would be used is from cygwin.'''
+ """Try to determine if the compiler that would be used is from cygwin."""
out_string = check_output(shlex.split(cc) + ['-dumpmachine'])
return out_string.strip().endswith(b'cygwin')
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py b/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py
index 23dc3392a2..370c6ffd49 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/dir_util.py
@@ -2,10 +2,11 @@
Utility functions for manipulating directories and directory trees."""
-import os
import errno
-from .errors import DistutilsInternalError, DistutilsFileError
+import os
+
from ._log import log
+from .errors import DistutilsFileError, DistutilsInternalError
# cache for by mkpath() -- in addition to cheapening redundant calls,
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
@@ -33,9 +34,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
# Detect a common bug -- name is None
if not isinstance(name, str):
- raise DistutilsInternalError(
- "mkpath: 'name' must be a string (got {!r})".format(name)
- )
+ raise DistutilsInternalError(f"mkpath: 'name' must be a string (got {name!r})")
# XXX what's the better way to handle verbosity? print as we create
# each directory in the path (the current behaviour), or only announce
@@ -76,7 +75,7 @@ def mkpath(name, mode=0o777, verbose=1, dry_run=0): # noqa: C901
except OSError as exc:
if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
raise DistutilsFileError(
- "could not create '{}': {}".format(head, exc.args[-1])
+ f"could not create '{head}': {exc.args[-1]}"
)
created_dirs.append(head)
@@ -95,9 +94,7 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
'dry_run' flags are as for 'mkpath()'.
"""
# First get the list of directories to create
- need_dir = set()
- for file in files:
- need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
+ need_dir = set(os.path.join(base_dir, os.path.dirname(file)) for file in files)
# Now create them
for dir in sorted(need_dir):
@@ -143,9 +140,7 @@ def copy_tree( # noqa: C901
if dry_run:
names = []
else:
- raise DistutilsFileError(
- "error listing files in '{}': {}".format(src, e.strerror)
- )
+ raise DistutilsFileError(f"error listing files in '{src}': {e.strerror}")
if not dry_run:
mkpath(dst, verbose=verbose)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/dist.py b/contrib/python/setuptools/py3/setuptools/_distutils/dist.py
index 7c0f0e5b78..668ce7eb0a 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/dist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/dist.py
@@ -4,12 +4,13 @@ Provides the Distribution class, which represents the module distribution
being built/installed/distributed.
"""
-import sys
-import os
-import re
-import pathlib
import contextlib
import logging
+import os
+import pathlib
+import re
+import sys
+from collections.abc import Iterable
from email import message_from_file
try:
@@ -17,16 +18,16 @@ try:
except ImportError:
warnings = None
+from ._log import log
+from .debug import DEBUG
from .errors import (
- DistutilsOptionError,
- DistutilsModuleError,
DistutilsArgError,
DistutilsClassError,
+ DistutilsModuleError,
+ DistutilsOptionError,
)
from .fancy_getopt import FancyGetopt, translate_longopt
-from .util import check_environ, strtobool, rfc822_escape
-from ._log import log
-from .debug import DEBUG
+from .util import check_environ, rfc822_escape, strtobool
# Regex to define acceptable Distutils command names. This is not *quite*
# the same as a Python NAME -- I don't allow leading underscores. The fact
@@ -395,7 +396,7 @@ Common commands: (see '--help-commands' for more)
for filename in filenames:
if DEBUG:
self.announce(" reading %s" % filename)
- parser.read(filename)
+ parser.read(filename, encoding='utf-8')
for section in parser.sections():
options = parser.options(section)
opt_dict = self.get_option_dict(section)
@@ -414,7 +415,7 @@ Common commands: (see '--help-commands' for more)
# to set Distribution options.
if 'global' in self.command_options:
- for opt, (src, val) in self.command_options['global'].items():
+ for opt, (_src, val) in self.command_options['global'].items():
alias = self.negative_opt.get(opt)
try:
if alias:
@@ -585,16 +586,15 @@ Common commands: (see '--help-commands' for more)
cmd_class.help_options, list
):
help_option_found = 0
- for help_option, short, desc, func in cmd_class.help_options:
+ for help_option, _short, _desc, func in cmd_class.help_options:
if hasattr(opts, parser.get_attr_name(help_option)):
help_option_found = 1
if callable(func):
func()
else:
raise DistutilsClassError(
- "invalid help function %r for help option '%s': "
+ f"invalid help function {func!r} for help option '{help_option}': "
"must be a callable object (function, etc.)"
- % (func, help_option)
)
if help_option_found:
@@ -621,7 +621,9 @@ Common commands: (see '--help-commands' for more)
value = [elm.strip() for elm in value.split(',')]
setattr(self.metadata, attr, value)
- def _show_help(self, parser, global_options=1, display_options=1, commands=[]):
+ def _show_help(
+ self, parser, global_options=1, display_options=1, commands: Iterable = ()
+ ):
"""Show help for the setup script command-line in the form of
several lists of command-line options. 'parser' should be a
FancyGetopt instance; do not expect it to be returned in the
@@ -635,8 +637,8 @@ Common commands: (see '--help-commands' for more)
in 'commands'.
"""
# late import because of mutual dependence between these modules
- from distutils.core import gen_usage
from distutils.cmd import Command
+ from distutils.core import gen_usage
if global_options:
if display_options:
@@ -645,7 +647,7 @@ Common commands: (see '--help-commands' for more)
options = self.global_options
parser.set_option_table(options)
parser.print_help(self.common_usage + "\nGlobal options:")
- print('')
+ print()
if display_options:
parser.set_option_table(self.display_options)
@@ -653,7 +655,7 @@ Common commands: (see '--help-commands' for more)
"Information display options (just display "
+ "information, ignore any commands)"
)
- print('')
+ print()
for command in self.commands:
if isinstance(command, type) and issubclass(command, Command):
@@ -667,7 +669,7 @@ Common commands: (see '--help-commands' for more)
else:
parser.set_option_table(klass.user_options)
parser.print_help("Options for '%s' command:" % klass.__name__)
- print('')
+ print()
print(gen_usage(self.script_name))
@@ -684,7 +686,7 @@ Common commands: (see '--help-commands' for more)
# we ignore "foo bar").
if self.help_commands:
self.print_commands()
- print('')
+ print()
print(gen_usage(self.script_name))
return 1
@@ -821,7 +823,7 @@ Common commands: (see '--help-commands' for more)
return klass
for pkgname in self.get_command_packages():
- module_name = "{}.{}".format(pkgname, command)
+ module_name = f"{pkgname}.{command}"
klass_name = command
try:
@@ -834,8 +836,7 @@ Common commands: (see '--help-commands' for more)
klass = getattr(module, klass_name)
except AttributeError:
raise DistutilsModuleError(
- "invalid command '%s' (no class '%s' in module '%s')"
- % (command, klass_name, module_name)
+ f"invalid command '{command}' (no class '{klass_name}' in module '{module_name}')"
)
self.cmdclass[command] = klass
@@ -889,7 +890,7 @@ Common commands: (see '--help-commands' for more)
self.announce(" setting options for '%s' command:" % command_name)
for option, (source, value) in option_dict.items():
if DEBUG:
- self.announce(" {} = {} (from {})".format(option, value, source))
+ self.announce(f" {option} = {value} (from {source})")
try:
bool_opts = [translate_longopt(o) for o in command_obj.boolean_options]
except AttributeError:
@@ -909,8 +910,7 @@ Common commands: (see '--help-commands' for more)
setattr(command_obj, option, value)
else:
raise DistutilsOptionError(
- "error in %s: command '%s' has no such option '%s'"
- % (source, command_name, option)
+ f"error in {source}: command '{command_name}' has no such option '{option}'"
)
except ValueError as msg:
raise DistutilsOptionError(msg)
@@ -1178,7 +1178,7 @@ class DistributionMetadata:
def _write_list(self, file, name, values):
values = values or []
for value in values:
- file.write('{}: {}\n'.format(name, value))
+ file.write(f'{name}: {value}\n')
# -- Metadata query methods ----------------------------------------
@@ -1189,7 +1189,7 @@ class DistributionMetadata:
return self.version or "0.0.0"
def get_fullname(self):
- return "{}-{}".format(self.get_name(), self.get_version())
+ return f"{self.get_name()}-{self.get_version()}"
def get_author(self):
return self.author
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/extension.py b/contrib/python/setuptools/py3/setuptools/_distutils/extension.py
index 6b8575de29..94e71635d9 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/extension.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/extension.py
@@ -102,7 +102,7 @@ class Extension:
depends=None,
language=None,
optional=None,
- **kw # To catch unknown keywords
+ **kw, # To catch unknown keywords
):
if not isinstance(name, str):
raise AssertionError("'name' must be a string")
@@ -134,18 +134,12 @@ class Extension:
warnings.warn(msg)
def __repr__(self):
- return '<{}.{}({!r}) at {:#x}>'.format(
- self.__class__.__module__,
- self.__class__.__qualname__,
- self.name,
- id(self),
- )
+ return f'<{self.__class__.__module__}.{self.__class__.__qualname__}({self.name!r}) at {id(self):#x}>'
def read_setup_file(filename): # noqa: C901
"""Reads a Setup file and returns Extension instances."""
- from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx
-
+ from distutils.sysconfig import _variable_rx, expand_makefile_vars, parse_makefile
from distutils.text_file import TextFile
from distutils.util import split_quoted
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py b/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py
index 3b887dc5a4..e905aede4d 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py
@@ -8,11 +8,13 @@ additional features:
* options set attributes of a passed-in object
"""
-import sys
-import string
-import re
import getopt
-from .errors import DistutilsGetoptError, DistutilsArgError
+import re
+import string
+import sys
+from typing import Any, Sequence
+
+from .errors import DistutilsArgError, DistutilsGetoptError
# Much like command_re in distutils.core, this is close to but not quite
# the same as a Python NAME -- except, in the spirit of most GNU
@@ -22,7 +24,7 @@ longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
longopt_re = re.compile(r'^%s$' % longopt_pat)
# For recognizing "negative alias" options, eg. "quiet=!verbose"
-neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat))
+neg_alias_re = re.compile(f"^({longopt_pat})=!({longopt_pat})$")
# This is used to translate long options to legitimate Python identifiers
# (for use as attributes of some object).
@@ -116,13 +118,11 @@ class FancyGetopt:
for alias, opt in aliases.items():
if alias not in self.option_index:
raise DistutilsGetoptError(
- ("invalid %s '%s': " "option '%s' not defined")
- % (what, alias, alias)
+ f"invalid {what} '{alias}': " f"option '{alias}' not defined"
)
if opt not in self.option_index:
raise DistutilsGetoptError(
- ("invalid %s '%s': " "aliased option '%s' not defined")
- % (what, alias, opt)
+ f"invalid {what} '{alias}': " f"aliased option '{opt}' not defined"
)
def set_aliases(self, alias):
@@ -157,13 +157,12 @@ class FancyGetopt:
else:
# the option table is part of the code, so simply
# assert that it is correct
- raise ValueError("invalid option tuple: {!r}".format(option))
+ raise ValueError(f"invalid option tuple: {option!r}")
# Type- and value-check the option names
if not isinstance(long, str) or len(long) < 2:
raise DistutilsGetoptError(
- ("invalid long option '%s': " "must be a string of length >= 2")
- % long
+ ("invalid long option '%s': must be a string of length >= 2") % long
)
if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
@@ -187,8 +186,8 @@ class FancyGetopt:
if alias_to is not None:
if self.takes_arg[alias_to]:
raise DistutilsGetoptError(
- "invalid negative alias '%s': "
- "aliased option '%s' takes a value" % (long, alias_to)
+ f"invalid negative alias '{long}': "
+ f"aliased option '{alias_to}' takes a value"
)
self.long_opts[-1] = long # XXX redundant?!
@@ -200,9 +199,9 @@ class FancyGetopt:
if alias_to is not None:
if self.takes_arg[long] != self.takes_arg[alias_to]:
raise DistutilsGetoptError(
- "invalid alias '%s': inconsistent with "
- "aliased option '%s' (one of them takes a value, "
- "the other doesn't" % (long, alias_to)
+ f"invalid alias '{long}': inconsistent with "
+ f"aliased option '{alias_to}' (one of them takes a value, "
+ "the other doesn't"
)
# Now enforce some bondage on the long option name, so we can
@@ -359,7 +358,7 @@ class FancyGetopt:
# Case 2: we have a short option, so we have to include it
# just after the long option
else:
- opt_names = "{} (-{})".format(long, short)
+ opt_names = f"{long} (-{short})"
if text:
lines.append(" --%-*s %s" % (max_opt, opt_names, text[0]))
else:
@@ -450,7 +449,7 @@ class OptionDummy:
"""Dummy class just used as a place to hold command-line option
values as instance attributes."""
- def __init__(self, options=[]):
+ def __init__(self, options: Sequence[Any] = []):
"""Create a new OptionDummy instance. The attributes listed in
'options' will be initialized to None."""
for opt in options:
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py b/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py
index 3f3e21b567..960def9cf9 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/file_util.py
@@ -4,8 +4,9 @@ Utility functions for operating on single files.
"""
import os
-from .errors import DistutilsFileError
+
from ._log import log
+from .errors import DistutilsFileError
# for generating verbose output in 'copy_file()'
_copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'}
@@ -26,30 +27,24 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901
try:
fsrc = open(src, 'rb')
except OSError as e:
- raise DistutilsFileError("could not open '{}': {}".format(src, e.strerror))
+ raise DistutilsFileError(f"could not open '{src}': {e.strerror}")
if os.path.exists(dst):
try:
os.unlink(dst)
except OSError as e:
- raise DistutilsFileError(
- "could not delete '{}': {}".format(dst, e.strerror)
- )
+ raise DistutilsFileError(f"could not delete '{dst}': {e.strerror}")
try:
fdst = open(dst, 'wb')
except OSError as e:
- raise DistutilsFileError(
- "could not create '{}': {}".format(dst, e.strerror)
- )
+ raise DistutilsFileError(f"could not create '{dst}': {e.strerror}")
while True:
try:
buf = fsrc.read(buffer_size)
except OSError as e:
- raise DistutilsFileError(
- "could not read from '{}': {}".format(src, e.strerror)
- )
+ raise DistutilsFileError(f"could not read from '{src}': {e.strerror}")
if not buf:
break
@@ -57,9 +52,7 @@ def _copy_file_contents(src, dst, buffer_size=16 * 1024): # noqa: C901
try:
fdst.write(buf)
except OSError as e:
- raise DistutilsFileError(
- "could not write to '{}': {}".format(dst, e.strerror)
- )
+ raise DistutilsFileError(f"could not write to '{dst}': {e.strerror}")
finally:
if fdst:
fdst.close()
@@ -109,7 +102,7 @@ def copy_file( # noqa: C901
# (not update) and (src newer than dst).
from distutils._modified import newer
- from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
+ from stat import S_IMODE, ST_ATIME, ST_MODE, ST_MTIME
if not os.path.isfile(src):
raise DistutilsFileError(
@@ -183,8 +176,8 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901
Handles cross-device moves on Unix using 'copy_file()'. What about
other systems???
"""
- from os.path import exists, isfile, isdir, basename, dirname
import errno
+ from os.path import basename, dirname, exists, isdir, isfile
if verbose >= 1:
log.info("moving %s -> %s", src, dst)
@@ -199,12 +192,12 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901
dst = os.path.join(dst, basename(src))
elif exists(dst):
raise DistutilsFileError(
- "can't move '{}': destination '{}' already exists".format(src, dst)
+ f"can't move '{src}': destination '{dst}' already exists"
)
if not isdir(dirname(dst)):
raise DistutilsFileError(
- "can't move '{}': destination '{}' not a valid path".format(src, dst)
+ f"can't move '{src}': destination '{dst}' not a valid path"
)
copy_it = False
@@ -215,9 +208,7 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901
if num == errno.EXDEV:
copy_it = True
else:
- raise DistutilsFileError(
- "couldn't move '{}' to '{}': {}".format(src, dst, msg)
- )
+ raise DistutilsFileError(f"couldn't move '{src}' to '{dst}': {msg}")
if copy_it:
copy_file(src, dst, verbose=verbose)
@@ -230,8 +221,8 @@ def move_file(src, dst, verbose=1, dry_run=0): # noqa: C901
except OSError:
pass
raise DistutilsFileError(
- "couldn't move '%s' to '%s' by copy/delete: "
- "delete '%s' failed: %s" % (src, dst, src, msg)
+ f"couldn't move '{src}' to '{dst}' by copy/delete: "
+ f"delete '{src}' failed: {msg}"
)
return dst
@@ -240,9 +231,5 @@ def write_file(filename, contents):
"""Create a file with the specified name and write 'contents' (a
sequence of strings without line terminators) to it.
"""
- f = open(filename, "w")
- try:
- for line in contents:
- f.write(line + "\n")
- finally:
- f.close()
+ with open(filename, 'w', encoding='utf-8') as f:
+ f.writelines(line + '\n' for line in contents)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py b/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py
index 6dadf923d7..71ffb2abe7 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/filelist.py
@@ -4,14 +4,14 @@ Provides the FileList class, used for poking about the filesystem
and building lists of files.
"""
-import os
-import re
import fnmatch
import functools
+import os
+import re
-from .util import convert_path
-from .errors import DistutilsTemplateError, DistutilsInternalError
from ._log import log
+from .errors import DistutilsInternalError, DistutilsTemplateError
+from .util import convert_path
class FileList:
@@ -162,9 +162,7 @@ class FileList:
self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns)))
for pattern in patterns:
if not self.include_pattern(pattern, prefix=dir):
- msg = (
- "warning: no files found matching '%s' " "under directory '%s'"
- )
+ msg = "warning: no files found matching '%s' under directory '%s'"
log.warning(msg, pattern, dir)
elif action == 'recursive-exclude':
@@ -189,7 +187,7 @@ class FileList:
self.debug_print("prune " + dir_pattern)
if not self.exclude_pattern(None, prefix=dir_pattern):
log.warning(
- ("no previously-included directories found " "matching '%s'"),
+ ("no previously-included directories found matching '%s'"),
dir_pattern,
)
else:
@@ -363,9 +361,9 @@ def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0):
if os.sep == '\\':
sep = r'\\'
pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)]
- pattern_re = r'{}\A{}{}.*{}{}'.format(start, prefix_re, sep, pattern_re, end)
+ pattern_re = rf'{start}\A{prefix_re}{sep}.*{pattern_re}{end}'
else: # no prefix -- respect anchor flag
if anchor:
- pattern_re = r'{}\A{}'.format(start, pattern_re[len(start) :])
+ pattern_re = rf'{start}\A{pattern_re[len(start) :]}'
return re.compile(pattern_re)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/log.py b/contrib/python/setuptools/py3/setuptools/_distutils/log.py
index 239f315850..8abb09cfa2 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/log.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/log.py
@@ -9,7 +9,6 @@ import warnings
from ._log import log as _global_log
-
DEBUG = logging.DEBUG
INFO = logging.INFO
WARN = logging.WARN
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py
index f9f9f2d844..6a0105e484 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/msvc9compiler.py
@@ -13,24 +13,23 @@ for older versions of VS in distutils.msvccompiler.
# ported to VS2005 and VS 2008 by Christian Heimes
import os
+import re
import subprocess
import sys
-import re
import warnings
+import winreg
+from ._log import log
+from .ccompiler import CCompiler, gen_lib_options
from .errors import (
+ CompileError,
DistutilsExecError,
DistutilsPlatformError,
- CompileError,
LibError,
LinkError,
)
-from .ccompiler import CCompiler, gen_lib_options
-from ._log import log
from .util import get_platform
-import winreg
-
warnings.warn(
"msvc9compiler is deprecated and slated to be removed "
"in the future. Please discontinue use or file an issue "
@@ -175,7 +174,7 @@ you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
except RegError:
continue
key = RegEnumKey(h, 0)
- d = Reg.get_value(base, r"{}\{}".format(p, key))
+ d = Reg.get_value(base, rf"{p}\{key}")
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
@@ -281,7 +280,7 @@ def query_vcvarsall(version, arch="x86"):
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
popen = subprocess.Popen(
- '"{}" {} & set'.format(vcvarsall, arch),
+ f'"{vcvarsall}" {arch} & set',
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
@@ -370,9 +369,7 @@ class MSVCCompiler(CCompiler):
# sanity check for platforms to prevent obscure errors later.
ok_plats = 'win32', 'win-amd64'
if plat_name not in ok_plats:
- raise DistutilsPlatformError(
- "--plat-name must be one of {}".format(ok_plats)
- )
+ raise DistutilsPlatformError(f"--plat-name must be one of {ok_plats}")
if (
"DISTUTILS_USE_SDK" in os.environ
@@ -564,9 +561,7 @@ class MSVCCompiler(CCompiler):
continue
else:
# how to handle this file?
- raise CompileError(
- "Don't know how to compile {} to {}".format(src, obj)
- )
+ raise CompileError(f"Don't know how to compile {src} to {obj}")
output_opt = "/Fo" + obj
try:
@@ -687,7 +682,7 @@ class MSVCCompiler(CCompiler):
mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
if mfinfo is not None:
mffilename, mfid = mfinfo
- out_arg = '-outputresource:{};{}'.format(output_filename, mfid)
+ out_arg = f'-outputresource:{output_filename};{mfid}'
try:
self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg])
except DistutilsExecError as msg:
@@ -698,8 +693,8 @@ class MSVCCompiler(CCompiler):
def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
# If we need a manifest at all, an embedded manifest is recommended.
# See MSDN article titled
- # "How to: Embed a Manifest Inside a C/C++ Application"
- # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
+ # "Understanding manifest generation for C/C++ programs"
+ # (currently at https://learn.microsoft.com/en-us/cpp/build/understanding-manifest-generation-for-c-cpp-programs)
# Ask the linker to generate the manifest in the temp dir, so
# we can check it, and possibly embed it, later.
temp_manifest = os.path.join(
@@ -710,7 +705,7 @@ class MSVCCompiler(CCompiler):
def manifest_get_embed_info(self, target_desc, ld_args):
# If a manifest should be embedded, return a tuple of
# (manifest_filename, resource_id). Returns None if no manifest
- # should be embedded. See http://bugs.python.org/issue7833 for why
+ # should be embedded. See https://bugs.python.org/issue7833 for why
# we want to avoid any manifest for extension modules if we can)
for arg in ld_args:
if arg.startswith("/MANIFESTFILE:"):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py
index c3823e257e..ac8b68c08c 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/msvccompiler.py
@@ -8,18 +8,19 @@ for the Microsoft Visual Studio.
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
-import sys
import os
+import sys
import warnings
+
+from ._log import log
+from .ccompiler import CCompiler, gen_lib_options
from .errors import (
+ CompileError,
DistutilsExecError,
DistutilsPlatformError,
- CompileError,
LibError,
LinkError,
)
-from .ccompiler import CCompiler, gen_lib_options
-from ._log import log
_can_read_reg = False
try:
@@ -159,7 +160,7 @@ you can try compiling with MingW32, by passing "-c mingw32" to setup.py."""
except RegError:
continue
key = RegEnumKey(h, 0)
- d = read_values(base, r"{}\{}".format(p, key))
+ d = read_values(base, rf"{p}\{key}")
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
@@ -454,9 +455,7 @@ class MSVCCompiler(CCompiler):
continue
else:
# how to handle this file?
- raise CompileError(
- "Don't know how to compile {} to {}".format(src, obj)
- )
+ raise CompileError(f"Don't know how to compile {src} to {obj}")
output_opt = "/Fo" + obj
try:
@@ -637,14 +636,11 @@ class MSVCCompiler(CCompiler):
path = path + " dirs"
if self.__version >= 7:
- key = r"{}\{:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories".format(
- self.__root,
- self.__version,
- )
+ key = rf"{self.__root}\{self.__version:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
else:
key = (
- r"%s\6.0\Build System\Components\Platforms"
- r"\Win32 (%s)\Directories" % (self.__root, platform)
+ rf"{self.__root}\6.0\Build System\Components\Platforms"
+ rf"\Win32 ({platform})\Directories"
)
for base in HKEYS:
@@ -686,7 +682,8 @@ class MSVCCompiler(CCompiler):
if get_build_version() >= 8.0:
log.debug("Importing new compiler from distutils.msvc9compiler")
OldMSVCCompiler = MSVCCompiler
- from distutils.msvc9compiler import MSVCCompiler
-
# get_build_architecture not really relevant now we support cross-compile
- from distutils.msvc9compiler import MacroExpander # noqa: F811
+ from distutils.msvc9compiler import (
+ MacroExpander, # noqa: F811
+ MSVCCompiler,
+ )
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py b/contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py
index 59224e71e5..ab12119fa5 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/py38compat.py
@@ -5,4 +5,4 @@ def aix_platform(osname, version, release):
return _aix_support.aix_platform()
except ImportError:
pass
- return "{}-{}.{}".format(osname, version, release)
+ return f"{osname}-{version}.{release}"
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py b/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py
index afefe525ef..046b5bbb82 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/spawn.py
@@ -6,13 +6,13 @@ Also provides the 'find_executable()' to search the path for a given
executable name.
"""
-import sys
import os
import subprocess
+import sys
-from .errors import DistutilsExecError
-from .debug import DEBUG
from ._log import log
+from .debug import DEBUG
+from .errors import DistutilsExecError
def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901
@@ -60,16 +60,12 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None): # noqa: C901
except OSError as exc:
if not DEBUG:
cmd = cmd[0]
- raise DistutilsExecError(
- "command {!r} failed: {}".format(cmd, exc.args[-1])
- ) from exc
+ raise DistutilsExecError(f"command {cmd!r} failed: {exc.args[-1]}") from exc
if exitcode:
if not DEBUG:
cmd = cmd[0]
- raise DistutilsExecError(
- "command {!r} failed with exit code {}".format(cmd, exitcode)
- )
+ raise DistutilsExecError(f"command {cmd!r} failed with exit code {exitcode}")
def find_executable(executable, path=None):
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py b/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py
index a40a7231b3..1a38e9fa79 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/sysconfig.py
@@ -9,15 +9,16 @@ Written by: Fred L. Drake, Jr.
Email: <fdrake@acm.org>
"""
+import functools
import os
+import pathlib
import re
import sys
import sysconfig
-import pathlib
-from .errors import DistutilsPlatformError
from . import py39compat
from ._functools import pass_none
+from .errors import DistutilsPlatformError
IS_PYPY = '__pypy__' in sys.builtin_module_names
@@ -195,12 +196,11 @@ def _get_python_inc_posix_prefix(prefix):
def _get_python_inc_nt(prefix, spec_prefix, plat_specific):
if python_build:
- # Include both the include and PC dir to ensure we can find
- # pyconfig.h
+ # Include both include dirs to ensure we can find pyconfig.h
return (
os.path.join(prefix, "include")
+ os.path.pathsep
- + os.path.join(prefix, "PC")
+ + os.path.dirname(sysconfig.get_config_h_filename())
)
return os.path.join(prefix, "include")
@@ -267,6 +267,24 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
)
+@functools.lru_cache
+def _customize_macos():
+ """
+ Perform first-time customization of compiler-related
+ config vars on macOS. Use after a compiler is known
+ to be needed. This customization exists primarily to support Pythons
+ from binary installers. The kind and paths to build tools on
+ the user system may vary significantly from the system
+ that Python itself was built on. Also the user OS
+ version and build tools may not support the same set
+ of CPU architectures for universal builds.
+ """
+
+ sys.platform == "darwin" and __import__('_osx_support').customize_compiler(
+ get_config_vars()
+ )
+
+
def customize_compiler(compiler): # noqa: C901
"""Do any platform-specific customization of a CCompiler instance.
@@ -274,22 +292,7 @@ def customize_compiler(compiler): # noqa: C901
varies across Unices and is stored in Python's Makefile.
"""
if compiler.compiler_type == "unix":
- if sys.platform == "darwin":
- # Perform first-time customization of compiler-related
- # config vars on OS X now that we know we need a compiler.
- # This is primarily to support Pythons from binary
- # installers. The kind and paths to build tools on
- # the user system may vary significantly from the system
- # that Python itself was built on. Also the user OS
- # version and build tools may not support the same set
- # of CPU architectures for universal builds.
- global _config_vars
- # Use get_config_var() to ensure _config_vars is initialized.
- if not get_config_var('CUSTOMIZED_OSX_COMPILER'):
- import _osx_support
-
- _osx_support.customize_compiler(_config_vars)
- _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+ _customize_macos()
(
cc,
@@ -361,14 +364,7 @@ def customize_compiler(compiler): # noqa: C901
def get_config_h_filename():
"""Return full pathname of installed pyconfig.h file."""
- if python_build:
- if os.name == "nt":
- inc_dir = os.path.join(_sys_home or project_base, "PC")
- else:
- inc_dir = _sys_home or project_base
- return os.path.join(inc_dir, 'pyconfig.h')
- else:
- return sysconfig.get_config_h_filename()
+ return sysconfig.get_config_h_filename()
def get_makefile_filename():
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py b/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py
index 36f947e51c..0f846e3c52 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/text_file.py
@@ -115,7 +115,7 @@ class TextFile:
"""Open a new file named 'filename'. This overrides both the
'filename' and 'file' arguments to the constructor."""
self.filename = filename
- self.file = open(self.filename, errors=self.errors)
+ self.file = open(self.filename, errors=self.errors, encoding='utf-8')
self.current_line = 0
def close(self):
@@ -220,7 +220,7 @@ class TextFile:
if self.join_lines and buildup_line:
# oops: end of file
if line is None:
- self.warn("continuation line immediately precedes " "end-of-file")
+ self.warn("continuation line immediately precedes end-of-file")
return buildup_line
if self.collapse_join:
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py
index bd8db9ac3f..0248bde87b 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/unixccompiler.py
@@ -13,18 +13,21 @@ the "typical" Unix-style command-line C compiler:
* link shared library handled by 'cc -shared'
"""
+from __future__ import annotations
+
+import itertools
import os
-import sys
import re
import shlex
-import itertools
+import sys
from . import sysconfig
-from ._modified import newer
-from .ccompiler import CCompiler, gen_preprocess_options, gen_lib_options
-from .errors import DistutilsExecError, CompileError, LibError, LinkError
+from .compat import consolidate_linker_args
from ._log import log
from ._macos_compat import compiler_fixup
+from ._modified import newer
+from .ccompiler import CCompiler, gen_lib_options, gen_preprocess_options
+from .errors import CompileError, DistutilsExecError, LibError, LinkError
# XXX Things not currently handled:
# * optimization/debug/warning flags; we just use whatever's in Python's
@@ -281,10 +284,9 @@ class UnixCCompiler(CCompiler):
compiler = os.path.basename(shlex.split(cc_var)[0])
return "gcc" in compiler or "g++" in compiler
- def runtime_library_dir_option(self, dir):
+ def runtime_library_dir_option(self, dir: str) -> str | list[str]:
# XXX Hackish, at the very least. See Python bug #445902:
- # http://sourceforge.net/tracker/index.php
- # ?func=detail&aid=445902&group_id=5470&atid=105470
+ # https://bugs.python.org/issue445902
# Linkers on different platforms need different options to
# specify that directories need to be added to the list of
# directories searched for dependencies when a dynamic library
@@ -311,13 +313,14 @@ class UnixCCompiler(CCompiler):
"-L" + dir,
]
- # For all compilers, `-Wl` is the presumed way to
- # pass a compiler option to the linker and `-R` is
- # the way to pass an RPATH.
+ # For all compilers, `-Wl` is the presumed way to pass a
+ # compiler option to the linker
if sysconfig.get_config_var("GNULD") == "yes":
- # GNU ld needs an extra option to get a RUNPATH
- # instead of just an RPATH.
- return "-Wl,--enable-new-dtags,-R" + dir
+ return consolidate_linker_args([
+ # Force RUNPATH instead of RPATH
+ "-Wl,--enable-new-dtags",
+ "-Wl,-rpath," + dir,
+ ])
else:
return "-Wl,-R" + dir
@@ -389,10 +392,7 @@ class UnixCCompiler(CCompiler):
roots = map(self._library_root, dirs)
- searched = (
- os.path.join(root, lib_name)
- for root, lib_name in itertools.product(roots, lib_names)
- )
+ searched = itertools.starmap(os.path.join, itertools.product(roots, lib_names))
found = filter(os.path.exists, searched)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/util.py b/contrib/python/setuptools/py3/setuptools/_distutils/util.py
index 7ae914f7ee..9ee77721b3 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/util.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/util.py
@@ -4,6 +4,7 @@ Miscellaneous utility functions -- anything that doesn't fit into
one of the other *util.py modules.
"""
+import functools
import importlib.util
import os
import re
@@ -11,12 +12,11 @@ import string
import subprocess
import sys
import sysconfig
-import functools
-from .errors import DistutilsPlatformError, DistutilsByteCompileError
+from ._log import log
from ._modified import newer
+from .errors import DistutilsByteCompileError, DistutilsPlatformError
from .spawn import spawn
-from ._log import log
def get_host_platform():
@@ -30,13 +30,6 @@ def get_host_platform():
# even with older Python versions when distutils was split out.
# Now it delegates to stdlib sysconfig, but maintains compatibility.
- if sys.version_info < (3, 8):
- if os.name == 'nt':
- if '(arm)' in sys.version.lower():
- return 'win-arm32'
- if '(arm64)' in sys.version.lower():
- return 'win-arm64'
-
if sys.version_info < (3, 9):
if os.name == "posix" and hasattr(os, 'uname'):
osname, host, release, version, machine = os.uname()
@@ -109,8 +102,8 @@ def get_macosx_target_ver():
):
my_msg = (
'$' + MACOSX_VERSION_VAR + ' mismatch: '
- 'now "%s" but "%s" during configure; '
- 'must use 10.3 or later' % (env_ver, syscfg_ver)
+ f'now "{env_ver}" but "{syscfg_ver}" during configure; '
+ 'must use 10.3 or later'
)
raise DistutilsPlatformError(my_msg)
return env_ver
@@ -172,7 +165,7 @@ def change_root(new_root, pathname):
raise DistutilsPlatformError(f"nothing known about platform '{os.name}'")
-@functools.lru_cache()
+@functools.lru_cache
def check_environ():
"""Ensure that 'os.environ' has all the environment variables we
guarantee that users can use in config files, command-line options,
@@ -328,7 +321,7 @@ def execute(func, args, msg=None, verbose=0, dry_run=0):
print.
"""
if msg is None:
- msg = "{}{!r}".format(func.__name__, args)
+ msg = f"{func.__name__}{args!r}"
if msg[-2:] == ',)': # correct for singleton tuple
msg = msg[0:-2] + ')'
@@ -350,7 +343,7 @@ def strtobool(val):
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
return 0
else:
- raise ValueError("invalid truth value {!r}".format(val))
+ raise ValueError(f"invalid truth value {val!r}")
def byte_compile( # noqa: C901
@@ -423,9 +416,9 @@ def byte_compile( # noqa: C901
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
- script = os.fdopen(script_fd, "w")
- else:
- script = open(script_name, "w")
+ script = os.fdopen(script_fd, "w", encoding='utf-8')
+ else: # pragma: no cover
+ script = open(script_name, "w", encoding='utf-8')
with script:
script.write(
@@ -447,13 +440,12 @@ files = [
script.write(",\n".join(map(repr, py_files)) + "]\n")
script.write(
- """
-byte_compile(files, optimize=%r, force=%r,
- prefix=%r, base_dir=%r,
- verbose=%r, dry_run=0,
+ f"""
+byte_compile(files, optimize={optimize!r}, force={force!r},
+ prefix={prefix!r}, base_dir={base_dir!r},
+ verbose={verbose!r}, dry_run=0,
direct=1)
"""
- % (optimize, force, prefix, base_dir, verbose)
)
cmd = [sys.executable]
@@ -487,8 +479,7 @@ byte_compile(files, optimize=%r, force=%r,
if prefix:
if file[: len(prefix)] != prefix:
raise ValueError(
- "invalid prefix: filename %r doesn't start with %r"
- % (file, prefix)
+ f"invalid prefix: filename {file!r} doesn't start with {prefix!r}"
)
dfile = dfile[len(prefix) :]
if base_dir:
@@ -508,6 +499,12 @@ def rfc822_escape(header):
"""Return a version of the string escaped for inclusion in an
RFC-822 header, by ensuring there are 8 spaces space after each newline.
"""
- lines = header.split('\n')
- sep = '\n' + 8 * ' '
- return sep.join(lines)
+ indent = 8 * " "
+ lines = header.splitlines(keepends=True)
+
+ # Emulate the behaviour of `str.split`
+ # (the terminal line break in `splitlines` does not result in an extra line):
+ ends_in_newline = lines and lines[-1].splitlines()[0] != lines[-1]
+ suffix = indent if ends_in_newline else ""
+
+ return indent.join(lines) + suffix
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/version.py b/contrib/python/setuptools/py3/setuptools/_distutils/version.py
index 74c40d7bfd..806d233ca5 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/version.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/version.py
@@ -26,9 +26,9 @@ Every version number class implements the following interface:
of the same class, thus must follow the same rules)
"""
+import contextlib
import re
import warnings
-import contextlib
@contextlib.contextmanager
@@ -60,7 +60,7 @@ class Version:
)
def __repr__(self):
- return "{} ('{}')".format(self.__class__.__name__, str(self))
+ return f"{self.__class__.__name__} ('{str(self)}')"
def __eq__(self, other):
c = self._cmp(other)
@@ -111,7 +111,6 @@ class Version:
class StrictVersion(Version):
-
"""Version numbering for anal retentives and software idealists.
Implements the standard interface for version number classes as
described above. A version number consists of two or three
@@ -179,42 +178,36 @@ class StrictVersion(Version):
return vstring
- def _cmp(self, other): # noqa: C901
+ def _cmp(self, other):
if isinstance(other, str):
with suppress_known_deprecation():
other = StrictVersion(other)
elif not isinstance(other, StrictVersion):
return NotImplemented
- if self.version != other.version:
- # numeric versions don't match
- # prerelease stuff doesn't matter
- if self.version < other.version:
- return -1
- else:
- return 1
-
- # have to compare prerelease
- # case 1: neither has prerelease; they're equal
- # case 2: self has prerelease, other doesn't; other is greater
- # case 3: self doesn't have prerelease, other does: self is greater
- # case 4: both have prerelease: must compare them!
-
- if not self.prerelease and not other.prerelease:
- return 0
- elif self.prerelease and not other.prerelease:
+ if self.version == other.version:
+ # versions match; pre-release drives the comparison
+ return self._cmp_prerelease(other)
+
+ return -1 if self.version < other.version else 1
+
+ def _cmp_prerelease(self, other):
+ """
+ case 1: self has prerelease, other doesn't; other is greater
+ case 2: self doesn't have prerelease, other does: self is greater
+ case 3: both or neither have prerelease: compare them!
+ """
+ if self.prerelease and not other.prerelease:
return -1
elif not self.prerelease and other.prerelease:
return 1
- elif self.prerelease and other.prerelease:
- if self.prerelease == other.prerelease:
- return 0
- elif self.prerelease < other.prerelease:
- return -1
- else:
- return 1
+
+ if self.prerelease == other.prerelease:
+ return 0
+ elif self.prerelease < other.prerelease:
+ return -1
else:
- assert False, "never get here"
+ return 1
# end class StrictVersion
@@ -286,7 +279,6 @@ class StrictVersion(Version):
class LooseVersion(Version):
-
"""Version numbering for anarchists and software realists.
Implements the standard interface for version number classes as
described above. A version number consists of a series of numbers,
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py b/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py
index d6c0c007aa..31c420168c 100644
--- a/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/versionpredicate.py
@@ -1,9 +1,9 @@
-"""Module for parsing and testing package version predicate strings.
-"""
-import re
-from . import version
+"""Module for parsing and testing package version predicate strings."""
+
import operator
+import re
+from . import version
re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII)
# (package) (rest)
diff --git a/contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py b/contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py
new file mode 100644
index 0000000000..c7a7ca61cf
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_distutils/zosccompiler.py
@@ -0,0 +1,229 @@
+"""distutils.zosccompiler
+
+Contains the selection of the c & c++ compilers on z/OS. There are several
+different c compilers on z/OS, all of them are optional, so the correct
+one needs to be chosen based on the users input. This is compatible with
+the following compilers:
+
+IBM C/C++ For Open Enterprise Languages on z/OS 2.0
+IBM Open XL C/C++ 1.1 for z/OS
+IBM XL C/C++ V2.4.1 for z/OS 2.4 and 2.5
+IBM z/OS XL C/C++
+"""
+
+import os
+
+from . import sysconfig
+from .errors import CompileError, DistutilsExecError
+from .unixccompiler import UnixCCompiler
+
+_cc_args = {
+ 'ibm-openxl': [
+ '-m64',
+ '-fvisibility=default',
+ '-fzos-le-char-mode=ascii',
+ '-fno-short-enums',
+ ],
+ 'ibm-xlclang': [
+ '-q64',
+ '-qexportall',
+ '-qascii',
+ '-qstrict',
+ '-qnocsect',
+ '-Wa,asa,goff',
+ '-Wa,xplink',
+ '-qgonumber',
+ '-qenum=int',
+ '-Wc,DLL',
+ ],
+ 'ibm-xlc': [
+ '-q64',
+ '-qexportall',
+ '-qascii',
+ '-qstrict',
+ '-qnocsect',
+ '-Wa,asa,goff',
+ '-Wa,xplink',
+ '-qgonumber',
+ '-qenum=int',
+ '-Wc,DLL',
+ '-qlanglvl=extc99',
+ ],
+}
+
+_cxx_args = {
+ 'ibm-openxl': [
+ '-m64',
+ '-fvisibility=default',
+ '-fzos-le-char-mode=ascii',
+ '-fno-short-enums',
+ ],
+ 'ibm-xlclang': [
+ '-q64',
+ '-qexportall',
+ '-qascii',
+ '-qstrict',
+ '-qnocsect',
+ '-Wa,asa,goff',
+ '-Wa,xplink',
+ '-qgonumber',
+ '-qenum=int',
+ '-Wc,DLL',
+ ],
+ 'ibm-xlc': [
+ '-q64',
+ '-qexportall',
+ '-qascii',
+ '-qstrict',
+ '-qnocsect',
+ '-Wa,asa,goff',
+ '-Wa,xplink',
+ '-qgonumber',
+ '-qenum=int',
+ '-Wc,DLL',
+ '-qlanglvl=extended0x',
+ ],
+}
+
+_asm_args = {
+ 'ibm-openxl': ['-fasm', '-fno-integrated-as', '-Wa,--ASA', '-Wa,--GOFF'],
+ 'ibm-xlclang': [],
+ 'ibm-xlc': [],
+}
+
+_ld_args = {
+ 'ibm-openxl': [],
+ 'ibm-xlclang': ['-Wl,dll', '-q64'],
+ 'ibm-xlc': ['-Wl,dll', '-q64'],
+}
+
+
+# Python on z/OS is built with no compiler specific options in it's CFLAGS.
+# But each compiler requires it's own specific options to build successfully,
+# though some of the options are common between them
+class zOSCCompiler(UnixCCompiler):
+ src_extensions = ['.c', '.C', '.cc', '.cxx', '.cpp', '.m', '.s']
+ _cpp_extensions = ['.cc', '.cpp', '.cxx', '.C']
+ _asm_extensions = ['.s']
+
+ def _get_zos_compiler_name(self):
+ zos_compiler_names = [
+ os.path.basename(binary)
+ for envvar in ('CC', 'CXX', 'LDSHARED')
+ if (binary := os.environ.get(envvar, None))
+ ]
+ if len(zos_compiler_names) == 0:
+ return 'ibm-openxl'
+
+ zos_compilers = {}
+ for compiler in (
+ 'ibm-clang',
+ 'ibm-clang64',
+ 'ibm-clang++',
+ 'ibm-clang++64',
+ 'clang',
+ 'clang++',
+ 'clang-14',
+ ):
+ zos_compilers[compiler] = 'ibm-openxl'
+
+ for compiler in ('xlclang', 'xlclang++', 'njsc', 'njsc++'):
+ zos_compilers[compiler] = 'ibm-xlclang'
+
+ for compiler in ('xlc', 'xlC', 'xlc++'):
+ zos_compilers[compiler] = 'ibm-xlc'
+
+ return zos_compilers.get(zos_compiler_names[0], 'ibm-openxl')
+
+ def __init__(self, verbose=0, dry_run=0, force=0):
+ super().__init__(verbose, dry_run, force)
+ self.zos_compiler = self._get_zos_compiler_name()
+ sysconfig.customize_compiler(self)
+
+ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+ local_args = []
+ if ext in self._cpp_extensions:
+ compiler = self.compiler_cxx
+ local_args.extend(_cxx_args[self.zos_compiler])
+ elif ext in self._asm_extensions:
+ compiler = self.compiler_so
+ local_args.extend(_cc_args[self.zos_compiler])
+ local_args.extend(_asm_args[self.zos_compiler])
+ else:
+ compiler = self.compiler_so
+ local_args.extend(_cc_args[self.zos_compiler])
+ local_args.extend(cc_args)
+
+ try:
+ self.spawn(compiler + local_args + [src, '-o', obj] + extra_postargs)
+ except DistutilsExecError as msg:
+ raise CompileError(msg)
+
+ def runtime_library_dir_option(self, dir):
+ return '-L' + dir
+
+ def link(
+ self,
+ target_desc,
+ objects,
+ output_filename,
+ output_dir=None,
+ libraries=None,
+ library_dirs=None,
+ runtime_library_dirs=None,
+ export_symbols=None,
+ debug=0,
+ extra_preargs=None,
+ extra_postargs=None,
+ build_temp=None,
+ target_lang=None,
+ ):
+ # For a built module to use functions from cpython, it needs to use Pythons
+ # side deck file. The side deck is located beside the libpython3.xx.so
+ ldversion = sysconfig.get_config_var('LDVERSION')
+ if sysconfig.python_build:
+ side_deck_path = os.path.join(
+ sysconfig.get_config_var('abs_builddir'),
+ f'libpython{ldversion}.x',
+ )
+ else:
+ side_deck_path = os.path.join(
+ sysconfig.get_config_var('installed_base'),
+ sysconfig.get_config_var('platlibdir'),
+ f'libpython{ldversion}.x',
+ )
+
+ if os.path.exists(side_deck_path):
+ if extra_postargs:
+ extra_postargs.append(side_deck_path)
+ else:
+ extra_postargs = [side_deck_path]
+
+ # Check and replace libraries included side deck files
+ if runtime_library_dirs:
+ for dir in runtime_library_dirs:
+ for library in libraries[:]:
+ library_side_deck = os.path.join(dir, f'{library}.x')
+ if os.path.exists(library_side_deck):
+ libraries.remove(library)
+ extra_postargs.append(library_side_deck)
+ break
+
+ # Any required ld args for the given compiler
+ extra_postargs.extend(_ld_args[self.zos_compiler])
+
+ super().link(
+ target_desc,
+ objects,
+ output_filename,
+ output_dir,
+ libraries,
+ library_dirs,
+ runtime_library_dirs,
+ export_symbols,
+ debug,
+ extra_preargs,
+ extra_postargs,
+ build_temp,
+ target_lang,
+ )
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/backports/__init__.py b/contrib/python/setuptools/py3/setuptools/_vendor/backports/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/backports/__init__.py
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/backports/tarfile.py b/contrib/python/setuptools/py3/setuptools/_vendor/backports/tarfile.py
new file mode 100644
index 0000000000..a7a9a6e7b9
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/backports/tarfile.py
@@ -0,0 +1,2900 @@
+#!/usr/bin/env python3
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+"""Read from and write to tar format archives.
+"""
+
+version = "0.9.0"
+__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
+__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+from builtins import open as bltn_open
+import sys
+import os
+import io
+import shutil
+import stat
+import time
+import struct
+import copy
+import re
+import warnings
+
+try:
+ import pwd
+except ImportError:
+ pwd = None
+try:
+ import grp
+except ImportError:
+ grp = None
+
+# os.symlink on Windows prior to 6.0 raises NotImplementedError
+# OSError (winerror=1314) will be raised if the caller does not hold the
+# SeCreateSymbolicLinkPrivilege privilege
+symlink_exception = (AttributeError, NotImplementedError, OSError)
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
+ "CompressionError", "StreamError", "ExtractError", "HeaderError",
+ "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
+ "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+ "tar_filter", "FilterError", "AbsoluteLinkError",
+ "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+ "LinkOutsideDestinationError"]
+
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL = b"\0" # the null character
+BLOCKSIZE = 512 # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20 # length of records
+GNU_MAGIC = b"ustar \0" # magic gnu tar string
+POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
+
+LENGTH_NAME = 100 # maximum length of a filename
+LENGTH_LINK = 100 # maximum length of a linkname
+LENGTH_PREFIX = 155 # maximum length of the prefix field
+
+REGTYPE = b"0" # regular file
+AREGTYPE = b"\0" # regular file
+LNKTYPE = b"1" # link (inside tarfile)
+SYMTYPE = b"2" # symbolic link
+CHRTYPE = b"3" # character special device
+BLKTYPE = b"4" # block special device
+DIRTYPE = b"5" # directory
+FIFOTYPE = b"6" # fifo special device
+CONTTYPE = b"7" # contiguous file
+
+GNUTYPE_LONGNAME = b"L" # GNU tar longname
+GNUTYPE_LONGLINK = b"K" # GNU tar longlink
+GNUTYPE_SPARSE = b"S" # GNU tar sparse file
+
+XHDTYPE = b"x" # POSIX.1-2001 extended header
+XGLTYPE = b"g" # POSIX.1-2001 global header
+SOLARIS_XHDTYPE = b"X" # Solaris extended header
+
+USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
+GNU_FORMAT = 1 # GNU tar format
+PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
+DEFAULT_FORMAT = PAX_FORMAT
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+# File types that tarfile supports:
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
+ SYMTYPE, DIRTYPE, FIFOTYPE,
+ CONTTYPE, CHRTYPE, BLKTYPE,
+ GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# File types that will be treated as a regular file.
+REGULAR_TYPES = (REGTYPE, AREGTYPE,
+ CONTTYPE, GNUTYPE_SPARSE)
+
+# File types that are part of the GNU tar format.
+GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+ GNUTYPE_SPARSE)
+
+# Fields from a pax header that override a TarInfo attribute.
+PAX_FIELDS = ("path", "linkpath", "size", "mtime",
+ "uid", "gid", "uname", "gname")
+
+# Fields from a pax header that are affected by hdrcharset.
+PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
+
+# Fields in a pax header that are numbers, all other fields
+# are treated as strings.
+PAX_NUMBER_FIELDS = {
+ "atime": float,
+ "ctime": float,
+ "mtime": float,
+ "uid": int,
+ "gid": int,
+ "size": int
+}
+
+#---------------------------------------------------------
+# initialization
+#---------------------------------------------------------
+if os.name == "nt":
+ ENCODING = "utf-8"
+else:
+ ENCODING = sys.getfilesystemencoding()
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+
+def stn(s, length, encoding, errors):
+ """Convert a string to a null-terminated bytes object.
+ """
+ if s is None:
+ raise ValueError("metadata cannot contain None")
+ s = s.encode(encoding, errors)
+ return s[:length] + (length - len(s)) * NUL
+
+def nts(s, encoding, errors):
+ """Convert a null-terminated bytes object to a string.
+ """
+ p = s.find(b"\0")
+ if p != -1:
+ s = s[:p]
+ return s.decode(encoding, errors)
+
+def nti(s):
+ """Convert a number field to a python number.
+ """
+ # There are two possible encodings for a number field, see
+ # itn() below.
+ if s[0] in (0o200, 0o377):
+ n = 0
+ for i in range(len(s) - 1):
+ n <<= 8
+ n += s[i + 1]
+ if s[0] == 0o377:
+ n = -(256 ** (len(s) - 1) - n)
+ else:
+ try:
+ s = nts(s, "ascii", "strict")
+ n = int(s.strip() or "0", 8)
+ except ValueError:
+ raise InvalidHeaderError("invalid header")
+ return n
+
+def itn(n, digits=8, format=DEFAULT_FORMAT):
+ """Convert a python number to a number field.
+ """
+ # POSIX 1003.1-1988 requires numbers to be encoded as a string of
+ # octal digits followed by a null-byte, this allows values up to
+ # (8**(digits-1))-1. GNU tar allows storing numbers greater than
+ # that if necessary. A leading 0o200 or 0o377 byte indicate this
+ # particular encoding, the following digits-1 bytes are a big-endian
+ # base-256 representation. This allows values up to (256**(digits-1))-1.
+ # A 0o200 byte indicates a positive number, a 0o377 byte a negative
+ # number.
+ original_n = n
+ n = int(n)
+ if 0 <= n < 8 ** (digits - 1):
+ s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
+ elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
+ if n >= 0:
+ s = bytearray([0o200])
+ else:
+ s = bytearray([0o377])
+ n = 256 ** digits + n
+
+ for i in range(digits - 1):
+ s.insert(1, n & 0o377)
+ n >>= 8
+ else:
+ raise ValueError("overflow in number field")
+
+ return s
+
+def calc_chksums(buf):
+ """Calculate the checksum for a member's header by summing up all
+ characters except for the chksum field which is treated as if
+ it was filled with spaces. According to the GNU tar sources,
+ some tars (Sun and NeXT) calculate chksum with signed char,
+ which will be different if there are chars in the buffer with
+ the high bit set. So we calculate two checksums, unsigned and
+ signed.
+ """
+ unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
+ signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
+ return unsigned_chksum, signed_chksum
+
+def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
+ """Copy length bytes from fileobj src to fileobj dst.
+ If length is None, copy the entire content.
+ """
+ bufsize = bufsize or 16 * 1024
+ if length == 0:
+ return
+ if length is None:
+ shutil.copyfileobj(src, dst, bufsize)
+ return
+
+ blocks, remainder = divmod(length, bufsize)
+ for b in range(blocks):
+ buf = src.read(bufsize)
+ if len(buf) < bufsize:
+ raise exception("unexpected end of data")
+ dst.write(buf)
+
+ if remainder != 0:
+ buf = src.read(remainder)
+ if len(buf) < remainder:
+ raise exception("unexpected end of data")
+ dst.write(buf)
+ return
+
+def _safe_print(s):
+ encoding = getattr(sys.stdout, 'encoding', None)
+ if encoding is not None:
+ s = s.encode(encoding, 'backslashreplace').decode(encoding)
+ print(s, end=' ')
+
+
+class TarError(Exception):
+ """Base exception."""
+ pass
+class ExtractError(TarError):
+ """General exception for extract errors."""
+ pass
+class ReadError(TarError):
+ """Exception for unreadable tar archives."""
+ pass
+class CompressionError(TarError):
+ """Exception for unavailable compression methods."""
+ pass
+class StreamError(TarError):
+ """Exception for unsupported operations on stream-like TarFiles."""
+ pass
+class HeaderError(TarError):
+ """Base exception for header errors."""
+ pass
+class EmptyHeaderError(HeaderError):
+ """Exception for empty headers."""
+ pass
+class TruncatedHeaderError(HeaderError):
+ """Exception for truncated headers."""
+ pass
+class EOFHeaderError(HeaderError):
+ """Exception for end of file headers."""
+ pass
+class InvalidHeaderError(HeaderError):
+ """Exception for invalid headers."""
+ pass
+class SubsequentHeaderError(HeaderError):
+ """Exception for missing and invalid extended headers."""
+ pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile:
+ """Low-level file object. Supports reading and writing.
+ It is used instead of a regular file object for streaming
+ access.
+ """
+
+ def __init__(self, name, mode):
+ mode = {
+ "r": os.O_RDONLY,
+ "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+ }[mode]
+ if hasattr(os, "O_BINARY"):
+ mode |= os.O_BINARY
+ self.fd = os.open(name, mode, 0o666)
+
+ def close(self):
+ os.close(self.fd)
+
+ def read(self, size):
+ return os.read(self.fd, size)
+
+ def write(self, s):
+ os.write(self.fd, s)
+
+class _Stream:
+ """Class that serves as an adapter between TarFile and
+ a stream-like object. The stream-like object only
+ needs to have a read() or write() method that works with bytes,
+ and the method is accessed blockwise.
+ Use of gzip or bzip2 compression is possible.
+ A stream-like object could be for example: sys.stdin.buffer,
+ sys.stdout.buffer, a socket, a tape device etc.
+
+ _Stream is intended to be used only internally.
+ """
+
+ def __init__(self, name, mode, comptype, fileobj, bufsize,
+ compresslevel):
+ """Construct a _Stream object.
+ """
+ self._extfileobj = True
+ if fileobj is None:
+ fileobj = _LowLevelFile(name, mode)
+ self._extfileobj = False
+
+ if comptype == '*':
+ # Enable transparent compression detection for the
+ # stream interface
+ fileobj = _StreamProxy(fileobj)
+ comptype = fileobj.getcomptype()
+
+ self.name = name or ""
+ self.mode = mode
+ self.comptype = comptype
+ self.fileobj = fileobj
+ self.bufsize = bufsize
+ self.buf = b""
+ self.pos = 0
+ self.closed = False
+
+ try:
+ if comptype == "gz":
+ try:
+ import zlib
+ except ImportError:
+ raise CompressionError("zlib module is not available") from None
+ self.zlib = zlib
+ self.crc = zlib.crc32(b"")
+ if mode == "r":
+ self.exception = zlib.error
+ self._init_read_gz()
+ else:
+ self._init_write_gz(compresslevel)
+
+ elif comptype == "bz2":
+ try:
+ import bz2
+ except ImportError:
+ raise CompressionError("bz2 module is not available") from None
+ if mode == "r":
+ self.dbuf = b""
+ self.cmp = bz2.BZ2Decompressor()
+ self.exception = OSError
+ else:
+ self.cmp = bz2.BZ2Compressor(compresslevel)
+
+ elif comptype == "xz":
+ try:
+ import lzma
+ except ImportError:
+ raise CompressionError("lzma module is not available") from None
+ if mode == "r":
+ self.dbuf = b""
+ self.cmp = lzma.LZMADecompressor()
+ self.exception = lzma.LZMAError
+ else:
+ self.cmp = lzma.LZMACompressor()
+
+ elif comptype != "tar":
+ raise CompressionError("unknown compression type %r" % comptype)
+
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ def __del__(self):
+ if hasattr(self, "closed") and not self.closed:
+ self.close()
+
+ def _init_write_gz(self, compresslevel):
+ """Initialize for writing with gzip compression.
+ """
+ self.cmp = self.zlib.compressobj(compresslevel,
+ self.zlib.DEFLATED,
+ -self.zlib.MAX_WBITS,
+ self.zlib.DEF_MEM_LEVEL,
+ 0)
+ timestamp = struct.pack("<L", int(time.time()))
+ self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
+ if self.name.endswith(".gz"):
+ self.name = self.name[:-3]
+ # Honor "directory components removed" from RFC1952
+ self.name = os.path.basename(self.name)
+ # RFC1952 says we must use ISO-8859-1 for the FNAME field.
+ self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
+
+ def write(self, s):
+ """Write string s to the stream.
+ """
+ if self.comptype == "gz":
+ self.crc = self.zlib.crc32(s, self.crc)
+ self.pos += len(s)
+ if self.comptype != "tar":
+ s = self.cmp.compress(s)
+ self.__write(s)
+
+ def __write(self, s):
+ """Write string s to the stream if a whole new block
+ is ready to be written.
+ """
+ self.buf += s
+ while len(self.buf) > self.bufsize:
+ self.fileobj.write(self.buf[:self.bufsize])
+ self.buf = self.buf[self.bufsize:]
+
+ def close(self):
+ """Close the _Stream object. No operation should be
+ done on it afterwards.
+ """
+ if self.closed:
+ return
+
+ self.closed = True
+ try:
+ if self.mode == "w" and self.comptype != "tar":
+ self.buf += self.cmp.flush()
+
+ if self.mode == "w" and self.buf:
+ self.fileobj.write(self.buf)
+ self.buf = b""
+ if self.comptype == "gz":
+ self.fileobj.write(struct.pack("<L", self.crc))
+ self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
+ finally:
+ if not self._extfileobj:
+ self.fileobj.close()
+
+ def _init_read_gz(self):
+ """Initialize for reading a gzip compressed fileobj.
+ """
+ self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
+ self.dbuf = b""
+
+ # taken from gzip.GzipFile with some alterations
+ if self.__read(2) != b"\037\213":
+ raise ReadError("not a gzip file")
+ if self.__read(1) != b"\010":
+ raise CompressionError("unsupported compression method")
+
+ flag = ord(self.__read(1))
+ self.__read(6)
+
+ if flag & 4:
+ xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
+ self.read(xlen)
+ if flag & 8:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 16:
+ while True:
+ s = self.__read(1)
+ if not s or s == NUL:
+ break
+ if flag & 2:
+ self.__read(2)
+
+ def tell(self):
+ """Return the stream's file pointer position.
+ """
+ return self.pos
+
+ def seek(self, pos=0):
+ """Set the stream's file pointer to pos. Negative seeking
+ is forbidden.
+ """
+ if pos - self.pos >= 0:
+ blocks, remainder = divmod(pos - self.pos, self.bufsize)
+ for i in range(blocks):
+ self.read(self.bufsize)
+ self.read(remainder)
+ else:
+ raise StreamError("seeking backwards is not allowed")
+ return self.pos
+
+ def read(self, size):
+ """Return the next size number of bytes from the stream."""
+ assert size is not None
+ buf = self._read(size)
+ self.pos += len(buf)
+ return buf
+
+ def _read(self, size):
+ """Return size bytes from the stream.
+ """
+ if self.comptype == "tar":
+ return self.__read(size)
+
+ c = len(self.dbuf)
+ t = [self.dbuf]
+ while c < size:
+ # Skip underlying buffer to avoid unaligned double buffering.
+ if self.buf:
+ buf = self.buf
+ self.buf = b""
+ else:
+ buf = self.fileobj.read(self.bufsize)
+ if not buf:
+ break
+ try:
+ buf = self.cmp.decompress(buf)
+ except self.exception as e:
+ raise ReadError("invalid compressed data") from e
+ t.append(buf)
+ c += len(buf)
+ t = b"".join(t)
+ self.dbuf = t[size:]
+ return t[:size]
+
+ def __read(self, size):
+ """Return size bytes from stream. If internal buffer is empty,
+ read another block from the stream.
+ """
+ c = len(self.buf)
+ t = [self.buf]
+ while c < size:
+ buf = self.fileobj.read(self.bufsize)
+ if not buf:
+ break
+ t.append(buf)
+ c += len(buf)
+ t = b"".join(t)
+ self.buf = t[size:]
+ return t[:size]
+# class _Stream
+
+class _StreamProxy(object):
+ """Small proxy class that enables transparent compression
+ detection for the Stream interface (mode 'r|*').
+ """
+
+ def __init__(self, fileobj):
+ self.fileobj = fileobj
+ self.buf = self.fileobj.read(BLOCKSIZE)
+
+ def read(self, size):
+ self.read = self.fileobj.read
+ return self.buf
+
+ def getcomptype(self):
+ if self.buf.startswith(b"\x1f\x8b\x08"):
+ return "gz"
+ elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
+ return "bz2"
+ elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
+ return "xz"
+ else:
+ return "tar"
+
+ def close(self):
+ self.fileobj.close()
+# class StreamProxy
+
+#------------------------
+# Extraction file object
+#------------------------
+class _FileInFile(object):
+ """A thin wrapper around an existing file object that
+ provides a part of its data as an individual file
+ object.
+ """
+
+ def __init__(self, fileobj, offset, size, name, blockinfo=None):
+ self.fileobj = fileobj
+ self.offset = offset
+ self.size = size
+ self.position = 0
+ self.name = name
+ self.closed = False
+
+ if blockinfo is None:
+ blockinfo = [(0, size)]
+
+ # Construct a map with data and zero blocks.
+ self.map_index = 0
+ self.map = []
+ lastpos = 0
+ realpos = self.offset
+ for offset, size in blockinfo:
+ if offset > lastpos:
+ self.map.append((False, lastpos, offset, None))
+ self.map.append((True, offset, offset + size, realpos))
+ realpos += size
+ lastpos = offset + size
+ if lastpos < self.size:
+ self.map.append((False, lastpos, self.size, None))
+
+ def flush(self):
+ pass
+
+ def readable(self):
+ return True
+
+ def writable(self):
+ return False
+
+ def seekable(self):
+ return self.fileobj.seekable()
+
+ def tell(self):
+ """Return the current file position.
+ """
+ return self.position
+
+ def seek(self, position, whence=io.SEEK_SET):
+ """Seek to a position in the file.
+ """
+ if whence == io.SEEK_SET:
+ self.position = min(max(position, 0), self.size)
+ elif whence == io.SEEK_CUR:
+ if position < 0:
+ self.position = max(self.position + position, 0)
+ else:
+ self.position = min(self.position + position, self.size)
+ elif whence == io.SEEK_END:
+ self.position = max(min(self.size + position, self.size), 0)
+ else:
+ raise ValueError("Invalid argument")
+ return self.position
+
+ def read(self, size=None):
+ """Read data from the file.
+ """
+ if size is None:
+ size = self.size - self.position
+ else:
+ size = min(size, self.size - self.position)
+
+ buf = b""
+ while size > 0:
+ while True:
+ data, start, stop, offset = self.map[self.map_index]
+ if start <= self.position < stop:
+ break
+ else:
+ self.map_index += 1
+ if self.map_index == len(self.map):
+ self.map_index = 0
+ length = min(size, stop - self.position)
+ if data:
+ self.fileobj.seek(offset + (self.position - start))
+ b = self.fileobj.read(length)
+ if len(b) != length:
+ raise ReadError("unexpected end of data")
+ buf += b
+ else:
+ buf += NUL * length
+ size -= length
+ self.position += length
+ return buf
+
+ def readinto(self, b):
+ buf = self.read(len(b))
+ b[:len(buf)] = buf
+ return len(buf)
+
+ def close(self):
+ self.closed = True
+#class _FileInFile
+
+class ExFileObject(io.BufferedReader):
+
+ def __init__(self, tarfile, tarinfo):
+ fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
+ tarinfo.size, tarinfo.name, tarinfo.sparse)
+ super().__init__(fileobj)
+#class ExFileObject
+
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+ pass
+
+class AbsolutePathError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+ + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a link to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+ + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+ new_attrs = {}
+ name = member.name
+ dest_path = os.path.realpath(dest_path)
+ # Strip leading / (tar's directory separator) from filenames.
+ # Include os.sep (target OS directory separator) as well.
+ if name.startswith(('/', os.sep)):
+ name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+ if os.path.isabs(name):
+ # Path is absolute even after stripping.
+ # For example, 'C:/foo' on Windows.
+ raise AbsolutePathError(member)
+ # Ensure we stay in the destination
+ target_path = os.path.realpath(os.path.join(dest_path, name))
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise OutsideDestinationError(member, target_path)
+ # Limit permissions (no high bits, and go-w)
+ mode = member.mode
+ if mode is not None:
+ # Strip high bits & group/other write bits
+ mode = mode & 0o755
+ if for_data:
+ # For data, handle permissions & file types
+ if member.isreg() or member.islnk():
+ if not mode & 0o100:
+ # Clear executable bits if not executable by user
+ mode &= ~0o111
+ # Ensure owner can read & write
+ mode |= 0o600
+ elif member.isdir() or member.issym():
+ # Ignore mode for directories & symlinks
+ mode = None
+ else:
+ # Reject special files
+ raise SpecialFileError(member)
+ if mode != member.mode:
+ new_attrs['mode'] = mode
+ if for_data:
+ # Ignore ownership for 'data'
+ if member.uid is not None:
+ new_attrs['uid'] = None
+ if member.gid is not None:
+ new_attrs['gid'] = None
+ if member.uname is not None:
+ new_attrs['uname'] = None
+ if member.gname is not None:
+ new_attrs['gname'] = None
+ # Check link destination for 'data'
+ if member.islnk() or member.issym():
+ if os.path.isabs(member.linkname):
+ raise AbsoluteLinkError(member)
+ if member.issym():
+ target_path = os.path.join(dest_path,
+ os.path.dirname(name),
+ member.linkname)
+ else:
+ target_path = os.path.join(dest_path,
+ member.linkname)
+ target_path = os.path.realpath(target_path)
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise LinkOutsideDestinationError(member, target_path)
+ return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+ return member
+
+def tar_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, False)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+def data_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, True)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+_NAMED_FILTERS = {
+ "fully_trusted": fully_trusted_filter,
+ "tar": tar_filter,
+ "data": data_filter,
+}
+
+#------------------
+# Exported Classes
+#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
+class TarInfo(object):
+ """Informational class which holds the details about an
+ archive member given by a tar header block.
+ TarInfo objects are returned by TarFile.getmember(),
+ TarFile.getmembers() and TarFile.gettarinfo() and are
+ usually created internally.
+ """
+
+ __slots__ = dict(
+ name = 'Name of the archive member.',
+ mode = 'Permission bits.',
+ uid = 'User ID of the user who originally stored this member.',
+ gid = 'Group ID of the user who originally stored this member.',
+ size = 'Size in bytes.',
+ mtime = 'Time of last modification.',
+ chksum = 'Header checksum.',
+ type = ('File type. type is usually one of these constants: '
+ 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
+ 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
+ linkname = ('Name of the target file name, which is only present '
+ 'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
+ uname = 'User name.',
+ gname = 'Group name.',
+ devmajor = 'Device major number.',
+ devminor = 'Device minor number.',
+ offset = 'The tar header starts here.',
+ offset_data = "The file's data starts here.",
+ pax_headers = ('A dictionary containing key-value pairs of an '
+ 'associated pax extended header.'),
+ sparse = 'Sparse member information.',
+ tarfile = None,
+ _sparse_structs = None,
+ _link_target = None,
+ )
+
+ def __init__(self, name=""):
+ """Construct a TarInfo object. name is the optional name
+ of the member.
+ """
+ self.name = name # member name
+ self.mode = 0o644 # file permissions
+ self.uid = 0 # user id
+ self.gid = 0 # group id
+ self.size = 0 # file size
+ self.mtime = 0 # modification time
+ self.chksum = 0 # header checksum
+ self.type = REGTYPE # member type
+ self.linkname = "" # link name
+ self.uname = "" # user name
+ self.gname = "" # group name
+ self.devmajor = 0 # device major number
+ self.devminor = 0 # device minor number
+
+ self.offset = 0 # the tar header starts here
+ self.offset_data = 0 # the file's data starts here
+
+ self.sparse = None # sparse member information
+ self.pax_headers = {} # pax header information
+
+ @property
+ def path(self):
+ 'In pax headers, "name" is called "path".'
+ return self.name
+
+ @path.setter
+ def path(self, name):
+ self.name = name
+
+ @property
+ def linkpath(self):
+ 'In pax headers, "linkname" is called "linkpath".'
+ return self.linkname
+
+ @linkpath.setter
+ def linkpath(self, linkname):
+ self.linkname = linkname
+
+ def __repr__(self):
+ return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+ def replace(self, *,
+ name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+ uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+ deep=True, _KEEP=_KEEP):
+ """Return a deep copy of self with the given attributes replaced.
+ """
+ if deep:
+ result = copy.deepcopy(self)
+ else:
+ result = copy.copy(self)
+ if name is not _KEEP:
+ result.name = name
+ if mtime is not _KEEP:
+ result.mtime = mtime
+ if mode is not _KEEP:
+ result.mode = mode
+ if linkname is not _KEEP:
+ result.linkname = linkname
+ if uid is not _KEEP:
+ result.uid = uid
+ if gid is not _KEEP:
+ result.gid = gid
+ if uname is not _KEEP:
+ result.uname = uname
+ if gname is not _KEEP:
+ result.gname = gname
+ return result
+
+ def get_info(self):
+ """Return the TarInfo's attributes as a dictionary.
+ """
+ if self.mode is None:
+ mode = None
+ else:
+ mode = self.mode & 0o7777
+ info = {
+ "name": self.name,
+ "mode": mode,
+ "uid": self.uid,
+ "gid": self.gid,
+ "size": self.size,
+ "mtime": self.mtime,
+ "chksum": self.chksum,
+ "type": self.type,
+ "linkname": self.linkname,
+ "uname": self.uname,
+ "gname": self.gname,
+ "devmajor": self.devmajor,
+ "devminor": self.devminor
+ }
+
+ if info["type"] == DIRTYPE and not info["name"].endswith("/"):
+ info["name"] += "/"
+
+ return info
+
+ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
+ """Return a tar header as a string of 512 byte blocks.
+ """
+ info = self.get_info()
+ for name, value in info.items():
+ if value is None:
+ raise ValueError("%s may not be None" % name)
+
+ if format == USTAR_FORMAT:
+ return self.create_ustar_header(info, encoding, errors)
+ elif format == GNU_FORMAT:
+ return self.create_gnu_header(info, encoding, errors)
+ elif format == PAX_FORMAT:
+ return self.create_pax_header(info, encoding)
+ else:
+ raise ValueError("invalid format")
+
+ def create_ustar_header(self, info, encoding, errors):
+ """Return the object as a ustar header block.
+ """
+ info["magic"] = POSIX_MAGIC
+
+ if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+ raise ValueError("linkname is too long")
+
+ if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+ info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors)
+
+ return self._create_header(info, USTAR_FORMAT, encoding, errors)
+
+ def create_gnu_header(self, info, encoding, errors):
+ """Return the object as a GNU header block sequence.
+ """
+ info["magic"] = GNU_MAGIC
+
+ buf = b""
+ if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK:
+ buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
+
+ if len(info["name"].encode(encoding, errors)) > LENGTH_NAME:
+ buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
+
+ return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
+
+ def create_pax_header(self, info, encoding):
+ """Return the object as a ustar header block. If it cannot be
+ represented this way, prepend a pax extended header sequence
+ with supplement information.
+ """
+ info["magic"] = POSIX_MAGIC
+ pax_headers = self.pax_headers.copy()
+
+ # Test string fields for values that exceed the field length or cannot
+ # be represented in ASCII encoding.
+ for name, hname, length in (
+ ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
+ ("uname", "uname", 32), ("gname", "gname", 32)):
+
+ if hname in pax_headers:
+ # The pax header has priority.
+ continue
+
+ # Try to encode the string as ASCII.
+ try:
+ info[name].encode("ascii", "strict")
+ except UnicodeEncodeError:
+ pax_headers[hname] = info[name]
+ continue
+
+ if len(info[name]) > length:
+ pax_headers[hname] = info[name]
+
+ # Test number fields for values that exceed the field limit or values
+ # that like to be stored as float.
+ for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
+ needs_pax = False
+
+ val = info[name]
+ val_is_float = isinstance(val, float)
+ val_int = round(val) if val_is_float else val
+ if not 0 <= val_int < 8 ** (digits - 1):
+ # Avoid overflow.
+ info[name] = 0
+ needs_pax = True
+ elif val_is_float:
+ # Put rounded value in ustar header, and full
+ # precision value in pax header.
+ info[name] = val_int
+ needs_pax = True
+
+ # The existing pax header has priority.
+ if needs_pax and name not in pax_headers:
+ pax_headers[name] = str(val)
+
+ # Create a pax extended header if necessary.
+ if pax_headers:
+ buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
+ else:
+ buf = b""
+
+ return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
+
+ @classmethod
+ def create_pax_global_header(cls, pax_headers):
+ """Return the object as a pax global header block sequence.
+ """
+ return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
+
+ def _posix_split_name(self, name, encoding, errors):
+ """Split a name longer than 100 chars into a prefix
+ and a name part.
+ """
+ components = name.split("/")
+ for i in range(1, len(components)):
+ prefix = "/".join(components[:i])
+ name = "/".join(components[i:])
+ if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \
+ len(name.encode(encoding, errors)) <= LENGTH_NAME:
+ break
+ else:
+ raise ValueError("name is too long")
+
+ return prefix, name
+
+ @staticmethod
+ def _create_header(info, format, encoding, errors):
+ """Return a header block. info is a dictionary with file
+ information, format must be one of the *_FORMAT constants.
+ """
+ has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+ if has_device_fields:
+ devmajor = itn(info.get("devmajor", 0), 8, format)
+ devminor = itn(info.get("devminor", 0), 8, format)
+ else:
+ devmajor = stn("", 8, encoding, errors)
+ devminor = stn("", 8, encoding, errors)
+
+ # None values in metadata should cause ValueError.
+ # itn()/stn() do this for all fields except type.
+ filetype = info.get("type", REGTYPE)
+ if filetype is None:
+ raise ValueError("TarInfo.type must not be None")
+
+ parts = [
+ stn(info.get("name", ""), 100, encoding, errors),
+ itn(info.get("mode", 0) & 0o7777, 8, format),
+ itn(info.get("uid", 0), 8, format),
+ itn(info.get("gid", 0), 8, format),
+ itn(info.get("size", 0), 12, format),
+ itn(info.get("mtime", 0), 12, format),
+ b" ", # checksum field
+ filetype,
+ stn(info.get("linkname", ""), 100, encoding, errors),
+ info.get("magic", POSIX_MAGIC),
+ stn(info.get("uname", ""), 32, encoding, errors),
+ stn(info.get("gname", ""), 32, encoding, errors),
+ devmajor,
+ devminor,
+ stn(info.get("prefix", ""), 155, encoding, errors)
+ ]
+
+ buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
+ chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
+ buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
+ return buf
+
+ @staticmethod
+ def _create_payload(payload):
+ """Return the string payload filled with zero bytes
+ up to the next 512 byte border.
+ """
+ blocks, remainder = divmod(len(payload), BLOCKSIZE)
+ if remainder > 0:
+ payload += (BLOCKSIZE - remainder) * NUL
+ return payload
+
+ @classmethod
+ def _create_gnu_long_header(cls, name, type, encoding, errors):
+ """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
+ for name.
+ """
+ name = name.encode(encoding, errors) + NUL
+
+ info = {}
+ info["name"] = "././@LongLink"
+ info["type"] = type
+ info["size"] = len(name)
+ info["magic"] = GNU_MAGIC
+
+ # create extended header + name blocks.
+ return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
+ cls._create_payload(name)
+
+ @classmethod
+ def _create_pax_generic_header(cls, pax_headers, type, encoding):
+ """Return a POSIX.1-2008 extended or global header sequence
+ that contains a list of keyword, value pairs. The values
+ must be strings.
+ """
+ # Check if one of the fields contains surrogate characters and thereby
+ # forces hdrcharset=BINARY, see _proc_pax() for more information.
+ binary = False
+ for keyword, value in pax_headers.items():
+ try:
+ value.encode("utf-8", "strict")
+ except UnicodeEncodeError:
+ binary = True
+ break
+
+ records = b""
+ if binary:
+ # Put the hdrcharset field at the beginning of the header.
+ records += b"21 hdrcharset=BINARY\n"
+
+ for keyword, value in pax_headers.items():
+ keyword = keyword.encode("utf-8")
+ if binary:
+ # Try to restore the original byte representation of `value'.
+ # Needless to say, that the encoding must match the string.
+ value = value.encode(encoding, "surrogateescape")
+ else:
+ value = value.encode("utf-8")
+
+ l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
+ n = p = 0
+ while True:
+ n = l + len(str(p))
+ if n == p:
+ break
+ p = n
+ records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
+
+ # We use a hardcoded "././@PaxHeader" name like star does
+ # instead of the one that POSIX recommends.
+ info = {}
+ info["name"] = "././@PaxHeader"
+ info["type"] = type
+ info["size"] = len(records)
+ info["magic"] = POSIX_MAGIC
+
+ # Create pax header + record blocks.
+ return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
+ cls._create_payload(records)
+
+ @classmethod
+ def frombuf(cls, buf, encoding, errors):
+ """Construct a TarInfo object from a 512 byte bytes object.
+ """
+ if len(buf) == 0:
+ raise EmptyHeaderError("empty header")
+ if len(buf) != BLOCKSIZE:
+ raise TruncatedHeaderError("truncated header")
+ if buf.count(NUL) == BLOCKSIZE:
+ raise EOFHeaderError("end of file header")
+
+ chksum = nti(buf[148:156])
+ if chksum not in calc_chksums(buf):
+ raise InvalidHeaderError("bad checksum")
+
+ obj = cls()
+ obj.name = nts(buf[0:100], encoding, errors)
+ obj.mode = nti(buf[100:108])
+ obj.uid = nti(buf[108:116])
+ obj.gid = nti(buf[116:124])
+ obj.size = nti(buf[124:136])
+ obj.mtime = nti(buf[136:148])
+ obj.chksum = chksum
+ obj.type = buf[156:157]
+ obj.linkname = nts(buf[157:257], encoding, errors)
+ obj.uname = nts(buf[265:297], encoding, errors)
+ obj.gname = nts(buf[297:329], encoding, errors)
+ obj.devmajor = nti(buf[329:337])
+ obj.devminor = nti(buf[337:345])
+ prefix = nts(buf[345:500], encoding, errors)
+
+ # Old V7 tar format represents a directory as a regular
+ # file with a trailing slash.
+ if obj.type == AREGTYPE and obj.name.endswith("/"):
+ obj.type = DIRTYPE
+
+ # The old GNU sparse format occupies some of the unused
+ # space in the buffer for up to 4 sparse structures.
+ # Save them for later processing in _proc_sparse().
+ if obj.type == GNUTYPE_SPARSE:
+ pos = 386
+ structs = []
+ for i in range(4):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[482])
+ origsize = nti(buf[483:495])
+ obj._sparse_structs = (structs, isextended, origsize)
+
+ # Remove redundant slashes from directories.
+ if obj.isdir():
+ obj.name = obj.name.rstrip("/")
+
+ # Reconstruct a ustar longname.
+ if prefix and obj.type not in GNU_TYPES:
+ obj.name = prefix + "/" + obj.name
+ return obj
+
+ @classmethod
+ def fromtarfile(cls, tarfile):
+ """Return the next TarInfo object from TarFile object
+ tarfile.
+ """
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
+ obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
+ return obj._proc_member(tarfile)
+
+ #--------------------------------------------------------------------------
+ # The following are methods that are called depending on the type of a
+ # member. The entry point is _proc_member() which can be overridden in a
+ # subclass to add custom _proc_*() methods. A _proc_*() method MUST
+ # implement the following
+ # operations:
+ # 1. Set self.offset_data to the position where the data blocks begin,
+ # if there is data that follows.
+ # 2. Set tarfile.offset to the position where the next member's header will
+ # begin.
+ # 3. Return self or another valid TarInfo object.
+ def _proc_member(self, tarfile):
+ """Choose the right processing method depending on
+ the type and call it.
+ """
+ if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
+ return self._proc_gnulong(tarfile)
+ elif self.type == GNUTYPE_SPARSE:
+ return self._proc_sparse(tarfile)
+ elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
+ return self._proc_pax(tarfile)
+ else:
+ return self._proc_builtin(tarfile)
+
+ def _proc_builtin(self, tarfile):
+ """Process a builtin type or an unknown type which
+ will be treated as a regular file.
+ """
+ self.offset_data = tarfile.fileobj.tell()
+ offset = self.offset_data
+ if self.isreg() or self.type not in SUPPORTED_TYPES:
+ # Skip the following data blocks.
+ offset += self._block(self.size)
+ tarfile.offset = offset
+
+ # Patch the TarInfo object with saved global
+ # header information.
+ self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
+
+ # Remove redundant slashes from directories. This is to be consistent
+ # with frombuf().
+ if self.isdir():
+ self.name = self.name.rstrip("/")
+
+ return self
+
+ def _proc_gnulong(self, tarfile):
+ """Process the blocks that hold a GNU longname
+ or longlink member.
+ """
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # Fetch the next header and process it.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError as e:
+ raise SubsequentHeaderError(str(e)) from None
+
+ # Patch the TarInfo object from the next header with
+ # the longname information.
+ next.offset = self.offset
+ if self.type == GNUTYPE_LONGNAME:
+ next.name = nts(buf, tarfile.encoding, tarfile.errors)
+ elif self.type == GNUTYPE_LONGLINK:
+ next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
+
+ # Remove redundant slashes from directories. This is to be consistent
+ # with frombuf().
+ if next.isdir():
+ next.name = next.name.removesuffix("/")
+
+ return next
+
+ def _proc_sparse(self, tarfile):
+ """Process a GNU sparse header plus extra headers.
+ """
+ # We already collected some sparse structures in frombuf().
+ structs, isextended, origsize = self._sparse_structs
+ del self._sparse_structs
+
+ # Collect sparse structures from extended header blocks.
+ while isextended:
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ pos = 0
+ for i in range(21):
+ try:
+ offset = nti(buf[pos:pos + 12])
+ numbytes = nti(buf[pos + 12:pos + 24])
+ except ValueError:
+ break
+ if offset and numbytes:
+ structs.append((offset, numbytes))
+ pos += 24
+ isextended = bool(buf[504])
+ self.sparse = structs
+
+ self.offset_data = tarfile.fileobj.tell()
+ tarfile.offset = self.offset_data + self._block(self.size)
+ self.size = origsize
+ return self
+
+ def _proc_pax(self, tarfile):
+ """Process an extended or global header as described in
+ POSIX.1-2008.
+ """
+ # Read the header information.
+ buf = tarfile.fileobj.read(self._block(self.size))
+
+ # A pax header stores supplemental information for either
+ # the following file (extended) or all following files
+ # (global).
+ if self.type == XGLTYPE:
+ pax_headers = tarfile.pax_headers
+ else:
+ pax_headers = tarfile.pax_headers.copy()
+
+ # Check if the pax header contains a hdrcharset field. This tells us
+ # the encoding of the path, linkpath, uname and gname fields. Normally,
+ # these fields are UTF-8 encoded but since POSIX.1-2008 tar
+ # implementations are allowed to store them as raw binary strings if
+ # the translation to UTF-8 fails.
+ match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
+ if match is not None:
+ pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
+
+ # For the time being, we don't care about anything other than "BINARY".
+ # The only other value that is currently allowed by the standard is
+ # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
+ hdrcharset = pax_headers.get("hdrcharset")
+ if hdrcharset == "BINARY":
+ encoding = tarfile.encoding
+ else:
+ encoding = "utf-8"
+
+ # Parse pax header information. A record looks like that:
+ # "%d %s=%s\n" % (length, keyword, value). length is the size
+ # of the complete record including the length field itself and
+ # the newline. keyword and value are both UTF-8 encoded strings.
+ regex = re.compile(br"(\d+) ([^=]+)=")
+ pos = 0
+ while match := regex.match(buf, pos):
+ length, keyword = match.groups()
+ length = int(length)
+ if length == 0:
+ raise InvalidHeaderError("invalid header")
+ value = buf[match.end(2) + 1:match.start(1) + length - 1]
+
+ # Normally, we could just use "utf-8" as the encoding and "strict"
+ # as the error handler, but we better not take the risk. For
+ # example, GNU tar <= 1.23 is known to store filenames it cannot
+ # translate to UTF-8 as raw strings (unfortunately without a
+ # hdrcharset=BINARY header).
+ # We first try the strict standard encoding, and if that fails we
+ # fall back on the user's encoding and error handler.
+ keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
+ tarfile.errors)
+ if keyword in PAX_NAME_FIELDS:
+ value = self._decode_pax_field(value, encoding, tarfile.encoding,
+ tarfile.errors)
+ else:
+ value = self._decode_pax_field(value, "utf-8", "utf-8",
+ tarfile.errors)
+
+ pax_headers[keyword] = value
+ pos += length
+
+ # Fetch the next header.
+ try:
+ next = self.fromtarfile(tarfile)
+ except HeaderError as e:
+ raise SubsequentHeaderError(str(e)) from None
+
+ # Process GNU sparse information.
+ if "GNU.sparse.map" in pax_headers:
+ # GNU extended sparse format version 0.1.
+ self._proc_gnusparse_01(next, pax_headers)
+
+ elif "GNU.sparse.size" in pax_headers:
+ # GNU extended sparse format version 0.0.
+ self._proc_gnusparse_00(next, pax_headers, buf)
+
+ elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
+ # GNU extended sparse format version 1.0.
+ self._proc_gnusparse_10(next, pax_headers, tarfile)
+
+ if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
+ # Patch the TarInfo object with the extended header info.
+ next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
+ next.offset = self.offset
+
+ if "size" in pax_headers:
+ # If the extended header replaces the size field,
+ # we need to recalculate the offset where the next
+ # header starts.
+ offset = next.offset_data
+ if next.isreg() or next.type not in SUPPORTED_TYPES:
+ offset += next._block(next.size)
+ tarfile.offset = offset
+
+ return next
+
+ def _proc_gnusparse_00(self, next, pax_headers, buf):
+ """Process a GNU tar extended sparse header, version 0.0.
+ """
+ offsets = []
+ for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
+ offsets.append(int(match.group(1)))
+ numbytes = []
+ for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
+ numbytes.append(int(match.group(1)))
+ next.sparse = list(zip(offsets, numbytes))
+
+ def _proc_gnusparse_01(self, next, pax_headers):
+ """Process a GNU tar extended sparse header, version 0.1.
+ """
+ sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _proc_gnusparse_10(self, next, pax_headers, tarfile):
+ """Process a GNU tar extended sparse header, version 1.0.
+ """
+ fields = None
+ sparse = []
+ buf = tarfile.fileobj.read(BLOCKSIZE)
+ fields, buf = buf.split(b"\n", 1)
+ fields = int(fields)
+ while len(sparse) < fields * 2:
+ if b"\n" not in buf:
+ buf += tarfile.fileobj.read(BLOCKSIZE)
+ number, buf = buf.split(b"\n", 1)
+ sparse.append(int(number))
+ next.offset_data = tarfile.fileobj.tell()
+ next.sparse = list(zip(sparse[::2], sparse[1::2]))
+
+ def _apply_pax_info(self, pax_headers, encoding, errors):
+ """Replace fields with supplemental information from a previous
+ pax extended or global header.
+ """
+ for keyword, value in pax_headers.items():
+ if keyword == "GNU.sparse.name":
+ setattr(self, "path", value)
+ elif keyword == "GNU.sparse.size":
+ setattr(self, "size", int(value))
+ elif keyword == "GNU.sparse.realsize":
+ setattr(self, "size", int(value))
+ elif keyword in PAX_FIELDS:
+ if keyword in PAX_NUMBER_FIELDS:
+ try:
+ value = PAX_NUMBER_FIELDS[keyword](value)
+ except ValueError:
+ value = 0
+ if keyword == "path":
+ value = value.rstrip("/")
+ setattr(self, keyword, value)
+
+ self.pax_headers = pax_headers.copy()
+
+ def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
+ """Decode a single field from a pax record.
+ """
+ try:
+ return value.decode(encoding, "strict")
+ except UnicodeDecodeError:
+ return value.decode(fallback_encoding, fallback_errors)
+
+ def _block(self, count):
+ """Round up a byte count by BLOCKSIZE and return it,
+ e.g. _block(834) => 1024.
+ """
+ blocks, remainder = divmod(count, BLOCKSIZE)
+ if remainder:
+ blocks += 1
+ return blocks * BLOCKSIZE
+
+ def isreg(self):
+ 'Return True if the Tarinfo object is a regular file.'
+ return self.type in REGULAR_TYPES
+
+ def isfile(self):
+ 'Return True if the Tarinfo object is a regular file.'
+ return self.isreg()
+
+ def isdir(self):
+ 'Return True if it is a directory.'
+ return self.type == DIRTYPE
+
+ def issym(self):
+ 'Return True if it is a symbolic link.'
+ return self.type == SYMTYPE
+
+ def islnk(self):
+ 'Return True if it is a hard link.'
+ return self.type == LNKTYPE
+
+ def ischr(self):
+ 'Return True if it is a character device.'
+ return self.type == CHRTYPE
+
+ def isblk(self):
+ 'Return True if it is a block device.'
+ return self.type == BLKTYPE
+
+ def isfifo(self):
+ 'Return True if it is a FIFO.'
+ return self.type == FIFOTYPE
+
+ def issparse(self):
+ return self.sparse is not None
+
+ def isdev(self):
+ 'Return True if it is one of character device, block device or FIFO.'
+ return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+ """The TarFile Class provides an interface to tar archives.
+ """
+
+ debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
+
+ dereference = False # If true, add content of linked file to the
+ # tar file, else the link.
+
+ ignore_zeros = False # If true, skips empty or invalid blocks and
+ # continues processing.
+
+ errorlevel = 1 # If 0, fatal errors only appear in debug
+ # messages (if debug >= 0). If > 0, errors
+ # are passed to the caller as exceptions.
+
+ format = DEFAULT_FORMAT # The format to use when creating an archive.
+
+ encoding = ENCODING # Encoding for 8-bit character strings.
+
+ errors = None # Error handler for unicode conversion.
+
+ tarinfo = TarInfo # The default TarInfo class to use.
+
+ fileobject = ExFileObject # The file-object for extractfile().
+
+ extraction_filter = None # The default filter for extraction.
+
+ def __init__(self, name=None, mode="r", fileobj=None, format=None,
+ tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
+ errors="surrogateescape", pax_headers=None, debug=None,
+ errorlevel=None, copybufsize=None):
+ """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+ read from an existing archive, 'a' to append data to an existing
+ file or 'w' to create a new file overwriting an existing one. `mode'
+ defaults to 'r'.
+ If `fileobj' is given, it is used for reading or writing data. If it
+ can be determined, `mode' is overridden by `fileobj's mode.
+ `fileobj' is not closed, when TarFile is closed.
+ """
+ modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"}
+ if mode not in modes:
+ raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+ self.mode = mode
+ self._mode = modes[mode]
+
+ if not fileobj:
+ if self.mode == "a" and not os.path.exists(name):
+ # Create nonexistent files in append mode.
+ self.mode = "w"
+ self._mode = "wb"
+ fileobj = bltn_open(name, self._mode)
+ self._extfileobj = False
+ else:
+ if (name is None and hasattr(fileobj, "name") and
+ isinstance(fileobj.name, (str, bytes))):
+ name = fileobj.name
+ if hasattr(fileobj, "mode"):
+ self._mode = fileobj.mode
+ self._extfileobj = True
+ self.name = os.path.abspath(name) if name else None
+ self.fileobj = fileobj
+
+ # Init attributes.
+ if format is not None:
+ self.format = format
+ if tarinfo is not None:
+ self.tarinfo = tarinfo
+ if dereference is not None:
+ self.dereference = dereference
+ if ignore_zeros is not None:
+ self.ignore_zeros = ignore_zeros
+ if encoding is not None:
+ self.encoding = encoding
+ self.errors = errors
+
+ if pax_headers is not None and self.format == PAX_FORMAT:
+ self.pax_headers = pax_headers
+ else:
+ self.pax_headers = {}
+
+ if debug is not None:
+ self.debug = debug
+ if errorlevel is not None:
+ self.errorlevel = errorlevel
+
+ # Init datastructures.
+ self.copybufsize = copybufsize
+ self.closed = False
+ self.members = [] # list of members as TarInfo objects
+ self._loaded = False # flag if all members have been read
+ self.offset = self.fileobj.tell()
+ # current position in the archive file
+ self.inodes = {} # dictionary caching the inodes of
+ # archive members already added
+
+ try:
+ if self.mode == "r":
+ self.firstmember = None
+ self.firstmember = self.next()
+
+ if self.mode == "a":
+ # Move to the end of the archive,
+ # before the first empty block.
+ while True:
+ self.fileobj.seek(self.offset)
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ self.members.append(tarinfo)
+ except EOFHeaderError:
+ self.fileobj.seek(self.offset)
+ break
+ except HeaderError as e:
+ raise ReadError(str(e)) from None
+
+ if self.mode in ("a", "w", "x"):
+ self._loaded = True
+
+ if self.pax_headers:
+ buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+ except:
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+ raise
+
+ #--------------------------------------------------------------------------
+ # Below are the classmethods which act as alternate constructors to the
+ # TarFile class. The open() method is the only one that is needed for
+ # public use; it is the "super"-constructor and is able to select an
+ # adequate "sub"-constructor for a particular compression using the mapping
+ # from OPEN_METH.
+ #
+ # This concept allows one to subclass TarFile without losing the comfort of
+ # the super-constructor. A sub-constructor is registered and made available
+ # by adding it to the mapping in OPEN_METH.
+
+ @classmethod
+ def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
+ r"""Open a tar archive for reading, writing or appending. Return
+ an appropriate TarFile class.
+
+ mode:
+ 'r' or 'r:\*' open for reading with transparent compression
+ 'r:' open for reading exclusively uncompressed
+ 'r:gz' open for reading with gzip compression
+ 'r:bz2' open for reading with bzip2 compression
+ 'r:xz' open for reading with lzma compression
+ 'a' or 'a:' open for appending, creating the file if necessary
+ 'w' or 'w:' open for writing without compression
+ 'w:gz' open for writing with gzip compression
+ 'w:bz2' open for writing with bzip2 compression
+ 'w:xz' open for writing with lzma compression
+
+ 'x' or 'x:' create a tarfile exclusively without compression, raise
+ an exception if the file is already created
+ 'x:gz' create a gzip compressed tarfile, raise an exception
+ if the file is already created
+ 'x:bz2' create a bzip2 compressed tarfile, raise an exception
+ if the file is already created
+ 'x:xz' create an lzma compressed tarfile, raise an exception
+ if the file is already created
+
+ 'r|\*' open a stream of tar blocks with transparent compression
+ 'r|' open an uncompressed stream of tar blocks for reading
+ 'r|gz' open a gzip compressed stream of tar blocks
+ 'r|bz2' open a bzip2 compressed stream of tar blocks
+ 'r|xz' open an lzma compressed stream of tar blocks
+ 'w|' open an uncompressed stream for writing
+ 'w|gz' open a gzip compressed stream for writing
+ 'w|bz2' open a bzip2 compressed stream for writing
+ 'w|xz' open an lzma compressed stream for writing
+ """
+
+ if not name and not fileobj:
+ raise ValueError("nothing to open")
+
+ if mode in ("r", "r:*"):
+ # Find out which *open() is appropriate for opening the file.
+ def not_compressed(comptype):
+ return cls.OPEN_METH[comptype] == 'taropen'
+ error_msgs = []
+ for comptype in sorted(cls.OPEN_METH, key=not_compressed):
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ if fileobj is not None:
+ saved_pos = fileobj.tell()
+ try:
+ return func(name, "r", fileobj, **kwargs)
+ except (ReadError, CompressionError) as e:
+ error_msgs.append(f'- method {comptype}: {e!r}')
+ if fileobj is not None:
+ fileobj.seek(saved_pos)
+ continue
+ error_msgs_summary = '\n'.join(error_msgs)
+ raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}")
+
+ elif ":" in mode:
+ filemode, comptype = mode.split(":", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ # Select the *open() function according to
+ # given compression.
+ if comptype in cls.OPEN_METH:
+ func = getattr(cls, cls.OPEN_METH[comptype])
+ else:
+ raise CompressionError("unknown compression type %r" % comptype)
+ return func(name, filemode, fileobj, **kwargs)
+
+ elif "|" in mode:
+ filemode, comptype = mode.split("|", 1)
+ filemode = filemode or "r"
+ comptype = comptype or "tar"
+
+ if filemode not in ("r", "w"):
+ raise ValueError("mode must be 'r' or 'w'")
+
+ compresslevel = kwargs.pop("compresslevel", 9)
+ stream = _Stream(name, filemode, comptype, fileobj, bufsize,
+ compresslevel)
+ try:
+ t = cls(name, filemode, stream, **kwargs)
+ except:
+ stream.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ elif mode in ("a", "w", "x"):
+ return cls.taropen(name, mode, fileobj, **kwargs)
+
+ raise ValueError("undiscernible mode")
+
+ @classmethod
+ def taropen(cls, name, mode="r", fileobj=None, **kwargs):
+ """Open uncompressed tar archive name for reading or writing.
+ """
+ if mode not in ("r", "a", "w", "x"):
+ raise ValueError("mode must be 'r', 'a', 'w' or 'x'")
+ return cls(name, mode, fileobj, **kwargs)
+
+ @classmethod
+ def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open gzip compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if mode not in ("r", "w", "x"):
+ raise ValueError("mode must be 'r', 'w' or 'x'")
+
+ try:
+ from gzip import GzipFile
+ except ImportError:
+ raise CompressionError("gzip module is not available") from None
+
+ try:
+ fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
+ except OSError as e:
+ if fileobj is not None and mode == 'r':
+ raise ReadError("not a gzip file") from e
+ raise
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except OSError as e:
+ fileobj.close()
+ if mode == 'r':
+ raise ReadError("not a gzip file") from e
+ raise
+ except:
+ fileobj.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ @classmethod
+ def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
+ """Open bzip2 compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if mode not in ("r", "w", "x"):
+ raise ValueError("mode must be 'r', 'w' or 'x'")
+
+ try:
+ from bz2 import BZ2File
+ except ImportError:
+ raise CompressionError("bz2 module is not available") from None
+
+ fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except (OSError, EOFError) as e:
+ fileobj.close()
+ if mode == 'r':
+ raise ReadError("not a bzip2 file") from e
+ raise
+ except:
+ fileobj.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ @classmethod
+ def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
+ """Open lzma compressed tar archive name for reading or writing.
+ Appending is not allowed.
+ """
+ if mode not in ("r", "w", "x"):
+ raise ValueError("mode must be 'r', 'w' or 'x'")
+
+ try:
+ from lzma import LZMAFile, LZMAError
+ except ImportError:
+ raise CompressionError("lzma module is not available") from None
+
+ fileobj = LZMAFile(fileobj or name, mode, preset=preset)
+
+ try:
+ t = cls.taropen(name, mode, fileobj, **kwargs)
+ except (LZMAError, EOFError) as e:
+ fileobj.close()
+ if mode == 'r':
+ raise ReadError("not an lzma file") from e
+ raise
+ except:
+ fileobj.close()
+ raise
+ t._extfileobj = False
+ return t
+
+ # All *open() methods are registered here.
+ OPEN_METH = {
+ "tar": "taropen", # uncompressed tar
+ "gz": "gzopen", # gzip compressed tar
+ "bz2": "bz2open", # bzip2 compressed tar
+ "xz": "xzopen" # lzma compressed tar
+ }
+
+ #--------------------------------------------------------------------------
+ # The public methods which TarFile provides:
+
+ def close(self):
+ """Close the TarFile. In write-mode, two finishing zero blocks are
+ appended to the archive.
+ """
+ if self.closed:
+ return
+
+ self.closed = True
+ try:
+ if self.mode in ("a", "w", "x"):
+ self.fileobj.write(NUL * (BLOCKSIZE * 2))
+ self.offset += (BLOCKSIZE * 2)
+ # fill up the end with zero-blocks
+ # (like option -b20 for tar does)
+ blocks, remainder = divmod(self.offset, RECORDSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (RECORDSIZE - remainder))
+ finally:
+ if not self._extfileobj:
+ self.fileobj.close()
+
+ def getmember(self, name):
+ """Return a TarInfo object for member ``name``. If ``name`` can not be
+ found in the archive, KeyError is raised. If a member occurs more
+ than once in the archive, its last occurrence is assumed to be the
+ most up-to-date version.
+ """
+ tarinfo = self._getmember(name.rstrip('/'))
+ if tarinfo is None:
+ raise KeyError("filename %r not found" % name)
+ return tarinfo
+
+ def getmembers(self):
+ """Return the members of the archive as a list of TarInfo objects. The
+ list has the same order as the members in the archive.
+ """
+ self._check()
+ if not self._loaded: # if we want to obtain a list of
+ self._load() # all members, we first have to
+ # scan the whole archive.
+ return self.members
+
+ def getnames(self):
+ """Return the members of the archive as a list of their names. It has
+ the same order as the list returned by getmembers().
+ """
+ return [tarinfo.name for tarinfo in self.getmembers()]
+
+ def gettarinfo(self, name=None, arcname=None, fileobj=None):
+ """Create a TarInfo object from the result of os.stat or equivalent
+ on an existing file. The file is either named by ``name``, or
+ specified as a file object ``fileobj`` with a file descriptor. If
+ given, ``arcname`` specifies an alternative name for the file in the
+ archive, otherwise, the name is taken from the 'name' attribute of
+ 'fileobj', or the 'name' argument. The name should be a text
+ string.
+ """
+ self._check("awx")
+
+ # When fileobj is given, replace name by
+ # fileobj's real name.
+ if fileobj is not None:
+ name = fileobj.name
+
+ # Building the name of the member in the archive.
+ # Backward slashes are converted to forward slashes,
+ # Absolute paths are turned to relative paths.
+ if arcname is None:
+ arcname = name
+ drv, arcname = os.path.splitdrive(arcname)
+ arcname = arcname.replace(os.sep, "/")
+ arcname = arcname.lstrip("/")
+
+ # Now, fill the TarInfo object with
+ # information specific for the file.
+ tarinfo = self.tarinfo()
+ tarinfo.tarfile = self # Not needed
+
+ # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
+ if fileobj is None:
+ if not self.dereference:
+ statres = os.lstat(name)
+ else:
+ statres = os.stat(name)
+ else:
+ statres = os.fstat(fileobj.fileno())
+ linkname = ""
+
+ stmd = statres.st_mode
+ if stat.S_ISREG(stmd):
+ inode = (statres.st_ino, statres.st_dev)
+ if not self.dereference and statres.st_nlink > 1 and \
+ inode in self.inodes and arcname != self.inodes[inode]:
+ # Is it a hardlink to an already
+ # archived file?
+ type = LNKTYPE
+ linkname = self.inodes[inode]
+ else:
+ # The inode is added only if its valid.
+ # For win32 it is always 0.
+ type = REGTYPE
+ if inode[0]:
+ self.inodes[inode] = arcname
+ elif stat.S_ISDIR(stmd):
+ type = DIRTYPE
+ elif stat.S_ISFIFO(stmd):
+ type = FIFOTYPE
+ elif stat.S_ISLNK(stmd):
+ type = SYMTYPE
+ linkname = os.readlink(name)
+ elif stat.S_ISCHR(stmd):
+ type = CHRTYPE
+ elif stat.S_ISBLK(stmd):
+ type = BLKTYPE
+ else:
+ return None
+
+ # Fill the TarInfo object with all
+ # information we can get.
+ tarinfo.name = arcname
+ tarinfo.mode = stmd
+ tarinfo.uid = statres.st_uid
+ tarinfo.gid = statres.st_gid
+ if type == REGTYPE:
+ tarinfo.size = statres.st_size
+ else:
+ tarinfo.size = 0
+ tarinfo.mtime = statres.st_mtime
+ tarinfo.type = type
+ tarinfo.linkname = linkname
+ if pwd:
+ try:
+ tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+ except KeyError:
+ pass
+ if grp:
+ try:
+ tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+ except KeyError:
+ pass
+
+ if type in (CHRTYPE, BLKTYPE):
+ if hasattr(os, "major") and hasattr(os, "minor"):
+ tarinfo.devmajor = os.major(statres.st_rdev)
+ tarinfo.devminor = os.minor(statres.st_rdev)
+ return tarinfo
+
+ def list(self, verbose=True, *, members=None):
+ """Print a table of contents to sys.stdout. If ``verbose`` is False, only
+ the names of the members are printed. If it is True, an `ls -l'-like
+ output is produced. ``members`` is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ self._check()
+
+ if members is None:
+ members = self
+ for tarinfo in members:
+ if verbose:
+ if tarinfo.mode is None:
+ _safe_print("??????????")
+ else:
+ _safe_print(stat.filemode(tarinfo.mode))
+ _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
+ tarinfo.gname or tarinfo.gid))
+ if tarinfo.ischr() or tarinfo.isblk():
+ _safe_print("%10s" %
+ ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
+ else:
+ _safe_print("%10d" % tarinfo.size)
+ if tarinfo.mtime is None:
+ _safe_print("????-??-?? ??:??:??")
+ else:
+ _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+ % time.localtime(tarinfo.mtime)[:6])
+
+ _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
+
+ if verbose:
+ if tarinfo.issym():
+ _safe_print("-> " + tarinfo.linkname)
+ if tarinfo.islnk():
+ _safe_print("link to " + tarinfo.linkname)
+ print()
+
+ def add(self, name, arcname=None, recursive=True, *, filter=None):
+ """Add the file ``name`` to the archive. ``name`` may be any type of file
+ (directory, fifo, symbolic link, etc.). If given, ``arcname``
+ specifies an alternative name for the file in the archive.
+ Directories are added recursively by default. This can be avoided by
+ setting ``recursive`` to False. ``filter`` is a function
+ that expects a TarInfo object argument and returns the changed
+ TarInfo object, if it returns None the TarInfo object will be
+ excluded from the archive.
+ """
+ self._check("awx")
+
+ if arcname is None:
+ arcname = name
+
+ # Skip if somebody tries to archive the archive...
+ if self.name is not None and os.path.abspath(name) == self.name:
+ self._dbg(2, "tarfile: Skipped %r" % name)
+ return
+
+ self._dbg(1, name)
+
+ # Create a TarInfo object from the file.
+ tarinfo = self.gettarinfo(name, arcname)
+
+ if tarinfo is None:
+ self._dbg(1, "tarfile: Unsupported type %r" % name)
+ return
+
+ # Change or exclude the TarInfo object.
+ if filter is not None:
+ tarinfo = filter(tarinfo)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % name)
+ return
+
+ # Append the tar header and data to the archive.
+ if tarinfo.isreg():
+ with bltn_open(name, "rb") as f:
+ self.addfile(tarinfo, f)
+
+ elif tarinfo.isdir():
+ self.addfile(tarinfo)
+ if recursive:
+ for f in sorted(os.listdir(name)):
+ self.add(os.path.join(name, f), os.path.join(arcname, f),
+ recursive, filter=filter)
+
+ else:
+ self.addfile(tarinfo)
+
+ def addfile(self, tarinfo, fileobj=None):
+ """Add the TarInfo object ``tarinfo`` to the archive. If ``fileobj`` is
+ given, it should be a binary file, and tarinfo.size bytes are read
+ from it and added to the archive. You can create TarInfo objects
+ directly, or by using gettarinfo().
+ """
+ self._check("awx")
+
+ tarinfo = copy.copy(tarinfo)
+
+ buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
+ self.fileobj.write(buf)
+ self.offset += len(buf)
+ bufsize=self.copybufsize
+ # If there's data to follow, append it.
+ if fileobj is not None:
+ copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize)
+ blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+ if remainder > 0:
+ self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+ blocks += 1
+ self.offset += blocks * BLOCKSIZE
+
+ self.members.append(tarinfo)
+
+ def _get_filter_function(self, filter):
+ if filter is None:
+ filter = self.extraction_filter
+ if filter is None:
+ warnings.warn(
+ 'Python 3.14 will, by default, filter extracted tar '
+ + 'archives and reject files or modify their metadata. '
+ + 'Use the filter argument to control this behavior.',
+ DeprecationWarning)
+ return fully_trusted_filter
+ if isinstance(filter, str):
+ raise TypeError(
+ 'String names are not supported for '
+ + 'TarFile.extraction_filter. Use a function such as '
+ + 'tarfile.data_filter directly.')
+ return filter
+ if callable(filter):
+ return filter
+ try:
+ return _NAMED_FILTERS[filter]
+ except KeyError:
+ raise ValueError(f"filter {filter!r} not found") from None
+
+ def extractall(self, path=".", members=None, *, numeric_owner=False,
+ filter=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers(). If `numeric_owner` is True, only
+ the numbers for user/group names are used and not the names.
+
+ The `filter` function will be called on each member just
+ before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
+ """
+ directories = []
+
+ filter_function = self._get_filter_function(filter)
+ if members is None:
+ members = self
+
+ for member in members:
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is None:
+ continue
+ if tarinfo.isdir():
+ # For directories, delay setting attributes until later,
+ # since permissions can interfere with extraction and
+ # extracting contents can reset mtime.
+ directories.append(tarinfo)
+ self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+ numeric_owner=numeric_owner)
+
+ # Reverse sort directories.
+ directories.sort(key=lambda a: a.name, reverse=True)
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+
+ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+ filter=None):
+ """Extract a member from the archive to the current working directory,
+ using its full name. Its file information is extracted as accurately
+ as possible. `member' may be a filename or a TarInfo object. You can
+ specify a different directory using `path'. File attributes (owner,
+ mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
+ is True, only the numbers for user/group names are used and not
+ the names.
+
+ The `filter` function will be called before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
+ """
+ filter_function = self._get_filter_function(filter)
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is not None:
+ self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+
+ def _get_extract_tarinfo(self, member, filter_function, path):
+ """Get filtered TarInfo (or None) from member, which might be a str"""
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ unfiltered = tarinfo
+ try:
+ tarinfo = filter_function(tarinfo, path)
+ except (OSError, FilterError) as e:
+ self._handle_fatal_error(e)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+ return None
+ # Prepare the link target for makelink().
+ if tarinfo.islnk():
+ tarinfo = copy.copy(tarinfo)
+ tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+ return tarinfo
+
+ def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+ """Extract from filtered tarinfo to disk"""
+ self._check("r")
+
+ try:
+ self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
+ set_attrs=set_attrs,
+ numeric_owner=numeric_owner)
+ except OSError as e:
+ self._handle_fatal_error(e)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+
+ def _handle_nonfatal_error(self, e):
+ """Handle non-fatal error (ExtractError) according to errorlevel"""
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def _handle_fatal_error(self, e):
+ """Handle "fatal" error according to self.errorlevel"""
+ if self.errorlevel > 0:
+ raise
+ elif isinstance(e, OSError):
+ if e.filename is None:
+ self._dbg(1, "tarfile: %s" % e.strerror)
+ else:
+ self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ else:
+ self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
+
+ def extractfile(self, member):
+ """Extract a member from the archive as a file object. ``member`` may be
+ a filename or a TarInfo object. If ``member`` is a regular file or
+ a link, an io.BufferedReader object is returned. For all other
+ existing members, None is returned. If ``member`` does not appear
+ in the archive, KeyError is raised.
+ """
+ self._check("r")
+
+ if isinstance(member, str):
+ tarinfo = self.getmember(member)
+ else:
+ tarinfo = member
+
+ if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
+ # Members with unknown types are treated as regular files.
+ return self.fileobject(self, tarinfo)
+
+ elif tarinfo.islnk() or tarinfo.issym():
+ if isinstance(self.fileobj, _Stream):
+ # A small but ugly workaround for the case that someone tries
+ # to extract a (sym)link as a file-object from a non-seekable
+ # stream of tar blocks.
+ raise StreamError("cannot extract (sym)link as file object")
+ else:
+ # A (sym)link's file object is its target's file object.
+ return self.extractfile(self._find_link_target(tarinfo))
+ else:
+ # If there's no data associated with the member (directory, chrdev,
+ # blkdev, etc.), return None instead of a file object.
+ return None
+
+ def _extract_member(self, tarinfo, targetpath, set_attrs=True,
+ numeric_owner=False):
+ """Extract the TarInfo object tarinfo to a physical
+ file called targetpath.
+ """
+ # Fetch the TarInfo object for the given name
+ # and build the destination pathname, replacing
+ # forward slashes to platform specific separators.
+ targetpath = targetpath.rstrip("/")
+ targetpath = targetpath.replace("/", os.sep)
+
+ # Create all upper directories.
+ upperdirs = os.path.dirname(targetpath)
+ if upperdirs and not os.path.exists(upperdirs):
+ # Create directories that are not part of the archive with
+ # default permissions.
+ os.makedirs(upperdirs)
+
+ if tarinfo.islnk() or tarinfo.issym():
+ self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+ else:
+ self._dbg(1, tarinfo.name)
+
+ if tarinfo.isreg():
+ self.makefile(tarinfo, targetpath)
+ elif tarinfo.isdir():
+ self.makedir(tarinfo, targetpath)
+ elif tarinfo.isfifo():
+ self.makefifo(tarinfo, targetpath)
+ elif tarinfo.ischr() or tarinfo.isblk():
+ self.makedev(tarinfo, targetpath)
+ elif tarinfo.islnk() or tarinfo.issym():
+ self.makelink(tarinfo, targetpath)
+ elif tarinfo.type not in SUPPORTED_TYPES:
+ self.makeunknown(tarinfo, targetpath)
+ else:
+ self.makefile(tarinfo, targetpath)
+
+ if set_attrs:
+ self.chown(tarinfo, targetpath, numeric_owner)
+ if not tarinfo.issym():
+ self.chmod(tarinfo, targetpath)
+ self.utime(tarinfo, targetpath)
+
+ #--------------------------------------------------------------------------
+ # Below are the different file methods. They are called via
+ # _extract_member() when extract() is called. They can be replaced in a
+ # subclass to implement other functionality.
+
+ def makedir(self, tarinfo, targetpath):
+ """Make a directory called targetpath.
+ """
+ try:
+ if tarinfo.mode is None:
+ # Use the system's default mode
+ os.mkdir(targetpath)
+ else:
+ # Use a safe mode for the directory, the real mode is set
+ # later in _extract_member().
+ os.mkdir(targetpath, 0o700)
+ except FileExistsError:
+ if not os.path.isdir(targetpath):
+ raise
+
+ def makefile(self, tarinfo, targetpath):
+ """Make a file called targetpath.
+ """
+ source = self.fileobj
+ source.seek(tarinfo.offset_data)
+ bufsize = self.copybufsize
+ with bltn_open(targetpath, "wb") as target:
+ if tarinfo.sparse is not None:
+ for offset, size in tarinfo.sparse:
+ target.seek(offset)
+ copyfileobj(source, target, size, ReadError, bufsize)
+ target.seek(tarinfo.size)
+ target.truncate()
+ else:
+ copyfileobj(source, target, tarinfo.size, ReadError, bufsize)
+
+ def makeunknown(self, tarinfo, targetpath):
+ """Make a file from a TarInfo object with an unknown type
+ at targetpath.
+ """
+ self.makefile(tarinfo, targetpath)
+ self._dbg(1, "tarfile: Unknown file type %r, " \
+ "extracted as regular file." % tarinfo.type)
+
+ def makefifo(self, tarinfo, targetpath):
+ """Make a fifo called targetpath.
+ """
+ if hasattr(os, "mkfifo"):
+ os.mkfifo(targetpath)
+ else:
+ raise ExtractError("fifo not supported by system")
+
+ def makedev(self, tarinfo, targetpath):
+ """Make a character or block device called targetpath.
+ """
+ if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+ raise ExtractError("special devices not supported by system")
+
+ mode = tarinfo.mode
+ if mode is None:
+ # Use mknod's default
+ mode = 0o600
+ if tarinfo.isblk():
+ mode |= stat.S_IFBLK
+ else:
+ mode |= stat.S_IFCHR
+
+ os.mknod(targetpath, mode,
+ os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+ def makelink(self, tarinfo, targetpath):
+ """Make a (symbolic) link called targetpath. If it cannot be created
+ (platform limitation), we try to make a copy of the referenced file
+ instead of a link.
+ """
+ try:
+ # For systems that support symbolic and hard links.
+ if tarinfo.issym():
+ if os.path.lexists(targetpath):
+ # Avoid FileExistsError on following os.symlink.
+ os.unlink(targetpath)
+ os.symlink(tarinfo.linkname, targetpath)
+ else:
+ if os.path.exists(tarinfo._link_target):
+ os.link(tarinfo._link_target, targetpath)
+ else:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except symlink_exception:
+ try:
+ self._extract_member(self._find_link_target(tarinfo),
+ targetpath)
+ except KeyError:
+ raise ExtractError("unable to resolve link inside archive") from None
+
+ def chown(self, tarinfo, targetpath, numeric_owner):
+ """Set owner of targetpath according to tarinfo. If numeric_owner
+ is True, use .gid/.uid instead of .gname/.uname. If numeric_owner
+ is False, fall back to .gid/.uid when the search based on name
+ fails.
+ """
+ if hasattr(os, "geteuid") and os.geteuid() == 0:
+ # We have to be root to do so.
+ g = tarinfo.gid
+ u = tarinfo.uid
+ if not numeric_owner:
+ try:
+ if grp and tarinfo.gname:
+ g = grp.getgrnam(tarinfo.gname)[2]
+ except KeyError:
+ pass
+ try:
+ if pwd and tarinfo.uname:
+ u = pwd.getpwnam(tarinfo.uname)[2]
+ except KeyError:
+ pass
+ if g is None:
+ g = -1
+ if u is None:
+ u = -1
+ try:
+ if tarinfo.issym() and hasattr(os, "lchown"):
+ os.lchown(targetpath, u, g)
+ else:
+ os.chown(targetpath, u, g)
+ except OSError as e:
+ raise ExtractError("could not change owner") from e
+
+ def chmod(self, tarinfo, targetpath):
+ """Set file permissions of targetpath according to tarinfo.
+ """
+ if tarinfo.mode is None:
+ return
+ try:
+ os.chmod(targetpath, tarinfo.mode)
+ except OSError as e:
+ raise ExtractError("could not change mode") from e
+
+ def utime(self, tarinfo, targetpath):
+ """Set modification time of targetpath according to tarinfo.
+ """
+ mtime = tarinfo.mtime
+ if mtime is None:
+ return
+ if not hasattr(os, 'utime'):
+ return
+ try:
+ os.utime(targetpath, (mtime, mtime))
+ except OSError as e:
+ raise ExtractError("could not change modification time") from e
+
+ #--------------------------------------------------------------------------
+ def next(self):
+ """Return the next member of the archive as a TarInfo object, when
+ TarFile is opened for reading. Return None if there is no more
+ available.
+ """
+ self._check("ra")
+ if self.firstmember is not None:
+ m = self.firstmember
+ self.firstmember = None
+ return m
+
+ # Advance the file pointer.
+ if self.offset != self.fileobj.tell():
+ if self.offset == 0:
+ return None
+ self.fileobj.seek(self.offset - 1)
+ if not self.fileobj.read(1):
+ raise ReadError("unexpected end of data")
+
+ # Read the next block.
+ tarinfo = None
+ while True:
+ try:
+ tarinfo = self.tarinfo.fromtarfile(self)
+ except EOFHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ except InvalidHeaderError as e:
+ if self.ignore_zeros:
+ self._dbg(2, "0x%X: %s" % (self.offset, e))
+ self.offset += BLOCKSIZE
+ continue
+ elif self.offset == 0:
+ raise ReadError(str(e)) from None
+ except EmptyHeaderError:
+ if self.offset == 0:
+ raise ReadError("empty file") from None
+ except TruncatedHeaderError as e:
+ if self.offset == 0:
+ raise ReadError(str(e)) from None
+ except SubsequentHeaderError as e:
+ raise ReadError(str(e)) from None
+ except Exception as e:
+ try:
+ import zlib
+ if isinstance(e, zlib.error):
+ raise ReadError(f'zlib error: {e}') from None
+ else:
+ raise e
+ except ImportError:
+ raise e
+ break
+
+ if tarinfo is not None:
+ self.members.append(tarinfo)
+ else:
+ self._loaded = True
+
+ return tarinfo
+
+ #--------------------------------------------------------------------------
+ # Little helper methods:
+
+ def _getmember(self, name, tarinfo=None, normalize=False):
+ """Find an archive member by name from bottom to top.
+ If tarinfo is given, it is used as the starting point.
+ """
+ # Ensure that all members have been loaded.
+ members = self.getmembers()
+
+ # Limit the member search list up to tarinfo.
+ skipping = False
+ if tarinfo is not None:
+ try:
+ index = members.index(tarinfo)
+ except ValueError:
+ # The given starting point might be a (modified) copy.
+ # We'll later skip members until we find an equivalent.
+ skipping = True
+ else:
+ # Happy fast path
+ members = members[:index]
+
+ if normalize:
+ name = os.path.normpath(name)
+
+ for member in reversed(members):
+ if skipping:
+ if tarinfo.offset == member.offset:
+ skipping = False
+ continue
+ if normalize:
+ member_name = os.path.normpath(member.name)
+ else:
+ member_name = member.name
+
+ if name == member_name:
+ return member
+
+ if skipping:
+ # Starting point was not found
+ raise ValueError(tarinfo)
+
+ def _load(self):
+ """Read through the entire archive file and look for readable
+ members.
+ """
+ while self.next() is not None:
+ pass
+ self._loaded = True
+
+ def _check(self, mode=None):
+ """Check if TarFile is still open, and if the operation's mode
+ corresponds to TarFile's mode.
+ """
+ if self.closed:
+ raise OSError("%s is closed" % self.__class__.__name__)
+ if mode is not None and self.mode not in mode:
+ raise OSError("bad operation for mode %r" % self.mode)
+
+ def _find_link_target(self, tarinfo):
+ """Find the target member of a symlink or hardlink member in the
+ archive.
+ """
+ if tarinfo.issym():
+ # Always search the entire archive.
+ linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
+ limit = None
+ else:
+ # Search the archive before the link, because a hard link is
+ # just a reference to an already archived file.
+ linkname = tarinfo.linkname
+ limit = tarinfo
+
+ member = self._getmember(linkname, tarinfo=limit, normalize=True)
+ if member is None:
+ raise KeyError("linkname %r not found" % linkname)
+ return member
+
+ def __iter__(self):
+ """Provide an iterator object.
+ """
+ if self._loaded:
+ yield from self.members
+ return
+
+ # Yield items using TarFile's next() method.
+ # When all members have been read, set TarFile as _loaded.
+ index = 0
+ # Fix for SF #1100429: Under rare circumstances it can
+ # happen that getmembers() is called during iteration,
+ # which will have already exhausted the next() method.
+ if self.firstmember is not None:
+ tarinfo = self.next()
+ index += 1
+ yield tarinfo
+
+ while True:
+ if index < len(self.members):
+ tarinfo = self.members[index]
+ elif not self._loaded:
+ tarinfo = self.next()
+ if not tarinfo:
+ self._loaded = True
+ return
+ else:
+ return
+ index += 1
+ yield tarinfo
+
+ def _dbg(self, level, msg):
+ """Write debugging output to sys.stderr.
+ """
+ if level <= self.debug:
+ print(msg, file=sys.stderr)
+
+ def __enter__(self):
+ self._check()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if type is None:
+ self.close()
+ else:
+ # An exception occurred. We must not call close() because
+ # it would try to write end-of-archive blocks and padding.
+ if not self._extfileobj:
+ self.fileobj.close()
+ self.closed = True
+
+#--------------------
+# exported functions
+#--------------------
+
+def is_tarfile(name):
+ """Return True if name points to a tar archive that we
+ are able to handle, else return False.
+
+ 'name' should be a string, file, or file-like object.
+ """
+ try:
+ if hasattr(name, "read"):
+ pos = name.tell()
+ t = open(fileobj=name)
+ name.seek(pos)
+ else:
+ t = open(name)
+ t.close()
+ return True
+ except TarError:
+ return False
+
+open = TarFile.open
+
+
+def main():
+ import argparse
+
+ description = 'A simple command-line interface for tarfile module.'
+ parser = argparse.ArgumentParser(description=description)
+ parser.add_argument('-v', '--verbose', action='store_true', default=False,
+ help='Verbose output')
+ parser.add_argument('--filter', metavar='<filtername>',
+ choices=_NAMED_FILTERS,
+ help='Filter for extraction')
+
+ group = parser.add_mutually_exclusive_group(required=True)
+ group.add_argument('-l', '--list', metavar='<tarfile>',
+ help='Show listing of a tarfile')
+ group.add_argument('-e', '--extract', nargs='+',
+ metavar=('<tarfile>', '<output_dir>'),
+ help='Extract tarfile into target dir')
+ group.add_argument('-c', '--create', nargs='+',
+ metavar=('<name>', '<file>'),
+ help='Create tarfile from sources')
+ group.add_argument('-t', '--test', metavar='<tarfile>',
+ help='Test if a tarfile is valid')
+
+ args = parser.parse_args()
+
+ if args.filter and args.extract is None:
+ parser.exit(1, '--filter is only valid for extraction\n')
+
+ if args.test is not None:
+ src = args.test
+ if is_tarfile(src):
+ with open(src, 'r') as tar:
+ tar.getmembers()
+ print(tar.getmembers(), file=sys.stderr)
+ if args.verbose:
+ print('{!r} is a tar archive.'.format(src))
+ else:
+ parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+ elif args.list is not None:
+ src = args.list
+ if is_tarfile(src):
+ with TarFile.open(src, 'r:*') as tf:
+ tf.list(verbose=args.verbose)
+ else:
+ parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+ elif args.extract is not None:
+ if len(args.extract) == 1:
+ src = args.extract[0]
+ curdir = os.curdir
+ elif len(args.extract) == 2:
+ src, curdir = args.extract
+ else:
+ parser.exit(1, parser.format_help())
+
+ if is_tarfile(src):
+ with TarFile.open(src, 'r:*') as tf:
+ tf.extractall(path=curdir, filter=args.filter)
+ if args.verbose:
+ if curdir == '.':
+ msg = '{!r} file is extracted.'.format(src)
+ else:
+ msg = ('{!r} file is extracted '
+ 'into {!r} directory.').format(src, curdir)
+ print(msg)
+ else:
+ parser.exit(1, '{!r} is not a tar archive.\n'.format(src))
+
+ elif args.create is not None:
+ tar_name = args.create.pop(0)
+ _, ext = os.path.splitext(tar_name)
+ compressions = {
+ # gz
+ '.gz': 'gz',
+ '.tgz': 'gz',
+ # xz
+ '.xz': 'xz',
+ '.txz': 'xz',
+ # bz2
+ '.bz2': 'bz2',
+ '.tbz': 'bz2',
+ '.tbz2': 'bz2',
+ '.tb2': 'bz2',
+ }
+ tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w'
+ tar_files = args.create
+
+ with TarFile.open(tar_name, tar_mode) as tf:
+ for file_name in tar_files:
+ tf.add(file_name)
+
+ if args.verbose:
+ print('{!r} file created.'.format(tar_name))
+
+if __name__ == '__main__':
+ main()
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/context.py b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/context.py
index b0d1ef37cb..0322c45d4a 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/context.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/context.py
@@ -1,15 +1,26 @@
-import os
-import subprocess
+from __future__ import annotations
+
import contextlib
import functools
-import tempfile
-import shutil
import operator
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import urllib.request
import warnings
+from typing import Iterator
+
+
+if sys.version_info < (3, 12):
+ from setuptools.extern.backports import tarfile
+else:
+ import tarfile
@contextlib.contextmanager
-def pushd(dir):
+def pushd(dir: str | os.PathLike) -> Iterator[str | os.PathLike]:
"""
>>> tmp_path = getfixture('tmp_path')
>>> with pushd(tmp_path):
@@ -26,33 +37,88 @@ def pushd(dir):
@contextlib.contextmanager
-def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
+def tarball(
+ url, target_dir: str | os.PathLike | None = None
+) -> Iterator[str | os.PathLike]:
"""
- Get a tarball, extract it, change to that directory, yield, then
- clean up.
- `runner` is the function to invoke commands.
- `pushd` is a context manager for changing the directory.
+ Get a tarball, extract it, yield, then clean up.
+
+ >>> import urllib.request
+ >>> url = getfixture('tarfile_served')
+ >>> target = getfixture('tmp_path') / 'out'
+ >>> tb = tarball(url, target_dir=target)
+ >>> import pathlib
+ >>> with tb as extracted:
+ ... contents = pathlib.Path(extracted, 'contents.txt').read_text(encoding='utf-8')
+ >>> assert not os.path.exists(extracted)
"""
if target_dir is None:
target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
- if runner is None:
- runner = functools.partial(subprocess.check_call, shell=True)
- else:
- warnings.warn("runner parameter is deprecated", DeprecationWarning)
# In the tar command, use --strip-components=1 to strip the first path and
# then
# use -C to cause the files to be extracted to {target_dir}. This ensures
# that we always know where the files were extracted.
- runner('mkdir {target_dir}'.format(**vars()))
+ os.mkdir(target_dir)
try:
- getter = 'wget {url} -O -'
- extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
- cmd = ' | '.join((getter, extract))
- runner(cmd.format(compression=infer_compression(url), **vars()))
- with pushd(target_dir):
- yield target_dir
+ req = urllib.request.urlopen(url)
+ with tarfile.open(fileobj=req, mode='r|*') as tf:
+ tf.extractall(path=target_dir, filter=strip_first_component)
+ yield target_dir
finally:
- runner('rm -Rf {target_dir}'.format(**vars()))
+ shutil.rmtree(target_dir)
+
+
+def strip_first_component(
+ member: tarfile.TarInfo,
+ path,
+) -> tarfile.TarInfo:
+ _, member.name = member.name.split('/', 1)
+ return member
+
+
+def _compose(*cmgrs):
+ """
+ Compose any number of dependent context managers into a single one.
+
+ The last, innermost context manager may take arbitrary arguments, but
+ each successive context manager should accept the result from the
+ previous as a single parameter.
+
+ Like :func:`jaraco.functools.compose`, behavior works from right to
+ left, so the context manager should be indicated from outermost to
+ innermost.
+
+ Example, to create a context manager to change to a temporary
+ directory:
+
+ >>> temp_dir_as_cwd = _compose(pushd, temp_dir)
+ >>> with temp_dir_as_cwd() as dir:
+ ... assert os.path.samefile(os.getcwd(), dir)
+ """
+
+ def compose_two(inner, outer):
+ def composed(*args, **kwargs):
+ with inner(*args, **kwargs) as saved, outer(saved) as res:
+ yield res
+
+ return contextlib.contextmanager(composed)
+
+ return functools.reduce(compose_two, reversed(cmgrs))
+
+
+tarball_cwd = _compose(pushd, tarball)
+
+
+@contextlib.contextmanager
+def tarball_context(*args, **kwargs):
+ warnings.warn(
+ "tarball_context is deprecated. Use tarball or tarball_cwd instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ pushd_ctx = kwargs.pop('pushd', pushd)
+ with tarball(*args, **kwargs) as tball, pushd_ctx(tball) as dir:
+ yield dir
def infer_compression(url):
@@ -68,6 +134,11 @@ def infer_compression(url):
>>> infer_compression('file.xz')
'J'
"""
+ warnings.warn(
+ "infer_compression is deprecated with no replacement",
+ DeprecationWarning,
+ stacklevel=2,
+ )
# cheat and just assume it's the last two characters
compression_indicator = url[-2:]
mapping = dict(gz='z', bz='j', xz='J')
@@ -84,7 +155,7 @@ def temp_dir(remover=shutil.rmtree):
>>> import pathlib
>>> with temp_dir() as the_dir:
... assert os.path.isdir(the_dir)
- ... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents')
+ ... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents', encoding='utf-8')
>>> assert not os.path.exists(the_dir)
"""
temp_dir = tempfile.mkdtemp()
@@ -113,15 +184,23 @@ def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
yield repo_dir
-@contextlib.contextmanager
def null():
"""
A null context suitable to stand in for a meaningful context.
>>> with null() as value:
... assert value is None
+
+ This context is most useful when dealing with two or more code
+ branches but only some need a context. Wrap the others in a null
+ context to provide symmetry across all options.
"""
- yield
+ warnings.warn(
+ "null is deprecated. Use contextlib.nullcontext",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return contextlib.nullcontext()
class ExceptionTrap:
@@ -267,13 +346,7 @@ class on_interrupt(contextlib.ContextDecorator):
... on_interrupt('ignore')(do_interrupt)()
"""
- def __init__(
- self,
- action='error',
- # py3.7 compat
- # /,
- code=1,
- ):
+ def __init__(self, action='error', /, code=1):
self.action = action
self.code = code
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools.py b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/__init__.py
index ebf7a36137..130b87a485 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/__init__.py
@@ -1,18 +1,14 @@
+import collections.abc
import functools
-import time
import inspect
-import collections
-import types
import itertools
+import operator
+import time
+import types
import warnings
import setuptools.extern.more_itertools
-from typing import Callable, TypeVar
-
-
-CallableT = TypeVar("CallableT", bound=Callable[..., object])
-
def compose(*funcs):
"""
@@ -38,24 +34,6 @@ def compose(*funcs):
return functools.reduce(compose_two, funcs)
-def method_caller(method_name, *args, **kwargs):
- """
- Return a function that will call a named method on the
- target object with optional positional and keyword
- arguments.
-
- >>> lower = method_caller('lower')
- >>> lower('MyString')
- 'mystring'
- """
-
- def call_method(target):
- func = getattr(target, method_name)
- return func(*args, **kwargs)
-
- return call_method
-
-
def once(func):
"""
Decorate func so it's only ever called the first time.
@@ -98,12 +76,7 @@ def once(func):
return wrapper
-def method_cache(
- method: CallableT,
- cache_wrapper: Callable[
- [CallableT], CallableT
- ] = functools.lru_cache(), # type: ignore[assignment]
-) -> CallableT:
+def method_cache(method, cache_wrapper=functools.lru_cache()):
"""
Wrap lru_cache to support storing the cache data in the object instances.
@@ -171,21 +144,17 @@ def method_cache(
for another implementation and additional justification.
"""
- def wrapper(self: object, *args: object, **kwargs: object) -> object:
+ def wrapper(self, *args, **kwargs):
# it's the first call, replace the method with a cached, bound method
- bound_method: CallableT = types.MethodType( # type: ignore[assignment]
- method, self
- )
+ bound_method = types.MethodType(method, self)
cached_method = cache_wrapper(bound_method)
setattr(self, method.__name__, cached_method)
return cached_method(*args, **kwargs)
# Support cache clear even before cache has been created.
- wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
+ wrapper.cache_clear = lambda: None
- return ( # type: ignore[return-value]
- _special_method_cache(method, cache_wrapper) or wrapper
- )
+ return _special_method_cache(method, cache_wrapper) or wrapper
def _special_method_cache(method, cache_wrapper):
@@ -201,12 +170,13 @@ def _special_method_cache(method, cache_wrapper):
"""
name = method.__name__
special_names = '__getattr__', '__getitem__'
+
if name not in special_names:
- return
+ return None
wrapper_name = '__cached' + name
- def proxy(self, *args, **kwargs):
+ def proxy(self, /, *args, **kwargs):
if wrapper_name not in vars(self):
bound = types.MethodType(method, self)
cache = cache_wrapper(bound)
@@ -243,7 +213,7 @@ def result_invoke(action):
r"""
Decorate a function with an action function that is
invoked on the results returned from the decorated
- function (for its side-effect), then return the original
+ function (for its side effect), then return the original
result.
>>> @result_invoke(print)
@@ -267,7 +237,7 @@ def result_invoke(action):
return wrap
-def invoke(f, *args, **kwargs):
+def invoke(f, /, *args, **kwargs):
"""
Call a function for its side effect after initialization.
@@ -302,25 +272,15 @@ def invoke(f, *args, **kwargs):
Use functools.partial to pass parameters to the initial call
>>> @functools.partial(invoke, name='bingo')
- ... def func(name): print("called with", name)
+ ... def func(name): print('called with', name)
called with bingo
"""
f(*args, **kwargs)
return f
-def call_aside(*args, **kwargs):
- """
- Deprecated name for invoke.
- """
- warnings.warn("call_aside is deprecated, use invoke", DeprecationWarning)
- return invoke(*args, **kwargs)
-
-
class Throttler:
- """
- Rate-limit a function (or other callable)
- """
+ """Rate-limit a function (or other callable)."""
def __init__(self, func, max_rate=float('Inf')):
if isinstance(func, Throttler):
@@ -337,20 +297,20 @@ class Throttler:
return self.func(*args, **kwargs)
def _wait(self):
- "ensure at least 1/max_rate seconds from last call"
+ """Ensure at least 1/max_rate seconds from last call."""
elapsed = time.time() - self.last_called
must_wait = 1 / self.max_rate - elapsed
time.sleep(max(0, must_wait))
self.last_called = time.time()
- def __get__(self, obj, type=None):
+ def __get__(self, obj, owner=None):
return first_invoke(self._wait, functools.partial(self.func, obj))
def first_invoke(func1, func2):
"""
Return a function that when invoked will invoke func1 without
- any parameters (for its side-effect) and then invoke func2
+ any parameters (for its side effect) and then invoke func2
with whatever parameters were passed, returning its result.
"""
@@ -361,6 +321,17 @@ def first_invoke(func1, func2):
return wrapper
+method_caller = first_invoke(
+ lambda: warnings.warn(
+ '`jaraco.functools.method_caller` is deprecated, '
+ 'use `operator.methodcaller` instead',
+ DeprecationWarning,
+ stacklevel=3,
+ ),
+ operator.methodcaller,
+)
+
+
def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
"""
Given a callable func, trap the indicated exceptions
@@ -369,7 +340,7 @@ def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
to propagate.
"""
attempts = itertools.count() if retries == float('inf') else range(retries)
- for attempt in attempts:
+ for _ in attempts:
try:
return func()
except trap:
@@ -406,7 +377,7 @@ def retry(*r_args, **r_kwargs):
def print_yielded(func):
"""
- Convert a generator into a function that prints all yielded elements
+ Convert a generator into a function that prints all yielded elements.
>>> @print_yielded
... def x():
@@ -422,7 +393,7 @@ def print_yielded(func):
def pass_none(func):
"""
- Wrap func so it's not called if its first param is None
+ Wrap func so it's not called if its first param is None.
>>> print_text = pass_none(print)
>>> print_text('text')
@@ -431,9 +402,10 @@ def pass_none(func):
"""
@functools.wraps(func)
- def wrapper(param, *args, **kwargs):
+ def wrapper(param, /, *args, **kwargs):
if param is not None:
return func(param, *args, **kwargs)
+ return None
return wrapper
@@ -507,7 +479,7 @@ def save_method_args(method):
args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
@functools.wraps(method)
- def wrapper(self, *args, **kwargs):
+ def wrapper(self, /, *args, **kwargs):
attr_name = '_saved_' + method.__name__
attr = args_and_kwargs(args, kwargs)
setattr(self, attr_name, attr)
@@ -554,3 +526,108 @@ def except_(*exceptions, replace=None, use=None):
return wrapper
return decorate
+
+
+def identity(x):
+ """
+ Return the argument.
+
+ >>> o = object()
+ >>> identity(o) is o
+ True
+ """
+ return x
+
+
+def bypass_when(check, *, _op=identity):
+ """
+ Decorate a function to return its parameter when ``check``.
+
+ >>> bypassed = [] # False
+
+ >>> @bypass_when(bypassed)
+ ... def double(x):
+ ... return x * 2
+ >>> double(2)
+ 4
+ >>> bypassed[:] = [object()] # True
+ >>> double(2)
+ 2
+ """
+
+ def decorate(func):
+ @functools.wraps(func)
+ def wrapper(param, /):
+ return param if _op(check) else func(param)
+
+ return wrapper
+
+ return decorate
+
+
+def bypass_unless(check):
+ """
+ Decorate a function to return its parameter unless ``check``.
+
+ >>> enabled = [object()] # True
+
+ >>> @bypass_unless(enabled)
+ ... def double(x):
+ ... return x * 2
+ >>> double(2)
+ 4
+ >>> del enabled[:] # False
+ >>> double(2)
+ 2
+ """
+ return bypass_when(check, _op=operator.not_)
+
+
+@functools.singledispatch
+def _splat_inner(args, func):
+ """Splat args to func."""
+ return func(*args)
+
+
+@_splat_inner.register
+def _(args: collections.abc.Mapping, func):
+ """Splat kargs to func as kwargs."""
+ return func(**args)
+
+
+def splat(func):
+ """
+ Wrap func to expect its parameters to be passed positionally in a tuple.
+
+ Has a similar effect to that of ``itertools.starmap`` over
+ simple ``map``.
+
+ >>> pairs = [(-1, 1), (0, 2)]
+ >>> setuptools.extern.more_itertools.consume(itertools.starmap(print, pairs))
+ -1 1
+ 0 2
+ >>> setuptools.extern.more_itertools.consume(map(splat(print), pairs))
+ -1 1
+ 0 2
+
+ The approach generalizes to other iterators that don't have a "star"
+ equivalent, such as a "starfilter".
+
+ >>> list(filter(splat(operator.add), pairs))
+ [(0, 2)]
+
+ Splat also accepts a mapping argument.
+
+ >>> def is_nice(msg, code):
+ ... return "smile" in msg or code == 0
+ >>> msgs = [
+ ... dict(msg='smile!', code=20),
+ ... dict(msg='error :(', code=1),
+ ... dict(msg='unknown', code=0),
+ ... ]
+ >>> for msg in filter(splat(is_nice), msgs):
+ ... print(msg)
+ {'msg': 'smile!', 'code': 20}
+ {'msg': 'unknown', 'code': 0}
+ """
+ return functools.wraps(func)(functools.partial(_splat_inner, func=func))
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/__init__.pyi b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/__init__.pyi
new file mode 100644
index 0000000000..c2b9ab1757
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/__init__.pyi
@@ -0,0 +1,128 @@
+from collections.abc import Callable, Hashable, Iterator
+from functools import partial
+from operator import methodcaller
+import sys
+from typing import (
+ Any,
+ Generic,
+ Protocol,
+ TypeVar,
+ overload,
+)
+
+if sys.version_info >= (3, 10):
+ from typing import Concatenate, ParamSpec
+else:
+ from typing_extensions import Concatenate, ParamSpec
+
+_P = ParamSpec('_P')
+_R = TypeVar('_R')
+_T = TypeVar('_T')
+_R1 = TypeVar('_R1')
+_R2 = TypeVar('_R2')
+_V = TypeVar('_V')
+_S = TypeVar('_S')
+_R_co = TypeVar('_R_co', covariant=True)
+
+class _OnceCallable(Protocol[_P, _R]):
+ saved_result: _R
+ reset: Callable[[], None]
+ def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
+
+class _ProxyMethodCacheWrapper(Protocol[_R_co]):
+ cache_clear: Callable[[], None]
+ def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+class _MethodCacheWrapper(Protocol[_R_co]):
+ def cache_clear(self) -> None: ...
+ def __call__(self, *args: Hashable, **kwargs: Hashable) -> _R_co: ...
+
+# `compose()` overloads below will cover most use cases.
+
+@overload
+def compose(
+ __func1: Callable[[_R], _T],
+ __func2: Callable[_P, _R],
+ /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+ __func1: Callable[[_R], _T],
+ __func2: Callable[[_R1], _R],
+ __func3: Callable[_P, _R1],
+ /,
+) -> Callable[_P, _T]: ...
+@overload
+def compose(
+ __func1: Callable[[_R], _T],
+ __func2: Callable[[_R2], _R],
+ __func3: Callable[[_R1], _R2],
+ __func4: Callable[_P, _R1],
+ /,
+) -> Callable[_P, _T]: ...
+def once(func: Callable[_P, _R]) -> _OnceCallable[_P, _R]: ...
+def method_cache(
+ method: Callable[..., _R],
+ cache_wrapper: Callable[[Callable[..., _R]], _MethodCacheWrapper[_R]] = ...,
+) -> _MethodCacheWrapper[_R] | _ProxyMethodCacheWrapper[_R]: ...
+def apply(
+ transform: Callable[[_R], _T]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _T]]: ...
+def result_invoke(
+ action: Callable[[_R], Any]
+) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: ...
+def invoke(
+ f: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+def call_aside(
+ f: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs
+) -> Callable[_P, _R]: ...
+
+class Throttler(Generic[_R]):
+ last_called: float
+ func: Callable[..., _R]
+ max_rate: float
+ def __init__(
+ self, func: Callable[..., _R] | Throttler[_R], max_rate: float = ...
+ ) -> None: ...
+ def reset(self) -> None: ...
+ def __call__(self, *args: Any, **kwargs: Any) -> _R: ...
+ def __get__(self, obj: Any, owner: type[Any] | None = ...) -> Callable[..., _R]: ...
+
+def first_invoke(
+ func1: Callable[..., Any], func2: Callable[_P, _R]
+) -> Callable[_P, _R]: ...
+
+method_caller: Callable[..., methodcaller]
+
+def retry_call(
+ func: Callable[..., _R],
+ cleanup: Callable[..., None] = ...,
+ retries: int | float = ...,
+ trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> _R: ...
+def retry(
+ cleanup: Callable[..., None] = ...,
+ retries: int | float = ...,
+ trap: type[BaseException] | tuple[type[BaseException], ...] = ...,
+) -> Callable[[Callable[..., _R]], Callable[..., _R]]: ...
+def print_yielded(func: Callable[_P, Iterator[Any]]) -> Callable[_P, None]: ...
+def pass_none(
+ func: Callable[Concatenate[_T, _P], _R]
+) -> Callable[Concatenate[_T, _P], _R]: ...
+def assign_params(
+ func: Callable[..., _R], namespace: dict[str, Any]
+) -> partial[_R]: ...
+def save_method_args(
+ method: Callable[Concatenate[_S, _P], _R]
+) -> Callable[Concatenate[_S, _P], _R]: ...
+def except_(
+ *exceptions: type[BaseException], replace: Any = ..., use: Any = ...
+) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ...
+def identity(x: _T) -> _T: ...
+def bypass_when(
+ check: _V, *, _op: Callable[[_V], Any] = ...
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
+def bypass_unless(
+ check: Any,
+) -> Callable[[Callable[[_T], _R]], Callable[[_T], _T | _R]]: ...
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/py.typed b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/jaraco/functools/py.typed
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/__init__.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/__init__.py
index 13cadc7f04..e7c0aa12ca 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/__init__.py
@@ -6,10 +6,10 @@ __title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
-__version__ = "23.1"
+__version__ = "24.0"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
__license__ = "BSD-2-Clause or Apache-2.0"
-__copyright__ = "2014-2019 %s" % __author__
+__copyright__ = "2014 %s" % __author__
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_manylinux.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_manylinux.py
index 449c655be6..ad62505f3f 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_manylinux.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_manylinux.py
@@ -5,7 +5,7 @@ import os
import re
import sys
import warnings
-from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
+from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
from ._elffile import EIClass, EIData, ELFFile, EMachine
@@ -50,12 +50,21 @@ def _is_linux_i686(executable: str) -> bool:
)
-def _have_compatible_abi(executable: str, arch: str) -> bool:
- if arch == "armv7l":
+def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
+ if "armv7l" in archs:
return _is_linux_armhf(executable)
- if arch == "i686":
+ if "i686" in archs:
return _is_linux_i686(executable)
- return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
+ allowed_archs = {
+ "x86_64",
+ "aarch64",
+ "ppc64",
+ "ppc64le",
+ "s390x",
+ "loongarch64",
+ "riscv64",
+ }
+ return any(arch in allowed_archs for arch in archs)
# If glibc ever changes its major version, we need to know what the last
@@ -81,7 +90,7 @@ def _glibc_version_string_confstr() -> Optional[str]:
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
try:
# Should be a string like "glibc 2.17".
- version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
+ version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
assert version_string is not None
_, version = version_string.rsplit()
except (AssertionError, AttributeError, OSError, ValueError):
@@ -167,13 +176,13 @@ def _get_glibc_version() -> Tuple[int, int]:
# From PEP 513, PEP 600
-def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
+def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
sys_glibc = _get_glibc_version()
if sys_glibc < version:
return False
# Check for presence of _manylinux module.
try:
- import _manylinux # noqa
+ import _manylinux
except ImportError:
return True
if hasattr(_manylinux, "manylinux_compatible"):
@@ -203,12 +212,22 @@ _LEGACY_MANYLINUX_MAP = {
}
-def platform_tags(linux: str, arch: str) -> Iterator[str]:
- if not _have_compatible_abi(sys.executable, arch):
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
+ """Generate manylinux tags compatible to the current platform.
+
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be manylinux-compatible.
+
+ :returns: An iterator of compatible manylinux tags.
+ """
+ if not _have_compatible_abi(sys.executable, archs):
return
# Oldest glibc to be supported regardless of architecture is (2, 17).
too_old_glibc2 = _GLibCVersion(2, 16)
- if arch in {"x86_64", "i686"}:
+ if set(archs) & {"x86_64", "i686"}:
# On x86/i686 also oldest glibc to be supported is (2, 5).
too_old_glibc2 = _GLibCVersion(2, 4)
current_glibc = _GLibCVersion(*_get_glibc_version())
@@ -222,19 +241,20 @@ def platform_tags(linux: str, arch: str) -> Iterator[str]:
for glibc_major in range(current_glibc.major - 1, 1, -1):
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
- for glibc_max in glibc_max_list:
- if glibc_max.major == too_old_glibc2.major:
- min_minor = too_old_glibc2.minor
- else:
- # For other glibc major versions oldest supported is (x, 0).
- min_minor = -1
- for glibc_minor in range(glibc_max.minor, min_minor, -1):
- glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
- tag = "manylinux_{}_{}".format(*glibc_version)
- if _is_compatible(tag, arch, glibc_version):
- yield linux.replace("linux", tag)
- # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
- if glibc_version in _LEGACY_MANYLINUX_MAP:
- legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
- if _is_compatible(legacy_tag, arch, glibc_version):
- yield linux.replace("linux", legacy_tag)
+ for arch in archs:
+ for glibc_max in glibc_max_list:
+ if glibc_max.major == too_old_glibc2.major:
+ min_minor = too_old_glibc2.minor
+ else:
+ # For other glibc major versions oldest supported is (x, 0).
+ min_minor = -1
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
+ tag = "manylinux_{}_{}".format(*glibc_version)
+ if _is_compatible(arch, glibc_version):
+ yield f"{tag}_{arch}"
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
+ if _is_compatible(arch, glibc_version):
+ yield f"{legacy_tag}_{arch}"
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_musllinux.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_musllinux.py
index 706ba600a9..86419df9d7 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_musllinux.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_musllinux.py
@@ -8,7 +8,7 @@ import functools
import re
import subprocess
import sys
-from typing import Iterator, NamedTuple, Optional
+from typing import Iterator, NamedTuple, Optional, Sequence
from ._elffile import ELFFile
@@ -47,24 +47,27 @@ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
return None
if ld is None or "musl" not in ld:
return None
- proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
return _parse_musl_version(proc.stderr)
-def platform_tags(arch: str) -> Iterator[str]:
+def platform_tags(archs: Sequence[str]) -> Iterator[str]:
"""Generate musllinux tags compatible to the current platform.
- :param arch: Should be the part of platform tag after the ``linux_``
- prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
- prerequisite for the current platform to be musllinux-compatible.
+ :param archs: Sequence of compatible architectures.
+ The first one shall be the closest to the actual architecture and be the part of
+ platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
+ The ``linux_`` prefix is assumed as a prerequisite for the current platform to
+ be musllinux-compatible.
:returns: An iterator of compatible musllinux tags.
"""
sys_musl = _get_musl_version(sys.executable)
if sys_musl is None: # Python not dynamically linked against musl.
return
- for minor in range(sys_musl.minor, -1, -1):
- yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
+ for arch in archs:
+ for minor in range(sys_musl.minor, -1, -1):
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
if __name__ == "__main__": # pragma: no cover
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_parser.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_parser.py
index 5a18b758fe..684df75457 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_parser.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/_parser.py
@@ -252,7 +252,13 @@ def _parse_version_many(tokenizer: Tokenizer) -> str:
# Recursive descent parser for marker expression
# --------------------------------------------------------------------------------------
def parse_marker(source: str) -> MarkerList:
- return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
+ return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
+
+
+def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
+ retval = _parse_marker(tokenizer)
+ tokenizer.expect("END", expected="end of marker expression")
+ return retval
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
@@ -318,10 +324,7 @@ def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
def process_env_var(env_var: str) -> Variable:
- if (
- env_var == "platform_python_implementation"
- or env_var == "python_implementation"
- ):
+ if env_var in ("platform_python_implementation", "python_implementation"):
return Variable("platform_python_implementation")
else:
return Variable(env_var)
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/metadata.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/metadata.py
index e76a60c395..fb27493079 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/metadata.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/metadata.py
@@ -5,23 +5,77 @@ import email.parser
import email.policy
import sys
import typing
-from typing import Dict, List, Optional, Tuple, Union, cast
-
-if sys.version_info >= (3, 8): # pragma: no cover
- from typing import TypedDict
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ List,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from . import requirements, specifiers, utils, version as version_module
+
+T = typing.TypeVar("T")
+if sys.version_info[:2] >= (3, 8): # pragma: no cover
+ from typing import Literal, TypedDict
else: # pragma: no cover
if typing.TYPE_CHECKING:
- from typing_extensions import TypedDict
+ from typing_extensions import Literal, TypedDict
else:
try:
- from typing_extensions import TypedDict
+ from typing_extensions import Literal, TypedDict
except ImportError:
+ class Literal:
+ def __init_subclass__(*_args, **_kwargs):
+ pass
+
class TypedDict:
def __init_subclass__(*_args, **_kwargs):
pass
+try:
+ ExceptionGroup
+except NameError: # pragma: no cover
+
+ class ExceptionGroup(Exception): # noqa: N818
+ """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
+
+ If :external:exc:`ExceptionGroup` is already defined by Python itself,
+ that version is used instead.
+ """
+
+ message: str
+ exceptions: List[Exception]
+
+ def __init__(self, message: str, exceptions: List[Exception]) -> None:
+ self.message = message
+ self.exceptions = exceptions
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
+
+else: # pragma: no cover
+ ExceptionGroup = ExceptionGroup
+
+
+class InvalidMetadata(ValueError):
+ """A metadata field contains invalid data."""
+
+ field: str
+ """The name of the field that contains invalid data."""
+
+ def __init__(self, field: str, message: str) -> None:
+ self.field = field
+ super().__init__(message)
+
+
# The RawMetadata class attempts to make as few assumptions about the underlying
# serialization formats as possible. The idea is that as long as a serialization
# formats offer some very basic primitives in *some* way then we can support
@@ -33,7 +87,8 @@ class RawMetadata(TypedDict, total=False):
provided). The key is lower-case and underscores are used instead of dashes
compared to the equivalent core metadata field. Any core metadata field that
can be specified multiple times or can hold multiple values in a single
- field have a key with a plural name.
+ field have a key with a plural name. See :class:`Metadata` whose attributes
+ match the keys of this dictionary.
Core metadata fields that can be specified multiple times are stored as a
list or dict depending on which is appropriate for the field. Any fields
@@ -77,7 +132,7 @@ class RawMetadata(TypedDict, total=False):
# but got stuck without ever being able to build consensus on
# it and ultimately ended up withdrawn.
#
- # However, a number of tools had started emiting METADATA with
+ # However, a number of tools had started emitting METADATA with
# `2.0` Metadata-Version, so for historical reasons, this version
# was skipped.
@@ -110,7 +165,7 @@ _STRING_FIELDS = {
"version",
}
-_LIST_STRING_FIELDS = {
+_LIST_FIELDS = {
"classifiers",
"dynamic",
"obsoletes",
@@ -125,6 +180,10 @@ _LIST_STRING_FIELDS = {
"supported_platforms",
}
+_DICT_FIELDS = {
+ "project_urls",
+}
+
def _parse_keywords(data: str) -> List[str]:
"""Split a string of comma-separate keyboards into a list of keywords."""
@@ -230,10 +289,11 @@ _EMAIL_TO_RAW_MAPPING = {
"supported-platform": "supported_platforms",
"version": "version",
}
+_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
- """Parse a distribution's metadata.
+ """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
This function returns a two-item tuple of dicts. The first dict is of
recognized fields from the core metadata specification. Fields that can be
@@ -267,7 +327,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# We use get_all() here, even for fields that aren't multiple use,
# because otherwise someone could have e.g. two Name fields, and we
# would just silently ignore it rather than doing something about it.
- headers = parsed.get_all(name)
+ headers = parsed.get_all(name) or []
# The way the email module works when parsing bytes is that it
# unconditionally decodes the bytes as ascii using the surrogateescape
@@ -349,7 +409,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# If this is one of our list of string fields, then we can just assign
# the value, since email *only* has strings, and our get_all() call
# above ensures that this is a list.
- elif raw_name in _LIST_STRING_FIELDS:
+ elif raw_name in _LIST_FIELDS:
raw[raw_name] = value
# Special Case: Keywords
# The keywords field is implemented in the metadata spec as a str,
@@ -406,3 +466,360 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st
# way this function is implemented, our `TypedDict` can only have valid key
# names.
return cast(RawMetadata, raw), unparsed
+
+
+_NOT_FOUND = object()
+
+
+# Keep the two values in sync.
+_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
+
+_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
+
+
+class _Validator(Generic[T]):
+ """Validate a metadata field.
+
+ All _process_*() methods correspond to a core metadata field. The method is
+ called with the field's raw value. If the raw value is valid it is returned
+ in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
+ If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
+ as appropriate).
+ """
+
+ name: str
+ raw_name: str
+ added: _MetadataVersion
+
+ def __init__(
+ self,
+ *,
+ added: _MetadataVersion = "1.0",
+ ) -> None:
+ self.added = added
+
+ def __set_name__(self, _owner: "Metadata", name: str) -> None:
+ self.name = name
+ self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
+
+ def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
+ # With Python 3.8, the caching can be replaced with functools.cached_property().
+ # No need to check the cache as attribute lookup will resolve into the
+ # instance's __dict__ before __get__ is called.
+ cache = instance.__dict__
+ value = instance._raw.get(self.name)
+
+ # To make the _process_* methods easier, we'll check if the value is None
+ # and if this field is NOT a required attribute, and if both of those
+ # things are true, we'll skip the the converter. This will mean that the
+ # converters never have to deal with the None union.
+ if self.name in _REQUIRED_ATTRS or value is not None:
+ try:
+ converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
+ except AttributeError:
+ pass
+ else:
+ value = converter(value)
+
+ cache[self.name] = value
+ try:
+ del instance._raw[self.name] # type: ignore[misc]
+ except KeyError:
+ pass
+
+ return cast(T, value)
+
+ def _invalid_metadata(
+ self, msg: str, cause: Optional[Exception] = None
+ ) -> InvalidMetadata:
+ exc = InvalidMetadata(
+ self.raw_name, msg.format_map({"field": repr(self.raw_name)})
+ )
+ exc.__cause__ = cause
+ return exc
+
+ def _process_metadata_version(self, value: str) -> _MetadataVersion:
+ # Implicitly makes Metadata-Version required.
+ if value not in _VALID_METADATA_VERSIONS:
+ raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
+ return cast(_MetadataVersion, value)
+
+ def _process_name(self, value: str) -> str:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ # Validate the name as a side-effect.
+ try:
+ utils.canonicalize_name(value, validate=True)
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+ else:
+ return value
+
+ def _process_version(self, value: str) -> version_module.Version:
+ if not value:
+ raise self._invalid_metadata("{field} is a required field")
+ try:
+ return version_module.parse(value)
+ except version_module.InvalidVersion as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+
+ def _process_summary(self, value: str) -> str:
+ """Check the field contains no newlines."""
+ if "\n" in value:
+ raise self._invalid_metadata("{field} must be a single line")
+ return value
+
+ def _process_description_content_type(self, value: str) -> str:
+ content_types = {"text/plain", "text/x-rst", "text/markdown"}
+ message = email.message.EmailMessage()
+ message["content-type"] = value
+
+ content_type, parameters = (
+ # Defaults to `text/plain` if parsing failed.
+ message.get_content_type().lower(),
+ message["content-type"].params,
+ )
+ # Check if content-type is valid or defaulted to `text/plain` and thus was
+ # not parseable.
+ if content_type not in content_types or content_type not in value.lower():
+ raise self._invalid_metadata(
+ f"{{field}} must be one of {list(content_types)}, not {value!r}"
+ )
+
+ charset = parameters.get("charset", "UTF-8")
+ if charset != "UTF-8":
+ raise self._invalid_metadata(
+ f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
+ )
+
+ markdown_variants = {"GFM", "CommonMark"}
+ variant = parameters.get("variant", "GFM") # Use an acceptable default.
+ if content_type == "text/markdown" and variant not in markdown_variants:
+ raise self._invalid_metadata(
+ f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
+ f"not {variant!r}",
+ )
+ return value
+
+ def _process_dynamic(self, value: List[str]) -> List[str]:
+ for dynamic_field in map(str.lower, value):
+ if dynamic_field in {"name", "version", "metadata-version"}:
+ raise self._invalid_metadata(
+ f"{value!r} is not allowed as a dynamic field"
+ )
+ elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
+ raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
+ return list(map(str.lower, value))
+
+ def _process_provides_extra(
+ self,
+ value: List[str],
+ ) -> List[utils.NormalizedName]:
+ normalized_names = []
+ try:
+ for name in value:
+ normalized_names.append(utils.canonicalize_name(name, validate=True))
+ except utils.InvalidName as exc:
+ raise self._invalid_metadata(
+ f"{name!r} is invalid for {{field}}", cause=exc
+ )
+ else:
+ return normalized_names
+
+ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
+ try:
+ return specifiers.SpecifierSet(value)
+ except specifiers.InvalidSpecifier as exc:
+ raise self._invalid_metadata(
+ f"{value!r} is invalid for {{field}}", cause=exc
+ )
+
+ def _process_requires_dist(
+ self,
+ value: List[str],
+ ) -> List[requirements.Requirement]:
+ reqs = []
+ try:
+ for req in value:
+ reqs.append(requirements.Requirement(req))
+ except requirements.InvalidRequirement as exc:
+ raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
+ else:
+ return reqs
+
+
+class Metadata:
+ """Representation of distribution metadata.
+
+ Compared to :class:`RawMetadata`, this class provides objects representing
+ metadata fields instead of only using built-in types. Any invalid metadata
+ will cause :exc:`InvalidMetadata` to be raised (with a
+ :py:attr:`~BaseException.__cause__` attribute as appropriate).
+ """
+
+ _raw: RawMetadata
+
+ @classmethod
+ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
+ """Create an instance from :class:`RawMetadata`.
+
+ If *validate* is true, all metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ ins = cls()
+ ins._raw = data.copy() # Mutations occur due to caching enriched values.
+
+ if validate:
+ exceptions: List[Exception] = []
+ try:
+ metadata_version = ins.metadata_version
+ metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
+ except InvalidMetadata as metadata_version_exc:
+ exceptions.append(metadata_version_exc)
+ metadata_version = None
+
+ # Make sure to check for the fields that are present, the required
+ # fields (so their absence can be reported).
+ fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
+ # Remove fields that have already been checked.
+ fields_to_check -= {"metadata_version"}
+
+ for key in fields_to_check:
+ try:
+ if metadata_version:
+ # Can't use getattr() as that triggers descriptor protocol which
+ # will fail due to no value for the instance argument.
+ try:
+ field_metadata_version = cls.__dict__[key].added
+ except KeyError:
+ exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
+ exceptions.append(exc)
+ continue
+ field_age = _VALID_METADATA_VERSIONS.index(
+ field_metadata_version
+ )
+ if field_age > metadata_age:
+ field = _RAW_TO_EMAIL_MAPPING[key]
+ exc = InvalidMetadata(
+ field,
+ "{field} introduced in metadata version "
+ "{field_metadata_version}, not {metadata_version}",
+ )
+ exceptions.append(exc)
+ continue
+ getattr(ins, key)
+ except InvalidMetadata as exc:
+ exceptions.append(exc)
+
+ if exceptions:
+ raise ExceptionGroup("invalid metadata", exceptions)
+
+ return ins
+
+ @classmethod
+ def from_email(
+ cls, data: Union[bytes, str], *, validate: bool = True
+ ) -> "Metadata":
+ """Parse metadata from email headers.
+
+ If *validate* is true, the metadata will be validated. All exceptions
+ related to validation will be gathered and raised as an :class:`ExceptionGroup`.
+ """
+ raw, unparsed = parse_email(data)
+
+ if validate:
+ exceptions: list[Exception] = []
+ for unparsed_key in unparsed:
+ if unparsed_key in _EMAIL_TO_RAW_MAPPING:
+ message = f"{unparsed_key!r} has invalid data"
+ else:
+ message = f"unrecognized field: {unparsed_key!r}"
+ exceptions.append(InvalidMetadata(unparsed_key, message))
+
+ if exceptions:
+ raise ExceptionGroup("unparsed", exceptions)
+
+ try:
+ return cls.from_raw(raw, validate=validate)
+ except ExceptionGroup as exc_group:
+ raise ExceptionGroup(
+ "invalid or unparsed metadata", exc_group.exceptions
+ ) from None
+
+ metadata_version: _Validator[_MetadataVersion] = _Validator()
+ """:external:ref:`core-metadata-metadata-version`
+ (required; validated to be a valid metadata version)"""
+ name: _Validator[str] = _Validator()
+ """:external:ref:`core-metadata-name`
+ (required; validated using :func:`~packaging.utils.canonicalize_name` and its
+ *validate* parameter)"""
+ version: _Validator[version_module.Version] = _Validator()
+ """:external:ref:`core-metadata-version` (required)"""
+ dynamic: _Validator[Optional[List[str]]] = _Validator(
+ added="2.2",
+ )
+ """:external:ref:`core-metadata-dynamic`
+ (validated against core metadata field names and lowercased)"""
+ platforms: _Validator[Optional[List[str]]] = _Validator()
+ """:external:ref:`core-metadata-platform`"""
+ supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-supported-platform`"""
+ summary: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""
+ description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
+ """:external:ref:`core-metadata-description`"""
+ description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
+ """:external:ref:`core-metadata-description-content-type` (validated)"""
+ keywords: _Validator[Optional[List[str]]] = _Validator()
+ """:external:ref:`core-metadata-keywords`"""
+ home_page: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-home-page`"""
+ download_url: _Validator[Optional[str]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-download-url`"""
+ author: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-author`"""
+ author_email: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-author-email`"""
+ maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer`"""
+ maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-maintainer-email`"""
+ license: _Validator[Optional[str]] = _Validator()
+ """:external:ref:`core-metadata-license`"""
+ classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """:external:ref:`core-metadata-classifier`"""
+ requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-dist`"""
+ requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
+ added="1.2"
+ )
+ """:external:ref:`core-metadata-requires-python`"""
+ # Because `Requires-External` allows for non-PEP 440 version specifiers, we
+ # don't do any processing on the values.
+ requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-requires-external`"""
+ project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-project-url`"""
+ # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
+ # regardless of metadata version.
+ provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
+ added="2.1",
+ )
+ """:external:ref:`core-metadata-provides-extra`"""
+ provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-provides-dist`"""
+ obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
+ """:external:ref:`core-metadata-obsoletes-dist`"""
+ requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Requires`` (deprecated)"""
+ provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Provides`` (deprecated)"""
+ obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
+ """``Obsoletes`` (deprecated)"""
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/requirements.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/requirements.py
index f34bfa85c8..bdc43a7e98 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/requirements.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/requirements.py
@@ -2,13 +2,13 @@
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
-import urllib.parse
-from typing import Any, List, Optional, Set
+from typing import Any, Iterator, Optional, Set
from ._parser import parse_requirement as _parse_requirement
from ._tokenizer import ParserSyntaxError
from .markers import Marker, _normalize_extra_values
from .specifiers import SpecifierSet
+from .utils import canonicalize_name
class InvalidRequirement(ValueError):
@@ -37,57 +37,52 @@ class Requirement:
raise InvalidRequirement(str(e)) from e
self.name: str = parsed.name
- if parsed.url:
- parsed_url = urllib.parse.urlparse(parsed.url)
- if parsed_url.scheme == "file":
- if urllib.parse.urlunparse(parsed_url) != parsed.url:
- raise InvalidRequirement("Invalid URL given")
- elif not (parsed_url.scheme and parsed_url.netloc) or (
- not parsed_url.scheme and not parsed_url.netloc
- ):
- raise InvalidRequirement(f"Invalid URL: {parsed.url}")
- self.url: Optional[str] = parsed.url
- else:
- self.url = None
- self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
+ self.url: Optional[str] = parsed.url or None
+ self.extras: Set[str] = set(parsed.extras or [])
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
self.marker: Optional[Marker] = None
if parsed.marker is not None:
self.marker = Marker.__new__(Marker)
self.marker._markers = _normalize_extra_values(parsed.marker)
- def __str__(self) -> str:
- parts: List[str] = [self.name]
+ def _iter_parts(self, name: str) -> Iterator[str]:
+ yield name
if self.extras:
formatted_extras = ",".join(sorted(self.extras))
- parts.append(f"[{formatted_extras}]")
+ yield f"[{formatted_extras}]"
if self.specifier:
- parts.append(str(self.specifier))
+ yield str(self.specifier)
if self.url:
- parts.append(f"@ {self.url}")
+ yield f"@ {self.url}"
if self.marker:
- parts.append(" ")
+ yield " "
if self.marker:
- parts.append(f"; {self.marker}")
+ yield f"; {self.marker}"
- return "".join(parts)
+ def __str__(self) -> str:
+ return "".join(self._iter_parts(self.name))
def __repr__(self) -> str:
return f"<Requirement('{self}')>"
def __hash__(self) -> int:
- return hash((self.__class__.__name__, str(self)))
+ return hash(
+ (
+ self.__class__.__name__,
+ *self._iter_parts(canonicalize_name(self.name)),
+ )
+ )
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Requirement):
return NotImplemented
return (
- self.name == other.name
+ canonicalize_name(self.name) == canonicalize_name(other.name)
and self.extras == other.extras
and self.specifier == other.specifier
and self.url == other.url
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/specifiers.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/specifiers.py
index ba8fe37b7f..2d015bab59 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/specifiers.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/specifiers.py
@@ -11,17 +11,7 @@
import abc
import itertools
import re
-from typing import (
- Callable,
- Iterable,
- Iterator,
- List,
- Optional,
- Set,
- Tuple,
- TypeVar,
- Union,
-)
+from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
from .utils import canonicalize_version
from .version import Version
@@ -383,7 +373,7 @@ class Specifier(BaseSpecifier):
# We want everything but the last item in the version, but we want to
# ignore suffix segments.
- prefix = ".".join(
+ prefix = _version_join(
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
)
@@ -404,13 +394,13 @@ class Specifier(BaseSpecifier):
)
# Get the normalized version string ignoring the trailing .*
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
- # Split the spec out by dots, and pretend that there is an implicit
- # dot in between a release segment and a pre-release segment.
+ # Split the spec out by bangs and dots, and pretend that there is
+ # an implicit dot in between a release segment and a pre-release segment.
split_spec = _version_split(normalized_spec)
- # Split the prospective version out by dots, and pretend that there
- # is an implicit dot in between a release segment and a pre-release
- # segment.
+ # Split the prospective version out by bangs and dots, and pretend
+ # that there is an implicit dot in between a release segment and
+ # a pre-release segment.
split_prospective = _version_split(normalized_prospective)
# 0-pad the prospective version before shortening it to get the correct
@@ -644,8 +634,19 @@ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version: str) -> List[str]:
+ """Split version into components.
+
+ The split components are intended for version comparison. The logic does
+ not attempt to retain the original version string, so joining the
+ components back with :func:`_version_join` may not produce the original
+ version string.
+ """
result: List[str] = []
- for item in version.split("."):
+
+ epoch, _, rest = version.rpartition("!")
+ result.append(epoch or "0")
+
+ for item in rest.split("."):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
@@ -654,6 +655,17 @@ def _version_split(version: str) -> List[str]:
return result
+def _version_join(components: List[str]) -> str:
+ """Join split version components into a version string.
+
+ This function assumes the input came from :func:`_version_split`, where the
+ first component must be the epoch (either empty or numeric), and all other
+ components numeric.
+ """
+ epoch, *rest = components
+ return f"{epoch}!{'.'.join(rest)}"
+
+
def _is_not_suffix(segment: str) -> bool:
return not any(
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
@@ -675,7 +687,10 @@ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
- return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
+ return (
+ list(itertools.chain.from_iterable(left_split)),
+ list(itertools.chain.from_iterable(right_split)),
+ )
class SpecifierSet(BaseSpecifier):
@@ -707,14 +722,8 @@ class SpecifierSet(BaseSpecifier):
# strip each item to remove leading/trailing whitespace.
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
- # Parsed each individual specifier, attempting first to make it a
- # Specifier.
- parsed: Set[Specifier] = set()
- for specifier in split_specifiers:
- parsed.add(Specifier(specifier))
-
- # Turn our parsed specifiers into a frozen set and save them for later.
- self._specs = frozenset(parsed)
+ # Make each individual specifier a Specifier and save in a frozen set for later.
+ self._specs = frozenset(map(Specifier, split_specifiers))
# Store our prereleases value so we can use it later to determine if
# we accept prereleases or not.
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/tags.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/tags.py
index 76d243414d..89f1926137 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/tags.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/tags.py
@@ -4,6 +4,8 @@
import logging
import platform
+import re
+import struct
import subprocess
import sys
import sysconfig
@@ -37,7 +39,7 @@ INTERPRETER_SHORT_NAMES: Dict[str, str] = {
}
-_32_BIT_INTERPRETER = sys.maxsize <= 2**32
+_32_BIT_INTERPRETER = struct.calcsize("P") == 4
class Tag:
@@ -123,20 +125,37 @@ def _normalize_string(string: str) -> str:
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
-def _abi3_applies(python_version: PythonVersion) -> bool:
+def _is_threaded_cpython(abis: List[str]) -> bool:
+ """
+ Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
+
+ The threaded builds are indicated by a "t" in the abiflags.
+ """
+ if len(abis) == 0:
+ return False
+ # expect e.g., cp313
+ m = re.match(r"cp\d+(.*)", abis[0])
+ if not m:
+ return False
+ abiflags = m.group(1)
+ return "t" in abiflags
+
+
+def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
"""
Determine if the Python version supports abi3.
- PEP 384 was first implemented in Python 3.2.
+ PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
+ builds do not support abi3.
"""
- return len(python_version) > 1 and tuple(python_version) >= (3, 2)
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
py_version = tuple(py_version) # To allow for version comparison.
abis = []
version = _version_nodot(py_version[:2])
- debug = pymalloc = ucs4 = ""
+ threading = debug = pymalloc = ucs4 = ""
with_debug = _get_config_var("Py_DEBUG", warn)
has_refcount = hasattr(sys, "gettotalrefcount")
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
@@ -145,6 +164,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
if with_debug or (with_debug is None and (has_refcount or has_ext)):
debug = "d"
+ if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
+ threading = "t"
if py_version < (3, 8):
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
if with_pymalloc or with_pymalloc is None:
@@ -158,13 +179,8 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
elif debug:
# Debug builds can also load "normal" extension modules.
# We can also assume no UCS-4 or pymalloc requirement.
- abis.append(f"cp{version}")
- abis.insert(
- 0,
- "cp{version}{debug}{pymalloc}{ucs4}".format(
- version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
- ),
- )
+ abis.append(f"cp{version}{threading}")
+ abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
return abis
@@ -212,11 +228,14 @@ def cpython_tags(
for abi in abis:
for platform_ in platforms:
yield Tag(interpreter, abi, platform_)
- if _abi3_applies(python_version):
+
+ threading = _is_threaded_cpython(abis)
+ use_abi3 = _abi3_applies(python_version, threading)
+ if use_abi3:
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
- if _abi3_applies(python_version):
+ if use_abi3:
for minor_version in range(python_version[1] - 1, 1, -1):
for platform_ in platforms:
interpreter = "cp{version}".format(
@@ -406,7 +425,7 @@ def mac_platforms(
check=True,
env={"SYSTEM_VERSION_COMPAT": "0"},
stdout=subprocess.PIPE,
- universal_newlines=True,
+ text=True,
).stdout
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
else:
@@ -469,15 +488,21 @@ def mac_platforms(
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
linux = _normalize_string(sysconfig.get_platform())
+ if not linux.startswith("linux_"):
+ # we should never be here, just yield the sysconfig one and return
+ yield linux
+ return
if is_32bit:
if linux == "linux_x86_64":
linux = "linux_i686"
elif linux == "linux_aarch64":
- linux = "linux_armv7l"
+ linux = "linux_armv8l"
_, arch = linux.split("_", 1)
- yield from _manylinux.platform_tags(linux, arch)
- yield from _musllinux.platform_tags(arch)
- yield linux
+ archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
+ yield from _manylinux.platform_tags(archs)
+ yield from _musllinux.platform_tags(archs)
+ for arch in archs:
+ yield f"linux_{arch}"
def _generic_platforms() -> Iterator[str]:
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/utils.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/utils.py
index 33c613b749..c2c2f75aa8 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/utils.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/utils.py
@@ -12,6 +12,12 @@ BuildTag = Union[Tuple[()], Tuple[int, str]]
NormalizedName = NewType("NormalizedName", str)
+class InvalidName(ValueError):
+ """
+ An invalid distribution name; users should refer to the packaging user guide.
+ """
+
+
class InvalidWheelFilename(ValueError):
"""
An invalid wheel filename was found, users should refer to PEP 427.
@@ -24,17 +30,28 @@ class InvalidSdistFilename(ValueError):
"""
+# Core metadata spec for `Name`
+_validate_regex = re.compile(
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+)
_canonicalize_regex = re.compile(r"[-_.]+")
+_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
# PEP 427: The build number must start with a digit.
_build_tag_regex = re.compile(r"(\d+)(.*)")
-def canonicalize_name(name: str) -> NormalizedName:
+def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
+ if validate and not _validate_regex.match(name):
+ raise InvalidName(f"name is invalid: {name!r}")
# This is taken from PEP 503.
value = _canonicalize_regex.sub("-", name).lower()
return cast(NormalizedName, value)
+def is_normalized_name(name: str) -> bool:
+ return _normalized_regex.match(name) is not None
+
+
def canonicalize_version(
version: Union[Version, str], *, strip_trailing_zero: bool = True
) -> str:
@@ -100,11 +117,18 @@ def parse_wheel_filename(
parts = filename.split("-", dashes - 2)
name_part = parts[0]
- # See PEP 427 for the rules on escaping the project name
+ # See PEP 427 for the rules on escaping the project name.
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
raise InvalidWheelFilename(f"Invalid project name: {filename}")
name = canonicalize_name(name_part)
- version = Version(parts[1])
+
+ try:
+ version = Version(parts[1])
+ except InvalidVersion as e:
+ raise InvalidWheelFilename(
+ f"Invalid wheel filename (invalid version): {filename}"
+ ) from e
+
if dashes == 5:
build_part = parts[2]
build_match = _build_tag_regex.match(build_part)
@@ -137,5 +161,12 @@ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
name = canonicalize_name(name_part)
- version = Version(version_part)
+
+ try:
+ version = Version(version_part)
+ except InvalidVersion as e:
+ raise InvalidSdistFilename(
+ f"Invalid sdist filename (invalid version): {filename}"
+ ) from e
+
return (name, version)
diff --git a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/version.py b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/version.py
index b30e8cbf84..5faab9bd0d 100644
--- a/contrib/python/setuptools/py3/setuptools/_vendor/packaging/version.py
+++ b/contrib/python/setuptools/py3/setuptools/_vendor/packaging/version.py
@@ -7,37 +7,39 @@
from packaging.version import parse, Version
"""
-import collections
import itertools
import re
-from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
+from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
-InfiniteTypes = Union[InfinityType, NegativeInfinityType]
-PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
-SubLocalType = Union[InfiniteTypes, int, str]
-LocalType = Union[
+LocalType = Tuple[Union[int, str], ...]
+
+CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
+CmpLocalType = Union[
NegativeInfinityType,
- Tuple[
- Union[
- SubLocalType,
- Tuple[SubLocalType, str],
- Tuple[NegativeInfinityType, SubLocalType],
- ],
- ...,
- ],
+ Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
]
CmpKey = Tuple[
- int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
+ int,
+ Tuple[int, ...],
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpPrePostDevType,
+ CmpLocalType,
]
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
-_Version = collections.namedtuple(
- "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
-)
+
+class _Version(NamedTuple):
+ epoch: int
+ release: Tuple[int, ...]
+ dev: Optional[Tuple[str, int]]
+ pre: Optional[Tuple[str, int]]
+ post: Optional[Tuple[str, int]]
+ local: Optional[LocalType]
def parse(version: str) -> "Version":
@@ -117,7 +119,7 @@ _VERSION_PATTERN = r"""
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
- (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+ (?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
@@ -269,8 +271,7 @@ class Version(_BaseVersion):
>>> Version("1!2.0.0").epoch
1
"""
- _epoch: int = self._version.epoch
- return _epoch
+ return self._version.epoch
@property
def release(self) -> Tuple[int, ...]:
@@ -286,8 +287,7 @@ class Version(_BaseVersion):
Includes trailing zeroes but not the epoch or any pre-release / development /
post-release suffixes.
"""
- _release: Tuple[int, ...] = self._version.release
- return _release
+ return self._version.release
@property
def pre(self) -> Optional[Tuple[str, int]]:
@@ -302,8 +302,7 @@ class Version(_BaseVersion):
>>> Version("1.2.3rc1").pre
('rc', 1)
"""
- _pre: Optional[Tuple[str, int]] = self._version.pre
- return _pre
+ return self._version.pre
@property
def post(self) -> Optional[int]:
@@ -451,7 +450,7 @@ class Version(_BaseVersion):
def _parse_letter_version(
- letter: str, number: Union[str, bytes, SupportsInt]
+ letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
) -> Optional[Tuple[str, int]]:
if letter:
@@ -489,7 +488,7 @@ def _parse_letter_version(
_local_version_separators = re.compile(r"[\._-]")
-def _parse_local_version(local: str) -> Optional[LocalType]:
+def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
@@ -507,7 +506,7 @@ def _cmpkey(
pre: Optional[Tuple[str, int]],
post: Optional[Tuple[str, int]],
dev: Optional[Tuple[str, int]],
- local: Optional[Tuple[SubLocalType]],
+ local: Optional[LocalType],
) -> CmpKey:
# When we compare a release version, we want to compare it with all of the
@@ -524,7 +523,7 @@ def _cmpkey(
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
- _pre: PrePostDevType = NegativeInfinity
+ _pre: CmpPrePostDevType = NegativeInfinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
@@ -534,21 +533,21 @@ def _cmpkey(
# Versions without a post segment should sort before those with one.
if post is None:
- _post: PrePostDevType = NegativeInfinity
+ _post: CmpPrePostDevType = NegativeInfinity
else:
_post = post
# Versions without a development segment should sort after those with one.
if dev is None:
- _dev: PrePostDevType = Infinity
+ _dev: CmpPrePostDevType = Infinity
else:
_dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
- _local: LocalType = NegativeInfinity
+ _local: CmpLocalType = NegativeInfinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
diff --git a/contrib/python/setuptools/py3/setuptools/command/easy_install.py b/contrib/python/setuptools/py3/setuptools/command/easy_install.py
index 858fb20f83..87a68c292a 100644
--- a/contrib/python/setuptools/py3/setuptools/command/easy_install.py
+++ b/contrib/python/setuptools/py3/setuptools/command/easy_install.py
@@ -565,7 +565,7 @@ class easy_install(Command):
msg += '\n' + self.__access_msg
raise DistutilsError(msg)
- def check_pth_processing(self):
+ def check_pth_processing(self): # noqa: C901
"""Empirically verify whether .pth files are supported in inst. dir"""
instdir = self.install_dir
log.info("Checking .pth file support in %s", instdir)
diff --git a/contrib/python/setuptools/py3/setuptools/extern/__init__.py b/contrib/python/setuptools/py3/setuptools/extern/__init__.py
index 67c4a4552f..427b27cb80 100644
--- a/contrib/python/setuptools/py3/setuptools/extern/__init__.py
+++ b/contrib/python/setuptools/py3/setuptools/extern/__init__.py
@@ -80,5 +80,6 @@ names = (
'jaraco',
'typing_extensions',
'tomli',
+ 'backports',
)
VendorImporter(__name__, names, 'setuptools._vendor').install()
diff --git a/contrib/python/setuptools/py3/setuptools/monkey.py b/contrib/python/setuptools/py3/setuptools/monkey.py
index fd07d91dec..1f8d8ffe0f 100644
--- a/contrib/python/setuptools/py3/setuptools/monkey.py
+++ b/contrib/python/setuptools/py3/setuptools/monkey.py
@@ -95,6 +95,7 @@ def _patch_distribution_metadata():
'write_pkg_file',
'read_pkg_file',
'get_metadata_version',
+ 'get_fullname',
):
new_val = getattr(_core_metadata, attr)
setattr(distutils.dist.DistributionMetadata, attr, new_val)
diff --git a/contrib/python/setuptools/py3/ya.make b/contrib/python/setuptools/py3/ya.make
index 4e2b62dbce..a3a05780af 100644
--- a/contrib/python/setuptools/py3/ya.make
+++ b/contrib/python/setuptools/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(69.2.0)
+VERSION(69.5.1)
LICENSE(MIT)
@@ -24,6 +24,8 @@ PY_SRCS(
_distutils_hack/override.py
pkg_resources/__init__.py
pkg_resources/_vendor/__init__.py
+ pkg_resources/_vendor/backports/__init__.py
+ pkg_resources/_vendor/backports/tarfile.py
pkg_resources/_vendor/importlib_resources/__init__.py
pkg_resources/_vendor/importlib_resources/_adapters.py
pkg_resources/_vendor/importlib_resources/_common.py
@@ -35,7 +37,8 @@ PY_SRCS(
pkg_resources/_vendor/importlib_resources/simple.py
pkg_resources/_vendor/jaraco/__init__.py
pkg_resources/_vendor/jaraco/context.py
- pkg_resources/_vendor/jaraco/functools.py
+ pkg_resources/_vendor/jaraco/functools/__init__.py
+ pkg_resources/_vendor/jaraco/functools/__init__.pyi
pkg_resources/_vendor/jaraco/text/__init__.py
pkg_resources/_vendor/more_itertools/__init__.py
pkg_resources/_vendor/more_itertools/__init__.pyi
@@ -73,6 +76,7 @@ PY_SRCS(
setuptools/_distutils/__init__.py
setuptools/_distutils/_collections.py
setuptools/_distutils/_functools.py
+ setuptools/_distutils/_itertools.py
setuptools/_distutils/_log.py
setuptools/_distutils/_macos_compat.py
setuptools/_distutils/_modified.py
@@ -100,10 +104,11 @@ PY_SRCS(
setuptools/_distutils/command/install_headers.py
setuptools/_distutils/command/install_lib.py
setuptools/_distutils/command/install_scripts.py
- setuptools/_distutils/command/py37compat.py
setuptools/_distutils/command/register.py
setuptools/_distutils/command/sdist.py
setuptools/_distutils/command/upload.py
+ setuptools/_distutils/compat/__init__.py
+ setuptools/_distutils/compat/py38.py
setuptools/_distutils/config.py
setuptools/_distutils/core.py
setuptools/_distutils/cygwinccompiler.py
@@ -128,6 +133,7 @@ PY_SRCS(
setuptools/_distutils/util.py
setuptools/_distutils/version.py
setuptools/_distutils/versionpredicate.py
+ setuptools/_distutils/zosccompiler.py
setuptools/_entry_points.py
setuptools/_imp.py
setuptools/_importlib.py
@@ -136,6 +142,8 @@ PY_SRCS(
setuptools/_path.py
setuptools/_reqs.py
setuptools/_vendor/__init__.py
+ setuptools/_vendor/backports/__init__.py
+ setuptools/_vendor/backports/tarfile.py
setuptools/_vendor/importlib_metadata/__init__.py
setuptools/_vendor/importlib_metadata/_adapters.py
setuptools/_vendor/importlib_metadata/_collections.py
@@ -156,7 +164,8 @@ PY_SRCS(
setuptools/_vendor/importlib_resources/simple.py
setuptools/_vendor/jaraco/__init__.py
setuptools/_vendor/jaraco/context.py
- setuptools/_vendor/jaraco/functools.py
+ setuptools/_vendor/jaraco/functools/__init__.py
+ setuptools/_vendor/jaraco/functools/__init__.pyi
setuptools/_vendor/jaraco/text/__init__.py
setuptools/_vendor/more_itertools/__init__.py
setuptools/_vendor/more_itertools/__init__.pyi
@@ -258,11 +267,13 @@ RESOURCE_FILES(
.dist-info/entry_points.txt
.dist-info/top_level.txt
pkg_resources/_vendor/importlib_resources/py.typed
+ pkg_resources/_vendor/jaraco/functools/py.typed
pkg_resources/_vendor/more_itertools/py.typed
pkg_resources/_vendor/packaging/py.typed
pkg_resources/_vendor/platformdirs/py.typed
setuptools/_vendor/importlib_metadata/py.typed
setuptools/_vendor/importlib_resources/py.typed
+ setuptools/_vendor/jaraco/functools/py.typed
setuptools/_vendor/more_itertools/py.typed
setuptools/_vendor/packaging/py.typed
setuptools/_vendor/tomli/py.typed