aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/pip
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-08-12 09:28:59 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-08-12 09:38:32 +0300
commit257897ca64dd6a9c71b917b128ec453a36d6d8a1 (patch)
treeaa22a80b0de2e8b7f4de8f3f5593fac9942af14a /contrib/python/pip
parent480038df25b9267a11b390666e5077cad0529a5e (diff)
downloadydb-257897ca64dd6a9c71b917b128ec453a36d6d8a1.tar.gz
Intermediate changes
Diffstat (limited to 'contrib/python/pip')
-rw-r--r--contrib/python/pip/.dist-info/METADATA2
-rw-r--r--contrib/python/pip/AUTHORS.txt11
-rw-r--r--contrib/python/pip/pip/__init__.py2
-rw-r--r--contrib/python/pip/pip/_internal/build_env.py5
-rw-r--r--contrib/python/pip/pip/_internal/cli/base_command.py125
-rw-r--r--contrib/python/pip/pip/_internal/cli/cmdoptions.py3
-rw-r--r--contrib/python/pip/pip/_internal/cli/index_command.py50
-rw-r--r--contrib/python/pip/pip/_internal/cli/progress_bars.py2
-rw-r--r--contrib/python/pip/pip/_internal/commands/check.py19
-rw-r--r--contrib/python/pip/pip/_internal/commands/freeze.py1
-rw-r--r--contrib/python/pip/pip/_internal/commands/install.py28
-rw-r--r--contrib/python/pip/pip/_internal/index/package_finder.py29
-rw-r--r--contrib/python/pip/pip/_internal/metadata/importlib/_compat.py38
-rw-r--r--contrib/python/pip/pip/_internal/metadata/importlib/_dists.py27
-rw-r--r--contrib/python/pip/pip/_internal/metadata/importlib/_envs.py12
-rw-r--r--contrib/python/pip/pip/_internal/network/auth.py4
-rw-r--r--contrib/python/pip/pip/_internal/network/download.py6
-rw-r--r--contrib/python/pip/pip/_internal/network/utils.py6
-rw-r--r--contrib/python/pip/pip/_internal/operations/check.py34
-rw-r--r--contrib/python/pip/pip/_internal/operations/install/wheel.py26
-rw-r--r--contrib/python/pip/pip/_internal/pyproject.py14
-rw-r--r--contrib/python/pip/pip/_internal/req/constructors.py6
-rw-r--r--contrib/python/pip/pip/_internal/req/req_install.py22
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py3
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py1
-rw-r--r--contrib/python/pip/pip/_internal/self_outdated_check.py24
-rw-r--r--contrib/python/pip/pip/_internal/utils/filesystem.py8
-rw-r--r--contrib/python/pip/pip/_internal/utils/glibc.py17
-rw-r--r--contrib/python/pip/pip/_internal/utils/hashes.py2
-rw-r--r--contrib/python/pip/pip/_internal/utils/logging.py2
-rw-r--r--contrib/python/pip/pip/_internal/utils/misc.py19
-rw-r--r--contrib/python/pip/pip/_internal/utils/packaging.py2
-rw-r--r--contrib/python/pip/pip/_internal/utils/retry.py42
-rw-r--r--contrib/python/pip/pip/_internal/utils/temp_dir.py2
-rw-r--r--contrib/python/pip/pip/_vendor/__init__.py6
-rw-r--r--contrib/python/pip/pip/_vendor/certifi/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/certifi/cacert.pem64
-rw-r--r--contrib/python/pip/pip/_vendor/pkg_resources/__init__.py949
-rw-r--r--contrib/python/pip/pip/_vendor/platformdirs/android.py47
-rw-r--r--contrib/python/pip/pip/_vendor/platformdirs/version.py4
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/__init__.py4
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/__main__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/cmdline.py24
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/console.py10
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/filter.py5
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/filters/__init__.py8
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatter.py7
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py13
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py4
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/groff.py6
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/html.py75
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/img.py21
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/irc.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/latex.py51
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/other.py7
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py4
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py253
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/svg.py37
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexer.py42
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py23
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py21
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexers/python.py40
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/modeline.py8
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/plugin.py22
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/regexopt.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/scanner.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/sphinxext.py24
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/style.py4
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/styles/__init__.py6
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/styles/_mapping.py1
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/token.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/unistring.py10
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/util.py32
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/__init__.py608
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/_asyncio.py94
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/_utils.py76
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/after.py51
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/before.py46
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/before_sleep.py71
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/nap.py43
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/py.typed0
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/retry.py272
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/stop.py103
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/tornadoweb.py59
-rw-r--r--contrib/python/pip/pip/_vendor/tenacity/wait.py228
-rw-r--r--contrib/python/pip/pip/_vendor/typing_extensions.py501
-rw-r--r--contrib/python/pip/pip/_vendor/vendor.txt11
-rw-r--r--contrib/python/pip/ya.make15
90 files changed, 1993 insertions, 2627 deletions
diff --git a/contrib/python/pip/.dist-info/METADATA b/contrib/python/pip/.dist-info/METADATA
index 8fb9410b57..6141107f90 100644
--- a/contrib/python/pip/.dist-info/METADATA
+++ b/contrib/python/pip/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: pip
-Version: 24.1.2
+Version: 24.2
Summary: The PyPA recommended tool for installing Python packages.
Author-email: The pip developers <distutils-sig@python.org>
License: MIT
diff --git a/contrib/python/pip/AUTHORS.txt b/contrib/python/pip/AUTHORS.txt
index 10317a284b..dda2ac30f8 100644
--- a/contrib/python/pip/AUTHORS.txt
+++ b/contrib/python/pip/AUTHORS.txt
@@ -105,6 +105,7 @@ Bogdan Opanchuk
BorisZZZ
Brad Erickson
Bradley Ayers
+Branch Vincent
Brandon L. Reiss
Brandt Bucher
Brannon Dorsey
@@ -131,11 +132,13 @@ Carol Willing
Carter Thayer
Cass
Chandrasekhar Atina
+Charlie Marsh
Chih-Hsuan Yen
Chris Brinker
Chris Hunt
Chris Jerdonek
Chris Kuehl
+Chris Markiewicz
Chris McDonough
Chris Pawley
Chris Pryer
@@ -234,6 +237,7 @@ Dos Moonen
Douglas Thor
DrFeathers
Dustin Ingram
+Dustin Rodrigues
Dwayne Bailey
Ed Morley
Edgar Ramírez
@@ -365,12 +369,14 @@ Jeff Dairiki
Jeff Widman
Jelmer Vernooij
jenix21
+Jeremy Fleischman
Jeremy Stanley
Jeremy Zafran
Jesse Rittner
Jiashuo Li
Jim Fisher
Jim Garrison
+Jinzhe Zeng
Jiun Bae
Jivan Amara
Joe Bylund
@@ -391,6 +397,7 @@ Jorge Niedbalski
Joseph Bylund
Joseph Long
Josh Bronson
+Josh Cannon
Josh Hansen
Josh Schneier
Joshua
@@ -425,6 +432,7 @@ konstin
kpinc
Krishna Oza
Kumar McMillan
+Kuntal Majumder
Kurt McKee
Kyle Persohn
lakshmanaram
@@ -513,6 +521,7 @@ Miro Hrončok
Monica Baluna
montefra
Monty Taylor
+morotti
mrKazzila
Muha Ajjan
Nadav Wexler
@@ -625,6 +634,7 @@ Richard Jones
Richard Si
Ricky Ng-Adam
Rishi
+rmorotti
RobberPhex
Robert Collins
Robert McGibbon
@@ -700,6 +710,7 @@ Stéphane Klein
Sumana Harihareswara
Surbhi Sharma
Sviatoslav Sydorenko
+Sviatoslav Sydorenko (Святослав Сидоренко)
Swat009
Sylvain
Takayuki SHIMIZUKAWA
diff --git a/contrib/python/pip/pip/__init__.py b/contrib/python/pip/pip/__init__.py
index 60c2d0e20b..640e922f53 100644
--- a/contrib/python/pip/pip/__init__.py
+++ b/contrib/python/pip/pip/__init__.py
@@ -1,6 +1,6 @@
from typing import List, Optional
-__version__ = "24.1.2"
+__version__ = "24.2"
def main(args: Optional[List[str]] = None) -> int:
diff --git a/contrib/python/pip/pip/_internal/build_env.py b/contrib/python/pip/pip/_internal/build_env.py
index 838de86474..be1e0ca85d 100644
--- a/contrib/python/pip/pip/_internal/build_env.py
+++ b/contrib/python/pip/pip/_internal/build_env.py
@@ -12,7 +12,6 @@ from types import TracebackType
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
from pip._vendor.certifi import where
-from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.version import Version
from pip import __file__ as pip_location
@@ -20,6 +19,7 @@ from pip._internal.cli.spinners import open_spinner
from pip._internal.locations import get_platlib, get_purelib, get_scheme
from pip._internal.metadata import get_default_environment, get_environment
from pip._internal.utils.logging import VERBOSE
+from pip._internal.utils.packaging import get_requirement
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
@@ -184,7 +184,7 @@ class BuildEnvironment:
else get_default_environment()
)
for req_str in reqs:
- req = Requirement(req_str)
+ req = get_requirement(req_str)
# We're explicitly evaluating with an empty extra value, since build
# environments are not provided any mechanism to select specific extras.
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
@@ -241,6 +241,7 @@ class BuildEnvironment:
"--prefix",
prefix.path,
"--no-warn-script-location",
+ "--disable-pip-version-check",
]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append("-vv")
diff --git a/contrib/python/pip/pip/_internal/cli/base_command.py b/contrib/python/pip/pip/_internal/cli/base_command.py
index 09f8c75ff8..bc1ab65949 100644
--- a/contrib/python/pip/pip/_internal/cli/base_command.py
+++ b/contrib/python/pip/pip/_internal/cli/base_command.py
@@ -1,6 +1,5 @@
"""Base Command class, and related routines"""
-import functools
import logging
import logging.config
import optparse
@@ -8,8 +7,9 @@ import os
import sys
import traceback
from optparse import Values
-from typing import Any, Callable, List, Optional, Tuple
+from typing import List, Optional, Tuple
+from pip._vendor.rich import reconfigure
from pip._vendor.rich import traceback as rich_traceback
from pip._internal.cli import cmdoptions
@@ -90,6 +90,63 @@ class Command(CommandContextMixIn):
def run(self, options: Values, args: List[str]) -> int:
raise NotImplementedError
+ def _run_wrapper(self, level_number: int, options: Values, args: List[str]) -> int:
+ def _inner_run() -> int:
+ try:
+ return self.run(options, args)
+ finally:
+ self.handle_pip_version_check(options)
+
+ if options.debug_mode:
+ rich_traceback.install(show_locals=True)
+ return _inner_run()
+
+ try:
+ status = _inner_run()
+ assert isinstance(status, int)
+ return status
+ except DiagnosticPipError as exc:
+ logger.error("%s", exc, extra={"rich": True})
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except PreviousBuildDirError as exc:
+ logger.critical(str(exc))
+ logger.debug("Exception information:", exc_info=True)
+
+ return PREVIOUS_BUILD_DIR_ERROR
+ except (
+ InstallationError,
+ BadCommand,
+ NetworkConnectionError,
+ ) as exc:
+ logger.critical(str(exc))
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except CommandError as exc:
+ logger.critical("%s", exc)
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except BrokenStdoutLoggingError:
+ # Bypass our logger and write any remaining messages to
+ # stderr because stdout no longer works.
+ print("ERROR: Pipe to stdout was broken", file=sys.stderr)
+ if level_number <= logging.DEBUG:
+ traceback.print_exc(file=sys.stderr)
+
+ return ERROR
+ except KeyboardInterrupt:
+ logger.critical("Operation cancelled by user")
+ logger.debug("Exception information:", exc_info=True)
+
+ return ERROR
+ except BaseException:
+ logger.critical("Exception:", exc_info=True)
+
+ return UNKNOWN_ERROR
+
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
# factored out for testability
return self.parser.parse_args(args)
@@ -115,6 +172,7 @@ class Command(CommandContextMixIn):
# Set verbosity so that it can be used elsewhere.
self.verbosity = options.verbose - options.quiet
+ reconfigure(no_color=options.no_color)
level_number = setup_logging(
verbosity=self.verbosity,
no_color=options.no_color,
@@ -170,65 +228,4 @@ class Command(CommandContextMixIn):
)
options.cache_dir = None
- def intercepts_unhandled_exc(
- run_func: Callable[..., int]
- ) -> Callable[..., int]:
- @functools.wraps(run_func)
- def exc_logging_wrapper(*args: Any) -> int:
- try:
- status = run_func(*args)
- assert isinstance(status, int)
- return status
- except DiagnosticPipError as exc:
- logger.error("%s", exc, extra={"rich": True})
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except PreviousBuildDirError as exc:
- logger.critical(str(exc))
- logger.debug("Exception information:", exc_info=True)
-
- return PREVIOUS_BUILD_DIR_ERROR
- except (
- InstallationError,
- BadCommand,
- NetworkConnectionError,
- ) as exc:
- logger.critical(str(exc))
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except CommandError as exc:
- logger.critical("%s", exc)
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except BrokenStdoutLoggingError:
- # Bypass our logger and write any remaining messages to
- # stderr because stdout no longer works.
- print("ERROR: Pipe to stdout was broken", file=sys.stderr)
- if level_number <= logging.DEBUG:
- traceback.print_exc(file=sys.stderr)
-
- return ERROR
- except KeyboardInterrupt:
- logger.critical("Operation cancelled by user")
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except BaseException:
- logger.critical("Exception:", exc_info=True)
-
- return UNKNOWN_ERROR
-
- return exc_logging_wrapper
-
- try:
- if not options.debug_mode:
- run = intercepts_unhandled_exc(self.run)
- else:
- run = self.run
- rich_traceback.install(show_locals=True)
- return run(options, args)
- finally:
- self.handle_pip_version_check(options)
+ return self._run_wrapper(level_number, options, args)
diff --git a/contrib/python/pip/pip/_internal/cli/cmdoptions.py b/contrib/python/pip/pip/_internal/cli/cmdoptions.py
index a47f8a3f46..0b7cff77bd 100644
--- a/contrib/python/pip/pip/_internal/cli/cmdoptions.py
+++ b/contrib/python/pip/pip/_internal/cli/cmdoptions.py
@@ -996,6 +996,7 @@ no_python_version_warning: Callable[..., Option] = partial(
# Features that are now always on. A warning is printed if they are used.
ALWAYS_ENABLED_FEATURES = [
+ "truststore", # always on since 24.2
"no-binary-enable-wheel-cache", # always on since 23.1
]
@@ -1008,7 +1009,6 @@ use_new_feature: Callable[..., Option] = partial(
default=[],
choices=[
"fast-deps",
- "truststore",
]
+ ALWAYS_ENABLED_FEATURES,
help="Enable new functionality, that may be backward incompatible.",
@@ -1023,6 +1023,7 @@ use_deprecated_feature: Callable[..., Option] = partial(
default=[],
choices=[
"legacy-resolver",
+ "legacy-certs",
],
help=("Enable deprecated functionality, that will be removed in the future."),
)
diff --git a/contrib/python/pip/pip/_internal/cli/index_command.py b/contrib/python/pip/pip/_internal/cli/index_command.py
index 4ff7b2c3a5..226f8da1e9 100644
--- a/contrib/python/pip/pip/_internal/cli/index_command.py
+++ b/contrib/python/pip/pip/_internal/cli/index_command.py
@@ -12,9 +12,10 @@ import sys
from optparse import Values
from typing import TYPE_CHECKING, List, Optional
+from pip._vendor import certifi
+
from pip._internal.cli.base_command import Command
from pip._internal.cli.command_context import CommandContextMixIn
-from pip._internal.exceptions import CommandError
if TYPE_CHECKING:
from ssl import SSLContext
@@ -26,7 +27,8 @@ logger = logging.getLogger(__name__)
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
if sys.version_info < (3, 10):
- raise CommandError("The truststore feature is only available for Python 3.10+")
+ logger.debug("Disabling truststore because Python version isn't 3.10+")
+ return None
try:
import ssl
@@ -36,10 +38,13 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
try:
from pip._vendor import truststore
- except ImportError as e:
- raise CommandError(f"The truststore feature is unavailable: {e}")
+ except ImportError:
+ logger.warning("Disabling truststore because platform isn't supported")
+ return None
- return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ctx.load_verify_locations(certifi.where())
+ return ctx
class SessionCommandMixin(CommandContextMixIn):
@@ -80,20 +85,14 @@ class SessionCommandMixin(CommandContextMixIn):
options: Values,
retries: Optional[int] = None,
timeout: Optional[int] = None,
- fallback_to_certifi: bool = False,
) -> "PipSession":
from pip._internal.network.session import PipSession
cache_dir = options.cache_dir
assert not cache_dir or os.path.isabs(cache_dir)
- if "truststore" in options.features_enabled:
- try:
- ssl_context = _create_truststore_ssl_context()
- except Exception:
- if not fallback_to_certifi:
- raise
- ssl_context = None
+ if "legacy-certs" not in options.deprecated_features_enabled:
+ ssl_context = _create_truststore_ssl_context()
else:
ssl_context = None
@@ -157,16 +156,15 @@ class IndexGroupCommand(Command, SessionCommandMixin):
if options.disable_pip_version_check or options.no_index:
return
- # Otherwise, check if we're using the latest version of pip available.
- session = self._build_session(
- options,
- retries=0,
- timeout=min(5, options.timeout),
- # This is set to ensure the function does not fail when truststore is
- # specified in use-feature but cannot be loaded. This usually raises a
- # CommandError and shows a nice user-facing error, but this function is not
- # called in that try-except block.
- fallback_to_certifi=True,
- )
- with session:
- _pip_self_version_check(session, options)
+ try:
+ # Otherwise, check if we're using the latest version of pip available.
+ session = self._build_session(
+ options,
+ retries=0,
+ timeout=min(5, options.timeout),
+ )
+ with session:
+ _pip_self_version_check(session, options)
+ except Exception:
+ logger.warning("There was an error checking the latest version of pip.")
+ logger.debug("See below for error", exc_info=True)
diff --git a/contrib/python/pip/pip/_internal/cli/progress_bars.py b/contrib/python/pip/pip/_internal/cli/progress_bars.py
index b842b1b316..883359c9ce 100644
--- a/contrib/python/pip/pip/_internal/cli/progress_bars.py
+++ b/contrib/python/pip/pip/_internal/cli/progress_bars.py
@@ -49,7 +49,7 @@ def _rich_progress_bar(
TimeRemainingColumn(),
)
- progress = Progress(*columns, refresh_per_second=30)
+ progress = Progress(*columns, refresh_per_second=5)
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
with progress:
for chunk in iterable:
diff --git a/contrib/python/pip/pip/_internal/commands/check.py b/contrib/python/pip/pip/_internal/commands/check.py
index 584df9f55c..f54a16dc0a 100644
--- a/contrib/python/pip/pip/_internal/commands/check.py
+++ b/contrib/python/pip/pip/_internal/commands/check.py
@@ -4,10 +4,13 @@ from typing import List
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
+from pip._internal.metadata import get_default_environment
from pip._internal.operations.check import (
check_package_set,
+ check_unsupported,
create_package_set_from_installed,
)
+from pip._internal.utils.compatibility_tags import get_supported
from pip._internal.utils.misc import write_output
logger = logging.getLogger(__name__)
@@ -16,12 +19,19 @@ logger = logging.getLogger(__name__)
class CheckCommand(Command):
"""Verify installed packages have compatible dependencies."""
+ ignore_require_venv = True
usage = """
%prog [options]"""
def run(self, options: Values, args: List[str]) -> int:
package_set, parsing_probs = create_package_set_from_installed()
missing, conflicting = check_package_set(package_set)
+ unsupported = list(
+ check_unsupported(
+ get_default_environment().iter_installed_distributions(),
+ get_supported(),
+ )
+ )
for project_name in missing:
version = package_set[project_name].version
@@ -44,8 +54,13 @@ class CheckCommand(Command):
dep_name,
dep_version,
)
-
- if missing or conflicting or parsing_probs:
+ for package in unsupported:
+ write_output(
+ "%s %s is not supported on this platform",
+ package.raw_name,
+ package.version,
+ )
+ if missing or conflicting or parsing_probs or unsupported:
return ERROR
else:
write_output("No broken requirements found.")
diff --git a/contrib/python/pip/pip/_internal/commands/freeze.py b/contrib/python/pip/pip/_internal/commands/freeze.py
index fd9d88a8b0..885fdfeb83 100644
--- a/contrib/python/pip/pip/_internal/commands/freeze.py
+++ b/contrib/python/pip/pip/_internal/commands/freeze.py
@@ -29,6 +29,7 @@ class FreezeCommand(Command):
packages are listed in a case-insensitive sorted order.
"""
+ ignore_require_venv = True
usage = """
%prog [options]"""
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
diff --git a/contrib/python/pip/pip/_internal/commands/install.py b/contrib/python/pip/pip/_internal/commands/install.py
index d5b06c8c78..ad45a2f2a5 100644
--- a/contrib/python/pip/pip/_internal/commands/install.py
+++ b/contrib/python/pip/pip/_internal/commands/install.py
@@ -7,6 +7,7 @@ import site
from optparse import SUPPRESS_HELP, Values
from typing import List, Optional
+from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.rich import print_json
from pip._internal.cache import WheelCache
@@ -370,6 +371,7 @@ class InstallCommand(RequirementCommand):
force_reinstall=options.force_reinstall,
upgrade_strategy=upgrade_strategy,
use_pep517=options.use_pep517,
+ py_version_info=options.python_version,
)
self.trace_basic_info(finder)
@@ -472,17 +474,21 @@ class InstallCommand(RequirementCommand):
)
env = get_environment(lib_locations)
+ # Display a summary of installed packages, with extra care to
+ # display a package name as it was requested by the user.
installed.sort(key=operator.attrgetter("name"))
- items = []
- for result in installed:
- item = result.name
- try:
- installed_dist = env.get_distribution(item)
- if installed_dist is not None:
- item = f"{item}-{installed_dist.version}"
- except Exception:
- pass
- items.append(item)
+ summary = []
+ installed_versions = {}
+ for distribution in env.iter_all_distributions():
+ installed_versions[distribution.canonical_name] = distribution.version
+ for package in installed:
+ display_name = package.name
+ version = installed_versions.get(canonicalize_name(display_name), None)
+ if version:
+ text = f"{display_name}-{version}"
+ else:
+ text = display_name
+ summary.append(text)
if conflicts is not None:
self._warn_about_conflicts(
@@ -490,7 +496,7 @@ class InstallCommand(RequirementCommand):
resolver_variant=self.determine_resolver_variant(options),
)
- installed_desc = " ".join(items)
+ installed_desc = " ".join(summary)
if installed_desc:
write_output(
"Successfully installed %s",
diff --git a/contrib/python/pip/pip/_internal/index/package_finder.py b/contrib/python/pip/pip/_internal/index/package_finder.py
index fb270f22f8..0d65ce35f3 100644
--- a/contrib/python/pip/pip/_internal/index/package_finder.py
+++ b/contrib/python/pip/pip/_internal/index/package_finder.py
@@ -452,24 +452,23 @@ class CandidateEvaluator:
# Using None infers from the specifier instead.
allow_prereleases = self._allow_all_prereleases or None
specifier = self._specifier
- versions = {
- str(v)
- for v in specifier.filter(
- # We turn the version object into a str here because otherwise
- # when we're debundled but setuptools isn't, Python will see
- # packaging.version.Version and
- # pkg_resources._vendor.packaging.version.Version as different
- # types. This way we'll use a str as a common data interchange
- # format. If we stop using the pkg_resources provided specifier
- # and start using our own, we can drop the cast to str().
- (str(c.version) for c in candidates),
+
+ # We turn the version object into a str here because otherwise
+ # when we're debundled but setuptools isn't, Python will see
+ # packaging.version.Version and
+ # pkg_resources._vendor.packaging.version.Version as different
+ # types. This way we'll use a str as a common data interchange
+ # format. If we stop using the pkg_resources provided specifier
+ # and start using our own, we can drop the cast to str().
+ candidates_and_versions = [(c, str(c.version)) for c in candidates]
+ versions = set(
+ specifier.filter(
+ (v for _, v in candidates_and_versions),
prereleases=allow_prereleases,
)
- }
-
- # Again, converting version to str to deal with debundling.
- applicable_candidates = [c for c in candidates if str(c.version) in versions]
+ )
+ applicable_candidates = [c for c, v in candidates_and_versions if v in versions]
filtered_applicable_candidates = filter_unallowed_hashes(
candidates=applicable_candidates,
hashes=self._hashes,
diff --git a/contrib/python/pip/pip/_internal/metadata/importlib/_compat.py b/contrib/python/pip/pip/_internal/metadata/importlib/_compat.py
index 593bff23ed..ec1e815cdb 100644
--- a/contrib/python/pip/pip/_internal/metadata/importlib/_compat.py
+++ b/contrib/python/pip/pip/_internal/metadata/importlib/_compat.py
@@ -1,5 +1,8 @@
import importlib.metadata
-from typing import Any, Optional, Protocol, cast
+import os
+from typing import Any, Optional, Protocol, Tuple, cast
+
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
class BadMetadata(ValueError):
@@ -43,13 +46,40 @@ def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]:
return getattr(d, "_path", None)
-def get_dist_name(dist: importlib.metadata.Distribution) -> str:
- """Get the distribution's project name.
+def parse_name_and_version_from_info_directory(
+ dist: importlib.metadata.Distribution,
+) -> Tuple[Optional[str], Optional[str]]:
+ """Get a name and version from the metadata directory name.
+
+ This is much faster than reading distribution metadata.
+ """
+ info_location = get_info_location(dist)
+ if info_location is None:
+ return None, None
+
+ stem, suffix = os.path.splitext(info_location.name)
+ if suffix == ".dist-info":
+ name, sep, version = stem.partition("-")
+ if sep:
+ return name, version
+
+ if suffix == ".egg-info":
+ name = stem.split("-", 1)[0]
+ return name, None
+
+ return None, None
+
+
+def get_dist_canonical_name(dist: importlib.metadata.Distribution) -> NormalizedName:
+ """Get the distribution's normalized name.
The ``name`` attribute is only available in Python 3.10 or later. We are
targeting exactly that, but Mypy does not know this.
"""
+ if name := parse_name_and_version_from_info_directory(dist)[0]:
+ return canonicalize_name(name)
+
name = cast(Any, dist).name
if not isinstance(name, str):
raise BadMetadata(dist, reason="invalid metadata entry 'name'")
- return name
+ return canonicalize_name(name)
diff --git a/contrib/python/pip/pip/_internal/metadata/importlib/_dists.py b/contrib/python/pip/pip/_internal/metadata/importlib/_dists.py
index f65ccb1e70..36cd326232 100644
--- a/contrib/python/pip/pip/_internal/metadata/importlib/_dists.py
+++ b/contrib/python/pip/pip/_internal/metadata/importlib/_dists.py
@@ -1,6 +1,5 @@
import email.message
import importlib.metadata
-import os
import pathlib
import zipfile
from typing import (
@@ -27,10 +26,15 @@ from pip._internal.metadata.base import (
Wheel,
)
from pip._internal.utils.misc import normalize_path
+from pip._internal.utils.packaging import get_requirement
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
-from ._compat import BasePath, get_dist_name
+from ._compat import (
+ BasePath,
+ get_dist_canonical_name,
+ parse_name_and_version_from_info_directory,
+)
class WheelDistribution(importlib.metadata.Distribution):
@@ -153,25 +157,14 @@ class Distribution(BaseDistribution):
return None
return normalize_path(str(self._installed_location))
- def _get_dist_name_from_location(self) -> Optional[str]:
- """Try to get the name from the metadata directory name.
-
- This is much faster than reading metadata.
- """
- if self._info_location is None:
- return None
- stem, suffix = os.path.splitext(self._info_location.name)
- if suffix not in (".dist-info", ".egg-info"):
- return None
- return stem.split("-", 1)[0]
-
@property
def canonical_name(self) -> NormalizedName:
- name = self._get_dist_name_from_location() or get_dist_name(self._dist)
- return canonicalize_name(name)
+ return get_dist_canonical_name(self._dist)
@property
def version(self) -> Version:
+ if version := parse_name_and_version_from_info_directory(self._dist)[1]:
+ return parse_version(version)
return parse_version(self._dist.version)
@property
@@ -219,7 +212,7 @@ class Distribution(BaseDistribution):
for req_string in self.metadata.get_all("Requires-Dist", []):
# strip() because email.message.Message.get_all() may return a leading \n
# in case a long header was wrapped.
- req = Requirement(req_string.strip())
+ req = get_requirement(req_string.strip())
if not req.marker:
yield req
elif not extras and req.marker.evaluate({"extra": ""}):
diff --git a/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py b/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py
index 2df738fc73..70cb7a6009 100644
--- a/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py
+++ b/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py
@@ -15,7 +15,7 @@ from pip._internal.models.wheel import Wheel
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.filetypes import WHEEL_EXTENSION
-from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location
+from ._compat import BadMetadata, BasePath, get_dist_canonical_name, get_info_location
from ._dists import Distribution
logger = logging.getLogger(__name__)
@@ -61,14 +61,13 @@ class _DistributionFinder:
for dist in importlib.metadata.distributions(path=[location]):
info_location = get_info_location(dist)
try:
- raw_name = get_dist_name(dist)
+ name = get_dist_canonical_name(dist)
except BadMetadata as e:
logger.warning("Skipping %s due to %s", info_location, e.reason)
continue
- normalized_name = canonicalize_name(raw_name)
- if normalized_name in self._found_names:
+ if name in self._found_names:
continue
- self._found_names.add(normalized_name)
+ self._found_names.add(name)
yield dist, info_location
def find(self, location: str) -> Iterator[BaseDistribution]:
@@ -181,9 +180,10 @@ class Environment(BaseEnvironment):
yield from finder.find_linked(location)
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
+ canonical_name = canonicalize_name(name)
matches = (
distribution
for distribution in self.iter_all_distributions()
- if distribution.canonical_name == canonicalize_name(name)
+ if distribution.canonical_name == canonical_name
)
return next(matches, None)
diff --git a/contrib/python/pip/pip/_internal/network/auth.py b/contrib/python/pip/pip/_internal/network/auth.py
index 4705b55a7a..1a2606ed08 100644
--- a/contrib/python/pip/pip/_internal/network/auth.py
+++ b/contrib/python/pip/pip/_internal/network/auth.py
@@ -271,6 +271,10 @@ class MultiDomainBasicAuth(AuthBase):
try:
return self.keyring_provider.get_auth_info(url, username)
except Exception as exc:
+ # Log the full exception (with stacktrace) at debug, so it'll only
+ # show up when running in verbose mode.
+ logger.debug("Keyring is skipped due to an exception", exc_info=True)
+ # Always log a shortened version of the exception.
logger.warning(
"Keyring is skipped due to an exception: %s",
str(exc),
diff --git a/contrib/python/pip/pip/_internal/network/download.py b/contrib/python/pip/pip/_internal/network/download.py
index 032fdd0314..5c3bce3d2f 100644
--- a/contrib/python/pip/pip/_internal/network/download.py
+++ b/contrib/python/pip/pip/_internal/network/download.py
@@ -7,7 +7,7 @@ import mimetypes
import os
from typing import Iterable, Optional, Tuple
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+from pip._vendor.requests.models import Response
from pip._internal.cli.progress_bars import get_download_progress_renderer
from pip._internal.exceptions import NetworkConnectionError
@@ -56,12 +56,12 @@ def _prepare_download(
show_progress = False
elif not total_length:
show_progress = True
- elif total_length > (40 * 1000):
+ elif total_length > (512 * 1024):
show_progress = True
else:
show_progress = False
- chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
+ chunks = response_chunks(resp)
if not show_progress:
return chunks
diff --git a/contrib/python/pip/pip/_internal/network/utils.py b/contrib/python/pip/pip/_internal/network/utils.py
index 134848ae52..bba4c265e8 100644
--- a/contrib/python/pip/pip/_internal/network/utils.py
+++ b/contrib/python/pip/pip/_internal/network/utils.py
@@ -1,6 +1,6 @@
from typing import Dict, Generator
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
+from pip._vendor.requests.models import Response
from pip._internal.exceptions import NetworkConnectionError
@@ -25,6 +25,8 @@ from pip._internal.exceptions import NetworkConnectionError
# possible to make this work.
HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"}
+DOWNLOAD_CHUNK_SIZE = 256 * 1024
+
def raise_for_status(resp: Response) -> None:
http_error_msg = ""
@@ -55,7 +57,7 @@ def raise_for_status(resp: Response) -> None:
def response_chunks(
- response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
+ response: Response, chunk_size: int = DOWNLOAD_CHUNK_SIZE
) -> Generator[bytes, None, None]:
"""Given a requests Response, provide the data chunks."""
try:
diff --git a/contrib/python/pip/pip/_internal/operations/check.py b/contrib/python/pip/pip/_internal/operations/check.py
index 623db76e22..4b6fbc4c37 100644
--- a/contrib/python/pip/pip/_internal/operations/check.py
+++ b/contrib/python/pip/pip/_internal/operations/check.py
@@ -2,14 +2,30 @@
"""
import logging
-from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
+from contextlib import suppress
+from email.parser import Parser
+from functools import reduce
+from typing import (
+ Callable,
+ Dict,
+ FrozenSet,
+ Generator,
+ Iterable,
+ List,
+ NamedTuple,
+ Optional,
+ Set,
+ Tuple,
+)
from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.tags import Tag, parse_tag
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
from pip._vendor.packaging.version import Version
from pip._internal.distributions import make_distribution_for_install_requirement
from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import BaseDistribution
from pip._internal.req.req_install import InstallRequirement
logger = logging.getLogger(__name__)
@@ -113,6 +129,22 @@ def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDet
)
+def check_unsupported(
+ packages: Iterable[BaseDistribution],
+ supported_tags: Iterable[Tag],
+) -> Generator[BaseDistribution, None, None]:
+ for p in packages:
+ with suppress(FileNotFoundError):
+ wheel_file = p.read_text("WHEEL")
+ wheel_tags: FrozenSet[Tag] = reduce(
+ frozenset.union,
+ map(parse_tag, Parser().parsestr(wheel_file).get_all("Tag", [])),
+ frozenset(),
+ )
+ if wheel_tags.isdisjoint(supported_tags):
+ yield p
+
+
def _simulate_installation_of(
to_install: List[InstallRequirement], package_set: PackageSet
) -> Set[NormalizedName]:
diff --git a/contrib/python/pip/pip/_internal/operations/install/wheel.py b/contrib/python/pip/pip/_internal/operations/install/wheel.py
index a02a193d22..aef42aa9ee 100644
--- a/contrib/python/pip/pip/_internal/operations/install/wheel.py
+++ b/contrib/python/pip/pip/_internal/operations/install/wheel.py
@@ -358,12 +358,6 @@ class ZipBackedFile:
return self._zip_file.getinfo(self.src_record_path)
def save(self) -> None:
- # directory creation is lazy and after file filtering
- # to ensure we don't install empty dirs; empty dirs can't be
- # uninstalled.
- parent_dir = os.path.dirname(self.dest_path)
- ensure_dir(parent_dir)
-
# When we open the output file below, any existing file is truncated
# before we start writing the new contents. This is fine in most
# cases, but can cause a segfault if pip has loaded a shared
@@ -377,9 +371,13 @@ class ZipBackedFile:
zipinfo = self._getinfo()
- with self._zip_file.open(zipinfo) as f:
- with open(self.dest_path, "wb") as dest:
- shutil.copyfileobj(f, dest)
+ # optimization: the file is created by open(),
+ # skip the decompression when there is 0 bytes to decompress.
+ with open(self.dest_path, "wb") as dest:
+ if zipinfo.file_size > 0:
+ with self._zip_file.open(zipinfo) as f:
+ blocksize = min(zipinfo.file_size, 1024 * 1024)
+ shutil.copyfileobj(f, dest, blocksize)
if zip_item_is_executable(zipinfo):
set_extracted_file_to_default_mode_plus_executable(self.dest_path)
@@ -421,7 +419,7 @@ class PipScriptMaker(ScriptMaker):
return super().make(specification, options)
-def _install_wheel(
+def _install_wheel( # noqa: C901, PLR0915 function is too long
name: str,
wheel_zip: ZipFile,
wheel_path: str,
@@ -580,7 +578,15 @@ def _install_wheel(
script_scheme_files = map(ScriptFile, script_scheme_files)
files = chain(files, script_scheme_files)
+ existing_parents = set()
for file in files:
+ # directory creation is lazy and after file filtering
+ # to ensure we don't install empty dirs; empty dirs can't be
+ # uninstalled.
+ parent_dir = os.path.dirname(file.dest_path)
+ if parent_dir not in existing_parents:
+ ensure_dir(parent_dir)
+ existing_parents.add(parent_dir)
file.save()
record_installed(file.src_record_path, file.dest_path, file.changed)
diff --git a/contrib/python/pip/pip/_internal/pyproject.py b/contrib/python/pip/pip/_internal/pyproject.py
index 8de36b873e..2a9cad4803 100644
--- a/contrib/python/pip/pip/_internal/pyproject.py
+++ b/contrib/python/pip/pip/_internal/pyproject.py
@@ -1,16 +1,22 @@
import importlib.util
import os
+import sys
from collections import namedtuple
from typing import Any, List, Optional
-from pip._vendor import tomli
-from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
+if sys.version_info >= (3, 11):
+ import tomllib
+else:
+ from pip._vendor import tomli as tomllib
+
+from pip._vendor.packaging.requirements import InvalidRequirement
from pip._internal.exceptions import (
InstallationError,
InvalidPyProjectBuildRequires,
MissingPyProjectBuildRequires,
)
+from pip._internal.utils.packaging import get_requirement
def _is_list_of_str(obj: Any) -> bool:
@@ -61,7 +67,7 @@ def load_pyproject_toml(
if has_pyproject:
with open(pyproject_toml, encoding="utf-8") as f:
- pp_toml = tomli.loads(f.read())
+ pp_toml = tomllib.loads(f.read())
build_system = pp_toml.get("build-system")
else:
build_system = None
@@ -151,7 +157,7 @@ def load_pyproject_toml(
# Each requirement must be valid as per PEP 508
for requirement in requires:
try:
- Requirement(requirement)
+ get_requirement(requirement)
except InvalidRequirement as error:
raise InvalidPyProjectBuildRequires(
package=req_name,
diff --git a/contrib/python/pip/pip/_internal/req/constructors.py b/contrib/python/pip/pip/_internal/req/constructors.py
index b8e170f2a7..d73236e05c 100644
--- a/contrib/python/pip/pip/_internal/req/constructors.py
+++ b/contrib/python/pip/pip/_internal/req/constructors.py
@@ -81,7 +81,7 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme
pre is not None and post is not None
), f"regex group selection for requirement {req} failed, this should never happen"
extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else ""
- return Requirement(f"{pre}{extras}{post}")
+ return get_requirement(f"{pre}{extras}{post}")
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
@@ -163,7 +163,7 @@ def check_first_requirement_in_file(filename: str) -> None:
# If there is a line continuation, drop it, and append the next line.
if line.endswith("\\"):
line = line[:-2].strip() + next(lines, "")
- Requirement(line)
+ get_requirement(line)
return
@@ -205,7 +205,7 @@ def parse_req_from_editable(editable_req: str) -> RequirementParts:
if name is not None:
try:
- req: Optional[Requirement] = Requirement(name)
+ req: Optional[Requirement] = get_requirement(name)
except InvalidRequirement as exc:
raise InstallationError(f"Invalid requirement: {name!r}: {exc}")
else:
diff --git a/contrib/python/pip/pip/_internal/req/req_install.py b/contrib/python/pip/pip/_internal/req/req_install.py
index 213278588d..834bc51335 100644
--- a/contrib/python/pip/pip/_internal/req/req_install.py
+++ b/contrib/python/pip/pip/_internal/req/req_install.py
@@ -52,6 +52,7 @@ from pip._internal.utils.misc import (
redact_auth_from_requirement,
redact_auth_from_url,
)
+from pip._internal.utils.packaging import get_requirement
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.unpacking import unpack_file
@@ -395,7 +396,7 @@ class InstallRequirement:
else:
op = "==="
- self.req = Requirement(
+ self.req = get_requirement(
"".join(
[
self.metadata["Name"],
@@ -421,7 +422,7 @@ class InstallRequirement:
metadata_name,
self.name,
)
- self.req = Requirement(metadata_name)
+ self.req = get_requirement(metadata_name)
def check_if_exists(self, use_user_site: bool) -> None:
"""Find an installed distribution that satisfies or conflicts
@@ -824,6 +825,21 @@ class InstallRequirement:
)
if self.editable and not self.is_wheel:
+ deprecated(
+ reason=(
+ f"Legacy editable install of {self} (setup.py develop) "
+ "is deprecated."
+ ),
+ replacement=(
+ "to add a pyproject.toml or enable --use-pep517, "
+ "and use setuptools >= 64. "
+ "If the resulting installation is not behaving as expected, "
+ "try using --config-settings editable_mode=compat. "
+ "Please consult the setuptools documentation for more information"
+ ),
+ gone_in="25.0",
+ issue=11457,
+ )
if self.config_settings:
logger.warning(
"--config-settings ignored for legacy editable install of %s. "
@@ -909,7 +925,7 @@ def check_legacy_setup_py_options(
reason="--build-option and --global-option are deprecated.",
issue=11859,
replacement="to use --config-settings",
- gone_in="24.2",
+ gone_in="25.0",
)
logger.warning(
"Implying --no-binary=:all: due to the presence of "
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py
index 1f31d834b0..145bdbf71a 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py
@@ -121,6 +121,7 @@ class Factory:
self._extras_candidate_cache: Dict[
Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
] = {}
+ self._supported_tags_cache = get_supported()
if not ignore_installed:
env = get_default_environment()
@@ -608,7 +609,7 @@ class Factory:
return self._wheel_cache.get_cache_entry(
link=link,
package_name=name,
- supported_tags=get_supported(),
+ supported_tags=self._supported_tags_cache,
)
def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py
index 12adeff7b6..0594569d85 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py
@@ -66,6 +66,7 @@ class PipDebuggingReporter(BaseReporter):
def ending_round(self, index: int, state: Any) -> None:
logger.info("Reporter.ending_round(%r, state)", index)
+ logger.debug("Reporter.ending_round(%r, %r)", index, state)
def ending(self, state: Any) -> None:
logger.info("Reporter.ending(%r)", state)
diff --git a/contrib/python/pip/pip/_internal/self_outdated_check.py b/contrib/python/pip/pip/_internal/self_outdated_check.py
index 2185f2fb10..f9a91af9d8 100644
--- a/contrib/python/pip/pip/_internal/self_outdated_check.py
+++ b/contrib/python/pip/pip/_internal/self_outdated_check.py
@@ -232,17 +232,13 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non
if not installed_dist:
return
- try:
- upgrade_prompt = _self_version_check_logic(
- state=SelfCheckState(cache_dir=options.cache_dir),
- current_time=datetime.datetime.now(datetime.timezone.utc),
- local_version=installed_dist.version,
- get_remote_version=functools.partial(
- _get_current_remote_pip_version, session, options
- ),
- )
- if upgrade_prompt is not None:
- logger.warning("%s", upgrade_prompt, extra={"rich": True})
- except Exception:
- logger.warning("There was an error checking the latest version of pip.")
- logger.debug("See below for error", exc_info=True)
+ upgrade_prompt = _self_version_check_logic(
+ state=SelfCheckState(cache_dir=options.cache_dir),
+ current_time=datetime.datetime.now(datetime.timezone.utc),
+ local_version=installed_dist.version,
+ get_remote_version=functools.partial(
+ _get_current_remote_pip_version, session, options
+ ),
+ )
+ if upgrade_prompt is not None:
+ logger.warning("%s", upgrade_prompt, extra={"rich": True})
diff --git a/contrib/python/pip/pip/_internal/utils/filesystem.py b/contrib/python/pip/pip/_internal/utils/filesystem.py
index 83c2df75b9..22e356cdd7 100644
--- a/contrib/python/pip/pip/_internal/utils/filesystem.py
+++ b/contrib/python/pip/pip/_internal/utils/filesystem.py
@@ -7,10 +7,9 @@ from contextlib import contextmanager
from tempfile import NamedTemporaryFile
from typing import Any, BinaryIO, Generator, List, Union, cast
-from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
-
from pip._internal.utils.compat import get_path_uid
from pip._internal.utils.misc import format_size
+from pip._internal.utils.retry import retry
def check_path_owner(path: str) -> bool:
@@ -65,10 +64,7 @@ def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, Non
os.fsync(result.fileno())
-# Tenacity raises RetryError by default, explicitly raise the original exception
-_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25))
-
-replace = _replace_retry(os.replace)
+replace = retry(stop_after_delay=1, wait=0.25)(os.replace)
# test_writable_dir and _test_writable_dir_win are copied from Flit,
diff --git a/contrib/python/pip/pip/_internal/utils/glibc.py b/contrib/python/pip/pip/_internal/utils/glibc.py
index 81342afa44..998868ff2a 100644
--- a/contrib/python/pip/pip/_internal/utils/glibc.py
+++ b/contrib/python/pip/pip/_internal/utils/glibc.py
@@ -40,7 +40,20 @@ def glibc_version_string_ctypes() -> Optional[str]:
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
- process_namespace = ctypes.CDLL(None)
+ #
+ # We must also handle the special case where the executable is not a
+ # dynamically linked executable. This can occur when using musl libc,
+ # for example. In this situation, dlopen() will error, leading to an
+ # OSError. Interestingly, at least in the case of musl, there is no
+ # errno set on the OSError. The single string argument used to construct
+ # OSError comes from libc itself and is therefore not portable to
+ # hard code here. In any case, failure to call dlopen() means we
+ # can't proceed, so we bail on our attempt.
+ try:
+ process_namespace = ctypes.CDLL(None)
+ except OSError:
+ return None
+
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
@@ -50,7 +63,7 @@ def glibc_version_string_ctypes() -> Optional[str]:
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
- version_str = gnu_get_libc_version()
+ version_str: str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
diff --git a/contrib/python/pip/pip/_internal/utils/hashes.py b/contrib/python/pip/pip/_internal/utils/hashes.py
index c073b09dd9..535e94fca0 100644
--- a/contrib/python/pip/pip/_internal/utils/hashes.py
+++ b/contrib/python/pip/pip/_internal/utils/hashes.py
@@ -33,7 +33,7 @@ class Hashes:
if hashes is not None:
for alg, keys in hashes.items():
# Make sure values are always sorted (to ease equality checks)
- allowed[alg] = sorted(keys)
+ allowed[alg] = [k.lower() for k in sorted(keys)]
self._allowed = allowed
def __and__(self, other: "Hashes") -> "Hashes":
diff --git a/contrib/python/pip/pip/_internal/utils/logging.py b/contrib/python/pip/pip/_internal/utils/logging.py
index 90df257821..41f6eb51a2 100644
--- a/contrib/python/pip/pip/_internal/utils/logging.py
+++ b/contrib/python/pip/pip/_internal/utils/logging.py
@@ -154,8 +154,8 @@ class RichPipStreamHandler(RichHandler):
style: Optional[Style] = None
# If we are given a diagnostic error to present, present it with indentation.
- assert isinstance(record.args, tuple)
if getattr(record, "rich", False):
+ assert isinstance(record.args, tuple)
(rich_renderable,) = record.args
assert isinstance(
rich_renderable, (ConsoleRenderable, RichCast, str)
diff --git a/contrib/python/pip/pip/_internal/utils/misc.py b/contrib/python/pip/pip/_internal/utils/misc.py
index 48771c0991..3707e87268 100644
--- a/contrib/python/pip/pip/_internal/utils/misc.py
+++ b/contrib/python/pip/pip/_internal/utils/misc.py
@@ -1,7 +1,6 @@
import errno
import getpass
import hashlib
-import io
import logging
import os
import posixpath
@@ -36,12 +35,12 @@ from typing import (
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
-from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
from pip import __version__
from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment
from pip._internal.locations import get_major_minor_version
from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.retry import retry
from pip._internal.utils.virtualenv import running_under_virtualenv
__all__ = [
@@ -70,6 +69,8 @@ NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
OnExc = Callable[[FunctionType, Path, BaseException], Any]
OnErr = Callable[[FunctionType, Path, ExcInfo], Any]
+FILE_CHUNK_SIZE = 1024 * 1024
+
def get_pip_version() -> str:
pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
@@ -120,12 +121,9 @@ def get_prog() -> str:
# Retry every half second for up to 3 seconds
-# Tenacity raises RetryError by default, explicitly raise the original exception
-@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
+@retry(stop_after_delay=3, wait=0.5)
def rmtree(
- dir: str,
- ignore_errors: bool = False,
- onexc: Optional[OnExc] = None,
+ dir: str, ignore_errors: bool = False, onexc: Optional[OnExc] = None
) -> None:
if ignore_errors:
onexc = _onerror_ignore
@@ -149,7 +147,7 @@ def _onerror_ignore(*_args: Any) -> None:
def _onerror_reraise(*_args: Any) -> None:
- raise
+ raise # noqa: PLE0704 - Bare exception used to reraise existing exception
def rmtree_errorhandler(
@@ -314,7 +312,7 @@ def is_installable_dir(path: str) -> bool:
def read_chunks(
- file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE
+ file: BinaryIO, size: int = FILE_CHUNK_SIZE
) -> Generator[bytes, None, None]:
"""Yield pieces of data from a file-like object until EOF."""
while True:
@@ -644,8 +642,7 @@ def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
def partition(
- pred: Callable[[T], bool],
- iterable: Iterable[T],
+ pred: Callable[[T], bool], iterable: Iterable[T]
) -> Tuple[Iterable[T], Iterable[T]]:
"""
Use a predicate to partition entries into false entries and true entries,
diff --git a/contrib/python/pip/pip/_internal/utils/packaging.py b/contrib/python/pip/pip/_internal/utils/packaging.py
index b9f6af4d17..4b8fa0fe39 100644
--- a/contrib/python/pip/pip/_internal/utils/packaging.py
+++ b/contrib/python/pip/pip/_internal/utils/packaging.py
@@ -34,7 +34,7 @@ def check_requires_python(
return python_version in requires_python_specifier
-@functools.lru_cache(maxsize=512)
+@functools.lru_cache(maxsize=2048)
def get_requirement(req_string: str) -> Requirement:
"""Construct a packaging.Requirement object with caching"""
# Parsing requirement strings is expensive, and is also expected to happen
diff --git a/contrib/python/pip/pip/_internal/utils/retry.py b/contrib/python/pip/pip/_internal/utils/retry.py
new file mode 100644
index 0000000000..abfe07286e
--- /dev/null
+++ b/contrib/python/pip/pip/_internal/utils/retry.py
@@ -0,0 +1,42 @@
+import functools
+from time import perf_counter, sleep
+from typing import Callable, TypeVar
+
+from pip._vendor.typing_extensions import ParamSpec
+
+T = TypeVar("T")
+P = ParamSpec("P")
+
+
+def retry(
+ wait: float, stop_after_delay: float
+) -> Callable[[Callable[P, T]], Callable[P, T]]:
+ """Decorator to automatically retry a function on error.
+
+ If the function raises, the function is recalled with the same arguments
+ until it returns or the time limit is reached. When the time limit is
+ surpassed, the last exception raised is reraised.
+
+ :param wait: The time to wait after an error before retrying, in seconds.
+ :param stop_after_delay: The time limit after which retries will cease,
+ in seconds.
+ """
+
+ def wrapper(func: Callable[P, T]) -> Callable[P, T]:
+
+ @functools.wraps(func)
+ def retry_wrapped(*args: P.args, **kwargs: P.kwargs) -> T:
+ # The performance counter is monotonic on all platforms we care
+ # about and has much better resolution than time.monotonic().
+ start_time = perf_counter()
+ while True:
+ try:
+ return func(*args, **kwargs)
+ except Exception:
+ if perf_counter() - start_time > stop_after_delay:
+ raise
+ sleep(wait)
+
+ return retry_wrapped
+
+ return wrapper
diff --git a/contrib/python/pip/pip/_internal/utils/temp_dir.py b/contrib/python/pip/pip/_internal/utils/temp_dir.py
index 4eec5f37f7..06668e8ab2 100644
--- a/contrib/python/pip/pip/_internal/utils/temp_dir.py
+++ b/contrib/python/pip/pip/_internal/utils/temp_dir.py
@@ -208,7 +208,7 @@ class TempDirectory:
if self.ignore_cleanup_errors:
try:
- # first try with tenacity; retrying to handle ephemeral errors
+ # first try with @retry; retrying to handle ephemeral errors
rmtree(self._path, ignore_errors=False)
except OSError:
# last pass ignore/log all errors
diff --git a/contrib/python/pip/pip/_vendor/__init__.py b/contrib/python/pip/pip/_vendor/__init__.py
index 50537ab9de..561089ccc0 100644
--- a/contrib/python/pip/pip/_vendor/__init__.py
+++ b/contrib/python/pip/pip/_vendor/__init__.py
@@ -65,10 +65,10 @@ if DEBUNDLED:
vendored("packaging")
vendored("packaging.version")
vendored("packaging.specifiers")
- vendored("pep517")
vendored("pkg_resources")
vendored("platformdirs")
vendored("progress")
+ vendored("pyproject_hooks")
vendored("requests")
vendored("requests.exceptions")
vendored("requests.packages")
@@ -110,7 +110,7 @@ if DEBUNDLED:
vendored("rich.style")
vendored("rich.text")
vendored("rich.traceback")
- vendored("tenacity")
- vendored("tomli")
+ if sys.version_info < (3, 11):
+ vendored("tomli")
vendored("truststore")
vendored("urllib3")
diff --git a/contrib/python/pip/pip/_vendor/certifi/__init__.py b/contrib/python/pip/pip/_vendor/certifi/__init__.py
index 1c91f3ec93..d321f1bc3a 100644
--- a/contrib/python/pip/pip/_vendor/certifi/__init__.py
+++ b/contrib/python/pip/pip/_vendor/certifi/__init__.py
@@ -1,4 +1,4 @@
from .core import contents, where
__all__ = ["contents", "where"]
-__version__ = "2024.02.02"
+__version__ = "2024.07.04"
diff --git a/contrib/python/pip/pip/_vendor/certifi/cacert.pem b/contrib/python/pip/pip/_vendor/certifi/cacert.pem
index fac3c31909..a6581589ba 100644
--- a/contrib/python/pip/pip/_vendor/certifi/cacert.pem
+++ b/contrib/python/pip/pip/_vendor/certifi/cacert.pem
@@ -3485,46 +3485,6 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
-----END CERTIFICATE-----
-# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
-# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
-# Label: "GLOBALTRUST 2020"
-# Serial: 109160994242082918454945253
-# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
-# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
-# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
------BEGIN CERTIFICATE-----
-MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
-A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
-FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
-MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
-aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
-hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
-RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
-YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
-QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
-yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
-BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
-SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
-r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
-4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
-dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
-q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
-nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
-AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
-H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
-VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
-XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
-6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
-+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
-kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
-wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
-TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
-MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
-4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
-aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
-qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
------END CERTIFICATE-----
-
# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
# Label: "ANF Secure Server Root CA"
@@ -4812,3 +4772,27 @@ X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
-----END CERTIFICATE-----
+
+# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
+# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
+# Label: "FIRMAPROFESIONAL CA ROOT-A WEB"
+# Serial: 65916896770016886708751106294915943533
+# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3
+# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5
+# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a
+-----BEGIN CERTIFICATE-----
+MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw
+CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
+YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
+IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw
+CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
+YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
+IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf
+e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C
+cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O
+BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
+PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
+hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
+XSaQpYXFuXqUPoeovQA=
+-----END CERTIFICATE-----
diff --git a/contrib/python/pip/pip/_vendor/pkg_resources/__init__.py b/contrib/python/pip/pip/_vendor/pkg_resources/__init__.py
index 417a537d6f..57ce7f1006 100644
--- a/contrib/python/pip/pip/_vendor/pkg_resources/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pkg_resources/__init__.py
@@ -1,3 +1,6 @@
+# TODO: Add Generic type annotations to initialized collections.
+# For now we'd simply use implicit Any/Unknown which would add redundant annotations
+# mypy: disable-error-code="var-annotated"
"""
Package resource API
--------------------
@@ -17,9 +20,11 @@ This module is deprecated. Users are directed to :mod:`importlib.resources`,
:mod:`importlib.metadata` and :pypi:`packaging` instead.
"""
+from __future__ import annotations
+
import sys
-if sys.version_info < (3, 8):
+if sys.version_info < (3, 8): # noqa: UP036 # Check for unsupported versions
raise RuntimeError("Python 3.8 or later is required")
import os
@@ -27,7 +32,24 @@ import io
import time
import re
import types
-from typing import List, Protocol
+from typing import (
+ Any,
+ Literal,
+ Dict,
+ Iterator,
+ Mapping,
+ MutableSequence,
+ NamedTuple,
+ NoReturn,
+ Tuple,
+ Union,
+ TYPE_CHECKING,
+ Protocol,
+ Callable,
+ Iterable,
+ TypeVar,
+ overload,
+)
import zipfile
import zipimport
import warnings
@@ -46,6 +68,7 @@ import inspect
import ntpath
import posixpath
import importlib
+import importlib.abc
import importlib.machinery
from pkgutil import get_importer
@@ -53,6 +76,8 @@ import _imp
# capture these to bypass sandboxing
from os import utime
+from os import open as os_open
+from os.path import isdir, split
try:
from os import mkdir, rename, unlink
@@ -62,51 +87,59 @@ except ImportError:
# no write support, probably under GAE
WRITE_SUPPORT = False
-from os import open as os_open
-from os.path import isdir, split
-
from pip._internal.utils._jaraco_text import (
yield_lines,
drop_comment,
join_continuation,
)
-
-from pip._vendor import platformdirs
-from pip._vendor import packaging
-
-__import__('pip._vendor.packaging.version')
-__import__('pip._vendor.packaging.specifiers')
-__import__('pip._vendor.packaging.requirements')
-__import__('pip._vendor.packaging.markers')
-__import__('pip._vendor.packaging.utils')
-
-# declare some globals that will be defined later to
-# satisfy the linters.
-require = None
-working_set = None
-add_activation_listener = None
-cleanup_resources = None
-resource_stream = None
-set_extraction_path = None
-resource_isdir = None
-resource_string = None
-iter_entry_points = None
-resource_listdir = None
-resource_filename = None
-resource_exists = None
-_distribution_finders = None
-_namespace_handlers = None
-_namespace_packages = None
-
-
-warnings.warn(
- "pkg_resources is deprecated as an API. "
- "See https://setuptools.pypa.io/en/latest/pkg_resources.html",
- DeprecationWarning,
- stacklevel=2,
+from pip._vendor.packaging import markers as _packaging_markers
+from pip._vendor.packaging import requirements as _packaging_requirements
+from pip._vendor.packaging import utils as _packaging_utils
+from pip._vendor.packaging import version as _packaging_version
+from pip._vendor.platformdirs import user_cache_dir as _user_cache_dir
+
+if TYPE_CHECKING:
+ from _typeshed import BytesPath, StrPath, StrOrBytesPath
+ from pip._vendor.typing_extensions import Self
+
+
+# Patch: Remove deprecation warning from vendored pkg_resources.
+# Setting PYTHONWARNINGS=error to verify builds produce no warnings
+# causes immediate exceptions.
+# See https://github.com/pypa/pip/issues/12243
+
+
+_T = TypeVar("_T")
+_DistributionT = TypeVar("_DistributionT", bound="Distribution")
+# Type aliases
+_NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
+_InstallerTypeT = Callable[["Requirement"], "_DistributionT"]
+_InstallerType = Callable[["Requirement"], Union["Distribution", None]]
+_PkgReqType = Union[str, "Requirement"]
+_EPDistType = Union["Distribution", _PkgReqType]
+_MetadataType = Union["IResourceProvider", None]
+_ResolvedEntryPoint = Any # Can be any attribute in the module
+_ResourceStream = Any # TODO / Incomplete: A readable file-like object
+# Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
+_ModuleLike = Union[object, types.ModuleType]
+# Any: Should be _ModuleLike but we end up with issues where _ModuleLike doesn't have _ZipLoaderModule's __loader__
+_ProviderFactoryType = Callable[[Any], "IResourceProvider"]
+_DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
+_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
+_AdapterT = TypeVar(
+ "_AdapterT", _DistFinderType[Any], _ProviderFactoryType, _NSHandlerType[Any]
)
+# Use _typeshed.importlib.LoaderProtocol once available https://github.com/python/typeshed/pull/11890
+class _LoaderProtocol(Protocol):
+ def load_module(self, fullname: str, /) -> types.ModuleType: ...
+
+
+class _ZipLoaderModule(Protocol):
+ __loader__: zipimport.zipimporter
+
+
_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
@@ -117,18 +150,18 @@ class PEP440Warning(RuntimeWarning):
"""
-parse_version = packaging.version.Version
+parse_version = _packaging_version.Version
-_state_vars = {}
+_state_vars: dict[str, str] = {}
-def _declare_state(vartype, **kw):
- globals().update(kw)
- _state_vars.update(dict.fromkeys(kw, vartype))
+def _declare_state(vartype: str, varname: str, initial_value: _T) -> _T:
+ _state_vars[varname] = vartype
+ return initial_value
-def __getstate__():
+def __getstate__() -> dict[str, Any]:
state = {}
g = globals()
for k, v in _state_vars.items():
@@ -136,7 +169,7 @@ def __getstate__():
return state
-def __setstate__(state):
+def __setstate__(state: dict[str, Any]) -> dict[str, Any]:
g = globals()
for k, v in state.items():
g['_sset_' + _state_vars[k]](k, g[k], v)
@@ -291,17 +324,17 @@ class VersionConflict(ResolutionError):
_template = "{self.dist} is installed but {self.req} is required"
@property
- def dist(self):
+ def dist(self) -> Distribution:
return self.args[0]
@property
- def req(self):
+ def req(self) -> Requirement:
return self.args[1]
def report(self):
return self._template.format(**locals())
- def with_context(self, required_by):
+ def with_context(self, required_by: set[Distribution | str]):
"""
If required_by is non-empty, return a version of self that is a
ContextualVersionConflict.
@@ -321,7 +354,7 @@ class ContextualVersionConflict(VersionConflict):
_template = VersionConflict._template + ' by {self.required_by}'
@property
- def required_by(self):
+ def required_by(self) -> set[str]:
return self.args[2]
@@ -334,11 +367,11 @@ class DistributionNotFound(ResolutionError):
)
@property
- def req(self):
+ def req(self) -> Requirement:
return self.args[0]
@property
- def requirers(self):
+ def requirers(self) -> set[str] | None:
return self.args[1]
@property
@@ -358,7 +391,7 @@ class UnknownExtra(ResolutionError):
"""Distribution doesn't have an "extra feature" of the given name"""
-_provider_factories = {}
+_provider_factories: dict[type[_ModuleLike], _ProviderFactoryType] = {}
PY_MAJOR = '{}.{}'.format(*sys.version_info)
EGG_DIST = 3
@@ -368,7 +401,9 @@ CHECKOUT_DIST = 0
DEVELOP_DIST = -1
-def register_loader_type(loader_type, provider_factory):
+def register_loader_type(
+ loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType
+):
"""Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
@@ -378,7 +413,11 @@ def register_loader_type(loader_type, provider_factory):
_provider_factories[loader_type] = provider_factory
-def get_provider(moduleOrReq):
+@overload
+def get_provider(moduleOrReq: str) -> IResourceProvider: ...
+@overload
+def get_provider(moduleOrReq: Requirement) -> Distribution: ...
+def get_provider(moduleOrReq: str | Requirement) -> IResourceProvider | Distribution:
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq, Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
@@ -440,7 +479,7 @@ darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
get_platform = get_build_platform
-def compatible_platforms(provided, required):
+def compatible_platforms(provided: str | None, required: str | None):
"""Can code for the `provided` platform run on the `required` platform?
Returns true if either platform is ``None``, or the platforms are equal.
@@ -489,89 +528,106 @@ def compatible_platforms(provided, required):
return False
-def get_distribution(dist):
+@overload
+def get_distribution(dist: _DistributionT) -> _DistributionT: ...
+@overload
+def get_distribution(dist: _PkgReqType) -> Distribution: ...
+def get_distribution(dist: Distribution | _PkgReqType) -> Distribution:
"""Return a current distribution object for a Requirement or string"""
if isinstance(dist, str):
dist = Requirement.parse(dist)
if isinstance(dist, Requirement):
- dist = get_provider(dist)
+ # Bad type narrowing, dist has to be a Requirement here, so get_provider has to return Distribution
+ dist = get_provider(dist) # type: ignore[assignment]
if not isinstance(dist, Distribution):
- raise TypeError("Expected string, Requirement, or Distribution", dist)
+ raise TypeError("Expected str, Requirement, or Distribution", dist)
return dist
-def load_entry_point(dist, group, name):
+def load_entry_point(dist: _EPDistType, group: str, name: str) -> _ResolvedEntryPoint:
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
return get_distribution(dist).load_entry_point(group, name)
-def get_entry_map(dist, group=None):
+@overload
+def get_entry_map(
+ dist: _EPDistType, group: None = None
+) -> dict[str, dict[str, EntryPoint]]: ...
+@overload
+def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ...
+def get_entry_map(dist: _EPDistType, group: str | None = None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
-def get_entry_info(dist, group, name):
+def get_entry_info(dist: _EPDistType, group: str, name: str):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider(Protocol):
- def has_metadata(self, name) -> bool:
+ def has_metadata(self, name: str) -> bool:
"""Does the package's distribution contain the named metadata?"""
- def get_metadata(self, name):
+ def get_metadata(self, name: str) -> str:
"""The named metadata resource as a string"""
- def get_metadata_lines(self, name):
+ def get_metadata_lines(self, name: str) -> Iterator[str]:
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
- def metadata_isdir(self, name) -> bool:
+ def metadata_isdir(self, name: str) -> bool:
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
- def metadata_listdir(self, name):
+ def metadata_listdir(self, name: str) -> list[str]:
"""List of metadata names in the directory (like ``os.listdir()``)"""
- def run_script(self, script_name, namespace):
+ def run_script(self, script_name: str, namespace: dict[str, Any]) -> None:
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider, Protocol):
"""An object that provides access to package resources"""
- def get_resource_filename(self, manager, resource_name):
+ def get_resource_filename(
+ self, manager: ResourceManager, resource_name: str
+ ) -> str:
"""Return a true filesystem path for `resource_name`
- `manager` must be an ``IResourceManager``"""
+ `manager` must be a ``ResourceManager``"""
- def get_resource_stream(self, manager, resource_name):
+ def get_resource_stream(
+ self, manager: ResourceManager, resource_name: str
+ ) -> _ResourceStream:
"""Return a readable file-like object for `resource_name`
- `manager` must be an ``IResourceManager``"""
+ `manager` must be a ``ResourceManager``"""
- def get_resource_string(self, manager, resource_name) -> bytes:
+ def get_resource_string(
+ self, manager: ResourceManager, resource_name: str
+ ) -> bytes:
"""Return the contents of `resource_name` as :obj:`bytes`
- `manager` must be an ``IResourceManager``"""
+ `manager` must be a ``ResourceManager``"""
- def has_resource(self, resource_name):
+ def has_resource(self, resource_name: str) -> bool:
"""Does the package contain the named resource?"""
- def resource_isdir(self, resource_name):
+ def resource_isdir(self, resource_name: str) -> bool:
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
- def resource_listdir(self, resource_name):
+ def resource_listdir(self, resource_name: str) -> list[str]:
"""List of resource names in the directory (like ``os.listdir()``)"""
class WorkingSet:
"""A collection of active distributions on sys.path (or a similar list)"""
- def __init__(self, entries=None):
+ def __init__(self, entries: Iterable[str] | None = None):
"""Create working set from list of path entries (default=sys.path)"""
- self.entries = []
+ self.entries: list[str] = []
self.entry_keys = {}
self.by_key = {}
self.normalized_to_canonical_keys = {}
@@ -625,7 +681,7 @@ class WorkingSet:
sys.path[:] = ws.entries
return ws
- def add_entry(self, entry):
+ def add_entry(self, entry: str):
"""Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
@@ -640,11 +696,11 @@ class WorkingSet:
for dist in find_distributions(entry, True):
self.add(dist, entry, False)
- def __contains__(self, dist):
+ def __contains__(self, dist: Distribution) -> bool:
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
- def find(self, req):
+ def find(self, req: Requirement) -> Distribution | None:
"""Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
@@ -668,7 +724,7 @@ class WorkingSet:
raise VersionConflict(dist, req)
return dist
- def iter_entry_points(self, group, name=None):
+ def iter_entry_points(self, group: str, name: str | None = None):
"""Yield entry point objects from `group` matching `name`
If `name` is None, yields all entry points in `group` from all
@@ -682,7 +738,7 @@ class WorkingSet:
if name is None or name == entry.name
)
- def run_script(self, requires, script_name):
+ def run_script(self, requires: str, script_name: str):
"""Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
@@ -690,13 +746,13 @@ class WorkingSet:
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
- def __iter__(self):
+ def __iter__(self) -> Iterator[Distribution]:
"""Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
added to the working set.
"""
- seen = {}
+ seen = set()
for item in self.entries:
if item not in self.entry_keys:
# workaround a cache issue
@@ -704,10 +760,16 @@ class WorkingSet:
for key in self.entry_keys[item]:
if key not in seen:
- seen[key] = 1
+ seen.add(key)
yield self.by_key[key]
- def add(self, dist, entry=None, insert=True, replace=False):
+ def add(
+ self,
+ dist: Distribution,
+ entry: str | None = None,
+ insert: bool = True,
+ replace: bool = False,
+ ):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
@@ -731,7 +793,7 @@ class WorkingSet:
return
self.by_key[dist.key] = dist
- normalized_name = packaging.utils.canonicalize_name(dist.key)
+ normalized_name = _packaging_utils.canonicalize_name(dist.key)
self.normalized_to_canonical_keys[normalized_name] = dist.key
if dist.key not in keys:
keys.append(dist.key)
@@ -739,14 +801,42 @@ class WorkingSet:
keys2.append(dist.key)
self._added_new(dist)
+ @overload
def resolve(
self,
- requirements,
- env=None,
- installer=None,
- replace_conflicting=False,
- extras=None,
- ):
+ requirements: Iterable[Requirement],
+ env: Environment | None,
+ installer: _InstallerTypeT[_DistributionT],
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[_DistributionT]: ...
+ @overload
+ def resolve(
+ self,
+ requirements: Iterable[Requirement],
+ env: Environment | None = None,
+ *,
+ installer: _InstallerTypeT[_DistributionT],
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[_DistributionT]: ...
+ @overload
+ def resolve(
+ self,
+ requirements: Iterable[Requirement],
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[Distribution]: ...
+ def resolve(
+ self,
+ requirements: Iterable[Requirement],
+ env: Environment | None = None,
+ installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[Distribution] | list[_DistributionT]:
"""List all distributions needed to (recursively) meet `requirements`
`requirements` must be a sequence of ``Requirement`` objects. `env`,
@@ -774,7 +864,7 @@ class WorkingSet:
# set up the stack
requirements = list(requirements)[::-1]
# set of processed requirements
- processed = {}
+ processed = set()
# key -> dist
best = {}
to_activate = []
@@ -808,14 +898,14 @@ class WorkingSet:
required_by[new_requirement].add(req.project_name)
req_extras[new_requirement] = req.extras
- processed[req] = True
+ processed.add(req)
# return list of distros to activate
return to_activate
def _resolve_dist(
self, req, best, replace_conflicting, env, installer, required_by, to_activate
- ):
+ ) -> Distribution:
dist = best.get(req.key)
if dist is None:
# Find the best distribution and add it to the map
@@ -844,7 +934,41 @@ class WorkingSet:
raise VersionConflict(dist, req).with_context(dependent_req)
return dist
- def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):
+ @overload
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None,
+ installer: _InstallerTypeT[_DistributionT],
+ fallback: bool = True,
+ ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
+ @overload
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None = None,
+ *,
+ installer: _InstallerTypeT[_DistributionT],
+ fallback: bool = True,
+ ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
+ @overload
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ fallback: bool = True,
+ ) -> tuple[list[Distribution], dict[Distribution, Exception]]: ...
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None = None,
+ installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+ fallback: bool = True,
+ ) -> tuple[
+ list[Distribution] | list[_DistributionT],
+ dict[Distribution, Exception],
+ ]:
"""Find all activatable distributions in `plugin_env`
Example usage::
@@ -883,8 +1007,8 @@ class WorkingSet:
# scan project names in alphabetic order
plugin_projects.sort()
- error_info = {}
- distributions = {}
+ error_info: dict[Distribution, Exception] = {}
+ distributions: dict[Distribution, Exception | None] = {}
if full_env is None:
env = Environment(self.entries)
@@ -920,12 +1044,12 @@ class WorkingSet:
# success, no need to try any more versions of this project
break
- distributions = list(distributions)
- distributions.sort()
+ sorted_distributions = list(distributions)
+ sorted_distributions.sort()
- return distributions, error_info
+ return sorted_distributions, error_info
- def require(self, *requirements):
+ def require(self, *requirements: _NestedStr):
"""Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
@@ -941,7 +1065,9 @@ class WorkingSet:
return needed
- def subscribe(self, callback, existing=True):
+ def subscribe(
+ self, callback: Callable[[Distribution], object], existing: bool = True
+ ):
"""Invoke `callback` for all distributions
If `existing=True` (default),
@@ -977,12 +1103,12 @@ class WorkingSet:
self.callbacks = callbacks[:]
-class _ReqExtras(dict):
+class _ReqExtras(Dict["Requirement", Tuple[str, ...]]):
"""
Map each requirement to the extras that demanded it.
"""
- def markers_pass(self, req, extras=None):
+ def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None):
"""
Evaluate markers for req against each extra that
demanded it.
@@ -1001,7 +1127,10 @@ class Environment:
"""Searchable snapshot of distributions on a search path"""
def __init__(
- self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR
+ self,
+ search_path: Iterable[str] | None = None,
+ platform: str | None = get_supported_platform(),
+ python: str | None = PY_MAJOR,
):
"""Snapshot distributions available on a search path
@@ -1024,7 +1153,7 @@ class Environment:
self.python = python
self.scan(search_path)
- def can_add(self, dist):
+ def can_add(self, dist: Distribution):
"""Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
@@ -1038,11 +1167,11 @@ class Environment:
)
return py_compat and compatible_platforms(dist.platform, self.platform)
- def remove(self, dist):
+ def remove(self, dist: Distribution):
"""Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
- def scan(self, search_path=None):
+ def scan(self, search_path: Iterable[str] | None = None):
"""Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
@@ -1057,7 +1186,7 @@ class Environment:
for dist in find_distributions(item):
self.add(dist)
- def __getitem__(self, project_name):
+ def __getitem__(self, project_name: str) -> list[Distribution]:
"""Return a newest-to-oldest list of distributions for `project_name`
Uses case-insensitive `project_name` comparison, assuming all the
@@ -1068,7 +1197,7 @@ class Environment:
distribution_key = project_name.lower()
return self._distmap.get(distribution_key, [])
- def add(self, dist):
+ def add(self, dist: Distribution):
"""Add `dist` if we ``can_add()`` it and it has not already been added"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, [])
@@ -1076,7 +1205,29 @@ class Environment:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
- def best_match(self, req, working_set, installer=None, replace_conflicting=False):
+ @overload
+ def best_match(
+ self,
+ req: Requirement,
+ working_set: WorkingSet,
+ installer: _InstallerTypeT[_DistributionT],
+ replace_conflicting: bool = False,
+ ) -> _DistributionT: ...
+ @overload
+ def best_match(
+ self,
+ req: Requirement,
+ working_set: WorkingSet,
+ installer: _InstallerType | None = None,
+ replace_conflicting: bool = False,
+ ) -> Distribution | None: ...
+ def best_match(
+ self,
+ req: Requirement,
+ working_set: WorkingSet,
+ installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+ replace_conflicting: bool = False,
+ ) -> Distribution | None:
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
@@ -1103,7 +1254,32 @@ class Environment:
# try to download/install
return self.obtain(req, installer)
- def obtain(self, requirement, installer=None):
+ @overload
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: _InstallerTypeT[_DistributionT],
+ ) -> _DistributionT: ...
+ @overload
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: Callable[[Requirement], None] | None = None,
+ ) -> None: ...
+ @overload
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: _InstallerType | None = None,
+ ) -> Distribution | None: ...
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: Callable[[Requirement], None]
+ | _InstallerType
+ | None
+ | _InstallerTypeT[_DistributionT] = None,
+ ) -> Distribution | None:
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
@@ -1114,13 +1290,13 @@ class Environment:
to the `installer` argument."""
return installer(requirement) if installer else None
- def __iter__(self):
+ def __iter__(self) -> Iterator[str]:
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]:
yield key
- def __iadd__(self, other):
+ def __iadd__(self, other: Distribution | Environment):
"""In-place addition of a distribution or environment"""
if isinstance(other, Distribution):
self.add(other)
@@ -1132,7 +1308,7 @@ class Environment:
raise TypeError("Can't add %r to environment" % (other,))
return self
- def __add__(self, other):
+ def __add__(self, other: Distribution | Environment):
"""Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None)
for env in self, other:
@@ -1159,46 +1335,54 @@ class ExtractionError(RuntimeError):
The exception instance that caused extraction to fail
"""
+ manager: ResourceManager
+ cache_path: str
+ original_error: BaseException | None
+
class ResourceManager:
"""Manage resource extraction and packages"""
- extraction_path = None
+ extraction_path: str | None = None
def __init__(self):
self.cached_files = {}
- def resource_exists(self, package_or_requirement, resource_name):
+ def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str):
"""Does the named resource exist?"""
return get_provider(package_or_requirement).has_resource(resource_name)
- def resource_isdir(self, package_or_requirement, resource_name):
+ def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str):
"""Is the named resource an existing directory?"""
return get_provider(package_or_requirement).resource_isdir(resource_name)
- def resource_filename(self, package_or_requirement, resource_name):
+ def resource_filename(
+ self, package_or_requirement: _PkgReqType, resource_name: str
+ ):
"""Return a true filesystem path for specified resource"""
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name
)
- def resource_stream(self, package_or_requirement, resource_name):
+ def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str):
"""Return a readable file-like object for specified resource"""
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name
)
- def resource_string(self, package_or_requirement, resource_name) -> bytes:
+ def resource_string(
+ self, package_or_requirement: _PkgReqType, resource_name: str
+ ) -> bytes:
"""Return specified resource as :obj:`bytes`"""
return get_provider(package_or_requirement).get_resource_string(
self, resource_name
)
- def resource_listdir(self, package_or_requirement, resource_name):
+ def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str):
"""List the contents of the named resource directory"""
return get_provider(package_or_requirement).resource_listdir(resource_name)
- def extraction_error(self):
+ def extraction_error(self) -> NoReturn:
"""Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
@@ -1228,7 +1412,7 @@ class ResourceManager:
err.original_error = old_exc
raise err
- def get_cache_path(self, archive_name, names=()):
+ def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
@@ -1250,7 +1434,7 @@ class ResourceManager:
self._warn_unsafe_extraction_path(extract_path)
- self.cached_files[target_path] = 1
+ self.cached_files[target_path] = True
return target_path
@staticmethod
@@ -1280,7 +1464,7 @@ class ResourceManager:
).format(**locals())
warnings.warn(msg, UserWarning)
- def postprocess(self, tempname, filename):
+ def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath):
"""Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
@@ -1300,7 +1484,7 @@ class ResourceManager:
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
os.chmod(tempname, mode)
- def set_extraction_path(self, path):
+ def set_extraction_path(self, path: str):
"""Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
@@ -1324,7 +1508,7 @@ class ResourceManager:
self.extraction_path = path
- def cleanup_resources(self, force=False) -> List[str]:
+ def cleanup_resources(self, force: bool = False) -> list[str]:
"""
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
@@ -1339,18 +1523,16 @@ class ResourceManager:
return []
-def get_default_cache():
+def get_default_cache() -> str:
"""
Return the ``PYTHON_EGG_CACHE`` environment variable
or a platform-relevant user cache dir for an app
named "Python-Eggs".
"""
- return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir(
- appname='Python-Eggs'
- )
+ return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
-def safe_name(name):
+def safe_name(name: str):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
@@ -1358,14 +1540,14 @@ def safe_name(name):
return re.sub('[^A-Za-z0-9.]+', '-', name)
-def safe_version(version):
+def safe_version(version: str):
"""
Convert an arbitrary string to a standard version string
"""
try:
# normalize the version
- return str(packaging.version.Version(version))
- except packaging.version.InvalidVersion:
+ return str(_packaging_version.Version(version))
+ except _packaging_version.InvalidVersion:
version = version.replace(' ', '.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
@@ -1402,7 +1584,7 @@ def _safe_segment(segment):
return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
-def safe_extra(extra):
+def safe_extra(extra: str):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
@@ -1411,7 +1593,7 @@ def safe_extra(extra):
return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
-def to_filename(name):
+def to_filename(name: str):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
@@ -1419,7 +1601,7 @@ def to_filename(name):
return name.replace('-', '_')
-def invalid_marker(text):
+def invalid_marker(text: str):
"""
Validate text as a PEP 508 environment marker; return an exception
if invalid or False otherwise.
@@ -1433,7 +1615,7 @@ def invalid_marker(text):
return False
-def evaluate_marker(text, extra=None):
+def evaluate_marker(text: str, extra: str | None = None) -> bool:
"""
Evaluate a PEP 508 environment marker.
Return a boolean indicating the marker result in this environment.
@@ -1442,46 +1624,48 @@ def evaluate_marker(text, extra=None):
This implementation uses the 'pyparsing' module.
"""
try:
- marker = packaging.markers.Marker(text)
+ marker = _packaging_markers.Marker(text)
return marker.evaluate()
- except packaging.markers.InvalidMarker as e:
+ except _packaging_markers.InvalidMarker as e:
raise SyntaxError(e) from e
class NullProvider:
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
- egg_name = None
- egg_info = None
- loader = None
+ egg_name: str | None = None
+ egg_info: str | None = None
+ loader: _LoaderProtocol | None = None
- def __init__(self, module):
+ def __init__(self, module: _ModuleLike):
self.loader = getattr(module, '__loader__', None)
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
- def get_resource_filename(self, manager, resource_name):
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str):
return self._fn(self.module_path, resource_name)
- def get_resource_stream(self, manager, resource_name):
+ def get_resource_stream(self, manager: ResourceManager, resource_name: str):
return io.BytesIO(self.get_resource_string(manager, resource_name))
- def get_resource_string(self, manager, resource_name) -> bytes:
+ def get_resource_string(
+ self, manager: ResourceManager, resource_name: str
+ ) -> bytes:
return self._get(self._fn(self.module_path, resource_name))
- def has_resource(self, resource_name):
+ def has_resource(self, resource_name: str):
return self._has(self._fn(self.module_path, resource_name))
def _get_metadata_path(self, name):
return self._fn(self.egg_info, name)
- def has_metadata(self, name) -> bool:
+ def has_metadata(self, name: str) -> bool:
if not self.egg_info:
return False
path = self._get_metadata_path(name)
return self._has(path)
- def get_metadata(self, name):
+ def get_metadata(self, name: str):
if not self.egg_info:
return ""
path = self._get_metadata_path(name)
@@ -1494,24 +1678,24 @@ class NullProvider:
exc.reason += ' in {} file at path: {}'.format(name, path)
raise
- def get_metadata_lines(self, name):
+ def get_metadata_lines(self, name: str) -> Iterator[str]:
return yield_lines(self.get_metadata(name))
- def resource_isdir(self, resource_name):
+ def resource_isdir(self, resource_name: str):
return self._isdir(self._fn(self.module_path, resource_name))
- def metadata_isdir(self, name) -> bool:
+ def metadata_isdir(self, name: str) -> bool:
return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name)))
- def resource_listdir(self, resource_name):
+ def resource_listdir(self, resource_name: str):
return self._listdir(self._fn(self.module_path, resource_name))
- def metadata_listdir(self, name):
+ def metadata_listdir(self, name: str) -> list[str]:
if self.egg_info:
return self._listdir(self._fn(self.egg_info, name))
return []
- def run_script(self, script_name, namespace):
+ def run_script(self, script_name: str, namespace: dict[str, Any]):
script = 'scripts/' + script_name
if not self.has_metadata(script):
raise ResolutionError(
@@ -1519,13 +1703,13 @@ class NullProvider:
**locals()
),
)
+
script_text = self.get_metadata(script).replace('\r\n', '\n')
script_text = script_text.replace('\r', '\n')
script_filename = self._fn(self.egg_info, script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
- with open(script_filename) as fid:
- source = fid.read()
+ source = _read_utf8_with_fallback(script_filename)
code = compile(source, script_filename, 'exec')
exec(code, namespace, namespace)
else:
@@ -1550,12 +1734,16 @@ class NullProvider:
"Can't perform this operation for unregistered loader type"
)
- def _listdir(self, path):
+ def _listdir(self, path) -> list[str]:
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
- def _fn(self, base, resource_name):
+ def _fn(self, base: str | None, resource_name: str):
+ if base is None:
+ raise TypeError(
+ "`base` parameter in `_fn` is `None`. Either override this method or check the parameter first."
+ )
self._validate_resource_path(resource_name)
if resource_name:
return os.path.join(base, *resource_name.split('/'))
@@ -1618,6 +1806,7 @@ is not allowed.
os.path.pardir in path.split(posixpath.sep)
or posixpath.isabs(path)
or ntpath.isabs(path)
+ or path.startswith("\\")
)
if not invalid:
return
@@ -1625,7 +1814,7 @@ is not allowed.
msg = "Use of .. or absolute path in a resource path is not allowed."
# Aggressively disallow Windows absolute paths
- if ntpath.isabs(path) and not posixpath.isabs(path):
+ if (path.startswith("\\") or ntpath.isabs(path)) and not posixpath.isabs(path):
raise ValueError(msg)
# for compatibility, warn; in future
@@ -1636,8 +1825,9 @@ is not allowed.
)
def _get(self, path) -> bytes:
- if hasattr(self.loader, 'get_data'):
- return self.loader.get_data(path)
+ if hasattr(self.loader, 'get_data') and self.loader:
+ # Already checked get_data exists
+ return self.loader.get_data(path) # type: ignore[attr-defined]
raise NotImplementedError(
"Can't perform this operation for loaders without 'get_data()'"
)
@@ -1660,7 +1850,7 @@ def _parents(path):
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
- def __init__(self, module):
+ def __init__(self, module: _ModuleLike):
super().__init__(module)
self._setup_prefix()
@@ -1671,7 +1861,7 @@ class EggProvider(NullProvider):
egg = next(eggs, None)
egg and self._set_egg(egg)
- def _set_egg(self, path):
+ def _set_egg(self, path: str):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
@@ -1689,7 +1879,7 @@ class DefaultProvider(EggProvider):
def _listdir(self, path):
return os.listdir(path)
- def get_resource_stream(self, manager, resource_name):
+ def get_resource_stream(self, manager: object, resource_name: str):
return open(self._fn(self.module_path, resource_name), 'rb')
def _get(self, path) -> bytes:
@@ -1713,7 +1903,8 @@ DefaultProvider._register()
class EmptyProvider(NullProvider):
"""Provider that returns nothing for all requests"""
- module_path = None
+ # A special case, we don't want all Providers inheriting from NullProvider to have a potentially None module_path
+ module_path: str | None = None # type: ignore[assignment]
_isdir = _has = lambda self, path: False
@@ -1730,13 +1921,14 @@ class EmptyProvider(NullProvider):
empty_provider = EmptyProvider()
-class ZipManifests(dict):
+class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
"""
zip manifest builder
"""
+ # `path` could be `StrPath | IO[bytes]` but that violates the LSP for `MemoizedZipManifests.load`
@classmethod
- def build(cls, path):
+ def build(cls, path: str):
"""
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
@@ -1762,9 +1954,11 @@ class MemoizedZipManifests(ZipManifests):
Memoized zipfile manifests.
"""
- manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+ class manifest_mod(NamedTuple):
+ manifest: dict[str, zipfile.ZipInfo]
+ mtime: float
- def load(self, path):
+ def load(self, path: str) -> dict[str, zipfile.ZipInfo]: # type: ignore[override] # ZipManifests.load is a classmethod
"""
Load a manifest at path or return a suitable manifest already loaded.
"""
@@ -1781,10 +1975,12 @@ class MemoizedZipManifests(ZipManifests):
class ZipProvider(EggProvider):
"""Resource support for zips and eggs"""
- eagers = None
+ eagers: list[str] | None = None
_zip_manifests = MemoizedZipManifests()
+ # ZipProvider's loader should always be a zipimporter or equivalent
+ loader: zipimport.zipimporter
- def __init__(self, module):
+ def __init__(self, module: _ZipLoaderModule):
super().__init__(module)
self.zip_pre = self.loader.archive + os.sep
@@ -1810,7 +2006,7 @@ class ZipProvider(EggProvider):
def zipinfo(self):
return self._zip_manifests.load(self.loader.archive)
- def get_resource_filename(self, manager, resource_name):
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str):
if not self.egg_name:
raise NotImplementedError(
"resource_filename() only supported for .egg, not .zip"
@@ -1833,7 +2029,7 @@ class ZipProvider(EggProvider):
return timestamp, size
# FIXME: 'ZipProvider._extract_resource' is too complex (12)
- def _extract_resource(self, manager, zip_path): # noqa: C901
+ def _extract_resource(self, manager: ResourceManager, zip_path) -> str: # noqa: C901
if zip_path in self._index():
for name in self._index()[zip_path]:
last = self._extract_resource(manager, os.path.join(zip_path, name))
@@ -1844,9 +2040,13 @@ class ZipProvider(EggProvider):
if not WRITE_SUPPORT:
raise OSError(
- '"os.rename" and "os.unlink" are not supported ' 'on this platform'
+ '"os.rename" and "os.unlink" are not supported on this platform'
)
try:
+ if not self.egg_name:
+ raise OSError(
+ '"egg_name" is empty. This likely means no egg could be found from the "module_path".'
+ )
real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
if self._is_current(real_path, zip_path):
@@ -1935,10 +2135,10 @@ class ZipProvider(EggProvider):
def _listdir(self, fspath):
return list(self._index().get(self._zipinfo_name(fspath), ()))
- def _eager_to_zip(self, resource_name):
+ def _eager_to_zip(self, resource_name: str):
return self._zipinfo_name(self._fn(self.egg_root, resource_name))
- def _resource_to_zip(self, resource_name):
+ def _resource_to_zip(self, resource_name: str):
return self._zipinfo_name(self._fn(self.module_path, resource_name))
@@ -1957,16 +2157,16 @@ class FileMetadata(EmptyProvider):
the provided location.
"""
- def __init__(self, path):
+ def __init__(self, path: StrPath):
self.path = path
def _get_metadata_path(self, name):
return self.path
- def has_metadata(self, name) -> bool:
+ def has_metadata(self, name: str) -> bool:
return name == 'PKG-INFO' and os.path.isfile(self.path)
- def get_metadata(self, name):
+ def get_metadata(self, name: str):
if name != 'PKG-INFO':
raise KeyError("No metadata except PKG-INFO is available")
@@ -1982,7 +2182,7 @@ class FileMetadata(EmptyProvider):
msg = tmpl.format(**locals())
warnings.warn(msg)
- def get_metadata_lines(self, name):
+ def get_metadata_lines(self, name: str) -> Iterator[str]:
return yield_lines(self.get_metadata(name))
@@ -2006,7 +2206,7 @@ class PathMetadata(DefaultProvider):
dist = Distribution.from_filename(egg_path, metadata=metadata)
"""
- def __init__(self, path, egg_info):
+ def __init__(self, path: str, egg_info: str):
self.module_path = path
self.egg_info = egg_info
@@ -2014,7 +2214,7 @@ class PathMetadata(DefaultProvider):
class EggMetadata(ZipProvider):
"""Metadata provider for .egg files"""
- def __init__(self, importer):
+ def __init__(self, importer: zipimport.zipimporter):
"""Create a metadata provider from a zipimporter"""
self.zip_pre = importer.archive + os.sep
@@ -2026,10 +2226,12 @@ class EggMetadata(ZipProvider):
self._setup_prefix()
-_declare_state('dict', _distribution_finders={})
+_distribution_finders: dict[type, _DistFinderType[Any]] = _declare_state(
+ 'dict', '_distribution_finders', {}
+)
-def register_finder(importer_type, distribution_finder):
+def register_finder(importer_type: type[_T], distribution_finder: _DistFinderType[_T]):
"""Register `distribution_finder` to find distributions in sys.path items
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2039,14 +2241,16 @@ def register_finder(importer_type, distribution_finder):
_distribution_finders[importer_type] = distribution_finder
-def find_distributions(path_item, only=False):
+def find_distributions(path_item: str, only: bool = False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
-def find_eggs_in_zip(importer, path_item, only=False):
+def find_eggs_in_zip(
+ importer: zipimport.zipimporter, path_item: str, only: bool = False
+) -> Iterator[Distribution]:
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
@@ -2075,14 +2279,16 @@ def find_eggs_in_zip(importer, path_item, only=False):
register_finder(zipimport.zipimporter, find_eggs_in_zip)
-def find_nothing(importer, path_item, only=False):
+def find_nothing(
+ importer: object | None, path_item: str | None, only: bool | None = False
+):
return ()
register_finder(object, find_nothing)
-def find_on_path(importer, path_item, only=False):
+def find_on_path(importer: object | None, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
@@ -2137,7 +2343,7 @@ class NoDists:
return iter(())
-def safe_listdir(path):
+def safe_listdir(path: StrOrBytesPath):
"""
Attempt to list contents of path, but suppress some exceptions.
"""
@@ -2153,13 +2359,13 @@ def safe_listdir(path):
return ()
-def distributions_from_metadata(path):
+def distributions_from_metadata(path: str):
root = os.path.dirname(path)
if os.path.isdir(path):
if len(os.listdir(path)) == 0:
# empty metadata dir; skip
return
- metadata = PathMetadata(root, path)
+ metadata: _MetadataType = PathMetadata(root, path)
else:
metadata = FileMetadata(path)
entry = os.path.basename(path)
@@ -2175,11 +2381,10 @@ def non_empty_lines(path):
"""
Yield non-empty lines from file at path
"""
- with open(path) as f:
- for line in f:
- line = line.strip()
- if line:
- yield line
+ for line in _read_utf8_with_fallback(path).splitlines():
+ line = line.strip()
+ if line:
+ yield line
def resolve_egg_link(path):
@@ -2200,11 +2405,17 @@ if hasattr(pkgutil, 'ImpImporter'):
register_finder(importlib.machinery.FileFinder, find_on_path)
-_declare_state('dict', _namespace_handlers={})
-_declare_state('dict', _namespace_packages={})
+_namespace_handlers: dict[type, _NSHandlerType[Any]] = _declare_state(
+ 'dict', '_namespace_handlers', {}
+)
+_namespace_packages: dict[str | None, list[str]] = _declare_state(
+ 'dict', '_namespace_packages', {}
+)
-def register_namespace_handler(importer_type, namespace_handler):
+def register_namespace_handler(
+ importer_type: type[_T], namespace_handler: _NSHandlerType[_T]
+):
"""Register `namespace_handler` to declare namespace packages
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
@@ -2259,7 +2470,7 @@ def _handle_ns(packageName, path_item):
return subpath
-def _rebuild_mod_path(orig_path, package_name, module):
+def _rebuild_mod_path(orig_path, package_name, module: types.ModuleType):
"""
Rebuild module.__path__ ensuring that all entries are ordered
corresponding to their sys.path order
@@ -2293,7 +2504,7 @@ def _rebuild_mod_path(orig_path, package_name, module):
module.__path__ = new_path
-def declare_namespace(packageName):
+def declare_namespace(packageName: str):
"""Declare that package 'packageName' is a namespace package"""
msg = (
@@ -2310,7 +2521,7 @@ def declare_namespace(packageName):
if packageName in _namespace_packages:
return
- path = sys.path
+ path: MutableSequence[str] = sys.path
parent, _, _ = packageName.rpartition('.')
if parent:
@@ -2336,7 +2547,7 @@ def declare_namespace(packageName):
_imp.release_lock()
-def fixup_namespace_packages(path_item, parent=None):
+def fixup_namespace_packages(path_item: str, parent: str | None = None):
"""Ensure that previously-declared namespace packages include path_item"""
_imp.acquire_lock()
try:
@@ -2348,7 +2559,12 @@ def fixup_namespace_packages(path_item, parent=None):
_imp.release_lock()
-def file_ns_handler(importer, path_item, packageName, module):
+def file_ns_handler(
+ importer: object,
+ path_item: StrPath,
+ packageName: str,
+ module: types.ModuleType,
+):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
@@ -2368,19 +2584,28 @@ register_namespace_handler(zipimport.zipimporter, file_ns_handler)
register_namespace_handler(importlib.machinery.FileFinder, file_ns_handler)
-def null_ns_handler(importer, path_item, packageName, module):
+def null_ns_handler(
+ importer: object,
+ path_item: str | None,
+ packageName: str | None,
+ module: _ModuleLike | None,
+):
return None
register_namespace_handler(object, null_ns_handler)
-def normalize_path(filename):
+@overload
+def normalize_path(filename: StrPath) -> str: ...
+@overload
+def normalize_path(filename: BytesPath) -> bytes: ...
+def normalize_path(filename: StrOrBytesPath):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
-def _cygwin_patch(filename): # pragma: nocover
+def _cygwin_patch(filename: StrOrBytesPath): # pragma: nocover
"""
Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
symlink components. Using
@@ -2391,9 +2616,19 @@ def _cygwin_patch(filename): # pragma: nocover
return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
-@functools.lru_cache(maxsize=None)
-def _normalize_cached(filename):
- return normalize_path(filename)
+if TYPE_CHECKING:
+ # https://github.com/python/mypy/issues/16261
+ # https://github.com/python/typeshed/issues/6347
+ @overload
+ def _normalize_cached(filename: StrPath) -> str: ...
+ @overload
+ def _normalize_cached(filename: BytesPath) -> bytes: ...
+ def _normalize_cached(filename: StrOrBytesPath) -> str | bytes: ...
+else:
+
+ @functools.lru_cache(maxsize=None)
+ def _normalize_cached(filename):
+ return normalize_path(filename)
def _is_egg_path(path):
@@ -2446,7 +2681,14 @@ EGG_NAME = re.compile(
class EntryPoint:
"""Object representing an advertised importable object"""
- def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+ def __init__(
+ self,
+ name: str,
+ module_name: str,
+ attrs: Iterable[str] = (),
+ extras: Iterable[str] = (),
+ dist: Distribution | None = None,
+ ):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
@@ -2466,7 +2708,26 @@ class EntryPoint:
def __repr__(self):
return "EntryPoint.parse(%r)" % str(self)
- def load(self, require=True, *args, **kwargs):
+ @overload
+ def load(
+ self,
+ require: Literal[True] = True,
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ ) -> _ResolvedEntryPoint: ...
+ @overload
+ def load(
+ self,
+ require: Literal[False],
+ *args: Any,
+ **kwargs: Any,
+ ) -> _ResolvedEntryPoint: ...
+ def load(
+ self,
+ require: bool = True,
+ *args: Environment | _InstallerType | None,
+ **kwargs: Environment | _InstallerType | None,
+ ) -> _ResolvedEntryPoint:
"""
Require packages for this EntryPoint, then resolve it.
"""
@@ -2478,10 +2739,12 @@ class EntryPoint:
stacklevel=2,
)
if require:
- self.require(*args, **kwargs)
+ # We could pass `env` and `installer` directly,
+ # but keeping `*args` and `**kwargs` for backwards compatibility
+ self.require(*args, **kwargs) # type: ignore
return self.resolve()
- def resolve(self):
+ def resolve(self) -> _ResolvedEntryPoint:
"""
Resolve the entry point from its module and attrs.
"""
@@ -2491,9 +2754,14 @@ class EntryPoint:
except AttributeError as exc:
raise ImportError(str(exc)) from exc
- def require(self, env=None, installer=None):
- if self.extras and not self.dist:
- raise UnknownExtra("Can't require() without a distribution", self)
+ def require(
+ self,
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ ):
+ if not self.dist:
+ error_cls = UnknownExtra if self.extras else AttributeError
+ raise error_cls("Can't require() without a distribution", self)
# Get the requirements for this entry point with all its extras and
# then resolve them. We have to pass `extras` along when resolving so
@@ -2514,7 +2782,7 @@ class EntryPoint:
)
@classmethod
- def parse(cls, src, dist=None):
+ def parse(cls, src: str, dist: Distribution | None = None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
@@ -2539,15 +2807,20 @@ class EntryPoint:
return ()
req = Requirement.parse('x' + extras_spec)
if req.specs:
- raise ValueError()
+ raise ValueError
return req.extras
@classmethod
- def parse_group(cls, group, lines, dist=None):
+ def parse_group(
+ cls,
+ group: str,
+ lines: _NestedStr,
+ dist: Distribution | None = None,
+ ):
"""Parse an entry point group"""
if not MODULE(group):
raise ValueError("Invalid group name", group)
- this = {}
+ this: dict[str, Self] = {}
for line in yield_lines(lines):
ep = cls.parse(line, dist)
if ep.name in this:
@@ -2556,14 +2829,19 @@ class EntryPoint:
return this
@classmethod
- def parse_map(cls, data, dist=None):
+ def parse_map(
+ cls,
+ data: str | Iterable[str] | dict[str, str | Iterable[str]],
+ dist: Distribution | None = None,
+ ):
"""Parse a map of entry point groups"""
+ _data: Iterable[tuple[str | None, str | Iterable[str]]]
if isinstance(data, dict):
- data = data.items()
+ _data = data.items()
else:
- data = split_sections(data)
- maps = {}
- for group, lines in data:
+ _data = split_sections(data)
+ maps: dict[str, dict[str, Self]] = {}
+ for group, lines in _data:
if group is None:
if not lines:
continue
@@ -2597,13 +2875,13 @@ class Distribution:
def __init__(
self,
- location=None,
- metadata=None,
- project_name=None,
- version=None,
- py_version=PY_MAJOR,
- platform=None,
- precedence=EGG_DIST,
+ location: str | None = None,
+ metadata: _MetadataType = None,
+ project_name: str | None = None,
+ version: str | None = None,
+ py_version: str | None = PY_MAJOR,
+ platform: str | None = None,
+ precedence: int = EGG_DIST,
):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
@@ -2615,7 +2893,13 @@ class Distribution:
self._provider = metadata or empty_provider
@classmethod
- def from_location(cls, location, basename, metadata=None, **kw):
+ def from_location(
+ cls,
+ location: str,
+ basename: StrPath,
+ metadata: _MetadataType = None,
+ **kw: int, # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
+ ) -> Distribution:
project_name, version, py_version, platform = [None] * 4
basename, ext = os.path.splitext(basename)
if ext.lower() in _distributionImpl:
@@ -2653,25 +2937,25 @@ class Distribution:
def __hash__(self):
return hash(self.hashcmp)
- def __lt__(self, other):
+ def __lt__(self, other: Distribution):
return self.hashcmp < other.hashcmp
- def __le__(self, other):
+ def __le__(self, other: Distribution):
return self.hashcmp <= other.hashcmp
- def __gt__(self, other):
+ def __gt__(self, other: Distribution):
return self.hashcmp > other.hashcmp
- def __ge__(self, other):
+ def __ge__(self, other: Distribution):
return self.hashcmp >= other.hashcmp
- def __eq__(self, other):
+ def __eq__(self, other: object):
if not isinstance(other, self.__class__):
# It's not a Distribution, so they are not equal
return False
return self.hashcmp == other.hashcmp
- def __ne__(self, other):
+ def __ne__(self, other: object):
return not self == other
# These properties have to be lazy so that we don't have to load any
@@ -2691,12 +2975,12 @@ class Distribution:
if not hasattr(self, "_parsed_version"):
try:
self._parsed_version = parse_version(self.version)
- except packaging.version.InvalidVersion as ex:
+ except _packaging_version.InvalidVersion as ex:
info = f"(package: {self.project_name})"
if hasattr(ex, "add_note"):
ex.add_note(info) # PEP 678
raise
- raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None
+ raise _packaging_version.InvalidVersion(f"{str(ex)} {info}") from None
return self._parsed_version
@@ -2704,7 +2988,7 @@ class Distribution:
def _forgiving_parsed_version(self):
try:
return self.parsed_version
- except packaging.version.InvalidVersion as ex:
+ except _packaging_version.InvalidVersion as ex:
self._parsed_version = parse_version(_forgiving_version(self.version))
notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678
@@ -2754,14 +3038,14 @@ class Distribution:
return self.__dep_map
@staticmethod
- def _filter_extras(dm):
+ def _filter_extras(dm: dict[str | None, list[Requirement]]):
"""
Given a mapping of extras to dependencies, strip off
environment markers and filter out any dependencies
not matching the markers.
"""
for extra in list(filter(None, dm)):
- new_extra = extra
+ new_extra: str | None = extra
reqs = dm.pop(extra)
new_extra, _, marker = extra.partition(':')
fails_marker = marker and (
@@ -2781,10 +3065,10 @@ class Distribution:
dm.setdefault(extra, []).extend(parse_requirements(reqs))
return dm
- def requires(self, extras=()):
+ def requires(self, extras: Iterable[str] = ()):
"""List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map
- deps = []
+ deps: list[Requirement] = []
deps.extend(dm.get(None, ()))
for ext in extras:
try:
@@ -2820,12 +3104,12 @@ class Distribution:
lines = self._get_metadata(self.PKG_INFO)
return _version_from_file(lines)
- def activate(self, path=None, replace=False):
+ def activate(self, path: list[str] | None = None, replace: bool = False):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None:
path = sys.path
self.insert_on(path, replace=replace)
- if path is sys.path:
+ if path is sys.path and self.location is not None:
fixup_namespace_packages(self.location)
for pkg in self._get_metadata('namespace_packages.txt'):
if pkg in sys.modules:
@@ -2870,45 +3154,57 @@ class Distribution:
)
@classmethod
- def from_filename(cls, filename, metadata=None, **kw):
+ def from_filename(
+ cls,
+ filename: StrPath,
+ metadata: _MetadataType = None,
+ **kw: int, # We could set `precedence` explicitly, but keeping this as `**kw` for full backwards and subclassing compatibility
+ ):
return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata, **kw
)
def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
- if isinstance(self.parsed_version, packaging.version.Version):
+ if isinstance(self.parsed_version, _packaging_version.Version):
spec = "%s==%s" % (self.project_name, self.parsed_version)
else:
spec = "%s===%s" % (self.project_name, self.parsed_version)
return Requirement.parse(spec)
- def load_entry_point(self, group, name):
+ def load_entry_point(self, group: str, name: str) -> _ResolvedEntryPoint:
"""Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group, name)
if ep is None:
raise ImportError("Entry point %r not found" % ((group, name),))
return ep.load()
- def get_entry_map(self, group=None):
+ @overload
+ def get_entry_map(self, group: None = None) -> dict[str, dict[str, EntryPoint]]: ...
+ @overload
+ def get_entry_map(self, group: str) -> dict[str, EntryPoint]: ...
+ def get_entry_map(self, group: str | None = None):
"""Return the entry point map for `group`, or the full entry map"""
- try:
- ep_map = self._ep_map
- except AttributeError:
- ep_map = self._ep_map = EntryPoint.parse_map(
+ if not hasattr(self, "_ep_map"):
+ self._ep_map = EntryPoint.parse_map(
self._get_metadata('entry_points.txt'), self
)
if group is not None:
- return ep_map.get(group, {})
- return ep_map
+ return self._ep_map.get(group, {})
+ return self._ep_map
- def get_entry_info(self, group, name):
+ def get_entry_info(self, group: str, name: str):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
# FIXME: 'Distribution.insert_on' is too complex (13)
- def insert_on(self, path, loc=None, replace=False): # noqa: C901
+ def insert_on( # noqa: C901
+ self,
+ path: list[str],
+ loc=None,
+ replace: bool = False,
+ ):
"""Ensure self.location is on path
If replace=False (default):
@@ -3013,13 +3309,14 @@ class Distribution:
return False
return True
- def clone(self, **kw):
+ def clone(self, **kw: str | int | IResourceProvider | None):
"""Copy this distribution, substituting in any changed keyword args"""
names = 'project_name version py_version platform location precedence'
for attr in names.split():
kw.setdefault(attr, getattr(self, attr, None))
kw.setdefault('metadata', self._provider)
- return self.__class__(**kw)
+ # Unsafely unpacking. But keeping **kw for backwards and subclassing compatibility
+ return self.__class__(**kw) # type:ignore[arg-type]
@property
def extras(self):
@@ -3072,11 +3369,11 @@ class DistInfoDistribution(Distribution):
self.__dep_map = self._compute_dependencies()
return self.__dep_map
- def _compute_dependencies(self):
+ def _compute_dependencies(self) -> dict[str | None, list[Requirement]]:
"""Recompute this distribution's dependencies."""
- dm = self.__dep_map = {None: []}
+ self.__dep_map: dict[str | None, list[Requirement]] = {None: []}
- reqs = []
+ reqs: list[Requirement] = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
reqs.extend(parse_requirements(req))
@@ -3087,13 +3384,15 @@ class DistInfoDistribution(Distribution):
yield req
common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
- dm[None].extend(common)
+ self.__dep_map[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
s_extra = safe_extra(extra.strip())
- dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common]
+ self.__dep_map[s_extra] = [
+ r for r in reqs_for_extra(extra) if r not in common
+ ]
- return dm
+ return self.__dep_map
_distributionImpl = {
@@ -3116,7 +3415,7 @@ def issue_warning(*args, **kw):
warnings.warn(stacklevel=level + 1, *args, **kw)
-def parse_requirements(strs):
+def parse_requirements(strs: _NestedStr):
"""
Yield ``Requirement`` objects for each specification in `strs`.
@@ -3125,19 +3424,20 @@ def parse_requirements(strs):
return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs))))
-class RequirementParseError(packaging.requirements.InvalidRequirement):
+class RequirementParseError(_packaging_requirements.InvalidRequirement):
"Compatibility wrapper for InvalidRequirement"
-class Requirement(packaging.requirements.Requirement):
- def __init__(self, requirement_string):
+class Requirement(_packaging_requirements.Requirement):
+ def __init__(self, requirement_string: str):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
super().__init__(requirement_string)
self.unsafe_name = self.name
project_name = safe_name(self.name)
self.project_name, self.key = project_name, project_name.lower()
self.specs = [(spec.operator, spec.version) for spec in self.specifier]
- self.extras = tuple(map(safe_extra, self.extras))
+ # packaging.requirements.Requirement uses a set for its extras. We use a variable-length tuple
+ self.extras: tuple[str] = tuple(map(safe_extra, self.extras))
self.hashCmp = (
self.key,
self.url,
@@ -3147,13 +3447,13 @@ class Requirement(packaging.requirements.Requirement):
)
self.__hash = hash(self.hashCmp)
- def __eq__(self, other):
+ def __eq__(self, other: object):
return isinstance(other, Requirement) and self.hashCmp == other.hashCmp
def __ne__(self, other):
return not self == other
- def __contains__(self, item):
+ def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool:
if isinstance(item, Distribution):
if item.key != self.key:
return False
@@ -3172,7 +3472,7 @@ class Requirement(packaging.requirements.Requirement):
return "Requirement.parse(%r)" % str(self)
@staticmethod
- def parse(s):
+ def parse(s: str | Iterable[str]):
(req,) = parse_requirements(s)
return req
@@ -3187,7 +3487,7 @@ def _always_object(classes):
return classes
-def _find_adapter(registry, ob):
+def _find_adapter(registry: Mapping[type, _AdapterT], ob: object) -> _AdapterT:
"""Return an adapter factory for `ob` from `registry`"""
types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
for t in types:
@@ -3198,7 +3498,7 @@ def _find_adapter(registry, ob):
raise TypeError(f"Could not find adapter for {registry} and {ob}")
-def ensure_directory(path):
+def ensure_directory(path: StrOrBytesPath):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
os.makedirs(dirname, exist_ok=True)
@@ -3217,7 +3517,7 @@ def _bypass_ensure_directory(path):
pass
-def split_sections(s):
+def split_sections(s: _NestedStr) -> Iterator[tuple[str | None, list[str]]]:
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
@@ -3261,6 +3561,47 @@ def _mkstemp(*args, **kw):
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
+class PkgResourcesDeprecationWarning(Warning):
+ """
+ Base class for warning about deprecations in ``pkg_resources``
+
+ This class is not derived from ``DeprecationWarning``, and as such is
+ visible by default.
+ """
+
+
+# Ported from ``setuptools`` to avoid introducing an import inter-dependency:
+_LOCALE_ENCODING = "locale" if sys.version_info >= (3, 10) else None
+
+
+def _read_utf8_with_fallback(file: str, fallback_encoding=_LOCALE_ENCODING) -> str:
+ """See setuptools.unicode_utils._read_utf8_with_fallback"""
+ try:
+ with open(file, "r", encoding="utf-8") as f:
+ return f.read()
+ except UnicodeDecodeError: # pragma: no cover
+ msg = f"""\
+ ********************************************************************************
+ `encoding="utf-8"` fails with {file!r}, trying `encoding={fallback_encoding!r}`.
+
+ This fallback behaviour is considered **deprecated** and future versions of
+ `setuptools/pkg_resources` may not implement it.
+
+ Please encode {file!r} with "utf-8" to ensure future builds will succeed.
+
+ If this file was produced by `setuptools` itself, cleaning up the cached files
+ and re-building/re-installing the package with a newer version of `setuptools`
+ (e.g. by updating `build-system.requires` in its `pyproject.toml`)
+ might solve the problem.
+ ********************************************************************************
+ """
+ # TODO: Add a deadline?
+ # See comment in setuptools.unicode_utils._Utf8EncodingNeeded
+ warnings.warn(msg, PkgResourcesDeprecationWarning, stacklevel=2)
+ with open(file, "r", encoding=fallback_encoding) as f:
+ return f.read()
+
+
# from jaraco.functools 1.3
def _call_aside(f, *args, **kwargs):
f(*args, **kwargs)
@@ -3279,15 +3620,6 @@ def _initialize(g=globals()):
)
-class PkgResourcesDeprecationWarning(Warning):
- """
- Base class for warning about deprecations in ``pkg_resources``
-
- This class is not derived from ``DeprecationWarning``, and as such is
- visible by default.
- """
-
-
@_call_aside
def _initialize_master_working_set():
"""
@@ -3301,8 +3633,7 @@ def _initialize_master_working_set():
Invocation by other packages is unsupported and done
at their own risk.
"""
- working_set = WorkingSet._build_master()
- _declare_state('object', working_set=working_set)
+ working_set = _declare_state('object', 'working_set', WorkingSet._build_master())
require = working_set.require
iter_entry_points = working_set.iter_entry_points
@@ -3323,3 +3654,23 @@ def _initialize_master_working_set():
# match order
list(map(working_set.add_entry, sys.path))
globals().update(locals())
+
+
+if TYPE_CHECKING:
+ # All of these are set by the @_call_aside methods above
+ __resource_manager = ResourceManager() # Won't exist at runtime
+ resource_exists = __resource_manager.resource_exists
+ resource_isdir = __resource_manager.resource_isdir
+ resource_filename = __resource_manager.resource_filename
+ resource_stream = __resource_manager.resource_stream
+ resource_string = __resource_manager.resource_string
+ resource_listdir = __resource_manager.resource_listdir
+ set_extraction_path = __resource_manager.set_extraction_path
+ cleanup_resources = __resource_manager.cleanup_resources
+
+ working_set = WorkingSet()
+ require = working_set.require
+ iter_entry_points = working_set.iter_entry_points
+ add_activation_listener = working_set.subscribe
+ run_script = working_set.run_script
+ run_main = run_script
diff --git a/contrib/python/pip/pip/_vendor/platformdirs/android.py b/contrib/python/pip/pip/_vendor/platformdirs/android.py
index fefafd3297..afd3141c72 100644
--- a/contrib/python/pip/pip/_vendor/platformdirs/android.py
+++ b/contrib/python/pip/pip/_vendor/platformdirs/android.py
@@ -6,7 +6,7 @@ import os
import re
import sys
from functools import lru_cache
-from typing import cast
+from typing import TYPE_CHECKING, cast
from .api import PlatformDirsABC
@@ -117,16 +117,33 @@ class Android(PlatformDirsABC):
@lru_cache(maxsize=1)
-def _android_folder() -> str | None:
+def _android_folder() -> str | None: # noqa: C901, PLR0912
""":return: base folder for the Android OS or None if it cannot be found"""
- try:
- # First try to get a path to android app via pyjnius
- from jnius import autoclass # noqa: PLC0415
-
- context = autoclass("android.content.Context")
- result: str | None = context.getFilesDir().getParentFile().getAbsolutePath()
- except Exception: # noqa: BLE001
- # if fails find an android folder looking a path on the sys.path
+ result: str | None = None
+ # type checker isn't happy with our "import android", just don't do this when type checking see
+ # https://stackoverflow.com/a/61394121
+ if not TYPE_CHECKING:
+ try:
+ # First try to get a path to android app using python4android (if available)...
+ from android import mActivity # noqa: PLC0415
+
+ context = cast("android.content.Context", mActivity.getApplicationContext()) # noqa: F821
+ result = context.getFilesDir().getParentFile().getAbsolutePath()
+ except Exception: # noqa: BLE001
+ result = None
+ if result is None:
+ try:
+ # ...and fall back to using plain pyjnius, if python4android isn't available or doesn't deliver any useful
+ # result...
+ from jnius import autoclass # noqa: PLC0415
+
+ context = autoclass("android.content.Context")
+ result = context.getFilesDir().getParentFile().getAbsolutePath()
+ except Exception: # noqa: BLE001
+ result = None
+ if result is None:
+ # and if that fails, too, find an android folder looking at path on the sys.path
+ # warning: only works for apps installed under /data, not adopted storage etc.
pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
for path in sys.path:
if pattern.match(path):
@@ -134,6 +151,16 @@ def _android_folder() -> str | None:
break
else:
result = None
+ if result is None:
+ # one last try: find an android folder looking at path on the sys.path taking adopted storage paths into
+ # account
+ pattern = re.compile(r"/mnt/expand/[a-fA-F0-9-]{36}/(data|user/\d+)/(.+)/files")
+ for path in sys.path:
+ if pattern.match(path):
+ result = path.split("/files")[0]
+ break
+ else:
+ result = None
return result
diff --git a/contrib/python/pip/pip/_vendor/platformdirs/version.py b/contrib/python/pip/pip/_vendor/platformdirs/version.py
index c418cd0c9a..6483ddce0b 100644
--- a/contrib/python/pip/pip/_vendor/platformdirs/version.py
+++ b/contrib/python/pip/pip/_vendor/platformdirs/version.py
@@ -12,5 +12,5 @@ __version__: str
__version_tuple__: VERSION_TUPLE
version_tuple: VERSION_TUPLE
-__version__ = version = '4.2.1'
-__version_tuple__ = version_tuple = (4, 2, 1)
+__version__ = version = '4.2.2'
+__version_tuple__ = version_tuple = (4, 2, 2)
diff --git a/contrib/python/pip/pip/_vendor/pygments/__init__.py b/contrib/python/pip/pip/_vendor/pygments/__init__.py
index 5b8a3f9548..60ae9bb850 100644
--- a/contrib/python/pip/pip/_vendor/pygments/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/__init__.py
@@ -21,12 +21,12 @@
.. _Pygments master branch:
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from io import StringIO, BytesIO
-__version__ = '2.17.2'
+__version__ = '2.18.0'
__docformat__ = 'restructuredtext'
__all__ = ['lex', 'format', 'highlight']
diff --git a/contrib/python/pip/pip/_vendor/pygments/__main__.py b/contrib/python/pip/pip/_vendor/pygments/__main__.py
index 2f7f8cbad0..dcc6e5add7 100644
--- a/contrib/python/pip/pip/_vendor/pygments/__main__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/__main__.py
@@ -4,7 +4,7 @@
Main entry point for ``python -m pygments``.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/cmdline.py b/contrib/python/pip/pip/_vendor/pygments/cmdline.py
index 29b5608f33..0a7072eff3 100644
--- a/contrib/python/pip/pip/_vendor/pygments/cmdline.py
+++ b/contrib/python/pip/pip/_vendor/pygments/cmdline.py
@@ -4,7 +4,7 @@
Command line interface.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -68,19 +68,19 @@ def _print_help(what, name):
try:
if what == 'lexer':
cls = get_lexer_by_name(name)
- print("Help on the %s lexer:" % cls.name)
+ print(f"Help on the {cls.name} lexer:")
print(dedent(cls.__doc__))
elif what == 'formatter':
cls = find_formatter_class(name)
- print("Help on the %s formatter:" % cls.name)
+ print(f"Help on the {cls.name} formatter:")
print(dedent(cls.__doc__))
elif what == 'filter':
cls = find_filter_class(name)
- print("Help on the %s filter:" % name)
+ print(f"Help on the {name} filter:")
print(dedent(cls.__doc__))
return 0
except (AttributeError, ValueError):
- print("%s not found!" % what, file=sys.stderr)
+ print(f"{what} not found!", file=sys.stderr)
return 1
@@ -97,7 +97,7 @@ def _print_list(what):
info.append(tup)
info.sort()
for i in info:
- print(('* %s\n %s %s') % i)
+ print(('* {}\n {} {}').format(*i))
elif what == 'formatter':
print()
@@ -112,7 +112,7 @@ def _print_list(what):
info.append(tup)
info.sort()
for i in info:
- print(('* %s\n %s %s') % i)
+ print(('* {}\n {} {}').format(*i))
elif what == 'filter':
print()
@@ -122,7 +122,7 @@ def _print_list(what):
for name in get_all_filters():
cls = find_filter_class(name)
print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
+ print(f" {docstring_headline(cls)}")
elif what == 'style':
print()
@@ -132,7 +132,7 @@ def _print_list(what):
for name in get_all_styles():
cls = get_style_by_name(name)
print("* " + name + ':')
- print(" %s" % docstring_headline(cls))
+ print(f" {docstring_headline(cls)}")
def _print_list_as_json(requested_items):
@@ -185,8 +185,8 @@ def main_inner(parser, argns):
return 0
if argns.V:
- print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus '
- 'Chajdas and contributors.' % __version__)
+ print(f'Pygments version {__version__}, (c) 2006-2024 by Georg Brandl, Matthäus '
+ 'Chajdas and contributors.')
return 0
def is_only_option(opt):
@@ -659,7 +659,7 @@ def main(args=sys.argv):
msg = info[-1].strip()
if len(info) >= 3:
# extract relevant file and position info
- msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:]
+ msg += '\n (f{})'.format(info[-2].split('\n')[0].strip()[1:])
print(file=sys.stderr)
print('*** Error while highlighting:', file=sys.stderr)
print(msg, file=sys.stderr)
diff --git a/contrib/python/pip/pip/_vendor/pygments/console.py b/contrib/python/pip/pip/_vendor/pygments/console.py
index deb4937f74..4c1a06219c 100644
--- a/contrib/python/pip/pip/_vendor/pygments/console.py
+++ b/contrib/python/pip/pip/_vendor/pygments/console.py
@@ -4,7 +4,7 @@
Format colored console output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -27,12 +27,12 @@ light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brig
"brightmagenta", "brightcyan", "white"]
x = 30
-for d, l in zip(dark_colors, light_colors):
- codes[d] = esc + "%im" % x
- codes[l] = esc + "%im" % (60 + x)
+for dark, light in zip(dark_colors, light_colors):
+ codes[dark] = esc + "%im" % x
+ codes[light] = esc + "%im" % (60 + x)
x += 1
-del d, l, x
+del dark, light, x
codes["white"] = codes["bold"]
diff --git a/contrib/python/pip/pip/_vendor/pygments/filter.py b/contrib/python/pip/pip/_vendor/pygments/filter.py
index dafa08d156..aa6f76041b 100644
--- a/contrib/python/pip/pip/_vendor/pygments/filter.py
+++ b/contrib/python/pip/pip/_vendor/pygments/filter.py
@@ -4,7 +4,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -62,8 +62,7 @@ class FunctionFilter(Filter):
def __init__(self, **options):
if not hasattr(self, 'function'):
- raise TypeError('%r used without bound function' %
- self.__class__.__name__)
+ raise TypeError(f'{self.__class__.__name__!r} used without bound function')
Filter.__init__(self, **options)
def filter(self, lexer, stream):
diff --git a/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py b/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py
index 5aa9ecbb80..9255ca224d 100644
--- a/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py
@@ -5,7 +5,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -39,7 +39,7 @@ def get_filter_by_name(filtername, **options):
if cls:
return cls(**options)
else:
- raise ClassNotFound('filter %r not found' % filtername)
+ raise ClassNotFound(f'filter {filtername!r} not found')
def get_all_filters():
@@ -79,9 +79,9 @@ class CodeTagFilter(Filter):
Filter.__init__(self, **options)
tags = get_list_opt(options, 'codetags',
['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
- self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([
+ self.tag_re = re.compile(r'\b({})\b'.format('|'.join([
re.escape(tag) for tag in tags if tag
- ]))
+ ])))
def filter(self, lexer, stream):
regex = self.tag_re
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatter.py b/contrib/python/pip/pip/_vendor/pygments/formatter.py
index 3ca4892fa3..d2666037f7 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatter.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatter.py
@@ -4,7 +4,7 @@
Base formatter class.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -122,3 +122,8 @@ class Formatter:
# wrap the outfile in a StreamWriter
outfile = codecs.lookup(self.encoding)[3](outfile)
return self.format_unencoded(tokensource, outfile)
+
+ # Allow writing Formatter[str] or Formatter[bytes]. That's equivalent to
+ # Formatter. This helps when using third-party type stubs from typeshed.
+ def __class_getitem__(cls, name):
+ return cls
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py b/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py
index 6abb45ac71..f19e9931f0 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py
@@ -4,7 +4,7 @@
Pygments formatters.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -77,7 +77,7 @@ def get_formatter_by_name(_alias, **options):
"""
cls = find_formatter_class(_alias)
if cls is None:
- raise ClassNotFound("no formatter found for name %r" % _alias)
+ raise ClassNotFound(f"no formatter found for name {_alias!r}")
return cls(**options)
@@ -103,17 +103,16 @@ def load_formatter_from_file(filename, formattername="CustomFormatter", **option
exec(f.read(), custom_namespace)
# Retrieve the class `formattername` from that namespace
if formattername not in custom_namespace:
- raise ClassNotFound('no valid %s class found in %s' %
- (formattername, filename))
+ raise ClassNotFound(f'no valid {formattername} class found in {filename}')
formatter_class = custom_namespace[formattername]
# And finally instantiate it with the options
return formatter_class(**options)
except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ raise ClassNotFound(f'cannot read {filename}: {err}')
except ClassNotFound:
raise
except Exception as err:
- raise ClassNotFound('error when loading custom formatter: %s' % err)
+ raise ClassNotFound(f'error when loading custom formatter: {err}')
def get_formatter_for_filename(fn, **options):
@@ -135,7 +134,7 @@ def get_formatter_for_filename(fn, **options):
for filename in cls.filenames:
if _fn_matches(fn, filename):
return cls(**options)
- raise ClassNotFound("no formatter found for file name %r" % fn)
+ raise ClassNotFound(f"no formatter found for file name {fn!r}")
class _automodule(types.ModuleType):
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py b/contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py
index c4db8f4ef2..5a05bd961d 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py
@@ -4,7 +4,7 @@
BBcode formatter.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -60,7 +60,7 @@ class BBCodeFormatter(Formatter):
for ttype, ndef in self.style:
start = end = ''
if ndef['color']:
- start += '[color=#%s]' % ndef['color']
+ start += '[color=#{}]'.format(ndef['color'])
end = '[/color]' + end
if ndef['bold']:
start += '[b]'
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/groff.py b/contrib/python/pip/pip/_vendor/pygments/formatters/groff.py
index 30a528e668..5c8a958f8d 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/groff.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/groff.py
@@ -4,7 +4,7 @@
Formatter for groff output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -63,7 +63,7 @@ class GroffFormatter(Formatter):
for ttype, ndef in self.style:
start = end = ''
if ndef['color']:
- start += '\\m[%s]' % ndef['color']
+ start += '\\m[{}]'.format(ndef['color'])
end = '\\m[]' + end
if ndef['bold']:
start += bold
@@ -72,7 +72,7 @@ class GroffFormatter(Formatter):
start += italic
end = regular + end
if ndef['bgcolor']:
- start += '\\M[%s]' % ndef['bgcolor']
+ start += '\\M[{}]'.format(ndef['bgcolor'])
end = '\\M[]' + end
self.styles[ttype] = start, end
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/html.py b/contrib/python/pip/pip/_vendor/pygments/formatters/html.py
index 0cadcb228e..7aa938f511 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/html.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/html.py
@@ -4,7 +4,7 @@
Formatter for HTML output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -62,7 +62,7 @@ def _get_ttype_class(ttype):
CSSFILE_TEMPLATE = '''\
/*
generated by Pygments <https://pygments.org/>
-Copyright 2006-2023 by the Pygments team.
+Copyright 2006-2024 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
*/
%(styledefs)s
@@ -73,7 +73,7 @@ DOC_HEADER = '''\
"http://www.w3.org/TR/html4/strict.dtd">
<!--
generated by Pygments <https://pygments.org/>
-Copyright 2006-2023 by the Pygments team.
+Copyright 2006-2024 by the Pygments team.
Licensed under the BSD license, see LICENSE for details.
-->
<html>
@@ -488,7 +488,7 @@ class HtmlFormatter(Formatter):
name = self._get_css_class(ttype)
style = ''
if ndef['color']:
- style += 'color: %s; ' % webify(ndef['color'])
+ style += 'color: {}; '.format(webify(ndef['color']))
if ndef['bold']:
style += 'font-weight: bold; '
if ndef['italic']:
@@ -496,9 +496,9 @@ class HtmlFormatter(Formatter):
if ndef['underline']:
style += 'text-decoration: underline; '
if ndef['bgcolor']:
- style += 'background-color: %s; ' % webify(ndef['bgcolor'])
+ style += 'background-color: {}; '.format(webify(ndef['bgcolor']))
if ndef['border']:
- style += 'border: 1px solid %s; ' % webify(ndef['border'])
+ style += 'border: 1px solid {}; '.format(webify(ndef['border']))
if style:
t2c[ttype] = name
# save len(ttype) to enable ordering the styles by
@@ -530,7 +530,7 @@ class HtmlFormatter(Formatter):
styles.sort()
lines = [
- '%s { %s } /* %s */' % (prefix(cls), style, repr(ttype)[6:])
+ f'{prefix(cls)} {{ {style} }} /* {repr(ttype)[6:]} */'
for (level, ttype, cls, style) in styles
]
@@ -548,24 +548,24 @@ class HtmlFormatter(Formatter):
if Text in self.ttype2class:
text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
lines.insert(
- 0, '%s{ background: %s;%s }' % (
+ 0, '{}{{ background: {};{} }}'.format(
prefix(''), bg_color, text_style
)
)
if hl_color is not None:
lines.insert(
- 0, '%s { background-color: %s }' % (prefix('hll'), hl_color)
+ 0, '{} {{ background-color: {} }}'.format(prefix('hll'), hl_color)
)
return lines
def get_linenos_style_defs(self):
lines = [
- 'pre { %s }' % self._pre_style,
- 'td.linenos .normal { %s }' % self._linenos_style,
- 'span.linenos { %s }' % self._linenos_style,
- 'td.linenos .special { %s }' % self._linenos_special_style,
- 'span.linenos.special { %s }' % self._linenos_special_style,
+ f'pre {{ {self._pre_style} }}',
+ f'td.linenos .normal {{ {self._linenos_style} }}',
+ f'span.linenos {{ {self._linenos_style} }}',
+ f'td.linenos .special {{ {self._linenos_special_style} }}',
+ f'span.linenos.special {{ {self._linenos_special_style} }}',
]
return lines
@@ -594,17 +594,15 @@ class HtmlFormatter(Formatter):
@property
def _linenos_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_color,
- self.style.line_number_background_color
- )
+ color = self.style.line_number_color
+ background_color = self.style.line_number_background_color
+ return f'color: {color}; background-color: {background_color}; padding-left: 5px; padding-right: 5px;'
@property
def _linenos_special_style(self):
- return 'color: %s; background-color: %s; padding-left: 5px; padding-right: 5px;' % (
- self.style.line_number_special_color,
- self.style.line_number_special_background_color
- )
+ color = self.style.line_number_special_color
+ background_color = self.style.line_number_special_background_color
+ return f'color: {color}; background-color: {background_color}; padding-left: 5px; padding-right: 5px;'
def _decodeifneeded(self, value):
if isinstance(value, bytes):
@@ -685,9 +683,9 @@ class HtmlFormatter(Formatter):
if nocls:
if special_line:
- style = ' style="%s"' % self._linenos_special_style
+ style = f' style="{self._linenos_special_style}"'
else:
- style = ' style="%s"' % self._linenos_style
+ style = f' style="{self._linenos_style}"'
else:
if special_line:
style = ' class="special"'
@@ -695,7 +693,7 @@ class HtmlFormatter(Formatter):
style = ' class="normal"'
if style:
- line = '<span%s>%s</span>' % (style, line)
+ line = f'<span{style}>{line}</span>'
lines.append(line)
@@ -744,9 +742,9 @@ class HtmlFormatter(Formatter):
if nocls:
if special_line:
- style = ' style="%s"' % self._linenos_special_style
+ style = f' style="{self._linenos_special_style}"'
else:
- style = ' style="%s"' % self._linenos_style
+ style = f' style="{self._linenos_style}"'
else:
if special_line:
style = ' class="linenos special"'
@@ -754,7 +752,7 @@ class HtmlFormatter(Formatter):
style = ' class="linenos"'
if style:
- linenos = '<span%s>%s</span>' % (style, line)
+ linenos = f'<span{style}>{line}</span>'
else:
linenos = line
@@ -791,13 +789,13 @@ class HtmlFormatter(Formatter):
style = []
if (self.noclasses and not self.nobackground and
self.style.background_color is not None):
- style.append('background: %s' % (self.style.background_color,))
+ style.append(f'background: {self.style.background_color}')
if self.cssstyles:
style.append(self.cssstyles)
style = '; '.join(style)
- yield 0, ('<div' + (self.cssclass and ' class="%s"' % self.cssclass) +
- (style and (' style="%s"' % style)) + '>')
+ yield 0, ('<div' + (self.cssclass and f' class="{self.cssclass}"') +
+ (style and (f' style="{style}"')) + '>')
yield from inner
yield 0, '</div>\n'
@@ -814,7 +812,7 @@ class HtmlFormatter(Formatter):
# the empty span here is to keep leading empty lines from being
# ignored by HTML parsers
- yield 0, ('<pre' + (style and ' style="%s"' % style) + '><span></span>')
+ yield 0, ('<pre' + (style and f' style="{style}"') + '><span></span>')
yield from inner
yield 0, '</pre>'
@@ -843,18 +841,18 @@ class HtmlFormatter(Formatter):
try:
cspan = self.span_element_openers[ttype]
except KeyError:
- title = ' title="%s"' % '.'.join(ttype) if self.debug_token_types else ''
+ title = ' title="{}"'.format('.'.join(ttype)) if self.debug_token_types else ''
if nocls:
css_style = self._get_css_inline_styles(ttype)
if css_style:
css_style = self.class2style[css_style][0]
- cspan = '<span style="%s"%s>' % (css_style, title)
+ cspan = f'<span style="{css_style}"{title}>'
else:
cspan = ''
else:
css_class = self._get_css_classes(ttype)
if css_class:
- cspan = '<span class="%s"%s>' % (css_class, title)
+ cspan = f'<span class="{css_class}"{title}>'
else:
cspan = ''
self.span_element_openers[ttype] = cspan
@@ -927,11 +925,10 @@ class HtmlFormatter(Formatter):
if self.noclasses:
style = ''
if self.style.highlight_color is not None:
- style = (' style="background-color: %s"' %
- (self.style.highlight_color,))
- yield 1, '<span%s>%s</span>' % (style, value)
+ style = (f' style="background-color: {self.style.highlight_color}"')
+ yield 1, f'<span{style}>{value}</span>'
else:
- yield 1, '<span class="hll">%s</span>' % value
+ yield 1, f'<span class="hll">{value}</span>'
else:
yield 1, value
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/img.py b/contrib/python/pip/pip/_vendor/pygments/formatters/img.py
index 9e66b66916..7542cfad9d 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/img.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/img.py
@@ -4,7 +4,7 @@
Formatter for Pixmap output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
@@ -90,7 +90,7 @@ class FontManager:
self._create_nix()
def _get_nix_font_path(self, name, style):
- proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'],
+ proc = subprocess.Popen(['fc-list', f"{name}:style={style}", 'file'],
stdout=subprocess.PIPE, stderr=None)
stdout, _ = proc.communicate()
if proc.returncode == 0:
@@ -110,8 +110,7 @@ class FontManager:
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
break
else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
+ raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
for stylename in STYLES[style]:
path = self._get_nix_font_path(self.font_name, stylename)
@@ -142,8 +141,7 @@ class FontManager:
self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
break
else:
- raise FontNotFound('No usable fonts named: "%s"' %
- self.font_name)
+ raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
for stylename in STYLES[style]:
path = self._get_mac_font_path(font_map, self.font_name, stylename)
@@ -160,15 +158,14 @@ class FontManager:
for suffix in ('', ' (TrueType)'):
for style in styles:
try:
- valname = '%s%s%s' % (basename, style and ' '+style, suffix)
+ valname = '{}{}{}'.format(basename, style and ' '+style, suffix)
val, _ = _winreg.QueryValueEx(key, valname)
return val
except OSError:
continue
else:
if fail:
- raise FontNotFound('Font %s (%s) not found in registry' %
- (basename, styles[0]))
+ raise FontNotFound(f'Font {basename} ({styles[0]}) not found in registry')
return None
def _create_win(self):
@@ -633,7 +630,11 @@ class ImageFormatter(Formatter):
fill=self.hl_color)
for pos, value, font, text_fg, text_bg in self.drawables:
if text_bg:
- text_size = draw.textsize(text=value, font=font)
+ # see deprecations https://pillow.readthedocs.io/en/stable/releasenotes/9.2.0.html#font-size-and-offset-methods
+ if hasattr(draw, 'textsize'):
+ text_size = draw.textsize(text=value, font=font)
+ else:
+ text_size = font.getbbox(value)[2:]
draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
draw.text(pos, value, font=font, fill=text_fg)
im.save(outfile, self.image_format.upper())
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/irc.py b/contrib/python/pip/pip/_vendor/pygments/formatters/irc.py
index 2144d439e0..468c287605 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/irc.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/irc.py
@@ -4,7 +4,7 @@
Formatter for IRC output
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/latex.py b/contrib/python/pip/pip/_vendor/pygments/formatters/latex.py
index ca539b40f6..0ec9089b93 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/latex.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/latex.py
@@ -4,7 +4,7 @@
Formatter for LaTeX fancyvrb output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -23,21 +23,21 @@ def escape_tex(text, commandprefix):
return text.replace('\\', '\x00'). \
replace('{', '\x01'). \
replace('}', '\x02'). \
- replace('\x00', r'\%sZbs{}' % commandprefix). \
- replace('\x01', r'\%sZob{}' % commandprefix). \
- replace('\x02', r'\%sZcb{}' % commandprefix). \
- replace('^', r'\%sZca{}' % commandprefix). \
- replace('_', r'\%sZus{}' % commandprefix). \
- replace('&', r'\%sZam{}' % commandprefix). \
- replace('<', r'\%sZlt{}' % commandprefix). \
- replace('>', r'\%sZgt{}' % commandprefix). \
- replace('#', r'\%sZsh{}' % commandprefix). \
- replace('%', r'\%sZpc{}' % commandprefix). \
- replace('$', r'\%sZdl{}' % commandprefix). \
- replace('-', r'\%sZhy{}' % commandprefix). \
- replace("'", r'\%sZsq{}' % commandprefix). \
- replace('"', r'\%sZdq{}' % commandprefix). \
- replace('~', r'\%sZti{}' % commandprefix)
+ replace('\x00', rf'\{commandprefix}Zbs{{}}'). \
+ replace('\x01', rf'\{commandprefix}Zob{{}}'). \
+ replace('\x02', rf'\{commandprefix}Zcb{{}}'). \
+ replace('^', rf'\{commandprefix}Zca{{}}'). \
+ replace('_', rf'\{commandprefix}Zus{{}}'). \
+ replace('&', rf'\{commandprefix}Zam{{}}'). \
+ replace('<', rf'\{commandprefix}Zlt{{}}'). \
+ replace('>', rf'\{commandprefix}Zgt{{}}'). \
+ replace('#', rf'\{commandprefix}Zsh{{}}'). \
+ replace('%', rf'\{commandprefix}Zpc{{}}'). \
+ replace('$', rf'\{commandprefix}Zdl{{}}'). \
+ replace('-', rf'\{commandprefix}Zhy{{}}'). \
+ replace("'", rf'\{commandprefix}Zsq{{}}'). \
+ replace('"', rf'\{commandprefix}Zdq{{}}'). \
+ replace('~', rf'\{commandprefix}Zti{{}}')
DOC_TEMPLATE = r'''
@@ -304,17 +304,14 @@ class LatexFormatter(Formatter):
if ndef['mono']:
cmndef += r'\let\$$@ff=\textsf'
if ndef['color']:
- cmndef += (r'\def\$$@tc##1{\textcolor[rgb]{%s}{##1}}' %
- rgbcolor(ndef['color']))
+ cmndef += (r'\def\$$@tc##1{{\textcolor[rgb]{{{}}}{{##1}}}}'.format(rgbcolor(ndef['color'])))
if ndef['border']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{\string -\fboxrule}'
- r'\fcolorbox[rgb]{%s}{%s}{\strut ##1}}}' %
- (rgbcolor(ndef['border']),
+ cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{\string -\fboxrule}}'
+ r'\fcolorbox[rgb]{{{}}}{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['border']),
rgbcolor(ndef['bgcolor'])))
elif ndef['bgcolor']:
- cmndef += (r'\def\$$@bc##1{{\setlength{\fboxsep}{0pt}'
- r'\colorbox[rgb]{%s}{\strut ##1}}}' %
- rgbcolor(ndef['bgcolor']))
+ cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{0pt}}'
+ r'\colorbox[rgb]{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['bgcolor'])))
if cmndef == '':
continue
cmndef = cmndef.replace('$$', cp)
@@ -329,7 +326,7 @@ class LatexFormatter(Formatter):
cp = self.commandprefix
styles = []
for name, definition in self.cmd2def.items():
- styles.append(r'\@namedef{%s@tok@%s}{%s}' % (cp, name, definition))
+ styles.append(rf'\@namedef{{{cp}@tok@{name}}}{{{definition}}}')
return STYLE_TEMPLATE % {'cp': self.commandprefix,
'styles': '\n'.join(styles)}
@@ -410,10 +407,10 @@ class LatexFormatter(Formatter):
spl = value.split('\n')
for line in spl[:-1]:
if line:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, line))
+ outfile.write(f"\\{cp}{{{styleval}}}{{{line}}}")
outfile.write('\n')
if spl[-1]:
- outfile.write("\\%s{%s}{%s}" % (cp, styleval, spl[-1]))
+ outfile.write(f"\\{cp}{{{styleval}}}{{{spl[-1]}}}")
else:
outfile.write(value)
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/other.py b/contrib/python/pip/pip/_vendor/pygments/formatters/other.py
index 990ead4802..de8d9dcf89 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/other.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/other.py
@@ -4,7 +4,7 @@
Other formatters: NullFormatter, RawTokenFormatter.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -74,8 +74,7 @@ class RawTokenFormatter(Formatter):
try:
colorize(self.error_color, '')
except KeyError:
- raise ValueError("Invalid color %r specified" %
- self.error_color)
+ raise ValueError(f"Invalid color {self.error_color!r} specified")
def format(self, tokensource, outfile):
try:
@@ -147,7 +146,7 @@ class TestcaseFormatter(Formatter):
outbuf = []
for ttype, value in tokensource:
rawbuf.append(value)
- outbuf.append('%s(%s, %r),\n' % (indentation, ttype, value))
+ outbuf.append(f'{indentation}({ttype}, {value!r}),\n')
before = TESTCASE_BEFORE % (''.join(rawbuf),)
during = ''.join(outbuf)
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py b/contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py
index 6bb325d078..dfed53ab76 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py
@@ -4,7 +4,7 @@
Formatter for Pango markup output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -45,7 +45,7 @@ class PangoMarkupFormatter(Formatter):
start = ''
end = ''
if style['color']:
- start += '<span fgcolor="#%s">' % style['color']
+ start += '<span fgcolor="#{}">'.format(style['color'])
end = '</span>' + end
if style['bold']:
start += '<b>'
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py b/contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py
index 125189c6fa..eca2a41a1c 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py
@@ -4,12 +4,14 @@
A formatter that generates RTF files.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from collections import OrderedDict
from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.util import get_int_opt, surrogatepair
+from pip._vendor.pygments.style import _ansimap
+from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, surrogatepair
__all__ = ['RtfFormatter']
@@ -42,6 +44,59 @@ class RtfFormatter(Formatter):
default is 24 half-points, giving a size 12 font.
.. versionadded:: 2.0
+
+ `linenos`
+ Turn on line numbering (default: ``False``).
+
+ .. versionadded:: 2.18
+
+ `lineno_fontsize`
+ Font size for line numbers. Size is specified in half points
+ (default: `fontsize`).
+
+ .. versionadded:: 2.18
+
+ `lineno_padding`
+ Number of spaces between the (inline) line numbers and the
+ source code (default: ``2``).
+
+ .. versionadded:: 2.18
+
+ `linenostart`
+ The line number for the first line (default: ``1``).
+
+ .. versionadded:: 2.18
+
+ `linenostep`
+ If set to a number n > 1, only every nth line number is printed.
+
+ .. versionadded:: 2.18
+
+ `lineno_color`
+ Color for line numbers specified as a hex triplet, e.g. ``'5e5e5e'``.
+ Defaults to the style's line number color if it is a hex triplet,
+ otherwise ansi bright black.
+
+ .. versionadded:: 2.18
+
+ `hl_lines`
+ Specify a list of lines to be highlighted, as line numbers separated by
+ spaces, e.g. ``'3 7 8'``. The line numbers are relative to the input
+ (i.e. the first line is line 1) unless `hl_linenostart` is set.
+
+ .. versionadded:: 2.18
+
+ `hl_color`
+ Color for highlighting the lines specified in `hl_lines`, specified as
+ a hex triplet (default: style's `highlight_color`).
+
+ .. versionadded:: 2.18
+
+ `hl_linenostart`
+ If set to ``True`` line numbers in `hl_lines` are specified
+ relative to `linenostart` (default ``False``).
+
+ .. versionadded:: 2.18
"""
name = 'RTF'
aliases = ['rtf']
@@ -62,6 +117,40 @@ class RtfFormatter(Formatter):
Formatter.__init__(self, **options)
self.fontface = options.get('fontface') or ''
self.fontsize = get_int_opt(options, 'fontsize', 0)
+ self.linenos = get_bool_opt(options, 'linenos', False)
+ self.lineno_fontsize = get_int_opt(options, 'lineno_fontsize',
+ self.fontsize)
+ self.lineno_padding = get_int_opt(options, 'lineno_padding', 2)
+ self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
+ self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
+ self.hl_linenostart = get_bool_opt(options, 'hl_linenostart', False)
+
+ self.hl_color = options.get('hl_color', '')
+ if not self.hl_color:
+ self.hl_color = self.style.highlight_color
+
+ self.hl_lines = []
+ for lineno in get_list_opt(options, 'hl_lines', []):
+ try:
+ lineno = int(lineno)
+ if self.hl_linenostart:
+ lineno = lineno - self.linenostart + 1
+ self.hl_lines.append(lineno)
+ except ValueError:
+ pass
+
+ self.lineno_color = options.get('lineno_color', '')
+ if not self.lineno_color:
+ if self.style.line_number_color == 'inherit':
+ # style color is the css value 'inherit'
+ # default to ansi bright-black
+ self.lineno_color = _ansimap['ansibrightblack']
+ else:
+ # style color is assumed to be a hex triplet as other
+ # colors in pygments/style.py
+ self.lineno_color = self.style.line_number_color
+
+ self.color_mapping = self._create_color_mapping()
def _escape(self, text):
return text.replace('\\', '\\\\') \
@@ -90,43 +179,145 @@ class RtfFormatter(Formatter):
# Force surrogate pairs
buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn))
- return ''.join(buf).replace('\n', '\\par\n')
+ return ''.join(buf).replace('\n', '\\par')
- def format_unencoded(self, tokensource, outfile):
- # rtf 1.8 header
- outfile.write('{\\rtf1\\ansi\\uc0\\deff0'
- '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
- '{\\colortbl;' % (self.fontface and
- ' ' + self._escape(self.fontface) or
- ''))
-
- # convert colors and save them in a mapping to access them later.
- color_mapping = {}
+ @staticmethod
+ def hex_to_rtf_color(hex_color):
+ if hex_color[0] == "#":
+ hex_color = hex_color[1:]
+
+ return '\\red%d\\green%d\\blue%d;' % (
+ int(hex_color[0:2], 16),
+ int(hex_color[2:4], 16),
+ int(hex_color[4:6], 16)
+ )
+
+ def _split_tokens_on_newlines(self, tokensource):
+ """
+ Split tokens containing newline characters into multiple token
+ each representing a line of the input file. Needed for numbering
+ lines of e.g. multiline comments.
+ """
+ for ttype, value in tokensource:
+ if value == '\n':
+ yield (ttype, value)
+ elif "\n" in value:
+ lines = value.split("\n")
+ for line in lines[:-1]:
+ yield (ttype, line+"\n")
+ if lines[-1]:
+ yield (ttype, lines[-1])
+ else:
+ yield (ttype, value)
+
+ def _create_color_mapping(self):
+ """
+ Create a mapping of style hex colors to index/offset in
+ the RTF color table.
+ """
+ color_mapping = OrderedDict()
offset = 1
+
+ if self.linenos:
+ color_mapping[self.lineno_color] = offset
+ offset += 1
+
+ if self.hl_lines:
+ color_mapping[self.hl_color] = offset
+ offset += 1
+
for _, style in self.style:
for color in style['color'], style['bgcolor'], style['border']:
if color and color not in color_mapping:
color_mapping[color] = offset
- outfile.write('\\red%d\\green%d\\blue%d;' % (
- int(color[0:2], 16),
- int(color[2:4], 16),
- int(color[4:6], 16)
- ))
offset += 1
- outfile.write('}\\f0 ')
+
+ return color_mapping
+
+ @property
+ def _lineno_template(self):
+ if self.lineno_fontsize != self.fontsize:
+ return '{{\\fs{} \\cf{} %s{}}}'.format(self.lineno_fontsize,
+ self.color_mapping[self.lineno_color],
+ " " * self.lineno_padding)
+
+ return '{{\\cf{} %s{}}}'.format(self.color_mapping[self.lineno_color],
+ " " * self.lineno_padding)
+
+ @property
+ def _hl_open_str(self):
+ return rf'{{\highlight{self.color_mapping[self.hl_color]} '
+
+ @property
+ def _rtf_header(self):
+ lines = []
+ # rtf 1.8 header
+ lines.append('{\\rtf1\\ansi\\uc0\\deff0'
+ '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
+ % (self.fontface and ' '
+ + self._escape(self.fontface) or ''))
+
+ # color table
+ lines.append('{\\colortbl;')
+ for color, _ in self.color_mapping.items():
+ lines.append(self.hex_to_rtf_color(color))
+ lines.append('}')
+
+ # font and fontsize
+ lines.append('\\f0\\sa0')
if self.fontsize:
- outfile.write('\\fs%d' % self.fontsize)
+ lines.append('\\fs%d' % self.fontsize)
+
+ # ensure Libre Office Writer imports and renders consecutive
+ # space characters the same width, needed for line numbering.
+ # https://bugs.documentfoundation.org/show_bug.cgi?id=144050
+ lines.append('\\dntblnsbdb')
+
+ return lines
+
+ def format_unencoded(self, tokensource, outfile):
+ for line in self._rtf_header:
+ outfile.write(line + "\n")
+
+ tokensource = self._split_tokens_on_newlines(tokensource)
+
+ # first pass of tokens to count lines, needed for line numbering
+ if self.linenos:
+ line_count = 0
+ tokens = [] # for copying the token source generator
+ for ttype, value in tokensource:
+ tokens.append((ttype, value))
+ if value.endswith("\n"):
+ line_count += 1
+
+ # width of line number strings (for padding with spaces)
+ linenos_width = len(str(line_count+self.linenostart-1))
+
+ tokensource = tokens
# highlight stream
+ lineno = 1
+ start_new_line = True
for ttype, value in tokensource:
+ if start_new_line and lineno in self.hl_lines:
+ outfile.write(self._hl_open_str)
+
+ if start_new_line and self.linenos:
+ if (lineno-self.linenostart+1)%self.linenostep == 0:
+ current_lineno = lineno + self.linenostart - 1
+ lineno_str = str(current_lineno).rjust(linenos_width)
+ else:
+ lineno_str = "".rjust(linenos_width)
+ outfile.write(self._lineno_template % lineno_str)
+
while not self.style.styles_token(ttype) and ttype.parent:
ttype = ttype.parent
style = self.style.style_for_token(ttype)
buf = []
if style['bgcolor']:
- buf.append('\\cb%d' % color_mapping[style['bgcolor']])
+ buf.append('\\cb%d' % self.color_mapping[style['bgcolor']])
if style['color']:
- buf.append('\\cf%d' % color_mapping[style['color']])
+ buf.append('\\cf%d' % self.color_mapping[style['color']])
if style['bold']:
buf.append('\\b')
if style['italic']:
@@ -135,12 +326,24 @@ class RtfFormatter(Formatter):
buf.append('\\ul')
if style['border']:
buf.append('\\chbrdr\\chcfpat%d' %
- color_mapping[style['border']])
+ self.color_mapping[style['border']])
start = ''.join(buf)
if start:
- outfile.write('{%s ' % start)
+ outfile.write(f'{{{start} ')
outfile.write(self._escape_text(value))
if start:
outfile.write('}')
+ start_new_line = False
+
+ # complete line of input
+ if value.endswith("\n"):
+ # close line highlighting
+ if lineno in self.hl_lines:
+ outfile.write('}')
+ # newline in RTF file after closing }
+ outfile.write("\n")
+
+ start_new_line = True
+ lineno += 1
- outfile.write('}')
+ outfile.write('}\n')
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/svg.py b/contrib/python/pip/pip/_vendor/pygments/formatters/svg.py
index a8727ed859..d3e018ffd8 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/svg.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/svg.py
@@ -4,7 +4,7 @@
Formatter for SVG output.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -60,11 +60,11 @@ class SvgFormatter(Formatter):
`linenostep`
If set to a number n > 1, only every nth line number is printed.
-
+
`linenowidth`
Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
- for up to 4-digit line numbers. Increase width for longer code blocks).
-
+ for up to 4-digit line numbers. Increase width for longer code blocks).
+
`xoffset`
Starting offset in X direction, defaults to ``0``.
@@ -97,10 +97,11 @@ class SvgFormatter(Formatter):
self.fontsize = options.get('fontsize', '14px')
self.xoffset = get_int_opt(options, 'xoffset', 0)
fs = self.fontsize.strip()
- if fs.endswith('px'): fs = fs[:-2].strip()
+ if fs.endswith('px'):
+ fs = fs[:-2].strip()
try:
int_fs = int(fs)
- except:
+ except ValueError:
int_fs = 20
self.yoffset = get_int_opt(options, 'yoffset', int_fs)
self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
@@ -122,30 +123,27 @@ class SvgFormatter(Formatter):
y = self.yoffset
if not self.nowrap:
if self.encoding:
- outfile.write('<?xml version="1.0" encoding="%s"?>\n' %
- self.encoding)
+ outfile.write(f'<?xml version="1.0" encoding="{self.encoding}"?>\n')
else:
outfile.write('<?xml version="1.0"?>\n')
outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
'"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
'svg10.dtd">\n')
outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
- outfile.write('<g font-family="%s" font-size="%s">\n' %
- (self.fontfamily, self.fontsize))
-
- counter = self.linenostart
+ outfile.write(f'<g font-family="{self.fontfamily}" font-size="{self.fontsize}">\n')
+
+ counter = self.linenostart
counter_step = self.linenostep
counter_style = self._get_style(Comment)
line_x = x
-
+
if self.linenos:
if counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" %s text-anchor="end">%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
+ outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" {counter_style} text-anchor="end">{counter}</text>')
line_x += self.linenowidth + self.ystep
counter += 1
- outfile.write('<text x="%s" y="%s" xml:space="preserve">' % (line_x, y))
+ outfile.write(f'<text x="{line_x}" y="{y}" xml:space="preserve">')
for ttype, value in tokensource:
style = self._get_style(ttype)
tspan = style and '<tspan' + style + '>' or ''
@@ -159,11 +157,10 @@ class SvgFormatter(Formatter):
y += self.ystep
outfile.write('</text>\n')
if self.linenos and counter % counter_step == 0:
- outfile.write('<text x="%s" y="%s" text-anchor="end" %s>%s</text>' %
- (x+self.linenowidth,y,counter_style,counter))
-
+ outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" text-anchor="end" {counter_style}>{counter}</text>')
+
counter += 1
- outfile.write('<text x="%s" y="%s" ' 'xml:space="preserve">' % (line_x,y))
+ outfile.write(f'<text x="{line_x}" y="{y}" ' 'xml:space="preserve">')
outfile.write(tspan + parts[-1] + tspanend)
outfile.write('</text>')
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py b/contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py
index abb8770811..51b902d3e2 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py
@@ -4,7 +4,7 @@
Formatter for terminal output with ANSI sequences.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py b/contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py
index 0cfe5d1612..5f254051a8 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py
@@ -10,7 +10,7 @@
Formatter version 1.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexer.py b/contrib/python/pip/pip/_vendor/pygments/lexer.py
index 26c5fb31ff..1348be5878 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexer.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexer.py
@@ -4,7 +4,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -67,10 +67,12 @@ class Lexer(metaclass=LexerMeta):
:no-value:
.. autoattribute:: priority
- Lexers included in Pygments should have an additional attribute:
+ Lexers included in Pygments should have two additional attributes:
.. autoattribute:: url
:no-value:
+ .. autoattribute:: version_added
+ :no-value:
Lexers included in Pygments may have additional attributes:
@@ -130,9 +132,12 @@ class Lexer(metaclass=LexerMeta):
priority = 0
#: URL of the language specification/definition. Used in the Pygments
- #: documentation.
+ #: documentation. Set to an empty string to disable.
url = None
+ #: Version of Pygments in which the lexer was added.
+ version_added = None
+
#: Example file name. Relative to the ``tests/examplefiles`` directory.
#: This is used by the documentation generator to show an example.
_example = None
@@ -169,10 +174,9 @@ class Lexer(metaclass=LexerMeta):
def __repr__(self):
if self.options:
- return '<pygments.lexers.%s with %r>' % (self.__class__.__name__,
- self.options)
+ return f'<pygments.lexers.{self.__class__.__name__} with {self.options!r}>'
else:
- return '<pygments.lexers.%s>' % self.__class__.__name__
+ return f'<pygments.lexers.{self.__class__.__name__}>'
def add_filter(self, filter_, **options):
"""
@@ -508,7 +512,7 @@ class RegexLexerMeta(LexerMeta):
def _process_token(cls, token):
"""Preprocess the token component of a token definition."""
assert type(token) is _TokenType or callable(token), \
- 'token type must be simple type or callable, not %r' % (token,)
+ f'token type must be simple type or callable, not {token!r}'
return token
def _process_new_state(cls, new_state, unprocessed, processed):
@@ -524,14 +528,14 @@ class RegexLexerMeta(LexerMeta):
elif new_state[:5] == '#pop:':
return -int(new_state[5:])
else:
- assert False, 'unknown new state %r' % new_state
+ assert False, f'unknown new state {new_state!r}'
elif isinstance(new_state, combined):
# combine a new state from existing ones
tmp_state = '_tmp_%d' % cls._tmpname
cls._tmpname += 1
itokens = []
for istate in new_state:
- assert istate != new_state, 'circular state ref %r' % istate
+ assert istate != new_state, f'circular state ref {istate!r}'
itokens.extend(cls._process_state(unprocessed,
processed, istate))
processed[tmp_state] = itokens
@@ -544,12 +548,12 @@ class RegexLexerMeta(LexerMeta):
'unknown new state ' + istate
return new_state
else:
- assert False, 'unknown new state def %r' % new_state
+ assert False, f'unknown new state def {new_state!r}'
def _process_state(cls, unprocessed, processed, state):
"""Preprocess a single state definition."""
- assert type(state) is str, "wrong state name %r" % state
- assert state[0] != '#', "invalid state name %r" % state
+ assert isinstance(state, str), f"wrong state name {state!r}"
+ assert state[0] != '#', f"invalid state name {state!r}"
if state in processed:
return processed[state]
tokens = processed[state] = []
@@ -557,7 +561,7 @@ class RegexLexerMeta(LexerMeta):
for tdef in unprocessed[state]:
if isinstance(tdef, include):
# it's a state reference
- assert tdef != state, "circular state reference %r" % state
+ assert tdef != state, f"circular state reference {state!r}"
tokens.extend(cls._process_state(unprocessed, processed,
str(tdef)))
continue
@@ -571,13 +575,12 @@ class RegexLexerMeta(LexerMeta):
tokens.append((re.compile('').match, None, new_state))
continue
- assert type(tdef) is tuple, "wrong rule def %r" % tdef
+ assert type(tdef) is tuple, f"wrong rule def {tdef!r}"
try:
rex = cls._process_regex(tdef[0], rflags, state)
except Exception as err:
- raise ValueError("uncompilable regex %r in state %r of %r: %s" %
- (tdef[0], state, cls, err)) from err
+ raise ValueError(f"uncompilable regex {tdef[0]!r} in state {state!r} of {cls!r}: {err}") from err
token = cls._process_token(tdef[1])
@@ -738,7 +741,7 @@ class RegexLexer(Lexer, metaclass=RegexLexerMeta):
elif new_state == '#push':
statestack.append(statestack[-1])
else:
- assert False, "wrong state def: %r" % new_state
+ assert False, f"wrong state def: {new_state!r}"
statetokens = tokendefs[statestack[-1]]
break
else:
@@ -770,8 +773,7 @@ class LexerContext:
self.stack = stack or ['root']
def __repr__(self):
- return 'LexerContext(%r, %r, %r)' % (
- self.text, self.pos, self.stack)
+ return f'LexerContext({self.text!r}, {self.pos!r}, {self.stack!r})'
class ExtendedRegexLexer(RegexLexer):
@@ -826,7 +828,7 @@ class ExtendedRegexLexer(RegexLexer):
elif new_state == '#push':
ctx.stack.append(ctx.stack[-1])
else:
- assert False, "wrong state def: %r" % new_state
+ assert False, f"wrong state def: {new_state!r}"
statetokens = tokendefs[ctx.stack[-1]]
break
else:
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py b/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py
index 0c176dfbfd..ac88645a1b 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py
@@ -4,7 +4,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -93,7 +93,7 @@ def find_lexer_class_by_name(_alias):
.. versionadded:: 2.2
"""
if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
# lookup builtin lexers
for module_name, name, aliases, _, _ in LEXERS.values():
if _alias.lower() in aliases:
@@ -104,7 +104,7 @@ def find_lexer_class_by_name(_alias):
for cls in find_plugin_lexers():
if _alias.lower() in cls.aliases:
return cls
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
def get_lexer_by_name(_alias, **options):
@@ -117,7 +117,7 @@ def get_lexer_by_name(_alias, **options):
found.
"""
if not _alias:
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
# lookup builtin lexers
for module_name, name, aliases, _, _ in LEXERS.values():
@@ -129,7 +129,7 @@ def get_lexer_by_name(_alias, **options):
for cls in find_plugin_lexers():
if _alias.lower() in cls.aliases:
return cls(**options)
- raise ClassNotFound('no lexer for alias %r found' % _alias)
+ raise ClassNotFound(f'no lexer for alias {_alias!r} found')
def load_lexer_from_file(filename, lexername="CustomLexer", **options):
@@ -154,17 +154,16 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options):
exec(f.read(), custom_namespace)
# Retrieve the class `lexername` from that namespace
if lexername not in custom_namespace:
- raise ClassNotFound('no valid %s class found in %s' %
- (lexername, filename))
+ raise ClassNotFound(f'no valid {lexername} class found in {filename}')
lexer_class = custom_namespace[lexername]
# And finally instantiate it with the options
return lexer_class(**options)
except OSError as err:
- raise ClassNotFound('cannot read %s: %s' % (filename, err))
+ raise ClassNotFound(f'cannot read {filename}: {err}')
except ClassNotFound:
raise
except Exception as err:
- raise ClassNotFound('error when loading custom lexer: %s' % err)
+ raise ClassNotFound(f'error when loading custom lexer: {err}')
def find_lexer_class_for_filename(_fn, code=None):
@@ -225,7 +224,7 @@ def get_lexer_for_filename(_fn, code=None, **options):
"""
res = find_lexer_class_for_filename(_fn, code)
if not res:
- raise ClassNotFound('no lexer for filename %r found' % _fn)
+ raise ClassNotFound(f'no lexer for filename {_fn!r} found')
return res(**options)
@@ -245,7 +244,7 @@ def get_lexer_for_mimetype(_mime, **options):
for cls in find_plugin_lexers():
if _mime in cls.mimetypes:
return cls(**options)
- raise ClassNotFound('no lexer for mimetype %r found' % _mime)
+ raise ClassNotFound(f'no lexer for mimetype {_mime!r} found')
def _iter_lexerclasses(plugins=True):
@@ -280,7 +279,7 @@ def guess_lexer_for_filename(_fn, _text, **options):
matching_lexers.add(lexer)
primary[lexer] = False
if not matching_lexers:
- raise ClassNotFound('no lexer for filename %r found' % fn)
+ raise ClassNotFound(f'no lexer for filename {fn!r} found')
if len(matching_lexers) == 1:
return matching_lexers.pop()(**options)
result = []
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py b/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py
index 1ff2b282a1..f3e5c460db 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py
@@ -46,7 +46,7 @@ LEXERS = {
'BSTLexer': ('pip._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
'BareLexer': ('pip._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
'BaseMakefileLexer': ('pip._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
- 'BashLexer': ('pip._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
+ 'BashLexer': ('pip._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
'BashSessionLexer': ('pip._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
'BatchLexer': ('pip._vendor.pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
'BddLexer': ('pip._vendor.pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)),
@@ -128,7 +128,7 @@ LEXERS = {
'DaxLexer': ('pip._vendor.pygments.lexers.dax', 'Dax', ('dax',), ('*.dax',), ()),
'DebianControlLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
'DelphiLexer': ('pip._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
- 'DesktopLexer': ('pip._vendor.pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ()),
+ 'DesktopLexer': ('pip._vendor.pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ('application/x-desktop',)),
'DevicetreeLexer': ('pip._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
'DgLexer': ('pip._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
'DiffLexer': ('pip._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
@@ -216,8 +216,8 @@ LEXERS = {
'HtmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
'HttpLexer': ('pip._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
'HxmlLexer': ('pip._vendor.pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
- 'HyLexer': ('pip._vendor.pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
- 'HybrisLexer': ('pip._vendor.pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
+ 'HyLexer': ('pip._vendor.pygments.lexers.lisp', 'Hy', ('hylang', 'hy'), ('*.hy',), ('text/x-hy', 'application/x-hy')),
+ 'HybrisLexer': ('pip._vendor.pygments.lexers.scripting', 'Hybris', ('hybris',), ('*.hyb',), ('text/x-hybris', 'application/x-hybris')),
'IDLLexer': ('pip._vendor.pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
'IconLexer': ('pip._vendor.pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
'IdrisLexer': ('pip._vendor.pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
@@ -234,6 +234,7 @@ LEXERS = {
'JMESPathLexer': ('pip._vendor.pygments.lexers.jmespath', 'JMESPath', ('jmespath', 'jp'), ('*.jp',), ()),
'JSLTLexer': ('pip._vendor.pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
'JagsLexer': ('pip._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
+ 'JanetLexer': ('pip._vendor.pygments.lexers.lisp', 'Janet', ('janet',), ('*.janet', '*.jdn'), ('text/x-janet', 'application/x-janet')),
'JasminLexer': ('pip._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
'JavaLexer': ('pip._vendor.pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
'JavascriptDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), ('*.js.j2', '*.js.jinja2'), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
@@ -271,6 +272,7 @@ LEXERS = {
'LdaprcLexer': ('pip._vendor.pygments.lexers.ldap', 'LDAP configuration file', ('ldapconf', 'ldaprc'), ('.ldaprc', 'ldaprc', 'ldap.conf'), ('text/x-ldapconf',)),
'LdifLexer': ('pip._vendor.pygments.lexers.ldap', 'LDIF', ('ldif',), ('*.ldif',), ('text/x-ldif',)),
'Lean3Lexer': ('pip._vendor.pygments.lexers.lean', 'Lean', ('lean', 'lean3'), ('*.lean',), ('text/x-lean', 'text/x-lean3')),
+ 'Lean4Lexer': ('pip._vendor.pygments.lexers.lean', 'Lean4', ('lean4',), ('*.lean',), ('text/x-lean4',)),
'LessCssLexer': ('pip._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
'LighttpdConfLexer': ('pip._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
'LilyPondLexer': ('pip._vendor.pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()),
@@ -287,6 +289,7 @@ LEXERS = {
'LogosLexer': ('pip._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
'LogtalkLexer': ('pip._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
'LuaLexer': ('pip._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
+ 'LuauLexer': ('pip._vendor.pygments.lexers.scripting', 'Luau', ('luau',), ('*.luau',), ()),
'MCFunctionLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)),
'MCSchemaLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCSchema', ('mcschema',), ('*.mcschema',), ('text/mcschema',)),
'MIMELexer': ('pip._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
@@ -314,6 +317,7 @@ LEXERS = {
'ModelicaLexer': ('pip._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
'Modula2Lexer': ('pip._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
'MoinWikiLexer': ('pip._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
+ 'MojoLexer': ('pip._vendor.pygments.lexers.mojo', 'Mojo', ('mojo', '🔥'), ('*.mojo', '*.🔥'), ('text/x-mojo', 'application/x-mojo')),
'MonkeyLexer': ('pip._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
'MonteLexer': ('pip._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
'MoonScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
@@ -362,6 +366,7 @@ LEXERS = {
'OpaLexer': ('pip._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
'OpenEdgeLexer': ('pip._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
'OpenScadLexer': ('pip._vendor.pygments.lexers.openscad', 'OpenSCAD', ('openscad',), ('*.scad',), ('application/x-openscad',)),
+ 'OrgLexer': ('pip._vendor.pygments.lexers.markup', 'Org Mode', ('org', 'orgmode', 'org-mode'), ('*.org',), ('text/org',)),
'OutputLexer': ('pip._vendor.pygments.lexers.special', 'Text output', ('output',), (), ()),
'PacmanConfLexer': ('pip._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
'PanLexer': ('pip._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
@@ -390,6 +395,7 @@ LEXERS = {
'ProcfileLexer': ('pip._vendor.pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
'PrologLexer': ('pip._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
'PromQLLexer': ('pip._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
+ 'PromelaLexer': ('pip._vendor.pygments.lexers.c_like', 'Promela', ('promela',), ('*.pml', '*.prom', '*.prm', '*.promela', '*.pr', '*.pm'), ('text/x-promela',)),
'PropertiesLexer': ('pip._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
'ProtoBufLexer': ('pip._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
'PrqlLexer': ('pip._vendor.pygments.lexers.prql', 'PRQL', ('prql',), ('*.prql',), ('application/prql', 'application/x-prql')),
@@ -400,7 +406,7 @@ LEXERS = {
'PyPyLogLexer': ('pip._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
- 'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
+ 'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon', 'python-console'), (), ('text/x-python-doctest',)),
'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
'PythonUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()),
@@ -473,6 +479,7 @@ LEXERS = {
'SnobolLexer': ('pip._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
'SnowballLexer': ('pip._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
'SolidityLexer': ('pip._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
+ 'SoongLexer': ('pip._vendor.pygments.lexers.soong', 'Soong', ('androidbp', 'bp', 'soong'), ('Android.bp',), ()),
'SophiaLexer': ('pip._vendor.pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()),
'SourcePawnLexer': ('pip._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
'SourcesListLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
@@ -494,6 +501,7 @@ LEXERS = {
'TAPLexer': ('pip._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
'TNTLexer': ('pip._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
'TOMLLexer': ('pip._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ('application/toml',)),
+ 'TactLexer': ('pip._vendor.pygments.lexers.tact', 'Tact', ('tact',), ('*.tact',), ()),
'Tads3Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
'TalLexer': ('pip._vendor.pygments.lexers.tal', 'Tal', ('tal', 'uxntal'), ('*.tal',), ('text/x-uxntal',)),
'TasmLexer': ('pip._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
@@ -523,6 +531,7 @@ LEXERS = {
'TypoScriptCssDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
'TypoScriptHtmlDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
'TypoScriptLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
+ 'TypstLexer': ('pip._vendor.pygments.lexers.typst', 'Typst', ('typst',), ('*.typ',), ('text/x-typst',)),
'UL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'UL4', ('ul4',), ('*.ul4',), ()),
'UcodeLexer': ('pip._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
'UniconLexer': ('pip._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
@@ -537,7 +546,7 @@ LEXERS = {
'VGLLexer': ('pip._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
'ValaLexer': ('pip._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
'VbNetAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
- 'VbNetLexer': ('pip._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
+ 'VbNetLexer': ('pip._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas', 'visual-basic', 'visualbasic'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
'VelocityHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
'VelocityLexer': ('pip._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
'VelocityXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexers/python.py b/contrib/python/pip/pip/_vendor/pygments/lexers/python.py
index e2ce58f5a1..b2d07f2080 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexers/python.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexers/python.py
@@ -4,15 +4,14 @@
Lexers for Python and related languages.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
-import re
import keyword
-from pip._vendor.pygments.lexer import DelegatingLexer, Lexer, RegexLexer, include, \
- bygroups, using, default, words, combined, do_insertions, this, line_re
+from pip._vendor.pygments.lexer import DelegatingLexer, RegexLexer, include, \
+ bygroups, using, default, words, combined, this
from pip._vendor.pygments.util import get_bool_opt, shebang_matches
from pip._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic, Other, Error, Whitespace
@@ -27,8 +26,6 @@ class PythonLexer(RegexLexer):
"""
For Python source code (version 3.x).
- .. versionadded:: 0.10
-
.. versionchanged:: 2.5
This is now the default ``PythonLexer``. It is still available as the
alias ``Python3Lexer``.
@@ -61,8 +58,9 @@ class PythonLexer(RegexLexer):
]
mimetypes = ['text/x-python', 'application/x-python',
'text/x-python3', 'application/x-python3']
+ version_added = '0.10'
- uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
+ uni_name = f"[{uni.xid_start}][{uni.xid_continue}]*"
def innerstring_rules(ttype):
return [
@@ -224,7 +222,8 @@ class PythonLexer(RegexLexer):
r'(match|case)\b' # a possible keyword
r'(?![ \t]*(?:' # not followed by...
r'[:,;=^&|@~)\]}]|(?:' + # characters and keywords that mean this isn't
- r'|'.join(keyword.kwlist) + r')\b))', # pattern matching
+ # pattern matching (but None/True/False is ok)
+ r'|'.join(k for k in keyword.kwlist if k[0].islower()) + r')\b))',
bygroups(Text, Keyword), 'soft-keywords-inner'),
],
'soft-keywords-inner': [
@@ -429,6 +428,7 @@ class Python2Lexer(RegexLexer):
aliases = ['python2', 'py2']
filenames = [] # now taken over by PythonLexer (3.x)
mimetypes = ['text/x-python2', 'application/x-python2']
+ version_added = ''
def innerstring_rules(ttype):
return [
@@ -637,7 +637,7 @@ class Python2Lexer(RegexLexer):
class _PythonConsoleLexerBase(RegexLexer):
name = 'Python console session'
- aliases = ['pycon']
+ aliases = ['pycon', 'python-console']
mimetypes = ['text/x-python-doctest']
"""Auxiliary lexer for `PythonConsoleLexer`.
@@ -696,8 +696,10 @@ class PythonConsoleLexer(DelegatingLexer):
"""
name = 'Python console session'
- aliases = ['pycon']
+ aliases = ['pycon', 'python-console']
mimetypes = ['text/x-python-doctest']
+ url = 'https://python.org'
+ version_added = ''
def __init__(self, **options):
python3 = get_bool_opt(options, 'python3', True)
@@ -721,8 +723,6 @@ class PythonTracebackLexer(RegexLexer):
"""
For Python 3.x tracebacks, with support for chained exceptions.
- .. versionadded:: 1.0
-
.. versionchanged:: 2.5
This is now the default ``PythonTracebackLexer``. It is still available
as the alias ``Python3TracebackLexer``.
@@ -732,6 +732,8 @@ class PythonTracebackLexer(RegexLexer):
aliases = ['pytb', 'py3tb']
filenames = ['*.pytb', '*.py3tb']
mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
+ url = 'https://python.org'
+ version_added = '1.0'
tokens = {
'root': [
@@ -778,8 +780,6 @@ class Python2TracebackLexer(RegexLexer):
"""
For Python tracebacks.
- .. versionadded:: 0.7
-
.. versionchanged:: 2.5
This class has been renamed from ``PythonTracebackLexer``.
``PythonTracebackLexer`` now refers to the Python 3 variant.
@@ -789,6 +789,8 @@ class Python2TracebackLexer(RegexLexer):
aliases = ['py2tb']
filenames = ['*.py2tb']
mimetypes = ['text/x-python2-traceback']
+ url = 'https://python.org'
+ version_added = '0.7'
tokens = {
'root': [
@@ -825,8 +827,6 @@ class Python2TracebackLexer(RegexLexer):
class CythonLexer(RegexLexer):
"""
For Pyrex and Cython source code.
-
- .. versionadded:: 1.1
"""
name = 'Cython'
@@ -834,6 +834,7 @@ class CythonLexer(RegexLexer):
aliases = ['cython', 'pyx', 'pyrex']
filenames = ['*.pyx', '*.pxd', '*.pxi']
mimetypes = ['text/x-cython', 'application/x-cython']
+ version_added = '1.1'
tokens = {
'root': [
@@ -1007,13 +1008,13 @@ class DgLexer(RegexLexer):
Lexer for dg,
a functional and object-oriented programming language
running on the CPython 3 VM.
-
- .. versionadded:: 1.6
"""
name = 'dg'
aliases = ['dg']
filenames = ['*.dg']
mimetypes = ['text/x-dg']
+ url = 'http://pyos.github.io/dg'
+ version_added = '1.6'
tokens = {
'root': [
@@ -1104,13 +1105,12 @@ class DgLexer(RegexLexer):
class NumPyLexer(PythonLexer):
"""
A Python lexer recognizing Numerical Python builtins.
-
- .. versionadded:: 0.10
"""
name = 'NumPy'
url = 'https://numpy.org/'
aliases = ['numpy']
+ version_added = '0.10'
# override the mimetypes to not inherit them from python
mimetypes = []
diff --git a/contrib/python/pip/pip/_vendor/pygments/modeline.py b/contrib/python/pip/pip/_vendor/pygments/modeline.py
index 7b6f6a324b..e4d9fe167b 100644
--- a/contrib/python/pip/pip/_vendor/pygments/modeline.py
+++ b/contrib/python/pip/pip/_vendor/pygments/modeline.py
@@ -4,7 +4,7 @@
A simple modeline parser (based on pymodeline).
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -19,7 +19,7 @@ modeline_re = re.compile(r'''
''', re.VERBOSE)
-def get_filetype_from_line(l):
+def get_filetype_from_line(l): # noqa: E741
m = modeline_re.search(l)
if m:
return m.group(1)
@@ -30,8 +30,8 @@ def get_filetype_from_buffer(buf, max_lines=5):
Scan the buffer for modelines and return filetype if one is found.
"""
lines = buf.splitlines()
- for l in lines[-1:-max_lines-1:-1]:
- ret = get_filetype_from_line(l)
+ for line in lines[-1:-max_lines-1:-1]:
+ ret = get_filetype_from_line(line)
if ret:
return ret
for i in range(max_lines, -1, -1):
diff --git a/contrib/python/pip/pip/_vendor/pygments/plugin.py b/contrib/python/pip/pip/_vendor/pygments/plugin.py
index 7b722d58db..2e462f2c2f 100644
--- a/contrib/python/pip/pip/_vendor/pygments/plugin.py
+++ b/contrib/python/pip/pip/_vendor/pygments/plugin.py
@@ -2,12 +2,7 @@
pygments.plugin
~~~~~~~~~~~~~~~
- Pygments plugin interface. By default, this tries to use
- ``importlib.metadata``, which is in the Python standard
- library since Python 3.8, or its ``importlib_metadata``
- backport for earlier versions of Python. It falls back on
- ``pkg_resources`` if not found. Finally, if ``pkg_resources``
- is not found either, no plugins are loaded at all.
+ Pygments plugin interface.
lexer plugins::
@@ -34,9 +29,10 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
+from importlib.metadata import entry_points
LEXER_ENTRY_POINT = 'pygments.lexers'
FORMATTER_ENTRY_POINT = 'pygments.formatters'
@@ -45,18 +41,6 @@ FILTER_ENTRY_POINT = 'pygments.filters'
def iter_entry_points(group_name):
- try:
- from importlib.metadata import entry_points
- except ImportError:
- try:
- from importlib_metadata import entry_points
- except ImportError:
- try:
- from pip._vendor.pkg_resources import iter_entry_points
- except (ImportError, OSError):
- return []
- else:
- return iter_entry_points(group_name)
groups = entry_points()
if hasattr(groups, 'select'):
# New interface in Python 3.10 and newer versions of the
diff --git a/contrib/python/pip/pip/_vendor/pygments/regexopt.py b/contrib/python/pip/pip/_vendor/pygments/regexopt.py
index 45223eccc1..c44eedbf2a 100644
--- a/contrib/python/pip/pip/_vendor/pygments/regexopt.py
+++ b/contrib/python/pip/pip/_vendor/pygments/regexopt.py
@@ -5,7 +5,7 @@
An algorithm that generates optimized regexes for matching long lists of
literal strings.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/scanner.py b/contrib/python/pip/pip/_vendor/pygments/scanner.py
index 32a2f30329..112da34917 100644
--- a/contrib/python/pip/pip/_vendor/pygments/scanner.py
+++ b/contrib/python/pip/pip/_vendor/pygments/scanner.py
@@ -11,7 +11,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/contrib/python/pip/pip/_vendor/pygments/sphinxext.py b/contrib/python/pip/pip/_vendor/pygments/sphinxext.py
index fc0b0270bf..34077a2aee 100644
--- a/contrib/python/pip/pip/_vendor/pygments/sphinxext.py
+++ b/contrib/python/pip/pip/_vendor/pygments/sphinxext.py
@@ -5,7 +5,7 @@
Sphinx extension to generate automatic documentation of lexers,
formatters and filters.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -33,6 +33,8 @@ LEXERDOC = '''
%s
+ %s
+
'''
FMTERDOC = '''
@@ -119,11 +121,11 @@ class PygmentsDoc(Directive):
def write_row(*columns):
"""Format a table row"""
out = []
- for l, c in zip(column_lengths, columns):
- if c:
- out.append(c.ljust(l))
+ for length, col in zip(column_lengths, columns):
+ if col:
+ out.append(col.ljust(length))
else:
- out.append(' '*l)
+ out.append(' '*length)
return ' '.join(out)
@@ -160,7 +162,7 @@ class PygmentsDoc(Directive):
self.filenames.add(mod.__file__)
cls = getattr(mod, classname)
if not cls.__doc__:
- print("Warning: %s does not have a docstring." % classname)
+ print(f"Warning: {classname} does not have a docstring.")
docstring = cls.__doc__
if isinstance(docstring, bytes):
docstring = docstring.decode('utf8')
@@ -182,12 +184,18 @@ class PygmentsDoc(Directive):
for line in content.splitlines():
docstring += f' {line}\n'
+ if cls.version_added:
+ version_line = f'.. versionadded:: {cls.version_added}'
+ else:
+ version_line = ''
+
modules.setdefault(module, []).append((
classname,
', '.join(data[2]) or 'None',
', '.join(data[3]).replace('*', '\\*').replace('_', '\\') or 'None',
', '.join(data[4]) or 'None',
- docstring))
+ docstring,
+ version_line))
if module not in moduledocstrings:
moddoc = mod.__doc__
if isinstance(moddoc, bytes):
@@ -196,7 +204,7 @@ class PygmentsDoc(Directive):
for module, lexers in sorted(modules.items(), key=lambda x: x[0]):
if moduledocstrings[module] is None:
- raise Exception("Missing docstring for %s" % (module,))
+ raise Exception(f"Missing docstring for {module}")
heading = moduledocstrings[module].splitlines()[4].strip().rstrip('.')
out.append(MODULEDOC % (module, heading, '-'*len(heading)))
for data in lexers:
diff --git a/contrib/python/pip/pip/_vendor/pygments/style.py b/contrib/python/pip/pip/_vendor/pygments/style.py
index f2f72d3bc5..076e63f831 100644
--- a/contrib/python/pip/pip/_vendor/pygments/style.py
+++ b/contrib/python/pip/pip/_vendor/pygments/style.py
@@ -4,7 +4,7 @@
Basic style object.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -76,7 +76,7 @@ class StyleMeta(type):
return ''
elif text.startswith('var') or text.startswith('calc'):
return text
- assert False, "wrong color format %r" % text
+ assert False, f"wrong color format {text!r}"
_styles = obj._styles = {}
diff --git a/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py b/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py
index 23b55468e2..712f6e6993 100644
--- a/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py
@@ -4,7 +4,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -44,13 +44,13 @@ def get_style_by_name(name):
try:
mod = __import__(mod, None, None, [cls])
except ImportError:
- raise ClassNotFound("Could not find style module %r" % mod +
+ raise ClassNotFound(f"Could not find style module {mod!r}" +
(builtin and ", though it should be builtin")
+ ".")
try:
return getattr(mod, cls)
except AttributeError:
- raise ClassNotFound("Could not find style class %r in style module." % cls)
+ raise ClassNotFound(f"Could not find style class {cls!r} in style module.")
def get_all_styles():
diff --git a/contrib/python/pip/pip/_vendor/pygments/styles/_mapping.py b/contrib/python/pip/pip/_vendor/pygments/styles/_mapping.py
index 04c7ddfbb0..49a7fae92d 100644
--- a/contrib/python/pip/pip/_vendor/pygments/styles/_mapping.py
+++ b/contrib/python/pip/pip/_vendor/pygments/styles/_mapping.py
@@ -9,6 +9,7 @@ STYLES = {
'AutumnStyle': ('pygments.styles.autumn', 'autumn', ()),
'BlackWhiteStyle': ('pygments.styles.bw', 'bw', ()),
'BorlandStyle': ('pygments.styles.borland', 'borland', ()),
+ 'CoffeeStyle': ('pygments.styles.coffee', 'coffee', ()),
'ColorfulStyle': ('pygments.styles.colorful', 'colorful', ()),
'DefaultStyle': ('pygments.styles.default', 'default', ()),
'DraculaStyle': ('pygments.styles.dracula', 'dracula', ()),
diff --git a/contrib/python/pip/pip/_vendor/pygments/token.py b/contrib/python/pip/pip/_vendor/pygments/token.py
index bdf2e8e2e1..f78018a7aa 100644
--- a/contrib/python/pip/pip/_vendor/pygments/token.py
+++ b/contrib/python/pip/pip/_vendor/pygments/token.py
@@ -4,7 +4,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/unistring.py b/contrib/python/pip/pip/_vendor/pygments/unistring.py
index 39f6baeedf..e2c3523e4b 100644
--- a/contrib/python/pip/pip/_vendor/pygments/unistring.py
+++ b/contrib/python/pip/pip/_vendor/pygments/unistring.py
@@ -7,7 +7,7 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -104,7 +104,7 @@ def _handle_runs(char_list): # pragma: no cover
if a == b:
yield a
else:
- yield '%s-%s' % (a, b)
+ yield f'{a}-{b}'
if __name__ == '__main__': # pragma: no cover
@@ -141,13 +141,13 @@ if __name__ == '__main__': # pragma: no cover
for cat in sorted(categories):
val = ''.join(_handle_runs(categories[cat]))
- fp.write('%s = %a\n\n' % (cat, val))
+ fp.write(f'{cat} = {val!a}\n\n')
cats = sorted(categories)
cats.remove('xid_start')
cats.remove('xid_continue')
- fp.write('cats = %r\n\n' % cats)
+ fp.write(f'cats = {cats!r}\n\n')
- fp.write('# Generated from unidata %s\n\n' % (unicodedata.unidata_version,))
+ fp.write(f'# Generated from unidata {unicodedata.unidata_version}\n\n')
fp.write(footer)
diff --git a/contrib/python/pip/pip/_vendor/pygments/util.py b/contrib/python/pip/pip/_vendor/pygments/util.py
index 941fdb9ec7..83cf104925 100644
--- a/contrib/python/pip/pip/_vendor/pygments/util.py
+++ b/contrib/python/pip/pip/_vendor/pygments/util.py
@@ -4,7 +4,7 @@
Utility functions.
- :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -46,8 +46,7 @@ def get_choice_opt(options, optname, allowed, default=None, normcase=False):
if normcase:
string = string.lower()
if string not in allowed:
- raise OptionError('Value for option %s must be one of %s' %
- (optname, ', '.join(map(str, allowed))))
+ raise OptionError('Value for option {} must be one of {}'.format(optname, ', '.join(map(str, allowed))))
return string
@@ -69,17 +68,15 @@ def get_bool_opt(options, optname, default=None):
elif isinstance(string, int):
return bool(string)
elif not isinstance(string, str):
- raise OptionError('Invalid type %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
+ raise OptionError(f'Invalid type {string!r} for option {optname}; use '
+ '1/0, yes/no, true/false, on/off')
elif string.lower() in ('1', 'yes', 'true', 'on'):
return True
elif string.lower() in ('0', 'no', 'false', 'off'):
return False
else:
- raise OptionError('Invalid value %r for option %s; use '
- '1/0, yes/no, true/false, on/off' % (
- string, optname))
+ raise OptionError(f'Invalid value {string!r} for option {optname}; use '
+ '1/0, yes/no, true/false, on/off')
def get_int_opt(options, optname, default=None):
@@ -88,13 +85,11 @@ def get_int_opt(options, optname, default=None):
try:
return int(string)
except TypeError:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
+ raise OptionError(f'Invalid type {string!r} for option {optname}; you '
+ 'must give an integer value')
except ValueError:
- raise OptionError('Invalid value %r for option %s; you '
- 'must give an integer value' % (
- string, optname))
+ raise OptionError(f'Invalid value {string!r} for option {optname}; you '
+ 'must give an integer value')
def get_list_opt(options, optname, default=None):
"""
@@ -108,9 +103,8 @@ def get_list_opt(options, optname, default=None):
elif isinstance(val, (list, tuple)):
return list(val)
else:
- raise OptionError('Invalid type %r for option %s; you '
- 'must give a list value' % (
- val, optname))
+ raise OptionError(f'Invalid type {val!r} for option {optname}; you '
+ 'must give a list value')
def docstring_headline(obj):
@@ -181,7 +175,7 @@ def shebang_matches(text, regex):
if x and not x.startswith('-')][-1]
except IndexError:
return False
- regex = re.compile(r'^%s(\.(exe|cmd|bat|bin))?$' % regex, re.IGNORECASE)
+ regex = re.compile(rf'^{regex}(\.(exe|cmd|bat|bin))?$', re.IGNORECASE)
if regex.search(found) is not None:
return True
return False
diff --git a/contrib/python/pip/pip/_vendor/tenacity/__init__.py b/contrib/python/pip/pip/_vendor/tenacity/__init__.py
deleted file mode 100644
index c1b0310bdf..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/__init__.py
+++ /dev/null
@@ -1,608 +0,0 @@
-# Copyright 2016-2018 Julien Danjou
-# Copyright 2017 Elisey Zanko
-# Copyright 2016 Étienne Bersac
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import functools
-import sys
-import threading
-import time
-import typing as t
-import warnings
-from abc import ABC, abstractmethod
-from concurrent import futures
-from inspect import iscoroutinefunction
-
-# Import all built-in retry strategies for easier usage.
-from .retry import retry_base # noqa
-from .retry import retry_all # noqa
-from .retry import retry_always # noqa
-from .retry import retry_any # noqa
-from .retry import retry_if_exception # noqa
-from .retry import retry_if_exception_type # noqa
-from .retry import retry_if_exception_cause_type # noqa
-from .retry import retry_if_not_exception_type # noqa
-from .retry import retry_if_not_result # noqa
-from .retry import retry_if_result # noqa
-from .retry import retry_never # noqa
-from .retry import retry_unless_exception_type # noqa
-from .retry import retry_if_exception_message # noqa
-from .retry import retry_if_not_exception_message # noqa
-
-# Import all nap strategies for easier usage.
-from .nap import sleep # noqa
-from .nap import sleep_using_event # noqa
-
-# Import all built-in stop strategies for easier usage.
-from .stop import stop_after_attempt # noqa
-from .stop import stop_after_delay # noqa
-from .stop import stop_all # noqa
-from .stop import stop_any # noqa
-from .stop import stop_never # noqa
-from .stop import stop_when_event_set # noqa
-
-# Import all built-in wait strategies for easier usage.
-from .wait import wait_chain # noqa
-from .wait import wait_combine # noqa
-from .wait import wait_exponential # noqa
-from .wait import wait_fixed # noqa
-from .wait import wait_incrementing # noqa
-from .wait import wait_none # noqa
-from .wait import wait_random # noqa
-from .wait import wait_random_exponential # noqa
-from .wait import wait_random_exponential as wait_full_jitter # noqa
-from .wait import wait_exponential_jitter # noqa
-
-# Import all built-in before strategies for easier usage.
-from .before import before_log # noqa
-from .before import before_nothing # noqa
-
-# Import all built-in after strategies for easier usage.
-from .after import after_log # noqa
-from .after import after_nothing # noqa
-
-# Import all built-in after strategies for easier usage.
-from .before_sleep import before_sleep_log # noqa
-from .before_sleep import before_sleep_nothing # noqa
-
-# Replace a conditional import with a hard-coded None so that pip does
-# not attempt to use tornado even if it is present in the environment.
-# If tornado is non-None, tenacity will attempt to execute some code
-# that is sensitive to the version of tornado, which could break pip
-# if an old version is found.
-tornado = None # type: ignore
-
-if t.TYPE_CHECKING:
- import types
-
- from .retry import RetryBaseT
- from .stop import StopBaseT
- from .wait import WaitBaseT
-
-
-WrappedFnReturnT = t.TypeVar("WrappedFnReturnT")
-WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Any])
-
-
-class TryAgain(Exception):
- """Always retry the executed function when raised."""
-
-
-NO_RESULT = object()
-
-
-class DoAttempt:
- pass
-
-
-class DoSleep(float):
- pass
-
-
-class BaseAction:
- """Base class for representing actions to take by retry object.
-
- Concrete implementations must define:
- - __init__: to initialize all necessary fields
- - REPR_FIELDS: class variable specifying attributes to include in repr(self)
- - NAME: for identification in retry object methods and callbacks
- """
-
- REPR_FIELDS: t.Sequence[str] = ()
- NAME: t.Optional[str] = None
-
- def __repr__(self) -> str:
- state_str = ", ".join(f"{field}={getattr(self, field)!r}" for field in self.REPR_FIELDS)
- return f"{self.__class__.__name__}({state_str})"
-
- def __str__(self) -> str:
- return repr(self)
-
-
-class RetryAction(BaseAction):
- REPR_FIELDS = ("sleep",)
- NAME = "retry"
-
- def __init__(self, sleep: t.SupportsFloat) -> None:
- self.sleep = float(sleep)
-
-
-_unset = object()
-
-
-def _first_set(first: t.Union[t.Any, object], second: t.Any) -> t.Any:
- return second if first is _unset else first
-
-
-class RetryError(Exception):
- """Encapsulates the last attempt instance right before giving up."""
-
- def __init__(self, last_attempt: "Future") -> None:
- self.last_attempt = last_attempt
- super().__init__(last_attempt)
-
- def reraise(self) -> "t.NoReturn":
- if self.last_attempt.failed:
- raise self.last_attempt.result()
- raise self
-
- def __str__(self) -> str:
- return f"{self.__class__.__name__}[{self.last_attempt}]"
-
-
-class AttemptManager:
- """Manage attempt context."""
-
- def __init__(self, retry_state: "RetryCallState"):
- self.retry_state = retry_state
-
- def __enter__(self) -> None:
- pass
-
- def __exit__(
- self,
- exc_type: t.Optional[t.Type[BaseException]],
- exc_value: t.Optional[BaseException],
- traceback: t.Optional["types.TracebackType"],
- ) -> t.Optional[bool]:
- if exc_type is not None and exc_value is not None:
- self.retry_state.set_exception((exc_type, exc_value, traceback))
- return True # Swallow exception.
- else:
- # We don't have the result, actually.
- self.retry_state.set_result(None)
- return None
-
-
-class BaseRetrying(ABC):
- def __init__(
- self,
- sleep: t.Callable[[t.Union[int, float]], None] = sleep,
- stop: "StopBaseT" = stop_never,
- wait: "WaitBaseT" = wait_none(),
- retry: "RetryBaseT" = retry_if_exception_type(),
- before: t.Callable[["RetryCallState"], None] = before_nothing,
- after: t.Callable[["RetryCallState"], None] = after_nothing,
- before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
- reraise: bool = False,
- retry_error_cls: t.Type[RetryError] = RetryError,
- retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None,
- ):
- self.sleep = sleep
- self.stop = stop
- self.wait = wait
- self.retry = retry
- self.before = before
- self.after = after
- self.before_sleep = before_sleep
- self.reraise = reraise
- self._local = threading.local()
- self.retry_error_cls = retry_error_cls
- self.retry_error_callback = retry_error_callback
-
- def copy(
- self,
- sleep: t.Union[t.Callable[[t.Union[int, float]], None], object] = _unset,
- stop: t.Union["StopBaseT", object] = _unset,
- wait: t.Union["WaitBaseT", object] = _unset,
- retry: t.Union[retry_base, object] = _unset,
- before: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
- after: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
- before_sleep: t.Union[t.Optional[t.Callable[["RetryCallState"], None]], object] = _unset,
- reraise: t.Union[bool, object] = _unset,
- retry_error_cls: t.Union[t.Type[RetryError], object] = _unset,
- retry_error_callback: t.Union[t.Optional[t.Callable[["RetryCallState"], t.Any]], object] = _unset,
- ) -> "BaseRetrying":
- """Copy this object with some parameters changed if needed."""
- return self.__class__(
- sleep=_first_set(sleep, self.sleep),
- stop=_first_set(stop, self.stop),
- wait=_first_set(wait, self.wait),
- retry=_first_set(retry, self.retry),
- before=_first_set(before, self.before),
- after=_first_set(after, self.after),
- before_sleep=_first_set(before_sleep, self.before_sleep),
- reraise=_first_set(reraise, self.reraise),
- retry_error_cls=_first_set(retry_error_cls, self.retry_error_cls),
- retry_error_callback=_first_set(retry_error_callback, self.retry_error_callback),
- )
-
- def __repr__(self) -> str:
- return (
- f"<{self.__class__.__name__} object at 0x{id(self):x} ("
- f"stop={self.stop}, "
- f"wait={self.wait}, "
- f"sleep={self.sleep}, "
- f"retry={self.retry}, "
- f"before={self.before}, "
- f"after={self.after})>"
- )
-
- @property
- def statistics(self) -> t.Dict[str, t.Any]:
- """Return a dictionary of runtime statistics.
-
- This dictionary will be empty when the controller has never been
- ran. When it is running or has ran previously it should have (but
- may not) have useful and/or informational keys and values when
- running is underway and/or completed.
-
- .. warning:: The keys in this dictionary **should** be some what
- stable (not changing), but there existence **may**
- change between major releases as new statistics are
- gathered or removed so before accessing keys ensure that
- they actually exist and handle when they do not.
-
- .. note:: The values in this dictionary are local to the thread
- running call (so if multiple threads share the same retrying
- object - either directly or indirectly) they will each have
- there own view of statistics they have collected (in the
- future we may provide a way to aggregate the various
- statistics from each thread).
- """
- try:
- return self._local.statistics # type: ignore[no-any-return]
- except AttributeError:
- self._local.statistics = t.cast(t.Dict[str, t.Any], {})
- return self._local.statistics
-
- def wraps(self, f: WrappedFn) -> WrappedFn:
- """Wrap a function for retrying.
-
- :param f: A function to wraps for retrying.
- """
-
- @functools.wraps(f)
- def wrapped_f(*args: t.Any, **kw: t.Any) -> t.Any:
- return self(f, *args, **kw)
-
- def retry_with(*args: t.Any, **kwargs: t.Any) -> WrappedFn:
- return self.copy(*args, **kwargs).wraps(f)
-
- wrapped_f.retry = self # type: ignore[attr-defined]
- wrapped_f.retry_with = retry_with # type: ignore[attr-defined]
-
- return wrapped_f # type: ignore[return-value]
-
- def begin(self) -> None:
- self.statistics.clear()
- self.statistics["start_time"] = time.monotonic()
- self.statistics["attempt_number"] = 1
- self.statistics["idle_for"] = 0
-
- def iter(self, retry_state: "RetryCallState") -> t.Union[DoAttempt, DoSleep, t.Any]: # noqa
- fut = retry_state.outcome
- if fut is None:
- if self.before is not None:
- self.before(retry_state)
- return DoAttempt()
-
- is_explicit_retry = fut.failed and isinstance(fut.exception(), TryAgain)
- if not (is_explicit_retry or self.retry(retry_state)):
- return fut.result()
-
- if self.after is not None:
- self.after(retry_state)
-
- self.statistics["delay_since_first_attempt"] = retry_state.seconds_since_start
- if self.stop(retry_state):
- if self.retry_error_callback:
- return self.retry_error_callback(retry_state)
- retry_exc = self.retry_error_cls(fut)
- if self.reraise:
- raise retry_exc.reraise()
- raise retry_exc from fut.exception()
-
- if self.wait:
- sleep = self.wait(retry_state)
- else:
- sleep = 0.0
- retry_state.next_action = RetryAction(sleep)
- retry_state.idle_for += sleep
- self.statistics["idle_for"] += sleep
- self.statistics["attempt_number"] += 1
-
- if self.before_sleep is not None:
- self.before_sleep(retry_state)
-
- return DoSleep(sleep)
-
- def __iter__(self) -> t.Generator[AttemptManager, None, None]:
- self.begin()
-
- retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
- while True:
- do = self.iter(retry_state=retry_state)
- if isinstance(do, DoAttempt):
- yield AttemptManager(retry_state=retry_state)
- elif isinstance(do, DoSleep):
- retry_state.prepare_for_next_attempt()
- self.sleep(do)
- else:
- break
-
- @abstractmethod
- def __call__(
- self,
- fn: t.Callable[..., WrappedFnReturnT],
- *args: t.Any,
- **kwargs: t.Any,
- ) -> WrappedFnReturnT:
- pass
-
-
-class Retrying(BaseRetrying):
- """Retrying controller."""
-
- def __call__(
- self,
- fn: t.Callable[..., WrappedFnReturnT],
- *args: t.Any,
- **kwargs: t.Any,
- ) -> WrappedFnReturnT:
- self.begin()
-
- retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
- while True:
- do = self.iter(retry_state=retry_state)
- if isinstance(do, DoAttempt):
- try:
- result = fn(*args, **kwargs)
- except BaseException: # noqa: B902
- retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
- else:
- retry_state.set_result(result)
- elif isinstance(do, DoSleep):
- retry_state.prepare_for_next_attempt()
- self.sleep(do)
- else:
- return do # type: ignore[no-any-return]
-
-
-if sys.version_info[1] >= 9:
- FutureGenericT = futures.Future[t.Any]
-else:
- FutureGenericT = futures.Future
-
-
-class Future(FutureGenericT):
- """Encapsulates a (future or past) attempted call to a target function."""
-
- def __init__(self, attempt_number: int) -> None:
- super().__init__()
- self.attempt_number = attempt_number
-
- @property
- def failed(self) -> bool:
- """Return whether a exception is being held in this future."""
- return self.exception() is not None
-
- @classmethod
- def construct(cls, attempt_number: int, value: t.Any, has_exception: bool) -> "Future":
- """Construct a new Future object."""
- fut = cls(attempt_number)
- if has_exception:
- fut.set_exception(value)
- else:
- fut.set_result(value)
- return fut
-
-
-class RetryCallState:
- """State related to a single call wrapped with Retrying."""
-
- def __init__(
- self,
- retry_object: BaseRetrying,
- fn: t.Optional[WrappedFn],
- args: t.Any,
- kwargs: t.Any,
- ) -> None:
- #: Retry call start timestamp
- self.start_time = time.monotonic()
- #: Retry manager object
- self.retry_object = retry_object
- #: Function wrapped by this retry call
- self.fn = fn
- #: Arguments of the function wrapped by this retry call
- self.args = args
- #: Keyword arguments of the function wrapped by this retry call
- self.kwargs = kwargs
-
- #: The number of the current attempt
- self.attempt_number: int = 1
- #: Last outcome (result or exception) produced by the function
- self.outcome: t.Optional[Future] = None
- #: Timestamp of the last outcome
- self.outcome_timestamp: t.Optional[float] = None
- #: Time spent sleeping in retries
- self.idle_for: float = 0.0
- #: Next action as decided by the retry manager
- self.next_action: t.Optional[RetryAction] = None
-
- @property
- def seconds_since_start(self) -> t.Optional[float]:
- if self.outcome_timestamp is None:
- return None
- return self.outcome_timestamp - self.start_time
-
- def prepare_for_next_attempt(self) -> None:
- self.outcome = None
- self.outcome_timestamp = None
- self.attempt_number += 1
- self.next_action = None
-
- def set_result(self, val: t.Any) -> None:
- ts = time.monotonic()
- fut = Future(self.attempt_number)
- fut.set_result(val)
- self.outcome, self.outcome_timestamp = fut, ts
-
- def set_exception(
- self, exc_info: t.Tuple[t.Type[BaseException], BaseException, "types.TracebackType| None"]
- ) -> None:
- ts = time.monotonic()
- fut = Future(self.attempt_number)
- fut.set_exception(exc_info[1])
- self.outcome, self.outcome_timestamp = fut, ts
-
- def __repr__(self) -> str:
- if self.outcome is None:
- result = "none yet"
- elif self.outcome.failed:
- exception = self.outcome.exception()
- result = f"failed ({exception.__class__.__name__} {exception})"
- else:
- result = f"returned {self.outcome.result()}"
-
- slept = float(round(self.idle_for, 2))
- clsname = self.__class__.__name__
- return f"<{clsname} {id(self)}: attempt #{self.attempt_number}; slept for {slept}; last result: {result}>"
-
-
-@t.overload
-def retry(func: WrappedFn) -> WrappedFn:
- ...
-
-
-@t.overload
-def retry(
- sleep: t.Callable[[t.Union[int, float]], t.Optional[t.Awaitable[None]]] = sleep,
- stop: "StopBaseT" = stop_never,
- wait: "WaitBaseT" = wait_none(),
- retry: "RetryBaseT" = retry_if_exception_type(),
- before: t.Callable[["RetryCallState"], None] = before_nothing,
- after: t.Callable[["RetryCallState"], None] = after_nothing,
- before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
- reraise: bool = False,
- retry_error_cls: t.Type["RetryError"] = RetryError,
- retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None,
-) -> t.Callable[[WrappedFn], WrappedFn]:
- ...
-
-
-def retry(*dargs: t.Any, **dkw: t.Any) -> t.Any:
- """Wrap a function with a new `Retrying` object.
-
- :param dargs: positional arguments passed to Retrying object
- :param dkw: keyword arguments passed to the Retrying object
- """
- # support both @retry and @retry() as valid syntax
- if len(dargs) == 1 and callable(dargs[0]):
- return retry()(dargs[0])
- else:
-
- def wrap(f: WrappedFn) -> WrappedFn:
- if isinstance(f, retry_base):
- warnings.warn(
- f"Got retry_base instance ({f.__class__.__name__}) as callable argument, "
- f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)"
- )
- r: "BaseRetrying"
- if iscoroutinefunction(f):
- r = AsyncRetrying(*dargs, **dkw)
- elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f):
- r = TornadoRetrying(*dargs, **dkw)
- else:
- r = Retrying(*dargs, **dkw)
-
- return r.wraps(f)
-
- return wrap
-
-
-from pip._vendor.tenacity._asyncio import AsyncRetrying # noqa:E402,I100
-
-if tornado:
- from pip._vendor.tenacity.tornadoweb import TornadoRetrying
-
-
-__all__ = [
- "retry_base",
- "retry_all",
- "retry_always",
- "retry_any",
- "retry_if_exception",
- "retry_if_exception_type",
- "retry_if_exception_cause_type",
- "retry_if_not_exception_type",
- "retry_if_not_result",
- "retry_if_result",
- "retry_never",
- "retry_unless_exception_type",
- "retry_if_exception_message",
- "retry_if_not_exception_message",
- "sleep",
- "sleep_using_event",
- "stop_after_attempt",
- "stop_after_delay",
- "stop_all",
- "stop_any",
- "stop_never",
- "stop_when_event_set",
- "wait_chain",
- "wait_combine",
- "wait_exponential",
- "wait_fixed",
- "wait_incrementing",
- "wait_none",
- "wait_random",
- "wait_random_exponential",
- "wait_full_jitter",
- "wait_exponential_jitter",
- "before_log",
- "before_nothing",
- "after_log",
- "after_nothing",
- "before_sleep_log",
- "before_sleep_nothing",
- "retry",
- "WrappedFn",
- "TryAgain",
- "NO_RESULT",
- "DoAttempt",
- "DoSleep",
- "BaseAction",
- "RetryAction",
- "RetryError",
- "AttemptManager",
- "BaseRetrying",
- "Retrying",
- "Future",
- "RetryCallState",
- "AsyncRetrying",
-]
diff --git a/contrib/python/pip/pip/_vendor/tenacity/_asyncio.py b/contrib/python/pip/pip/_vendor/tenacity/_asyncio.py
deleted file mode 100644
index 2e50cd7b40..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/_asyncio.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2016 Étienne Bersac
-# Copyright 2016 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import functools
-import sys
-import typing as t
-from asyncio import sleep
-
-from pip._vendor.tenacity import AttemptManager
-from pip._vendor.tenacity import BaseRetrying
-from pip._vendor.tenacity import DoAttempt
-from pip._vendor.tenacity import DoSleep
-from pip._vendor.tenacity import RetryCallState
-
-WrappedFnReturnT = t.TypeVar("WrappedFnReturnT")
-WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Awaitable[t.Any]])
-
-
-class AsyncRetrying(BaseRetrying):
- sleep: t.Callable[[float], t.Awaitable[t.Any]]
-
- def __init__(self, sleep: t.Callable[[float], t.Awaitable[t.Any]] = sleep, **kwargs: t.Any) -> None:
- super().__init__(**kwargs)
- self.sleep = sleep
-
- async def __call__( # type: ignore[override]
- self, fn: WrappedFn, *args: t.Any, **kwargs: t.Any
- ) -> WrappedFnReturnT:
- self.begin()
-
- retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
- while True:
- do = self.iter(retry_state=retry_state)
- if isinstance(do, DoAttempt):
- try:
- result = await fn(*args, **kwargs)
- except BaseException: # noqa: B902
- retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
- else:
- retry_state.set_result(result)
- elif isinstance(do, DoSleep):
- retry_state.prepare_for_next_attempt()
- await self.sleep(do)
- else:
- return do # type: ignore[no-any-return]
-
- def __iter__(self) -> t.Generator[AttemptManager, None, None]:
- raise TypeError("AsyncRetrying object is not iterable")
-
- def __aiter__(self) -> "AsyncRetrying":
- self.begin()
- self._retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
- return self
-
- async def __anext__(self) -> AttemptManager:
- while True:
- do = self.iter(retry_state=self._retry_state)
- if do is None:
- raise StopAsyncIteration
- elif isinstance(do, DoAttempt):
- return AttemptManager(retry_state=self._retry_state)
- elif isinstance(do, DoSleep):
- self._retry_state.prepare_for_next_attempt()
- await self.sleep(do)
- else:
- raise StopAsyncIteration
-
- def wraps(self, fn: WrappedFn) -> WrappedFn:
- fn = super().wraps(fn)
- # Ensure wrapper is recognized as a coroutine function.
-
- @functools.wraps(fn)
- async def async_wrapped(*args: t.Any, **kwargs: t.Any) -> t.Any:
- return await fn(*args, **kwargs)
-
- # Preserve attributes
- async_wrapped.retry = fn.retry # type: ignore[attr-defined]
- async_wrapped.retry_with = fn.retry_with # type: ignore[attr-defined]
-
- return async_wrapped # type: ignore[return-value]
diff --git a/contrib/python/pip/pip/_vendor/tenacity/_utils.py b/contrib/python/pip/pip/_vendor/tenacity/_utils.py
deleted file mode 100644
index f14ff32096..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/_utils.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# Copyright 2016 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import typing
-from datetime import timedelta
-
-
-# sys.maxsize:
-# An integer giving the maximum value a variable of type Py_ssize_t can take.
-MAX_WAIT = sys.maxsize / 2
-
-
-def find_ordinal(pos_num: int) -> str:
- # See: https://en.wikipedia.org/wiki/English_numerals#Ordinal_numbers
- if pos_num == 0:
- return "th"
- elif pos_num == 1:
- return "st"
- elif pos_num == 2:
- return "nd"
- elif pos_num == 3:
- return "rd"
- elif 4 <= pos_num <= 20:
- return "th"
- else:
- return find_ordinal(pos_num % 10)
-
-
-def to_ordinal(pos_num: int) -> str:
- return f"{pos_num}{find_ordinal(pos_num)}"
-
-
-def get_callback_name(cb: typing.Callable[..., typing.Any]) -> str:
- """Get a callback fully-qualified name.
-
- If no name can be produced ``repr(cb)`` is called and returned.
- """
- segments = []
- try:
- segments.append(cb.__qualname__)
- except AttributeError:
- try:
- segments.append(cb.__name__)
- except AttributeError:
- pass
- if not segments:
- return repr(cb)
- else:
- try:
- # When running under sphinx it appears this can be none?
- if cb.__module__:
- segments.insert(0, cb.__module__)
- except AttributeError:
- pass
- return ".".join(segments)
-
-
-time_unit_type = typing.Union[int, float, timedelta]
-
-
-def to_seconds(time_unit: time_unit_type) -> float:
- return float(time_unit.total_seconds() if isinstance(time_unit, timedelta) else time_unit)
diff --git a/contrib/python/pip/pip/_vendor/tenacity/after.py b/contrib/python/pip/pip/_vendor/tenacity/after.py
deleted file mode 100644
index 574c9bcea6..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/after.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2016 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import typing
-
-from pip._vendor.tenacity import _utils
-
-if typing.TYPE_CHECKING:
- import logging
-
- from pip._vendor.tenacity import RetryCallState
-
-
-def after_nothing(retry_state: "RetryCallState") -> None:
- """After call strategy that does nothing."""
-
-
-def after_log(
- logger: "logging.Logger",
- log_level: int,
- sec_format: str = "%0.3f",
-) -> typing.Callable[["RetryCallState"], None]:
- """After call strategy that logs to some logger the finished attempt."""
-
- def log_it(retry_state: "RetryCallState") -> None:
- if retry_state.fn is None:
- # NOTE(sileht): can't really happen, but we must please mypy
- fn_name = "<unknown>"
- else:
- fn_name = _utils.get_callback_name(retry_state.fn)
- logger.log(
- log_level,
- f"Finished call to '{fn_name}' "
- f"after {sec_format % retry_state.seconds_since_start}(s), "
- f"this was the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.",
- )
-
- return log_it
diff --git a/contrib/python/pip/pip/_vendor/tenacity/before.py b/contrib/python/pip/pip/_vendor/tenacity/before.py
deleted file mode 100644
index cfd7dc72ee..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/before.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2016 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import typing
-
-from pip._vendor.tenacity import _utils
-
-if typing.TYPE_CHECKING:
- import logging
-
- from pip._vendor.tenacity import RetryCallState
-
-
-def before_nothing(retry_state: "RetryCallState") -> None:
- """Before call strategy that does nothing."""
-
-
-def before_log(logger: "logging.Logger", log_level: int) -> typing.Callable[["RetryCallState"], None]:
- """Before call strategy that logs to some logger the attempt."""
-
- def log_it(retry_state: "RetryCallState") -> None:
- if retry_state.fn is None:
- # NOTE(sileht): can't really happen, but we must please mypy
- fn_name = "<unknown>"
- else:
- fn_name = _utils.get_callback_name(retry_state.fn)
- logger.log(
- log_level,
- f"Starting call to '{fn_name}', "
- f"this is the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.",
- )
-
- return log_it
diff --git a/contrib/python/pip/pip/_vendor/tenacity/before_sleep.py b/contrib/python/pip/pip/_vendor/tenacity/before_sleep.py
deleted file mode 100644
index 8c6167fb3a..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/before_sleep.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2016 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import typing
-
-from pip._vendor.tenacity import _utils
-
-if typing.TYPE_CHECKING:
- import logging
-
- from pip._vendor.tenacity import RetryCallState
-
-
-def before_sleep_nothing(retry_state: "RetryCallState") -> None:
- """Before call strategy that does nothing."""
-
-
-def before_sleep_log(
- logger: "logging.Logger",
- log_level: int,
- exc_info: bool = False,
-) -> typing.Callable[["RetryCallState"], None]:
- """Before call strategy that logs to some logger the attempt."""
-
- def log_it(retry_state: "RetryCallState") -> None:
- local_exc_info: BaseException | bool | None
-
- if retry_state.outcome is None:
- raise RuntimeError("log_it() called before outcome was set")
-
- if retry_state.next_action is None:
- raise RuntimeError("log_it() called before next_action was set")
-
- if retry_state.outcome.failed:
- ex = retry_state.outcome.exception()
- verb, value = "raised", f"{ex.__class__.__name__}: {ex}"
-
- if exc_info:
- local_exc_info = retry_state.outcome.exception()
- else:
- local_exc_info = False
- else:
- verb, value = "returned", retry_state.outcome.result()
- local_exc_info = False # exc_info does not apply when no exception
-
- if retry_state.fn is None:
- # NOTE(sileht): can't really happen, but we must please mypy
- fn_name = "<unknown>"
- else:
- fn_name = _utils.get_callback_name(retry_state.fn)
-
- logger.log(
- log_level,
- f"Retrying {fn_name} " f"in {retry_state.next_action.sleep} seconds as it {verb} {value}.",
- exc_info=local_exc_info,
- )
-
- return log_it
diff --git a/contrib/python/pip/pip/_vendor/tenacity/nap.py b/contrib/python/pip/pip/_vendor/tenacity/nap.py
deleted file mode 100644
index 72aa5bfd4b..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/nap.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2016 Étienne Bersac
-# Copyright 2016 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-import typing
-
-if typing.TYPE_CHECKING:
- import threading
-
-
-def sleep(seconds: float) -> None:
- """
- Sleep strategy that delays execution for a given number of seconds.
-
- This is the default strategy, and may be mocked out for unit testing.
- """
- time.sleep(seconds)
-
-
-class sleep_using_event:
- """Sleep strategy that waits on an event to be set."""
-
- def __init__(self, event: "threading.Event") -> None:
- self.event = event
-
- def __call__(self, timeout: typing.Optional[float]) -> None:
- # NOTE(harlowja): this may *not* actually wait for timeout
- # seconds if the event is set (ie this may eject out early).
- self.event.wait(timeout=timeout)
diff --git a/contrib/python/pip/pip/_vendor/tenacity/py.typed b/contrib/python/pip/pip/_vendor/tenacity/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/py.typed
+++ /dev/null
diff --git a/contrib/python/pip/pip/_vendor/tenacity/retry.py b/contrib/python/pip/pip/_vendor/tenacity/retry.py
deleted file mode 100644
index 38988739d6..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/retry.py
+++ /dev/null
@@ -1,272 +0,0 @@
-# Copyright 2016–2021 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import abc
-import re
-import typing
-
-if typing.TYPE_CHECKING:
- from pip._vendor.tenacity import RetryCallState
-
-
-class retry_base(abc.ABC):
- """Abstract base class for retry strategies."""
-
- @abc.abstractmethod
- def __call__(self, retry_state: "RetryCallState") -> bool:
- pass
-
- def __and__(self, other: "retry_base") -> "retry_all":
- return retry_all(self, other)
-
- def __or__(self, other: "retry_base") -> "retry_any":
- return retry_any(self, other)
-
-
-RetryBaseT = typing.Union[retry_base, typing.Callable[["RetryCallState"], bool]]
-
-
-class _retry_never(retry_base):
- """Retry strategy that never rejects any result."""
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return False
-
-
-retry_never = _retry_never()
-
-
-class _retry_always(retry_base):
- """Retry strategy that always rejects any result."""
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return True
-
-
-retry_always = _retry_always()
-
-
-class retry_if_exception(retry_base):
- """Retry strategy that retries if an exception verifies a predicate."""
-
- def __init__(self, predicate: typing.Callable[[BaseException], bool]) -> None:
- self.predicate = predicate
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- if retry_state.outcome is None:
- raise RuntimeError("__call__() called before outcome was set")
-
- if retry_state.outcome.failed:
- exception = retry_state.outcome.exception()
- if exception is None:
- raise RuntimeError("outcome failed but the exception is None")
- return self.predicate(exception)
- else:
- return False
-
-
-class retry_if_exception_type(retry_if_exception):
- """Retries if an exception has been raised of one or more types."""
-
- def __init__(
- self,
- exception_types: typing.Union[
- typing.Type[BaseException],
- typing.Tuple[typing.Type[BaseException], ...],
- ] = Exception,
- ) -> None:
- self.exception_types = exception_types
- super().__init__(lambda e: isinstance(e, exception_types))
-
-
-class retry_if_not_exception_type(retry_if_exception):
- """Retries except an exception has been raised of one or more types."""
-
- def __init__(
- self,
- exception_types: typing.Union[
- typing.Type[BaseException],
- typing.Tuple[typing.Type[BaseException], ...],
- ] = Exception,
- ) -> None:
- self.exception_types = exception_types
- super().__init__(lambda e: not isinstance(e, exception_types))
-
-
-class retry_unless_exception_type(retry_if_exception):
- """Retries until an exception is raised of one or more types."""
-
- def __init__(
- self,
- exception_types: typing.Union[
- typing.Type[BaseException],
- typing.Tuple[typing.Type[BaseException], ...],
- ] = Exception,
- ) -> None:
- self.exception_types = exception_types
- super().__init__(lambda e: not isinstance(e, exception_types))
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- if retry_state.outcome is None:
- raise RuntimeError("__call__() called before outcome was set")
-
- # always retry if no exception was raised
- if not retry_state.outcome.failed:
- return True
-
- exception = retry_state.outcome.exception()
- if exception is None:
- raise RuntimeError("outcome failed but the exception is None")
- return self.predicate(exception)
-
-
-class retry_if_exception_cause_type(retry_base):
- """Retries if any of the causes of the raised exception is of one or more types.
-
- The check on the type of the cause of the exception is done recursively (until finding
- an exception in the chain that has no `__cause__`)
- """
-
- def __init__(
- self,
- exception_types: typing.Union[
- typing.Type[BaseException],
- typing.Tuple[typing.Type[BaseException], ...],
- ] = Exception,
- ) -> None:
- self.exception_cause_types = exception_types
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- if retry_state.outcome is None:
- raise RuntimeError("__call__ called before outcome was set")
-
- if retry_state.outcome.failed:
- exc = retry_state.outcome.exception()
- while exc is not None:
- if isinstance(exc.__cause__, self.exception_cause_types):
- return True
- exc = exc.__cause__
-
- return False
-
-
-class retry_if_result(retry_base):
- """Retries if the result verifies a predicate."""
-
- def __init__(self, predicate: typing.Callable[[typing.Any], bool]) -> None:
- self.predicate = predicate
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- if retry_state.outcome is None:
- raise RuntimeError("__call__() called before outcome was set")
-
- if not retry_state.outcome.failed:
- return self.predicate(retry_state.outcome.result())
- else:
- return False
-
-
-class retry_if_not_result(retry_base):
- """Retries if the result refutes a predicate."""
-
- def __init__(self, predicate: typing.Callable[[typing.Any], bool]) -> None:
- self.predicate = predicate
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- if retry_state.outcome is None:
- raise RuntimeError("__call__() called before outcome was set")
-
- if not retry_state.outcome.failed:
- return not self.predicate(retry_state.outcome.result())
- else:
- return False
-
-
-class retry_if_exception_message(retry_if_exception):
- """Retries if an exception message equals or matches."""
-
- def __init__(
- self,
- message: typing.Optional[str] = None,
- match: typing.Optional[str] = None,
- ) -> None:
- if message and match:
- raise TypeError(f"{self.__class__.__name__}() takes either 'message' or 'match', not both")
-
- # set predicate
- if message:
-
- def message_fnc(exception: BaseException) -> bool:
- return message == str(exception)
-
- predicate = message_fnc
- elif match:
- prog = re.compile(match)
-
- def match_fnc(exception: BaseException) -> bool:
- return bool(prog.match(str(exception)))
-
- predicate = match_fnc
- else:
- raise TypeError(f"{self.__class__.__name__}() missing 1 required argument 'message' or 'match'")
-
- super().__init__(predicate)
-
-
-class retry_if_not_exception_message(retry_if_exception_message):
- """Retries until an exception message equals or matches."""
-
- def __init__(
- self,
- message: typing.Optional[str] = None,
- match: typing.Optional[str] = None,
- ) -> None:
- super().__init__(message, match)
- # invert predicate
- if_predicate = self.predicate
- self.predicate = lambda *args_, **kwargs_: not if_predicate(*args_, **kwargs_)
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- if retry_state.outcome is None:
- raise RuntimeError("__call__() called before outcome was set")
-
- if not retry_state.outcome.failed:
- return True
-
- exception = retry_state.outcome.exception()
- if exception is None:
- raise RuntimeError("outcome failed but the exception is None")
- return self.predicate(exception)
-
-
-class retry_any(retry_base):
- """Retries if any of the retries condition is valid."""
-
- def __init__(self, *retries: retry_base) -> None:
- self.retries = retries
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return any(r(retry_state) for r in self.retries)
-
-
-class retry_all(retry_base):
- """Retries if all the retries condition are valid."""
-
- def __init__(self, *retries: retry_base) -> None:
- self.retries = retries
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return all(r(retry_state) for r in self.retries)
diff --git a/contrib/python/pip/pip/_vendor/tenacity/stop.py b/contrib/python/pip/pip/_vendor/tenacity/stop.py
deleted file mode 100644
index bb23effdf8..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/stop.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2016–2021 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import abc
-import typing
-
-from pip._vendor.tenacity import _utils
-
-if typing.TYPE_CHECKING:
- import threading
-
- from pip._vendor.tenacity import RetryCallState
-
-
-class stop_base(abc.ABC):
- """Abstract base class for stop strategies."""
-
- @abc.abstractmethod
- def __call__(self, retry_state: "RetryCallState") -> bool:
- pass
-
- def __and__(self, other: "stop_base") -> "stop_all":
- return stop_all(self, other)
-
- def __or__(self, other: "stop_base") -> "stop_any":
- return stop_any(self, other)
-
-
-StopBaseT = typing.Union[stop_base, typing.Callable[["RetryCallState"], bool]]
-
-
-class stop_any(stop_base):
- """Stop if any of the stop condition is valid."""
-
- def __init__(self, *stops: stop_base) -> None:
- self.stops = stops
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return any(x(retry_state) for x in self.stops)
-
-
-class stop_all(stop_base):
- """Stop if all the stop conditions are valid."""
-
- def __init__(self, *stops: stop_base) -> None:
- self.stops = stops
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return all(x(retry_state) for x in self.stops)
-
-
-class _stop_never(stop_base):
- """Never stop."""
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return False
-
-
-stop_never = _stop_never()
-
-
-class stop_when_event_set(stop_base):
- """Stop when the given event is set."""
-
- def __init__(self, event: "threading.Event") -> None:
- self.event = event
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return self.event.is_set()
-
-
-class stop_after_attempt(stop_base):
- """Stop when the previous attempt >= max_attempt."""
-
- def __init__(self, max_attempt_number: int) -> None:
- self.max_attempt_number = max_attempt_number
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- return retry_state.attempt_number >= self.max_attempt_number
-
-
-class stop_after_delay(stop_base):
- """Stop when the time from the first attempt >= limit."""
-
- def __init__(self, max_delay: _utils.time_unit_type) -> None:
- self.max_delay = _utils.to_seconds(max_delay)
-
- def __call__(self, retry_state: "RetryCallState") -> bool:
- if retry_state.seconds_since_start is None:
- raise RuntimeError("__call__() called but seconds_since_start is not set")
- return retry_state.seconds_since_start >= self.max_delay
diff --git a/contrib/python/pip/pip/_vendor/tenacity/tornadoweb.py b/contrib/python/pip/pip/_vendor/tenacity/tornadoweb.py
deleted file mode 100644
index e19c30b189..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/tornadoweb.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2017 Elisey Zanko
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import typing
-
-from pip._vendor.tenacity import BaseRetrying
-from pip._vendor.tenacity import DoAttempt
-from pip._vendor.tenacity import DoSleep
-from pip._vendor.tenacity import RetryCallState
-
-from tornado import gen
-
-if typing.TYPE_CHECKING:
- from tornado.concurrent import Future
-
-_RetValT = typing.TypeVar("_RetValT")
-
-
-class TornadoRetrying(BaseRetrying):
- def __init__(self, sleep: "typing.Callable[[float], Future[None]]" = gen.sleep, **kwargs: typing.Any) -> None:
- super().__init__(**kwargs)
- self.sleep = sleep
-
- @gen.coroutine # type: ignore[misc]
- def __call__(
- self,
- fn: "typing.Callable[..., typing.Union[typing.Generator[typing.Any, typing.Any, _RetValT], Future[_RetValT]]]",
- *args: typing.Any,
- **kwargs: typing.Any,
- ) -> "typing.Generator[typing.Any, typing.Any, _RetValT]":
- self.begin()
-
- retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
- while True:
- do = self.iter(retry_state=retry_state)
- if isinstance(do, DoAttempt):
- try:
- result = yield fn(*args, **kwargs)
- except BaseException: # noqa: B902
- retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
- else:
- retry_state.set_result(result)
- elif isinstance(do, DoSleep):
- retry_state.prepare_for_next_attempt()
- yield self.sleep(do)
- else:
- raise gen.Return(do)
diff --git a/contrib/python/pip/pip/_vendor/tenacity/wait.py b/contrib/python/pip/pip/_vendor/tenacity/wait.py
deleted file mode 100644
index f9349c0283..0000000000
--- a/contrib/python/pip/pip/_vendor/tenacity/wait.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# Copyright 2016–2021 Julien Danjou
-# Copyright 2016 Joshua Harlow
-# Copyright 2013-2014 Ray Holder
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import abc
-import random
-import typing
-
-from pip._vendor.tenacity import _utils
-
-if typing.TYPE_CHECKING:
- from pip._vendor.tenacity import RetryCallState
-
-
-class wait_base(abc.ABC):
- """Abstract base class for wait strategies."""
-
- @abc.abstractmethod
- def __call__(self, retry_state: "RetryCallState") -> float:
- pass
-
- def __add__(self, other: "wait_base") -> "wait_combine":
- return wait_combine(self, other)
-
- def __radd__(self, other: "wait_base") -> typing.Union["wait_combine", "wait_base"]:
- # make it possible to use multiple waits with the built-in sum function
- if other == 0: # type: ignore[comparison-overlap]
- return self
- return self.__add__(other)
-
-
-WaitBaseT = typing.Union[wait_base, typing.Callable[["RetryCallState"], typing.Union[float, int]]]
-
-
-class wait_fixed(wait_base):
- """Wait strategy that waits a fixed amount of time between each retry."""
-
- def __init__(self, wait: _utils.time_unit_type) -> None:
- self.wait_fixed = _utils.to_seconds(wait)
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- return self.wait_fixed
-
-
-class wait_none(wait_fixed):
- """Wait strategy that doesn't wait at all before retrying."""
-
- def __init__(self) -> None:
- super().__init__(0)
-
-
-class wait_random(wait_base):
- """Wait strategy that waits a random amount of time between min/max."""
-
- def __init__(self, min: _utils.time_unit_type = 0, max: _utils.time_unit_type = 1) -> None: # noqa
- self.wait_random_min = _utils.to_seconds(min)
- self.wait_random_max = _utils.to_seconds(max)
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- return self.wait_random_min + (random.random() * (self.wait_random_max - self.wait_random_min))
-
-
-class wait_combine(wait_base):
- """Combine several waiting strategies."""
-
- def __init__(self, *strategies: wait_base) -> None:
- self.wait_funcs = strategies
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- return sum(x(retry_state=retry_state) for x in self.wait_funcs)
-
-
-class wait_chain(wait_base):
- """Chain two or more waiting strategies.
-
- If all strategies are exhausted, the very last strategy is used
- thereafter.
-
- For example::
-
- @retry(wait=wait_chain(*[wait_fixed(1) for i in range(3)] +
- [wait_fixed(2) for j in range(5)] +
- [wait_fixed(5) for k in range(4)))
- def wait_chained():
- print("Wait 1s for 3 attempts, 2s for 5 attempts and 5s
- thereafter.")
- """
-
- def __init__(self, *strategies: wait_base) -> None:
- self.strategies = strategies
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- wait_func_no = min(max(retry_state.attempt_number, 1), len(self.strategies))
- wait_func = self.strategies[wait_func_no - 1]
- return wait_func(retry_state=retry_state)
-
-
-class wait_incrementing(wait_base):
- """Wait an incremental amount of time after each attempt.
-
- Starting at a starting value and incrementing by a value for each attempt
- (and restricting the upper limit to some maximum value).
- """
-
- def __init__(
- self,
- start: _utils.time_unit_type = 0,
- increment: _utils.time_unit_type = 100,
- max: _utils.time_unit_type = _utils.MAX_WAIT, # noqa
- ) -> None:
- self.start = _utils.to_seconds(start)
- self.increment = _utils.to_seconds(increment)
- self.max = _utils.to_seconds(max)
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- result = self.start + (self.increment * (retry_state.attempt_number - 1))
- return max(0, min(result, self.max))
-
-
-class wait_exponential(wait_base):
- """Wait strategy that applies exponential backoff.
-
- It allows for a customized multiplier and an ability to restrict the
- upper and lower limits to some maximum and minimum value.
-
- The intervals are fixed (i.e. there is no jitter), so this strategy is
- suitable for balancing retries against latency when a required resource is
- unavailable for an unknown duration, but *not* suitable for resolving
- contention between multiple processes for a shared resource. Use
- wait_random_exponential for the latter case.
- """
-
- def __init__(
- self,
- multiplier: typing.Union[int, float] = 1,
- max: _utils.time_unit_type = _utils.MAX_WAIT, # noqa
- exp_base: typing.Union[int, float] = 2,
- min: _utils.time_unit_type = 0, # noqa
- ) -> None:
- self.multiplier = multiplier
- self.min = _utils.to_seconds(min)
- self.max = _utils.to_seconds(max)
- self.exp_base = exp_base
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- try:
- exp = self.exp_base ** (retry_state.attempt_number - 1)
- result = self.multiplier * exp
- except OverflowError:
- return self.max
- return max(max(0, self.min), min(result, self.max))
-
-
-class wait_random_exponential(wait_exponential):
- """Random wait with exponentially widening window.
-
- An exponential backoff strategy used to mediate contention between multiple
- uncoordinated processes for a shared resource in distributed systems. This
- is the sense in which "exponential backoff" is meant in e.g. Ethernet
- networking, and corresponds to the "Full Jitter" algorithm described in
- this blog post:
-
- https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/
-
- Each retry occurs at a random time in a geometrically expanding interval.
- It allows for a custom multiplier and an ability to restrict the upper
- limit of the random interval to some maximum value.
-
- Example::
-
- wait_random_exponential(multiplier=0.5, # initial window 0.5s
- max=60) # max 60s timeout
-
- When waiting for an unavailable resource to become available again, as
- opposed to trying to resolve contention for a shared resource, the
- wait_exponential strategy (which uses a fixed interval) may be preferable.
-
- """
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- high = super().__call__(retry_state=retry_state)
- return random.uniform(0, high)
-
-
-class wait_exponential_jitter(wait_base):
- """Wait strategy that applies exponential backoff and jitter.
-
- It allows for a customized initial wait, maximum wait and jitter.
-
- This implements the strategy described here:
- https://cloud.google.com/storage/docs/retry-strategy
-
- The wait time is min(initial * 2**n + random.uniform(0, jitter), maximum)
- where n is the retry count.
- """
-
- def __init__(
- self,
- initial: float = 1,
- max: float = _utils.MAX_WAIT, # noqa
- exp_base: float = 2,
- jitter: float = 1,
- ) -> None:
- self.initial = initial
- self.max = max
- self.exp_base = exp_base
- self.jitter = jitter
-
- def __call__(self, retry_state: "RetryCallState") -> float:
- jitter = random.uniform(0, self.jitter)
- try:
- exp = self.exp_base ** (retry_state.attempt_number - 1)
- result = self.initial * exp + jitter
- except OverflowError:
- result = self.max
- return max(0, min(result, self.max))
diff --git a/contrib/python/pip/pip/_vendor/typing_extensions.py b/contrib/python/pip/pip/_vendor/typing_extensions.py
index d60315a6ad..e429384e76 100644
--- a/contrib/python/pip/pip/_vendor/typing_extensions.py
+++ b/contrib/python/pip/pip/_vendor/typing_extensions.py
@@ -1,6 +1,7 @@
import abc
import collections
import collections.abc
+import contextlib
import functools
import inspect
import operator
@@ -116,6 +117,7 @@ __all__ = [
'MutableMapping',
'MutableSequence',
'MutableSet',
+ 'NoDefault',
'Optional',
'Pattern',
'Reversible',
@@ -134,6 +136,7 @@ __all__ = [
# for backward compatibility
PEP_560 = True
GenericMeta = type
+_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
# The functions below are modified copies of typing internal helpers.
# They are needed by _ProtocolMeta and they provide support for PEP 646.
@@ -406,17 +409,96 @@ Coroutine = typing.Coroutine
AsyncIterable = typing.AsyncIterable
AsyncIterator = typing.AsyncIterator
Deque = typing.Deque
-ContextManager = typing.ContextManager
-AsyncContextManager = typing.AsyncContextManager
DefaultDict = typing.DefaultDict
OrderedDict = typing.OrderedDict
Counter = typing.Counter
ChainMap = typing.ChainMap
-AsyncGenerator = typing.AsyncGenerator
Text = typing.Text
TYPE_CHECKING = typing.TYPE_CHECKING
+if sys.version_info >= (3, 13, 0, "beta"):
+ from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
+else:
+ def _is_dunder(attr):
+ return attr.startswith('__') and attr.endswith('__')
+
+ # Python <3.9 doesn't have typing._SpecialGenericAlias
+ _special_generic_alias_base = getattr(
+ typing, "_SpecialGenericAlias", typing._GenericAlias
+ )
+
+ class _SpecialGenericAlias(_special_generic_alias_base, _root=True):
+ def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
+ if _special_generic_alias_base is typing._GenericAlias:
+ # Python <3.9
+ self.__origin__ = origin
+ self._nparams = nparams
+ super().__init__(origin, nparams, special=True, inst=inst, name=name)
+ else:
+ # Python >= 3.9
+ super().__init__(origin, nparams, inst=inst, name=name)
+ self._defaults = defaults
+
+ def __setattr__(self, attr, val):
+ allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
+ if _special_generic_alias_base is typing._GenericAlias:
+ # Python <3.9
+ allowed_attrs.add("__origin__")
+ if _is_dunder(attr) or attr in allowed_attrs:
+ object.__setattr__(self, attr, val)
+ else:
+ setattr(self.__origin__, attr, val)
+
+ @typing._tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ msg = "Parameters to generic types must be types."
+ params = tuple(typing._type_check(p, msg) for p in params)
+ if (
+ self._defaults
+ and len(params) < self._nparams
+ and len(params) + len(self._defaults) >= self._nparams
+ ):
+ params = (*params, *self._defaults[len(params) - self._nparams:])
+ actual_len = len(params)
+
+ if actual_len != self._nparams:
+ if self._defaults:
+ expected = f"at least {self._nparams - len(self._defaults)}"
+ else:
+ expected = str(self._nparams)
+ if not self._nparams:
+ raise TypeError(f"{self} is not a generic class")
+ raise TypeError(
+ f"Too {'many' if actual_len > self._nparams else 'few'}"
+ f" arguments for {self};"
+ f" actual {actual_len}, expected {expected}"
+ )
+ return self.copy_with(params)
+
+ _NoneType = type(None)
+ Generator = _SpecialGenericAlias(
+ collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
+ )
+ AsyncGenerator = _SpecialGenericAlias(
+ collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
+ )
+ ContextManager = _SpecialGenericAlias(
+ contextlib.AbstractContextManager,
+ 2,
+ name="ContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+ AsyncContextManager = _SpecialGenericAlias(
+ contextlib.AbstractAsyncContextManager,
+ 2,
+ name="AsyncContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+
+
_PROTO_ALLOWLIST = {
'collections.abc': [
'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
@@ -427,23 +509,11 @@ _PROTO_ALLOWLIST = {
}
-_EXCLUDED_ATTRS = {
- "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol",
- "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__",
- "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__",
- "__subclasshook__", "__orig_class__", "__init__", "__new__",
- "__protocol_attrs__", "__non_callable_proto_members__",
- "__match_args__",
+_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
+ "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
+ "__final__",
}
-if sys.version_info >= (3, 9):
- _EXCLUDED_ATTRS.add("__class_getitem__")
-
-if sys.version_info >= (3, 12):
- _EXCLUDED_ATTRS.add("__type_params__")
-
-_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS)
-
def _get_protocol_attrs(cls):
attrs = set()
@@ -669,13 +739,18 @@ else:
not their type signatures!
"""
if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
- raise TypeError('@runtime_checkable can be only applied to protocol classes,'
- ' got %r' % cls)
+ raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
+ f' got {cls!r}')
cls._is_runtime_protocol = True
- # Only execute the following block if it's a typing_extensions.Protocol class.
- # typing.Protocol classes don't need it.
- if isinstance(cls, _ProtocolMeta):
+ # typing.Protocol classes on <=3.11 break if we execute this block,
+ # because typing.Protocol classes on <=3.11 don't have a
+ # `__protocol_attrs__` attribute, and this block relies on the
+ # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
+ # break if we *don't* execute this block, because *they* assume that all
+ # protocol classes have a `__non_callable_proto_members__` attribute
+ # (which this block sets)
+ if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
# PEP 544 prohibits using issubclass()
# with protocols that have non-method members.
# See gh-113320 for why we compute this attribute here,
@@ -867,7 +942,13 @@ else:
tp_dict.__orig_bases__ = bases
annotations = {}
- own_annotations = ns.get('__annotations__', {})
+ if "__annotations__" in ns:
+ own_annotations = ns["__annotations__"]
+ elif "__annotate__" in ns:
+ # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+ own_annotations = ns["__annotate__"](1)
+ else:
+ own_annotations = {}
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
if _TAKES_MODULE:
own_annotations = {
@@ -1190,7 +1271,7 @@ else:
def __reduce__(self):
return operator.getitem, (
- Annotated, (self.__origin__,) + self.__metadata__
+ Annotated, (self.__origin__, *self.__metadata__)
)
def __eq__(self, other):
@@ -1316,7 +1397,7 @@ else:
get_args(Callable[[], T][int]) == ([], int)
"""
if isinstance(tp, _AnnotatedAlias):
- return (tp.__origin__,) + tp.__metadata__
+ return (tp.__origin__, *tp.__metadata__)
if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
if getattr(tp, "_special", False):
return ()
@@ -1362,17 +1443,37 @@ else:
)
+if hasattr(typing, "NoDefault"):
+ NoDefault = typing.NoDefault
+else:
+ class NoDefaultTypeMeta(type):
+ def __setattr__(cls, attr, value):
+ # TypeError is consistent with the behavior of NoneType
+ raise TypeError(
+ f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+ )
+
+ class NoDefaultType(metaclass=NoDefaultTypeMeta):
+ """The type of the NoDefault singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoDefault") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoDefault"
+
+ def __reduce__(self):
+ return "NoDefault"
+
+ NoDefault = NoDefaultType()
+ del NoDefaultType, NoDefaultTypeMeta
+
+
def _set_default(type_param, default):
- if isinstance(default, (tuple, list)):
- type_param.__default__ = tuple((typing._type_check(d, "Default must be a type")
- for d in default))
- elif default != _marker:
- if isinstance(type_param, ParamSpec) and default is ...: # ... not valid <3.11
- type_param.__default__ = default
- else:
- type_param.__default__ = typing._type_check(default, "Default must be a type")
- else:
- type_param.__default__ = None
+ type_param.has_default = lambda: default is not NoDefault
+ type_param.__default__ = default
def _set_module(typevarlike):
@@ -1395,32 +1496,46 @@ class _TypeVarLikeMeta(type):
return isinstance(__instance, cls._backported_typevarlike)
-# Add default and infer_variance parameters from PEP 696 and 695
-class TypeVar(metaclass=_TypeVarLikeMeta):
- """Type variable."""
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVar
+else:
+ # Add default and infer_variance parameters from PEP 696 and 695
+ class TypeVar(metaclass=_TypeVarLikeMeta):
+ """Type variable."""
- _backported_typevarlike = typing.TypeVar
+ _backported_typevarlike = typing.TypeVar
- def __new__(cls, name, *constraints, bound=None,
- covariant=False, contravariant=False,
- default=_marker, infer_variance=False):
- if hasattr(typing, "TypeAliasType"):
- # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
- typevar = typing.TypeVar(name, *constraints, bound=bound,
- covariant=covariant, contravariant=contravariant,
- infer_variance=infer_variance)
- else:
- typevar = typing.TypeVar(name, *constraints, bound=bound,
- covariant=covariant, contravariant=contravariant)
- if infer_variance and (covariant or contravariant):
- raise ValueError("Variance cannot be specified with infer_variance.")
- typevar.__infer_variance__ = infer_variance
- _set_default(typevar, default)
- _set_module(typevar)
- return typevar
+ def __new__(cls, name, *constraints, bound=None,
+ covariant=False, contravariant=False,
+ default=NoDefault, infer_variance=False):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant)
+ if infer_variance and (covariant or contravariant):
+ raise ValueError("Variance cannot be specified with infer_variance.")
+ typevar.__infer_variance__ = infer_variance
+
+ _set_default(typevar, default)
+ _set_module(typevar)
+
+ def _tvar_prepare_subst(alias, args):
+ if (
+ typevar.has_default()
+ and alias.__parameters__.index(typevar) == len(args)
+ ):
+ args += (typevar.__default__,)
+ return args
- def __init_subclass__(cls) -> None:
- raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+ typevar.__typing_prepare_subst__ = _tvar_prepare_subst
+ return typevar
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
# Python 3.10+ has PEP 612
@@ -1485,8 +1600,12 @@ else:
return NotImplemented
return self.__origin__ == other.__origin__
+
+if _PEP_696_IMPLEMENTED:
+ from typing import ParamSpec
+
# 3.10+
-if hasattr(typing, 'ParamSpec'):
+elif hasattr(typing, 'ParamSpec'):
# Add default parameter - PEP 696
class ParamSpec(metaclass=_TypeVarLikeMeta):
@@ -1496,7 +1615,7 @@ if hasattr(typing, 'ParamSpec'):
def __new__(cls, name, *, bound=None,
covariant=False, contravariant=False,
- infer_variance=False, default=_marker):
+ infer_variance=False, default=NoDefault):
if hasattr(typing, "TypeAliasType"):
# PEP 695 implemented, can pass infer_variance to typing.TypeVar
paramspec = typing.ParamSpec(name, bound=bound,
@@ -1511,6 +1630,24 @@ if hasattr(typing, 'ParamSpec'):
_set_default(paramspec, default)
_set_module(paramspec)
+
+ def _paramspec_prepare_subst(alias, args):
+ params = alias.__parameters__
+ i = params.index(paramspec)
+ if i == len(args) and paramspec.has_default():
+ args = [*args, paramspec.__default__]
+ if i >= len(args):
+ raise TypeError(f"Too few arguments for {alias}")
+ # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+ if len(params) == 1 and not typing._is_param_expr(args[0]):
+ assert i == 0
+ args = (args,)
+ # Convert lists to tuples to help other libraries cache the results.
+ elif isinstance(args[i], list):
+ args = (*args[:i], tuple(args[i]), *args[i + 1:])
+ return args
+
+ paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
return paramspec
def __init_subclass__(cls) -> None:
@@ -1579,8 +1716,8 @@ else:
return ParamSpecKwargs(self)
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
- infer_variance=False, default=_marker):
- super().__init__([self])
+ infer_variance=False, default=NoDefault):
+ list.__init__(self, [self])
self.__name__ = name
self.__covariant__ = bool(covariant)
self.__contravariant__ = bool(contravariant)
@@ -1674,7 +1811,7 @@ def _concatenate_getitem(self, parameters):
# 3.10+
if hasattr(typing, 'Concatenate'):
Concatenate = typing.Concatenate
- _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811
+ _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
# 3.9
elif sys.version_info[:2] >= (3, 9):
@_ExtensionsSpecialForm
@@ -2209,6 +2346,17 @@ elif sys.version_info[:2] >= (3, 9): # 3.9+
class _UnpackAlias(typing._GenericAlias, _root=True):
__class__ = typing.TypeVar
+ @property
+ def __typing_unpacked_tuple_args__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ arg, = self.__args__
+ if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
+ if arg.__origin__ is not tuple:
+ raise TypeError("Unpack[...] must be used with a tuple type")
+ return arg.__args__
+ return None
+
@_UnpackSpecialForm
def Unpack(self, parameters):
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
@@ -2233,7 +2381,20 @@ else: # 3.8
return isinstance(obj, _UnpackAlias)
-if hasattr(typing, "TypeVarTuple"): # 3.11+
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVarTuple
+
+elif hasattr(typing, "TypeVarTuple"): # 3.11+
+
+ def _unpack_args(*args):
+ newargs = []
+ for arg in args:
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs is not None and not (subargs and subargs[-1] is ...):
+ newargs.extend(subargs)
+ else:
+ newargs.append(arg)
+ return newargs
# Add default parameter - PEP 696
class TypeVarTuple(metaclass=_TypeVarLikeMeta):
@@ -2241,10 +2402,57 @@ if hasattr(typing, "TypeVarTuple"): # 3.11+
_backported_typevarlike = typing.TypeVarTuple
- def __new__(cls, name, *, default=_marker):
+ def __new__(cls, name, *, default=NoDefault):
tvt = typing.TypeVarTuple(name)
_set_default(tvt, default)
_set_module(tvt)
+
+ def _typevartuple_prepare_subst(alias, args):
+ params = alias.__parameters__
+ typevartuple_index = params.index(tvt)
+ for param in params[typevartuple_index + 1:]:
+ if isinstance(param, TypeVarTuple):
+ raise TypeError(
+ f"More than one TypeVarTuple parameter in {alias}"
+ )
+
+ alen = len(args)
+ plen = len(params)
+ left = typevartuple_index
+ right = plen - typevartuple_index - 1
+ var_tuple_index = None
+ fillarg = None
+ for k, arg in enumerate(args):
+ if not isinstance(arg, type):
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs and len(subargs) == 2 and subargs[-1] is ...:
+ if var_tuple_index is not None:
+ raise TypeError(
+ "More than one unpacked "
+ "arbitrary-length tuple argument"
+ )
+ var_tuple_index = k
+ fillarg = subargs[0]
+ if var_tuple_index is not None:
+ left = min(left, var_tuple_index)
+ right = min(right, alen - var_tuple_index - 1)
+ elif left + right > alen:
+ raise TypeError(f"Too few arguments for {alias};"
+ f" actual {alen}, expected at least {plen - 1}")
+ if left == alen - right and tvt.has_default():
+ replacement = _unpack_args(tvt.__default__)
+ else:
+ replacement = args[left: alen - right]
+
+ return (
+ *args[:left],
+ *([fillarg] * (typevartuple_index - left)),
+ replacement,
+ *([fillarg] * (plen - right - left - typevartuple_index - 1)),
+ *args[alen - right:],
+ )
+
+ tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
return tvt
def __init_subclass__(self, *args, **kwds):
@@ -2301,7 +2509,7 @@ else: # <=3.10
def __iter__(self):
yield self.__unpacked__
- def __init__(self, name, *, default=_marker):
+ def __init__(self, name, *, default=NoDefault):
self.__name__ = name
_DefaultMixin.__init__(self, default)
@@ -2352,6 +2560,12 @@ else: # <=3.10
return obj
+if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
+ _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
+else: # <=3.10
+ _ASSERT_NEVER_REPR_MAX_LENGTH = 100
+
+
if hasattr(typing, "assert_never"): # 3.11+
assert_never = typing.assert_never
else: # <=3.10
@@ -2375,7 +2589,10 @@ else: # <=3.10
At runtime, this throws an exception when called.
"""
- raise AssertionError("Expected code to be unreachable")
+ value = repr(arg)
+ if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+ value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+ raise AssertionError(f"Expected code to be unreachable, but got: {value}")
if sys.version_info >= (3, 12): # 3.12+
@@ -2677,11 +2894,14 @@ if not hasattr(typing, "TypeVarTuple"):
if alen < elen:
# since we validate TypeVarLike default in _collect_type_vars
# or _collect_parameters we can safely check parameters[alen]
- if getattr(parameters[alen], '__default__', None) is not None:
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
return
- num_default_tv = sum(getattr(p, '__default__', None)
- is not None for p in parameters)
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
elen -= num_default_tv
@@ -2711,11 +2931,14 @@ else:
if alen < elen:
# since we validate TypeVarLike default in _collect_type_vars
# or _collect_parameters we can safely check parameters[alen]
- if getattr(parameters[alen], '__default__', None) is not None:
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
return
- num_default_tv = sum(getattr(p, '__default__', None)
- is not None for p in parameters)
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
elen -= num_default_tv
@@ -2724,7 +2947,42 @@ else:
raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
f" for {cls}; actual {alen}, expected {expect_val}")
-typing._check_generic = _check_generic
+if not _PEP_696_IMPLEMENTED:
+ typing._check_generic = _check_generic
+
+
+def _has_generic_or_protocol_as_origin() -> bool:
+ try:
+ frame = sys._getframe(2)
+ # - Catch AttributeError: not all Python implementations have sys._getframe()
+ # - Catch ValueError: maybe we're called from an unexpected module
+ # and the call stack isn't deep enough
+ except (AttributeError, ValueError):
+ return False # err on the side of leniency
+ else:
+ # If we somehow get invoked from outside typing.py,
+ # also err on the side of leniency
+ if frame.f_globals.get("__name__") != "typing":
+ return False
+ origin = frame.f_locals.get("origin")
+ # Cannot use "in" because origin may be an object with a buggy __eq__ that
+ # throws an error.
+ return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
+
+
+_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
+
+
+def _is_unpacked_typevartuple(x) -> bool:
+ if get_origin(x) is not Unpack:
+ return False
+ args = get_args(x)
+ return (
+ bool(args)
+ and len(args) == 1
+ and type(args[0]) in _TYPEVARTUPLE_TYPES
+ )
+
# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
if hasattr(typing, '_collect_type_vars'):
@@ -2737,19 +2995,29 @@ if hasattr(typing, '_collect_type_vars'):
if typevar_types is None:
typevar_types = typing.TypeVar
tvars = []
- # required TypeVarLike cannot appear after TypeVarLike with default
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with a default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
for t in types:
- if (
- isinstance(t, typevar_types) and
- t not in tvars and
- not _is_unpack(t)
- ):
- if getattr(t, '__default__', None) is not None:
- default_encountered = True
- elif default_encountered:
- raise TypeError(f'Type parameter {t!r} without a default'
- ' follows type parameter with a default')
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
+ elif isinstance(t, typevar_types) and t not in tvars:
+ if enforce_default_ordering:
+ has_default = getattr(t, '__default__', NoDefault) is not NoDefault
+ if has_default:
+ if type_var_tuple_encountered:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
tvars.append(t)
if _should_collect_from_parameters(t):
@@ -2767,8 +3035,15 @@ else:
assert _collect_parameters((T, Callable[P, T])) == (T, P)
"""
parameters = []
- # required TypeVarLike cannot appear after TypeVarLike with default
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
for t in args:
if isinstance(t, type):
# We don't want __parameters__ descriptor of a bare Python class.
@@ -2782,21 +3057,33 @@ else:
parameters.append(collected)
elif hasattr(t, '__typing_subst__'):
if t not in parameters:
- if getattr(t, '__default__', None) is not None:
- default_encountered = True
- elif default_encountered:
- raise TypeError(f'Type parameter {t!r} without a default'
- ' follows type parameter with a default')
+ if enforce_default_ordering:
+ has_default = (
+ getattr(t, '__default__', NoDefault) is not NoDefault
+ )
+
+ if type_var_tuple_encountered and has_default:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+
+ if has_default:
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
parameters.append(t)
else:
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
for x in getattr(t, '__parameters__', ()):
if x not in parameters:
parameters.append(x)
return tuple(parameters)
- typing._collect_parameters = _collect_parameters
+ if not _PEP_696_IMPLEMENTED:
+ typing._collect_parameters = _collect_parameters
# Backport typing.NamedTuple as it exists in Python 3.13.
# In 3.11, the ability to define generic `NamedTuple`s was supported.
@@ -2830,7 +3117,13 @@ else:
raise TypeError(
'can only inherit from a NamedTuple type and Generic')
bases = tuple(tuple if base is _NamedTuple else base for base in bases)
- types = ns.get('__annotations__', {})
+ if "__annotations__" in ns:
+ types = ns["__annotations__"]
+ elif "__annotate__" in ns:
+ # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+ types = ns["__annotate__"](1)
+ else:
+ types = {}
default_names = []
for field_name in types:
if field_name in ns:
@@ -2962,7 +3255,7 @@ else:
if hasattr(collections.abc, "Buffer"):
Buffer = collections.abc.Buffer
else:
- class Buffer(abc.ABC):
+ class Buffer(abc.ABC): # noqa: B024
"""Base class for classes that implement the buffer protocol.
The buffer protocol allows Python objects to expose a low-level
@@ -3289,6 +3582,23 @@ else:
return self.documentation == other.documentation
+_CapsuleType = getattr(_types, "CapsuleType", None)
+
+if _CapsuleType is None:
+ try:
+ import _socket
+ except ImportError:
+ pass
+ else:
+ _CAPI = getattr(_socket, "CAPI", None)
+ if _CAPI is not None:
+ _CapsuleType = type(_CAPI)
+
+if _CapsuleType is not None:
+ CapsuleType = _CapsuleType
+ __all__.append("CapsuleType")
+
+
# Aliases for items that have always been in typing.
# Explicitly assign these (rather than using `from typing import *` at the top),
# so that we get a CI error if one of these is deleted from typing.py
@@ -3302,7 +3612,6 @@ Container = typing.Container
Dict = typing.Dict
ForwardRef = typing.ForwardRef
FrozenSet = typing.FrozenSet
-Generator = typing.Generator
Generic = typing.Generic
Hashable = typing.Hashable
IO = typing.IO
diff --git a/contrib/python/pip/pip/_vendor/vendor.txt b/contrib/python/pip/pip/_vendor/vendor.txt
index e50d946417..fd92690602 100644
--- a/contrib/python/pip/pip/_vendor/vendor.txt
+++ b/contrib/python/pip/pip/_vendor/vendor.txt
@@ -3,17 +3,16 @@ distlib==0.3.8
distro==1.9.0
msgpack==1.0.8
packaging==24.1
-platformdirs==4.2.1
+platformdirs==4.2.2
pyproject-hooks==1.0.0
requests==2.32.3
- certifi==2024.2.2
+ certifi==2024.7.4
idna==3.7
urllib3==1.26.18
rich==13.7.1
- pygments==2.17.2
- typing_extensions==4.11.0
+ pygments==2.18.0
+ typing_extensions==4.12.2
resolvelib==1.0.1
-setuptools==69.5.1
-tenacity==8.2.3
+setuptools==70.3.0
tomli==2.0.1
truststore==0.9.1
diff --git a/contrib/python/pip/ya.make b/contrib/python/pip/ya.make
index 9f0269447b..258dd0af73 100644
--- a/contrib/python/pip/ya.make
+++ b/contrib/python/pip/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(24.1.2)
+VERSION(24.2)
LICENSE(MIT)
@@ -152,6 +152,7 @@ PY_SRCS(
pip/_internal/utils/logging.py
pip/_internal/utils/misc.py
pip/_internal/utils/packaging.py
+ pip/_internal/utils/retry.py
pip/_internal/utils/setuptools_build.py
pip/_internal/utils/subprocess.py
pip/_internal/utils/temp_dir.py
@@ -376,17 +377,6 @@ PY_SRCS(
pip/_vendor/rich/themes.py
pip/_vendor/rich/traceback.py
pip/_vendor/rich/tree.py
- pip/_vendor/tenacity/__init__.py
- pip/_vendor/tenacity/_asyncio.py
- pip/_vendor/tenacity/_utils.py
- pip/_vendor/tenacity/after.py
- pip/_vendor/tenacity/before.py
- pip/_vendor/tenacity/before_sleep.py
- pip/_vendor/tenacity/nap.py
- pip/_vendor/tenacity/retry.py
- pip/_vendor/tenacity/stop.py
- pip/_vendor/tenacity/tornadoweb.py
- pip/_vendor/tenacity/wait.py
pip/_vendor/tomli/__init__.py
pip/_vendor/tomli/_parser.py
pip/_vendor/tomli/_re.py
@@ -453,7 +443,6 @@ RESOURCE_FILES(
pip/_vendor/platformdirs/py.typed
pip/_vendor/resolvelib/py.typed
pip/_vendor/rich/py.typed
- pip/_vendor/tenacity/py.typed
pip/_vendor/tomli/py.typed
pip/_vendor/truststore/py.typed
pip/_vendor/vendor.txt