aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2025-05-12 07:22:37 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2025-05-12 07:33:36 +0300
commitd7a8efdfadcf97a6db3250a427b22cc0602db822 (patch)
tree2b55764854960f9a67b2ffcd6211dd267e87dee9 /contrib/python
parentfe512fa85146df9204580c7279f09b81bf141953 (diff)
downloadydb-d7a8efdfadcf97a6db3250a427b22cc0602db822.tar.gz
Intermediate changes
commit_hash:91fe1a3c59e708fe728a543863a327ca7d4940e7
Diffstat (limited to 'contrib/python')
-rw-r--r--contrib/python/pip/.dist-info/METADATA8
-rw-r--r--contrib/python/pip/AUTHORS.txt15
-rw-r--r--contrib/python/pip/pip/__init__.py2
-rw-r--r--contrib/python/pip/pip/__pip-runner__.py2
-rw-r--r--contrib/python/pip/pip/_internal/build_env.py7
-rw-r--r--contrib/python/pip/pip/_internal/cache.py3
-rw-r--r--contrib/python/pip/pip/_internal/cli/__init__.py3
-rw-r--r--contrib/python/pip/pip/_internal/cli/autocompletion.py3
-rw-r--r--contrib/python/pip/pip/_internal/cli/base_command.py11
-rw-r--r--contrib/python/pip/pip/_internal/cli/cmdoptions.py70
-rw-r--r--contrib/python/pip/pip/_internal/cli/index_command.py2
-rw-r--r--contrib/python/pip/pip/_internal/cli/main.py3
-rw-r--r--contrib/python/pip/pip/_internal/cli/main_parser.py3
-rw-r--r--contrib/python/pip/pip/_internal/cli/progress_bars.py68
-rw-r--r--contrib/python/pip/pip/_internal/cli/req_command.py20
-rw-r--r--contrib/python/pip/pip/_internal/commands/__init__.py5
-rw-r--r--contrib/python/pip/pip/_internal/commands/completion.py16
-rw-r--r--contrib/python/pip/pip/_internal/commands/freeze.py1
-rw-r--r--contrib/python/pip/pip/_internal/commands/index.py34
-rw-r--r--contrib/python/pip/pip/_internal/commands/install.py9
-rw-r--r--contrib/python/pip/pip/_internal/commands/list.py26
-rw-r--r--contrib/python/pip/pip/_internal/commands/lock.py171
-rw-r--r--contrib/python/pip/pip/_internal/commands/search.py24
-rw-r--r--contrib/python/pip/pip/_internal/commands/show.py14
-rw-r--r--contrib/python/pip/pip/_internal/commands/wheel.py4
-rw-r--r--contrib/python/pip/pip/_internal/exceptions.py53
-rw-r--r--contrib/python/pip/pip/_internal/index/__init__.py3
-rw-r--r--contrib/python/pip/pip/_internal/index/package_finder.py53
-rw-r--r--contrib/python/pip/pip/_internal/locations/__init__.py19
-rw-r--r--contrib/python/pip/pip/_internal/metadata/__init__.py60
-rw-r--r--contrib/python/pip/pip/_internal/metadata/base.py4
-rw-r--r--contrib/python/pip/pip/_internal/metadata/importlib/_envs.py71
-rw-r--r--contrib/python/pip/pip/_internal/models/__init__.py3
-rw-r--r--contrib/python/pip/pip/_internal/models/direct_url.py2
-rw-r--r--contrib/python/pip/pip/_internal/models/link.py10
-rw-r--r--contrib/python/pip/pip/_internal/models/pylock.py183
-rw-r--r--contrib/python/pip/pip/_internal/models/wheel.py107
-rw-r--r--contrib/python/pip/pip/_internal/network/__init__.py3
-rw-r--r--contrib/python/pip/pip/_internal/network/cache.py3
-rw-r--r--contrib/python/pip/pip/_internal/network/download.py213
-rw-r--r--contrib/python/pip/pip/_internal/network/xmlrpc.py3
-rw-r--r--contrib/python/pip/pip/_internal/operations/build/metadata.py3
-rw-r--r--contrib/python/pip/pip/_internal/operations/build/metadata_editable.py3
-rw-r--r--contrib/python/pip/pip/_internal/operations/build/metadata_legacy.py3
-rw-r--r--contrib/python/pip/pip/_internal/operations/build/wheel_legacy.py16
-rw-r--r--contrib/python/pip/pip/_internal/operations/check.py3
-rw-r--r--contrib/python/pip/pip/_internal/operations/install/__init__.py3
-rw-r--r--contrib/python/pip/pip/_internal/operations/install/editable_legacy.py3
-rw-r--r--contrib/python/pip/pip/_internal/operations/install/wheel.py24
-rw-r--r--contrib/python/pip/pip/_internal/operations/prepare.py15
-rw-r--r--contrib/python/pip/pip/_internal/req/__init__.py15
-rw-r--r--contrib/python/pip/pip/_internal/req/req_dependency_group.py74
-rw-r--r--contrib/python/pip/pip/_internal/req/req_install.py4
-rw-r--r--contrib/python/pip/pip/_internal/req/req_uninstall.py11
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/candidates.py7
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py2
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/found_candidates.py30
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/provider.py153
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py10
-rw-r--r--contrib/python/pip/pip/_internal/resolution/resolvelib/resolver.py7
-rw-r--r--contrib/python/pip/pip/_internal/utils/appdirs.py3
-rw-r--r--contrib/python/pip/pip/_internal/utils/compatibility_tags.py16
-rw-r--r--contrib/python/pip/pip/_internal/utils/datetime.py3
-rw-r--r--contrib/python/pip/pip/_internal/utils/entrypoints.py5
-rw-r--r--contrib/python/pip/pip/_internal/utils/filetypes.py3
-rw-r--r--contrib/python/pip/pip/_internal/utils/logging.py33
-rw-r--r--contrib/python/pip/pip/_internal/utils/misc.py18
-rw-r--r--contrib/python/pip/pip/_internal/utils/packaging.py19
-rw-r--r--contrib/python/pip/pip/_internal/utils/setuptools_build.py9
-rw-r--r--contrib/python/pip/pip/_internal/utils/unpacking.py4
-rw-r--r--contrib/python/pip/pip/_internal/utils/wheel.py3
-rw-r--r--contrib/python/pip/pip/_internal/vcs/git.py11
-rw-r--r--contrib/python/pip/pip/_internal/wheel_builder.py32
-rw-r--r--contrib/python/pip/pip/_vendor/__init__.py1
-rw-r--r--contrib/python/pip/pip/_vendor/cachecontrol/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/cachecontrol/adapter.py19
-rw-r--r--contrib/python/pip/pip/_vendor/cachecontrol/caches/file_cache.py57
-rw-r--r--contrib/python/pip/pip/_vendor/cachecontrol/controller.py13
-rw-r--r--contrib/python/pip/pip/_vendor/certifi/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/certifi/cacert.pem276
-rw-r--r--contrib/python/pip/pip/_vendor/dependency_groups/__init__.py13
-rw-r--r--contrib/python/pip/pip/_vendor/dependency_groups/__main__.py65
-rw-r--r--contrib/python/pip/pip/_vendor/dependency_groups/_implementation.py213
-rw-r--r--contrib/python/pip/pip/_vendor/dependency_groups/_lint_dependency_groups.py59
-rw-r--r--contrib/python/pip/pip/_vendor/dependency_groups/_pip_wrapper.py62
-rw-r--r--contrib/python/pip/pip/_vendor/dependency_groups/_toml_compat.py9
-rw-r--r--contrib/python/pip/pip/_vendor/dependency_groups/py.typed (renamed from contrib/python/pip/pip/_vendor/resolvelib/compat/__init__.py)0
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/_elffile.py3
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/_manylinux.py3
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/_parser.py3
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/_tokenizer.py9
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/licenses/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/markers.py75
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/metadata.py3
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/specifiers.py3
-rw-r--r--contrib/python/pip/pip/_vendor/packaging/tags.py39
-rw-r--r--contrib/python/pip/pip/_vendor/platformdirs/__init__.py40
-rw-r--r--contrib/python/pip/pip/_vendor/platformdirs/android.py6
-rw-r--r--contrib/python/pip/pip/_vendor/platformdirs/api.py5
-rw-r--r--contrib/python/pip/pip/_vendor/platformdirs/unix.py5
-rw-r--r--contrib/python/pip/pip/_vendor/platformdirs/version.py13
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/__init__.py4
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/__main__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/cmdline.py668
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/console.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/filter.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/filters/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatter.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py108
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/groff.py170
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/html.py987
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/img.py685
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/irc.py154
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/latex.py518
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/other.py160
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py83
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py349
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/svg.py185
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py127
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py338
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexer.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py19
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/lexers/python.py51
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/modeline.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/plugin.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/regexopt.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/scanner.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/sphinxext.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/style.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/styles/__init__.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/token.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/unistring.py2
-rw-r--r--contrib/python/pip/pip/_vendor/pygments/util.py2
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/__init__.py5
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/compat/collections_abc.py6
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/providers.py143
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/reporters.py30
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/resolvers/__init__.py27
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/resolvers/abstract.py47
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/resolvers/criterion.py48
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/resolvers/exceptions.py57
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/resolvers/resolution.py (renamed from contrib/python/pip/pip/_vendor/resolvelib/resolvers.py)428
-rw-r--r--contrib/python/pip/pip/_vendor/resolvelib/structs.py147
-rw-r--r--contrib/python/pip/pip/_vendor/rich/console.py28
-rw-r--r--contrib/python/pip/pip/_vendor/rich/default_styles.py4
-rw-r--r--contrib/python/pip/pip/_vendor/rich/diagnose.py13
-rw-r--r--contrib/python/pip/pip/_vendor/rich/panel.py2
-rw-r--r--contrib/python/pip/pip/_vendor/rich/style.py2
-rw-r--r--contrib/python/pip/pip/_vendor/rich/table.py1
-rw-r--r--contrib/python/pip/pip/_vendor/rich/traceback.py171
-rw-r--r--contrib/python/pip/pip/_vendor/tomli_w/__init__.py4
-rw-r--r--contrib/python/pip/pip/_vendor/tomli_w/_writer.py229
-rw-r--r--contrib/python/pip/pip/_vendor/tomli_w/py.typed1
-rw-r--r--contrib/python/pip/pip/_vendor/truststore/__init__.py4
-rw-r--r--contrib/python/pip/pip/_vendor/truststore/_api.py19
-rw-r--r--contrib/python/pip/pip/_vendor/typing_extensions.py1111
-rw-r--r--contrib/python/pip/pip/_vendor/vendor.txt20
-rw-r--r--contrib/python/pip/ya.make37
161 files changed, 4217 insertions, 5870 deletions
diff --git a/contrib/python/pip/.dist-info/METADATA b/contrib/python/pip/.dist-info/METADATA
index 3315c063574..0b953fccda6 100644
--- a/contrib/python/pip/.dist-info/METADATA
+++ b/contrib/python/pip/.dist-info/METADATA
@@ -1,6 +1,6 @@
-Metadata-Version: 2.2
+Metadata-Version: 2.4
Name: pip
-Version: 25.0.1
+Version: 25.1
Summary: The PyPA recommended tool for installing Python packages.
Author-email: The pip developers <distutils-sig@python.org>
License: MIT
@@ -15,7 +15,6 @@ Classifier: Topic :: Software Development :: Build Tools
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
@@ -23,10 +22,11 @@ Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
-Requires-Python: >=3.8
+Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE.txt
License-File: AUTHORS.txt
+Dynamic: license-file
pip - The Python Package Installer
==================================
diff --git a/contrib/python/pip/AUTHORS.txt b/contrib/python/pip/AUTHORS.txt
index f42daec02e2..08441a0a294 100644
--- a/contrib/python/pip/AUTHORS.txt
+++ b/contrib/python/pip/AUTHORS.txt
@@ -7,6 +7,7 @@ abs51295
AceGentile
Adam Chainz
Adam Tse
+Adam Turner
Adam Wentz
admin
Adolfo Ochagavía
@@ -30,6 +31,7 @@ Alex Hedges
Alex Loosley
Alex Morega
Alex Stachowiak
+Alexander Regueiro
Alexander Shtyrov
Alexandre Conrad
Alexey Popravka
@@ -106,6 +108,7 @@ Bogdan Opanchuk
BorisZZZ
Brad Erickson
Bradley Ayers
+Bradley Reynolds
Branch Vincent
Brandon L. Reiss
Brandt Bucher
@@ -221,6 +224,7 @@ Denise Yu
dependabot[bot]
derwolfe
Desetude
+developer
Devesh Kumar Singh
devsagul
Diego Caraballo
@@ -284,6 +288,7 @@ Florian Rathgeber
Francesco
Francesco Montesano
Fredrik Orderud
+Fredrik Roubert
Frost Ming
Gabriel Curio
Gabriel de Perthuis
@@ -291,6 +296,7 @@ Garry Polley
gavin
gdanielson
Geoffrey Sneddon
+George Margaritis
George Song
Georgi Valkov
Georgy Pchelkin
@@ -342,11 +348,13 @@ Inada Naoki
Ionel Cristian Mărieș
Ionel Maries Cristian
Itamar Turner-Trauring
+iTrooz
Ivan Pozdeev
J. Nick Koston
Jacob Kim
Jacob Walls
Jaime Sanz
+Jake Lishman
jakirkham
Jakub Kuczys
Jakub Stasiak
@@ -383,8 +391,10 @@ Jim Garrison
Jinzhe Zeng
Jiun Bae
Jivan Amara
+Joa
Joe Bylund
Joe Michelini
+Johannes Altmanninger
John Paton
John Sirois
John T. Wodder II
@@ -438,6 +448,7 @@ Klaas van Schelven
KOLANICH
konstin
kpinc
+Krishan Bhasin
Krishna Oza
Kumar McMillan
Kuntal Majumder
@@ -467,6 +478,7 @@ luojiebin
luz.paz
László Kiss Kollár
M00nL1ght
+Malcolm Smith
Marc Abramowitz
Marc Tamlyn
Marcus Smith
@@ -507,6 +519,7 @@ Maxim Kurnikov
Maxime Rouyrre
mayeut
mbaluna
+Md Sujauddin Sekh
mdebi
memoselyk
meowmeowcat
@@ -558,10 +571,12 @@ Noah
Noah Gorny
Nowell Strite
NtaleGrey
+nucccc
nvdv
OBITORASU
Ofek Lev
ofrinevo
+Oleg Burnaev
Oliver Freund
Oliver Jeeves
Oliver Mannion
diff --git a/contrib/python/pip/pip/__init__.py b/contrib/python/pip/pip/__init__.py
index d628f93ee21..52aefb210ac 100644
--- a/contrib/python/pip/pip/__init__.py
+++ b/contrib/python/pip/pip/__init__.py
@@ -1,6 +1,6 @@
from typing import List, Optional
-__version__ = "25.0.1"
+__version__ = "25.1"
def main(args: Optional[List[str]] = None) -> int:
diff --git a/contrib/python/pip/pip/__pip-runner__.py b/contrib/python/pip/pip/__pip-runner__.py
index c633787fced..d6be157831a 100644
--- a/contrib/python/pip/pip/__pip-runner__.py
+++ b/contrib/python/pip/pip/__pip-runner__.py
@@ -9,7 +9,7 @@ an import statement.
import sys
# Copied from pyproject.toml
-PYTHON_REQUIRES = (3, 8)
+PYTHON_REQUIRES = (3, 9)
def version_str(version): # type: ignore
diff --git a/contrib/python/pip/pip/_internal/build_env.py b/contrib/python/pip/pip/_internal/build_env.py
index e8d1aca0d6a..22c7476702b 100644
--- a/contrib/python/pip/pip/_internal/build_env.py
+++ b/contrib/python/pip/pip/_internal/build_env.py
@@ -1,5 +1,4 @@
-"""Build Environment used for isolation during sdist building
-"""
+"""Build Environment used for isolation during sdist building"""
import logging
import os
@@ -241,6 +240,10 @@ class BuildEnvironment:
prefix.path,
"--no-warn-script-location",
"--disable-pip-version-check",
+ # As the build environment is ephemeral, it's wasteful to
+ # pre-compile everything, especially as not every Python
+ # module will be used/compiled in most cases.
+ "--no-compile",
# The prefix specified two lines above, thus
# target from config file or env var should be ignored
"--target",
diff --git a/contrib/python/pip/pip/_internal/cache.py b/contrib/python/pip/pip/_internal/cache.py
index 6b4512672db..97d917193d3 100644
--- a/contrib/python/pip/pip/_internal/cache.py
+++ b/contrib/python/pip/pip/_internal/cache.py
@@ -1,5 +1,4 @@
-"""Cache Management
-"""
+"""Cache Management"""
import hashlib
import json
diff --git a/contrib/python/pip/pip/_internal/cli/__init__.py b/contrib/python/pip/pip/_internal/cli/__init__.py
index e589bb917e2..5fcddf5d81e 100644
--- a/contrib/python/pip/pip/_internal/cli/__init__.py
+++ b/contrib/python/pip/pip/_internal/cli/__init__.py
@@ -1,4 +1,3 @@
-"""Subpackage containing all of pip's command line interface related code
-"""
+"""Subpackage containing all of pip's command line interface related code"""
# This file intentionally does not import submodules
diff --git a/contrib/python/pip/pip/_internal/cli/autocompletion.py b/contrib/python/pip/pip/_internal/cli/autocompletion.py
index f3f70ac8553..4fa461293c7 100644
--- a/contrib/python/pip/pip/_internal/cli/autocompletion.py
+++ b/contrib/python/pip/pip/_internal/cli/autocompletion.py
@@ -1,5 +1,4 @@
-"""Logic that powers autocompletion installed by ``pip completion``.
-"""
+"""Logic that powers autocompletion installed by ``pip completion``."""
import optparse
import os
diff --git a/contrib/python/pip/pip/_internal/cli/base_command.py b/contrib/python/pip/pip/_internal/cli/base_command.py
index 362f84b6b0b..1d71d67e7a4 100644
--- a/contrib/python/pip/pip/_internal/cli/base_command.py
+++ b/contrib/python/pip/pip/_internal/cli/base_command.py
@@ -29,7 +29,6 @@ from pip._internal.exceptions import (
NetworkConnectionError,
PreviousBuildDirError,
)
-from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
from pip._internal.utils.misc import get_prog, normalize_path
@@ -172,6 +171,8 @@ class Command(CommandContextMixIn):
# Set verbosity so that it can be used elsewhere.
self.verbosity = options.verbose - options.quiet
+ if options.debug_mode:
+ self.verbosity = 2
reconfigure(no_color=options.no_color)
level_number = setup_logging(
@@ -229,12 +230,4 @@ class Command(CommandContextMixIn):
)
options.cache_dir = None
- if options.no_python_version_warning:
- deprecated(
- reason="--no-python-version-warning is deprecated.",
- replacement="to remove the flag as it's a no-op",
- gone_in="25.1",
- issue=13154,
- )
-
return self._run_wrapper(level_number, options, args)
diff --git a/contrib/python/pip/pip/_internal/cli/cmdoptions.py b/contrib/python/pip/pip/_internal/cli/cmdoptions.py
index eeb7e651b79..88392836cd9 100644
--- a/contrib/python/pip/pip/_internal/cli/cmdoptions.py
+++ b/contrib/python/pip/pip/_internal/cli/cmdoptions.py
@@ -13,6 +13,7 @@ pass on state. To be consistent, all options will follow this design.
import importlib.util
import logging
import os
+import pathlib
import textwrap
from functools import partial
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
@@ -280,8 +281,17 @@ retries: Callable[..., Option] = partial(
dest="retries",
type="int",
default=5,
- help="Maximum number of retries each connection should attempt "
- "(default %default times).",
+ help="Maximum attempts to establish a new HTTP connection. (default: %default)",
+)
+
+resume_retries: Callable[..., Option] = partial(
+ Option,
+ "--resume-retries",
+ dest="resume_retries",
+ type="int",
+ default=0,
+ help="Maximum attempts to resume or restart an incomplete download. "
+ "(default: %default)",
)
timeout: Callable[..., Option] = partial(
@@ -733,6 +743,46 @@ no_deps: Callable[..., Option] = partial(
help="Don't install package dependencies.",
)
+
+def _handle_dependency_group(
+ option: Option, opt: str, value: str, parser: OptionParser
+) -> None:
+ """
+ Process a value provided for the --group option.
+
+ Splits on the rightmost ":", and validates that the path (if present) ends
+ in `pyproject.toml`. Defaults the path to `pyproject.toml` when one is not given.
+
+ `:` cannot appear in dependency group names, so this is a safe and simple parse.
+
+ This is an optparse.Option callback for the dependency_groups option.
+ """
+ path, sep, groupname = value.rpartition(":")
+ if not sep:
+ path = "pyproject.toml"
+ else:
+ # check for 'pyproject.toml' filenames using pathlib
+ if pathlib.PurePath(path).name != "pyproject.toml":
+ msg = "group paths use 'pyproject.toml' filenames"
+ raise_option_error(parser, option=option, msg=msg)
+
+ parser.values.dependency_groups.append((path, groupname))
+
+
+dependency_groups: Callable[..., Option] = partial(
+ Option,
+ "--group",
+ dest="dependency_groups",
+ default=[],
+ type=str,
+ action="callback",
+ callback=_handle_dependency_group,
+ metavar="[path:]group",
+ help='Install a named dependency-group from a "pyproject.toml" file. '
+ 'If a path is given, the name of the file must be "pyproject.toml". '
+ 'Defaults to using "pyproject.toml" in the current directory.',
+)
+
ignore_requires_python: Callable[..., Option] = partial(
Option,
"--ignore-requires-python",
@@ -783,9 +833,9 @@ def _handle_no_use_pep517(
"""
raise_option_error(parser, option=option, msg=msg)
- # If user doesn't wish to use pep517, we check if setuptools and wheel are installed
+ # If user doesn't wish to use pep517, we check if setuptools is installed
# and raise error if it is not.
- packages = ("setuptools", "wheel")
+ packages = ("setuptools",)
if not all(importlib.util.find_spec(package) for package in packages):
msg = (
f"It is not possible to use --no-use-pep517 "
@@ -887,6 +937,14 @@ pre: Callable[..., Option] = partial(
"pip only finds stable versions.",
)
+json: Callable[..., Option] = partial(
+ Option,
+ "--json",
+ action="store_true",
+ default=False,
+ help="Output data in a machine-readable JSON format.",
+)
+
disable_pip_version_check: Callable[..., Option] = partial(
Option,
"--disable-pip-version-check",
@@ -990,7 +1048,7 @@ no_python_version_warning: Callable[..., Option] = partial(
dest="no_python_version_warning",
action="store_true",
default=False,
- help="Silence deprecation warnings for upcoming unsupported Pythons.",
+ help=SUPPRESS_HELP, # No-op, a hold-over from the Python 2->3 transition.
)
@@ -1028,7 +1086,6 @@ use_deprecated_feature: Callable[..., Option] = partial(
help=("Enable deprecated functionality, that will be removed in the future."),
)
-
##########
# groups #
##########
@@ -1061,6 +1118,7 @@ general_group: Dict[str, Any] = {
no_python_version_warning,
use_new_feature,
use_deprecated_feature,
+ resume_retries,
],
}
diff --git a/contrib/python/pip/pip/_internal/cli/index_command.py b/contrib/python/pip/pip/_internal/cli/index_command.py
index 295108ed605..87a1e088564 100644
--- a/contrib/python/pip/pip/_internal/cli/index_command.py
+++ b/contrib/python/pip/pip/_internal/cli/index_command.py
@@ -9,6 +9,7 @@ so commands which don't always hit the network (e.g. list w/o --outdated or
import logging
import os
import sys
+from functools import lru_cache
from optparse import Values
from typing import TYPE_CHECKING, List, Optional
@@ -25,6 +26,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
+@lru_cache
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
if sys.version_info < (3, 10):
logger.debug("Disabling truststore because Python version isn't 3.10+")
diff --git a/contrib/python/pip/pip/_internal/cli/main.py b/contrib/python/pip/pip/_internal/cli/main.py
index 563ac79c984..377476c18c3 100644
--- a/contrib/python/pip/pip/_internal/cli/main.py
+++ b/contrib/python/pip/pip/_internal/cli/main.py
@@ -1,5 +1,4 @@
-"""Primary application entrypoint.
-"""
+"""Primary application entrypoint."""
import locale
import logging
diff --git a/contrib/python/pip/pip/_internal/cli/main_parser.py b/contrib/python/pip/pip/_internal/cli/main_parser.py
index 5ade356b9c2..c52684a81fe 100644
--- a/contrib/python/pip/pip/_internal/cli/main_parser.py
+++ b/contrib/python/pip/pip/_internal/cli/main_parser.py
@@ -1,5 +1,4 @@
-"""A single place for constructing and exposing the main parser
-"""
+"""A single place for constructing and exposing the main parser"""
import os
import subprocess
diff --git a/contrib/python/pip/pip/_internal/cli/progress_bars.py b/contrib/python/pip/pip/_internal/cli/progress_bars.py
index 3d9dde8ed88..ab9d76b252e 100644
--- a/contrib/python/pip/pip/_internal/cli/progress_bars.py
+++ b/contrib/python/pip/pip/_internal/cli/progress_bars.py
@@ -1,11 +1,12 @@
import functools
import sys
-from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
+from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple, TypeVar
from pip._vendor.rich.progress import (
BarColumn,
DownloadColumn,
FileSizeColumn,
+ MofNCompleteColumn,
Progress,
ProgressColumn,
SpinnerColumn,
@@ -16,16 +17,19 @@ from pip._vendor.rich.progress import (
)
from pip._internal.cli.spinners import RateLimiter
-from pip._internal.utils.logging import get_indentation
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.logging import get_console, get_indentation
-DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
+T = TypeVar("T")
+ProgressRenderer = Callable[[Iterable[T]], Iterator[T]]
-def _rich_progress_bar(
+def _rich_download_progress_bar(
iterable: Iterable[bytes],
*,
bar_type: str,
size: Optional[int],
+ initial_progress: Optional[int] = None,
) -> Generator[bytes, None, None]:
assert bar_type == "on", "This should only be used in the default mode."
@@ -51,22 +55,47 @@ def _rich_progress_bar(
progress = Progress(*columns, refresh_per_second=5)
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
+ if initial_progress is not None:
+ progress.update(task_id, advance=initial_progress)
with progress:
for chunk in iterable:
yield chunk
progress.update(task_id, advance=len(chunk))
+def _rich_install_progress_bar(
+ iterable: Iterable[InstallRequirement], *, total: int
+) -> Iterator[InstallRequirement]:
+ columns = (
+ TextColumn("{task.fields[indent]}"),
+ BarColumn(),
+ MofNCompleteColumn(),
+ TextColumn("{task.description}"),
+ )
+ console = get_console()
+
+ bar = Progress(*columns, refresh_per_second=6, console=console, transient=True)
+ # Hiding the progress bar at initialization forces a refresh cycle to occur
+ # until the bar appears, avoiding very short flashes.
+ task = bar.add_task("", total=total, indent=" " * get_indentation(), visible=False)
+ with bar:
+ for req in iterable:
+ bar.update(task, description=rf"\[{req.name}]", visible=True)
+ yield req
+ bar.advance(task)
+
+
def _raw_progress_bar(
iterable: Iterable[bytes],
*,
size: Optional[int],
+ initial_progress: Optional[int] = None,
) -> Generator[bytes, None, None]:
def write_progress(current: int, total: int) -> None:
sys.stdout.write(f"Progress {current} of {total}\n")
sys.stdout.flush()
- current = 0
+ current = initial_progress or 0
total = size or 0
rate_limiter = RateLimiter(0.25)
@@ -80,15 +109,36 @@ def _raw_progress_bar(
def get_download_progress_renderer(
- *, bar_type: str, size: Optional[int] = None
-) -> DownloadProgressRenderer:
+ *, bar_type: str, size: Optional[int] = None, initial_progress: Optional[int] = None
+) -> ProgressRenderer[bytes]:
"""Get an object that can be used to render the download progress.
Returns a callable, that takes an iterable to "wrap".
"""
if bar_type == "on":
- return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
+ return functools.partial(
+ _rich_download_progress_bar,
+ bar_type=bar_type,
+ size=size,
+ initial_progress=initial_progress,
+ )
elif bar_type == "raw":
- return functools.partial(_raw_progress_bar, size=size)
+ return functools.partial(
+ _raw_progress_bar,
+ size=size,
+ initial_progress=initial_progress,
+ )
else:
return iter # no-op, when passed an iterator
+
+
+def get_install_progress_renderer(
+ *, bar_type: str, total: int
+) -> ProgressRenderer[InstallRequirement]:
+ """Get an object that can be used to render the install progress.
+ Returns a callable, that takes an iterable to "wrap".
+ """
+ if bar_type == "on":
+ return functools.partial(_rich_install_progress_bar, total=total)
+ else:
+ return iter
diff --git a/contrib/python/pip/pip/_internal/cli/req_command.py b/contrib/python/pip/pip/_internal/cli/req_command.py
index 92900f94ff4..d9b51427055 100644
--- a/contrib/python/pip/pip/_internal/cli/req_command.py
+++ b/contrib/python/pip/pip/_internal/cli/req_command.py
@@ -28,6 +28,7 @@ from pip._internal.req.constructors import (
install_req_from_parsed_requirement,
install_req_from_req_string,
)
+from pip._internal.req.req_dependency_group import parse_dependency_groups
from pip._internal.req.req_file import parse_requirements
from pip._internal.req.req_install import InstallRequirement
from pip._internal.resolution.base import BaseResolver
@@ -79,6 +80,7 @@ class RequirementCommand(IndexGroupCommand):
def __init__(self, *args: Any, **kw: Any) -> None:
super().__init__(*args, **kw)
+ self.cmd_opts.add_option(cmdoptions.dependency_groups())
self.cmd_opts.add_option(cmdoptions.no_clean())
@staticmethod
@@ -142,6 +144,7 @@ class RequirementCommand(IndexGroupCommand):
lazy_wheel=lazy_wheel,
verbosity=verbosity,
legacy_resolver=legacy_resolver,
+ resume_retries=options.resume_retries,
)
@classmethod
@@ -240,6 +243,16 @@ class RequirementCommand(IndexGroupCommand):
)
requirements.append(req_to_add)
+ if options.dependency_groups:
+ for req in parse_dependency_groups(options.dependency_groups):
+ req_to_add = install_req_from_req_string(
+ req,
+ isolated=options.isolated_mode,
+ use_pep517=options.use_pep517,
+ user_supplied=True,
+ )
+ requirements.append(req_to_add)
+
for req in options.editables:
req_to_add = install_req_from_editable(
req,
@@ -272,7 +285,12 @@ class RequirementCommand(IndexGroupCommand):
if any(req.has_hash_options for req in requirements):
options.require_hashes = True
- if not (args or options.editables or options.requirements):
+ if not (
+ args
+ or options.editables
+ or options.requirements
+ or options.dependency_groups
+ ):
opts = {"name": self.name}
if options.find_links:
raise CommandError(
diff --git a/contrib/python/pip/pip/_internal/commands/__init__.py b/contrib/python/pip/pip/_internal/commands/__init__.py
index 858a4101416..bc4f216a826 100644
--- a/contrib/python/pip/pip/_internal/commands/__init__.py
+++ b/contrib/python/pip/pip/_internal/commands/__init__.py
@@ -23,6 +23,11 @@ commands_dict: Dict[str, CommandInfo] = {
"InstallCommand",
"Install packages.",
),
+ "lock": CommandInfo(
+ "pip._internal.commands.lock",
+ "LockCommand",
+ "Generate a lock file.",
+ ),
"download": CommandInfo(
"pip._internal.commands.download",
"DownloadCommand",
diff --git a/contrib/python/pip/pip/_internal/commands/completion.py b/contrib/python/pip/pip/_internal/commands/completion.py
index 9e89e279883..fe041d2951a 100644
--- a/contrib/python/pip/pip/_internal/commands/completion.py
+++ b/contrib/python/pip/pip/_internal/commands/completion.py
@@ -38,12 +38,18 @@ COMPLETION_SCRIPTS = {
""",
"fish": """
function __fish_complete_pip
- set -lx COMP_WORDS (commandline -o) ""
- set -lx COMP_CWORD ( \\
- math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
- )
+ set -lx COMP_WORDS \\
+ (commandline --current-process --tokenize --cut-at-cursor) \\
+ (commandline --current-token --cut-at-cursor)
+ set -lx COMP_CWORD (math (count $COMP_WORDS) - 1)
set -lx PIP_AUTO_COMPLETE 1
- string split \\ -- (eval $COMP_WORDS[1])
+ set -l completions
+ if string match -q '2.*' $version
+ set completions (eval $COMP_WORDS[1])
+ else
+ set completions ($COMP_WORDS[1])
+ end
+ string split \\ -- $completions
end
complete -fa "(__fish_complete_pip)" -c {prog}
""",
diff --git a/contrib/python/pip/pip/_internal/commands/freeze.py b/contrib/python/pip/pip/_internal/commands/freeze.py
index 885fdfeb83b..f8de335b283 100644
--- a/contrib/python/pip/pip/_internal/commands/freeze.py
+++ b/contrib/python/pip/pip/_internal/commands/freeze.py
@@ -32,7 +32,6 @@ class FreezeCommand(Command):
ignore_require_venv = True
usage = """
%prog [options]"""
- log_streams = ("ext://sys.stderr", "ext://sys.stderr")
def add_options(self) -> None:
self.cmd_opts.add_option(
diff --git a/contrib/python/pip/pip/_internal/commands/index.py b/contrib/python/pip/pip/_internal/commands/index.py
index 2e2661bba71..e8714a7270c 100644
--- a/contrib/python/pip/pip/_internal/commands/index.py
+++ b/contrib/python/pip/pip/_internal/commands/index.py
@@ -1,3 +1,4 @@
+import json
import logging
from optparse import Values
from typing import Any, Iterable, List, Optional
@@ -7,7 +8,10 @@ from pip._vendor.packaging.version import Version
from pip._internal.cli import cmdoptions
from pip._internal.cli.req_command import IndexGroupCommand
from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.commands.search import print_dist_installation_info
+from pip._internal.commands.search import (
+ get_installed_distribution,
+ print_dist_installation_info,
+)
from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
from pip._internal.index.collector import LinkCollector
from pip._internal.index.package_finder import PackageFinder
@@ -34,6 +38,7 @@ class IndexCommand(IndexGroupCommand):
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
self.cmd_opts.add_option(cmdoptions.pre())
+ self.cmd_opts.add_option(cmdoptions.json())
self.cmd_opts.add_option(cmdoptions.no_binary())
self.cmd_opts.add_option(cmdoptions.only_binary())
@@ -50,12 +55,6 @@ class IndexCommand(IndexGroupCommand):
"versions": self.get_available_package_versions,
}
- logger.warning(
- "pip index is currently an experimental command. "
- "It may be removed/changed in a future release "
- "without prior warning."
- )
-
# Determine action
if not args or args[0] not in handlers:
logger.error(
@@ -134,6 +133,21 @@ class IndexCommand(IndexGroupCommand):
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
latest = formatted_versions[0]
- write_output(f"{query} ({latest})")
- write_output("Available versions: {}".format(", ".join(formatted_versions)))
- print_dist_installation_info(query, latest)
+ dist = get_installed_distribution(query)
+
+ if options.json:
+ structured_output = {
+ "name": query,
+ "versions": formatted_versions,
+ "latest": latest,
+ }
+
+ if dist is not None:
+ structured_output["installed_version"] = str(dist.version)
+
+ write_output(json.dumps(structured_output))
+
+ else:
+ write_output(f"{query} ({latest})")
+ write_output("Available versions: {}".format(", ".join(formatted_versions)))
+ print_dist_installation_info(latest, dist)
diff --git a/contrib/python/pip/pip/_internal/commands/install.py b/contrib/python/pip/pip/_internal/commands/install.py
index 232a34a6d3e..49b8fd78bef 100644
--- a/contrib/python/pip/pip/_internal/commands/install.py
+++ b/contrib/python/pip/pip/_internal/commands/install.py
@@ -8,6 +8,7 @@ from optparse import SUPPRESS_HELP, Values
from typing import List, Optional
from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.requests.exceptions import InvalidProxyURL
from pip._vendor.rich import print_json
# Eagerly import self_outdated_check to avoid crashes. Otherwise,
@@ -464,6 +465,7 @@ class InstallCommand(RequirementCommand):
warn_script_location=warn_script_location,
use_user_site=options.use_user_site,
pycompile=options.compile,
+ progress_bar=options.progress_bar,
)
lib_locations = get_lib_location_guesses(
@@ -765,6 +767,13 @@ def create_os_error_message(
parts.append(permissions_part)
parts.append(".\n")
+ # Suggest to check "pip config debug" in case of invalid proxy
+ if type(error) is InvalidProxyURL:
+ parts.append(
+ 'Consider checking your local proxy configuration with "pip config debug"'
+ )
+ parts.append(".\n")
+
# Suggest the user to enable Long Paths if path length is
# more than 260
if (
diff --git a/contrib/python/pip/pip/_internal/commands/list.py b/contrib/python/pip/pip/_internal/commands/list.py
index 84943702410..b03085070d2 100644
--- a/contrib/python/pip/pip/_internal/commands/list.py
+++ b/contrib/python/pip/pip/_internal/commands/list.py
@@ -1,5 +1,6 @@
import json
import logging
+from email.parser import Parser
from optparse import Values
from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
@@ -125,7 +126,7 @@ class ListCommand(IndexGroupCommand):
"--include-editable",
action="store_true",
dest="include_editable",
- help="Include editable package from output.",
+ help="Include editable package in output.",
default=True,
)
self.cmd_opts.add_option(cmdoptions.list_exclude())
@@ -323,17 +324,29 @@ def format_for_columns(
if running_outdated:
header.extend(["Latest", "Type"])
- has_editables = any(x.editable for x in pkgs)
- if has_editables:
- header.append("Editable project location")
+ def wheel_build_tag(dist: BaseDistribution) -> Optional[str]:
+ try:
+ wheel_file = dist.read_text("WHEEL")
+ except FileNotFoundError:
+ return None
+ return Parser().parsestr(wheel_file).get("Build")
+
+ build_tags = [wheel_build_tag(p) for p in pkgs]
+ has_build_tags = any(build_tags)
+ if has_build_tags:
+ header.append("Build")
if options.verbose >= 1:
header.append("Location")
if options.verbose >= 1:
header.append("Installer")
+ has_editables = any(x.editable for x in pkgs)
+ if has_editables:
+ header.append("Editable project location")
+
data = []
- for proj in pkgs:
+ for i, proj in enumerate(pkgs):
# if we're working on the 'outdated' list, separate out the
# latest_version and type
row = [proj.raw_name, proj.raw_version]
@@ -342,6 +355,9 @@ def format_for_columns(
row.append(str(proj.latest_version))
row.append(proj.latest_filetype)
+ if has_build_tags:
+ row.append(build_tags[i] or "")
+
if has_editables:
row.append(proj.editable_project_location or "")
diff --git a/contrib/python/pip/pip/_internal/commands/lock.py b/contrib/python/pip/pip/_internal/commands/lock.py
new file mode 100644
index 00000000000..39f27e8677b
--- /dev/null
+++ b/contrib/python/pip/pip/_internal/commands/lock.py
@@ -0,0 +1,171 @@
+import sys
+from optparse import Values
+from pathlib import Path
+from typing import List
+
+from pip._internal.cache import WheelCache
+from pip._internal.cli import cmdoptions
+from pip._internal.cli.req_command import (
+ RequirementCommand,
+ with_cleanup,
+)
+from pip._internal.cli.status_codes import SUCCESS
+from pip._internal.models.pylock import Pylock, is_valid_pylock_file_name
+from pip._internal.operations.build.build_tracker import get_build_tracker
+from pip._internal.req.req_install import (
+ check_legacy_setup_py_options,
+)
+from pip._internal.utils.logging import getLogger
+from pip._internal.utils.misc import (
+ get_pip_version,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = getLogger(__name__)
+
+
+class LockCommand(RequirementCommand):
+ """
+ EXPERIMENTAL - Lock packages and their dependencies from:
+
+ - PyPI (and other indexes) using requirement specifiers.
+ - VCS project urls.
+ - Local project directories.
+ - Local or remote source archives.
+
+ pip also supports locking from "requirements files", which provide an easy
+ way to specify a whole environment to be installed.
+
+ The generated lock file is only guaranteed to be valid for the current
+ python version and platform.
+ """
+
+ usage = """
+ %prog [options] [-e] <local project path> ...
+ %prog [options] <requirement specifier> [package-index-options] ...
+ %prog [options] -r <requirements file> [package-index-options] ...
+ %prog [options] <archive url/path> ..."""
+
+ def add_options(self) -> None:
+ self.cmd_opts.add_option(
+ cmdoptions.PipOption(
+ "--output",
+ "-o",
+ dest="output_file",
+ metavar="path",
+ type="path",
+ default="pylock.toml",
+ help="Lock file name (default=pylock.toml). Use - for stdout.",
+ )
+ )
+ self.cmd_opts.add_option(cmdoptions.requirements())
+ self.cmd_opts.add_option(cmdoptions.constraints())
+ self.cmd_opts.add_option(cmdoptions.no_deps())
+ self.cmd_opts.add_option(cmdoptions.pre())
+
+ self.cmd_opts.add_option(cmdoptions.editable())
+
+ self.cmd_opts.add_option(cmdoptions.src())
+
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
+
+ self.cmd_opts.add_option(cmdoptions.config_settings())
+
+ self.cmd_opts.add_option(cmdoptions.no_binary())
+ self.cmd_opts.add_option(cmdoptions.only_binary())
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
+
+ index_opts = cmdoptions.make_option_group(
+ cmdoptions.index_group,
+ self.parser,
+ )
+
+ self.parser.insert_option_group(0, index_opts)
+ self.parser.insert_option_group(0, self.cmd_opts)
+
+ @with_cleanup
+ def run(self, options: Values, args: List[str]) -> int:
+ logger.verbose("Using %s", get_pip_version())
+
+ logger.warning(
+ "pip lock is currently an experimental command. "
+ "It may be removed/changed in a future release "
+ "without prior warning."
+ )
+
+ session = self.get_default_session(options)
+
+ finder = self._build_package_finder(
+ options=options,
+ session=session,
+ ignore_requires_python=options.ignore_requires_python,
+ )
+ build_tracker = self.enter_context(get_build_tracker())
+
+ directory = TempDirectory(
+ delete=not options.no_clean,
+ kind="install",
+ globally_managed=True,
+ )
+
+ reqs = self.get_requirements(args, options, finder, session)
+ check_legacy_setup_py_options(options, reqs)
+
+ wheel_cache = WheelCache(options.cache_dir)
+
+ # Only when installing is it permitted to use PEP 660.
+ # In other circumstances (pip wheel, pip download) we generate
+ # regular (i.e. non editable) metadata and wheels.
+ for req in reqs:
+ req.permit_editable_wheels = True
+
+ preparer = self.make_requirement_preparer(
+ temp_build_dir=directory,
+ options=options,
+ build_tracker=build_tracker,
+ session=session,
+ finder=finder,
+ use_user_site=False,
+ verbosity=self.verbosity,
+ )
+ resolver = self.make_resolver(
+ preparer=preparer,
+ finder=finder,
+ options=options,
+ wheel_cache=wheel_cache,
+ use_user_site=False,
+ ignore_installed=True,
+ ignore_requires_python=options.ignore_requires_python,
+ upgrade_strategy="to-satisfy-only",
+ use_pep517=options.use_pep517,
+ )
+
+ self.trace_basic_info(finder)
+
+ requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
+
+ if options.output_file == "-":
+ base_dir = Path.cwd()
+ else:
+ output_file_path = Path(options.output_file)
+ if not is_valid_pylock_file_name(output_file_path):
+ logger.warning(
+ "%s is not a valid lock file name.",
+ output_file_path,
+ )
+ base_dir = output_file_path.parent
+ pylock_toml = Pylock.from_install_requirements(
+ requirement_set.requirements.values(), base_dir=base_dir
+ ).as_toml()
+ if options.output_file == "-":
+ sys.stdout.write(pylock_toml)
+ else:
+ output_file_path.write_text(pylock_toml, encoding="utf-8")
+
+ return SUCCESS
diff --git a/contrib/python/pip/pip/_internal/commands/search.py b/contrib/python/pip/pip/_internal/commands/search.py
index 74b8d656b47..c58c2b3b11e 100644
--- a/contrib/python/pip/pip/_internal/commands/search.py
+++ b/contrib/python/pip/pip/_internal/commands/search.py
@@ -5,7 +5,7 @@ import textwrap
import xmlrpc.client
from collections import OrderedDict
from optparse import Values
-from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict
+from typing import Dict, List, Optional, TypedDict
from pip._vendor.packaging.version import parse as parse_version
@@ -14,17 +14,17 @@ from pip._internal.cli.req_command import SessionCommandMixin
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
from pip._internal.exceptions import CommandError
from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import BaseDistribution
from pip._internal.models.index import PyPI
from pip._internal.network.xmlrpc import PipXmlrpcTransport
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import write_output
-if TYPE_CHECKING:
- class TransformedHit(TypedDict):
- name: str
- summary: str
- versions: List[str]
+class TransformedHit(TypedDict):
+ name: str
+ summary: str
+ versions: List[str]
logger = logging.getLogger(__name__)
@@ -111,9 +111,7 @@ def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
return list(packages.values())
-def print_dist_installation_info(name: str, latest: str) -> None:
- env = get_default_environment()
- dist = env.get_distribution(name)
+def print_dist_installation_info(latest: str, dist: Optional[BaseDistribution]) -> None:
if dist is not None:
with indent_log():
if dist.version == latest:
@@ -130,6 +128,11 @@ def print_dist_installation_info(name: str, latest: str) -> None:
write_output("LATEST: %s", latest)
+def get_installed_distribution(name: str) -> Optional[BaseDistribution]:
+ env = get_default_environment()
+ return env.get_distribution(name)
+
+
def print_results(
hits: List["TransformedHit"],
name_column_width: Optional[int] = None,
@@ -163,7 +166,8 @@ def print_results(
line = f"{name_latest:{name_column_width}} - {summary}"
try:
write_output(line)
- print_dist_installation_info(name, latest)
+ dist = get_installed_distribution(name)
+ print_dist_installation_info(latest, dist)
except UnicodeEncodeError:
pass
diff --git a/contrib/python/pip/pip/_internal/commands/show.py b/contrib/python/pip/pip/_internal/commands/show.py
index b47500cf8b4..7aaf6f4b6ca 100644
--- a/contrib/python/pip/pip/_internal/commands/show.py
+++ b/contrib/python/pip/pip/_internal/commands/show.py
@@ -1,4 +1,5 @@
import logging
+import string
from optparse import Values
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
@@ -13,6 +14,13 @@ from pip._internal.utils.misc import write_output
logger = logging.getLogger(__name__)
+def normalize_project_url_label(label: str) -> str:
+ # This logic is from PEP 753 (Well-known Project URLs in Metadata).
+ chars_to_remove = string.punctuation + string.whitespace
+ removal_map = str.maketrans("", "", chars_to_remove)
+ return label.translate(removal_map).lower()
+
+
class ShowCommand(Command):
"""
Show information about one or more installed packages.
@@ -135,13 +143,9 @@ def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None
if not homepage:
# It's common that there is a "homepage" Project-URL, but Home-page
# remains unset (especially as PEP 621 doesn't surface the field).
- #
- # This logic was taken from PyPI's codebase.
for url in project_urls:
url_label, url = url.split(",", maxsplit=1)
- normalized_label = (
- url_label.casefold().replace("-", "").replace("_", "").strip()
- )
+ normalized_label = normalize_project_url_label(url_label)
if normalized_label == "homepage":
homepage = url.strip()
break
diff --git a/contrib/python/pip/pip/_internal/commands/wheel.py b/contrib/python/pip/pip/_internal/commands/wheel.py
index 278719f4e0c..b380754bc89 100644
--- a/contrib/python/pip/pip/_internal/commands/wheel.py
+++ b/contrib/python/pip/pip/_internal/commands/wheel.py
@@ -16,7 +16,7 @@ from pip._internal.req.req_install import (
)
from pip._internal.utils.misc import ensure_dir, normalize_path
from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.wheel_builder import build, should_build_for_wheel_command
+from pip._internal.wheel_builder import build
logger = logging.getLogger(__name__)
@@ -150,7 +150,7 @@ class WheelCommand(RequirementCommand):
for req in requirement_set.requirements.values():
if req.is_wheel:
preparer.save_linked_requirement(req)
- elif should_build_for_wheel_command(req):
+ else:
reqs_to_build.append(req)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
diff --git a/contrib/python/pip/pip/_internal/exceptions.py b/contrib/python/pip/pip/_internal/exceptions.py
index 45a876a850d..4fe4aadef2f 100644
--- a/contrib/python/pip/pip/_internal/exceptions.py
+++ b/contrib/python/pip/pip/_internal/exceptions.py
@@ -27,6 +27,7 @@ if TYPE_CHECKING:
from pip._vendor.requests.models import Request, Response
from pip._internal.metadata import BaseDistribution
+ from pip._internal.models.link import Link
from pip._internal.req.req_install import InstallRequirement
logger = logging.getLogger(__name__)
@@ -807,3 +808,55 @@ class InvalidInstalledPackage(DiagnosticPipError):
),
hint_stmt="To proceed this package must be uninstalled.",
)
+
+
+class IncompleteDownloadError(DiagnosticPipError):
+ """Raised when the downloader receives fewer bytes than advertised
+ in the Content-Length header."""
+
+ reference = "incomplete-download"
+
+ def __init__(
+ self, link: "Link", received: int, expected: int, *, retries: int
+ ) -> None:
+ # Dodge circular import.
+ from pip._internal.utils.misc import format_size
+
+ download_status = f"{format_size(received)}/{format_size(expected)}"
+ if retries:
+ retry_status = f"after {retries} attempts "
+ hint = "Use --resume-retries to configure resume attempt limit."
+ else:
+ retry_status = ""
+ hint = "Consider using --resume-retries to enable download resumption."
+ message = Text(
+ f"Download failed {retry_status}because not enough bytes "
+ f"were received ({download_status})"
+ )
+
+ super().__init__(
+ message=message,
+ context=f"URL: {link.redacted_url}",
+ hint_stmt=hint,
+ note_stmt="This is an issue with network connectivity, not pip.",
+ )
+
+
+class ResolutionTooDeepError(DiagnosticPipError):
+ """Raised when the dependency resolver exceeds the maximum recursion depth."""
+
+ reference = "resolution-too-deep"
+
+ def __init__(self) -> None:
+ super().__init__(
+ message="Dependency resolution exceeded maximum depth",
+ context=(
+ "Pip cannot resolve the current dependencies as the dependency graph "
+ "is too complex for pip to solve efficiently."
+ ),
+ hint_stmt=(
+ "Try adding lower bounds to constrain your dependencies, "
+ "for example: 'package>=2.0.0' instead of just 'package'. "
+ ),
+ link="https://pip.pypa.io/en/stable/topics/dependency-resolution/#handling-resolution-too-deep-errors",
+ )
diff --git a/contrib/python/pip/pip/_internal/index/__init__.py b/contrib/python/pip/pip/_internal/index/__init__.py
index 7a17b7b3b6a..197dd757de9 100644
--- a/contrib/python/pip/pip/_internal/index/__init__.py
+++ b/contrib/python/pip/pip/_internal/index/__init__.py
@@ -1,2 +1 @@
-"""Index interaction code
-"""
+"""Index interaction code"""
diff --git a/contrib/python/pip/pip/_internal/index/package_finder.py b/contrib/python/pip/pip/_internal/index/package_finder.py
index 85628ee5d7a..6971e959d32 100644
--- a/contrib/python/pip/pip/_internal/index/package_finder.py
+++ b/contrib/python/pip/pip/_internal/index/package_finder.py
@@ -6,7 +6,17 @@ import itertools
import logging
import re
from dataclasses import dataclass
-from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
+from typing import (
+ TYPE_CHECKING,
+ Dict,
+ FrozenSet,
+ Iterable,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+)
from pip._vendor.packaging import specifiers
from pip._vendor.packaging.tags import Tag
@@ -514,11 +524,7 @@ class CandidateEvaluator:
)
if self._prefer_binary:
binary_preference = 1
- if wheel.build_tag is not None:
- match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
- assert match is not None, "guaranteed by filename validation"
- build_tag_groups = match.groups()
- build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+ build_tag = wheel.build_tag
else: # sdist
pri = -(support_num)
has_allowed_hash = int(link.is_hash_allowed(self._hashes))
@@ -605,6 +611,13 @@ class PackageFinder:
# These are boring links that have already been logged somehow.
self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
+ # Cache of the result of finding candidates
+ self._all_candidates: Dict[str, List[InstallationCandidate]] = {}
+ self._best_candidates: Dict[
+ Tuple[str, Optional[specifiers.BaseSpecifier], Optional[Hashes]],
+ BestCandidateResult,
+ ] = {}
+
# Don't include an allow_yanked default value to make sure each call
# site considers whether yanked releases are allowed. This also causes
# that decision to be made explicit in the calling code, which helps
@@ -736,11 +749,6 @@ class PackageFinder:
return no_eggs + eggs
def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
- # This is a hot method so don't waste time hashing links unless we're
- # actually going to log 'em.
- if not logger.isEnabledFor(logging.DEBUG):
- return
-
entry = (link, result, detail)
if entry not in self._logged_links:
# Put the link at the end so the reason is more visible and because
@@ -804,7 +812,6 @@ class PackageFinder:
return package_links
- @functools.lru_cache(maxsize=None)
def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
"""Find all available InstallationCandidate for project_name
@@ -814,6 +821,9 @@ class PackageFinder:
See LinkEvaluator.evaluate_link() for details on which files
are accepted.
"""
+ if project_name in self._all_candidates:
+ return self._all_candidates[project_name]
+
link_evaluator = self.make_link_evaluator(project_name)
collected_sources = self._link_collector.collect_sources(
@@ -855,7 +865,9 @@ class PackageFinder:
logger.debug("Local files found: %s", ", ".join(paths))
# This is an intentional priority ordering
- return file_candidates + page_candidates
+ self._all_candidates[project_name] = file_candidates + page_candidates
+
+ return self._all_candidates[project_name]
def make_candidate_evaluator(
self,
@@ -874,7 +886,6 @@ class PackageFinder:
hashes=hashes,
)
- @functools.lru_cache(maxsize=None)
def find_best_candidate(
self,
project_name: str,
@@ -889,13 +900,20 @@ class PackageFinder:
:return: A `BestCandidateResult` instance.
"""
+ if (project_name, specifier, hashes) in self._best_candidates:
+ return self._best_candidates[project_name, specifier, hashes]
+
candidates = self.find_all_candidates(project_name)
candidate_evaluator = self.make_candidate_evaluator(
project_name=project_name,
specifier=specifier,
hashes=hashes,
)
- return candidate_evaluator.compute_best_candidate(candidates)
+ self._best_candidates[project_name, specifier, hashes] = (
+ candidate_evaluator.compute_best_candidate(candidates)
+ )
+
+ return self._best_candidates[project_name, specifier, hashes]
def find_requirement(
self, req: InstallRequirement, upgrade: bool
@@ -906,9 +924,12 @@ class PackageFinder:
Returns a InstallationCandidate if found,
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
"""
+ name = req.name
+ assert name is not None, "find_requirement() called with no name"
+
hashes = req.hashes(trust_internet=False)
best_candidate_result = self.find_best_candidate(
- req.name,
+ name,
specifier=req.specifier,
hashes=hashes,
)
diff --git a/contrib/python/pip/pip/_internal/locations/__init__.py b/contrib/python/pip/pip/_internal/locations/__init__.py
index 32382be7fe5..dfb5dd36066 100644
--- a/contrib/python/pip/pip/_internal/locations/__init__.py
+++ b/contrib/python/pip/pip/_internal/locations/__init__.py
@@ -4,7 +4,7 @@ import os
import pathlib
import sys
import sysconfig
-from typing import Any, Dict, Generator, Optional, Tuple
+from typing import Any, Dict, Optional
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
from pip._internal.utils.compat import WINDOWS
@@ -174,22 +174,6 @@ def _looks_like_msys2_mingw_scheme() -> bool:
)
-def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
- ldversion = sysconfig.get_config_var("LDVERSION")
- abiflags = getattr(sys, "abiflags", None)
-
- # LDVERSION does not end with sys.abiflags. Just return the path unchanged.
- if not ldversion or not abiflags or not ldversion.endswith(abiflags):
- yield from parts
- return
-
- # Strip sys.abiflags from LDVERSION-based path components.
- for part in parts:
- if part.endswith(ldversion):
- part = part[: (0 - len(abiflags))]
- yield part
-
-
@functools.lru_cache(maxsize=None)
def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
issue_url = "https://github.com/pypa/pip/issues/10151"
@@ -304,7 +288,6 @@ def get_scheme(
user
and k == "platlib"
and not WINDOWS
- and sys.version_info >= (3, 9)
and _PLATLIBDIR != "lib"
and _looks_like_bpo_44860()
)
diff --git a/contrib/python/pip/pip/_internal/metadata/__init__.py b/contrib/python/pip/pip/_internal/metadata/__init__.py
index 1ea1e7fd2e5..60b62b956bd 100644
--- a/contrib/python/pip/pip/_internal/metadata/__init__.py
+++ b/contrib/python/pip/pip/_internal/metadata/__init__.py
@@ -2,17 +2,13 @@ import contextlib
import functools
import os
import sys
-from typing import TYPE_CHECKING, List, Optional, Type, cast
+from typing import List, Literal, Optional, Protocol, Type, cast
+from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.misc import strtobool
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
-if TYPE_CHECKING:
- from typing import Literal, Protocol
-else:
- Protocol = object
-
__all__ = [
"BaseDistribution",
"BaseEnvironment",
@@ -30,25 +26,60 @@ def _should_use_importlib_metadata() -> bool:
"""Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
By default, pip uses ``importlib.metadata`` on Python 3.11+, and
- ``pkg_resources`` otherwise. This can be overridden by a couple of ways:
+ ``pkg_resources`` otherwise. Up to Python 3.13, This can be
+ overridden by a couple of ways:
* If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
- dictates whether ``importlib.metadata`` is used, regardless of Python
- version.
- * On Python 3.11+, Python distributors can patch ``importlib.metadata``
- to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
- makes pip use ``pkg_resources`` (unless the user set the aforementioned
- environment variable to *True*).
+ dictates whether ``importlib.metadata`` is used, for Python <3.14.
+ * On Python 3.11, 3.12 and 3.13, Python distributors can patch
+ ``importlib.metadata`` to add a global constant
+ ``_PIP_USE_IMPORTLIB_METADATA = False``. This makes pip use
+ ``pkg_resources`` (unless the user set the aforementioned environment
+ variable to *True*).
+
+ On Python 3.14+, the ``pkg_resources`` backend cannot be used.
"""
+ if sys.version_info >= (3, 14):
+ # On Python >=3.14 we only support importlib.metadata.
+ return True
with contextlib.suppress(KeyError, ValueError):
+ # On Python <3.14, if the environment variable is set, we obey what it says.
return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
if sys.version_info < (3, 11):
+ # On Python <3.11, we always use pkg_resources, unless the environment
+ # variable was set.
return False
+ # On Python 3.11, 3.12 and 3.13, we check if the global constant is set.
import importlib.metadata
return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
+def _emit_pkg_resources_deprecation_if_needed() -> None:
+ if sys.version_info < (3, 11):
+ # All pip versions supporting Python<=3.11 will support pkg_resources,
+ # and pkg_resources is the default for these, so let's not bother users.
+ return
+
+ import importlib.metadata
+
+ if hasattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA"):
+ # The Python distributor has set the global constant, so we don't
+ # warn, since it is not a user decision.
+ return
+
+ # The user has decided to use pkg_resources, so we warn.
+ deprecated(
+ reason="Using the pkg_resources metadata backend is deprecated.",
+ replacement=(
+ "to use the default importlib.metadata backend, "
+ "by unsetting the _PIP_USE_IMPORTLIB_METADATA environment variable"
+ ),
+ gone_in="26.3",
+ issue=13317,
+ )
+
+
class Backend(Protocol):
NAME: 'Literal["importlib", "pkg_resources"]'
Distribution: Type[BaseDistribution]
@@ -61,6 +92,9 @@ def select_backend() -> Backend:
from . import importlib
return cast(Backend, importlib)
+
+ _emit_pkg_resources_deprecation_if_needed()
+
from . import pkg_resources
return cast(Backend, pkg_resources)
diff --git a/contrib/python/pip/pip/_internal/metadata/base.py b/contrib/python/pip/pip/_internal/metadata/base.py
index 9eabcdb278b..ea5a0756dd7 100644
--- a/contrib/python/pip/pip/_internal/metadata/base.py
+++ b/contrib/python/pip/pip/_internal/metadata/base.py
@@ -231,7 +231,9 @@ class BaseDistribution(Protocol):
location = self.location
if not location:
return False
- return location.endswith(".egg")
+ # XXX if the distribution is a zipped egg, location has a trailing /
+ # so we resort to pathlib.Path to check the suffix in a reliable way.
+ return pathlib.Path(location).suffix == ".egg"
@property
def installed_with_setuptools_egg_info(self) -> bool:
diff --git a/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py b/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py
index 4d906fd3149..314e75e6731 100644
--- a/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py
+++ b/contrib/python/pip/pip/_internal/metadata/importlib/_envs.py
@@ -1,18 +1,19 @@
-import functools
import importlib.metadata
import logging
import os
import pathlib
import sys
import zipfile
-import zipimport
from typing import Iterator, List, Optional, Sequence, Set, Tuple
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.utils import (
+ InvalidWheelFilename,
+ NormalizedName,
+ canonicalize_name,
+ parse_wheel_filename,
+)
from pip._internal.metadata.base import BaseDistribution, BaseEnvironment
-from pip._internal.models.wheel import Wheel
-from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.filetypes import WHEEL_EXTENSION
from ._compat import BadMetadata, BasePath, get_dist_canonical_name, get_info_location
@@ -26,7 +27,9 @@ def _looks_like_wheel(location: str) -> bool:
return False
if not os.path.isfile(location):
return False
- if not Wheel.wheel_file_re.match(os.path.basename(location)):
+ try:
+ parse_wheel_filename(os.path.basename(location))
+ except InvalidWheelFilename:
return False
return zipfile.is_zipfile(location)
@@ -82,7 +85,7 @@ class _DistributionFinder:
installed_location = info_location.parent
yield Distribution(dist, info_location, installed_location)
- def find_linked(self, location: str) -> Iterator[BaseDistribution]:
+ def find_legacy_editables(self, location: str) -> Iterator[BaseDistribution]:
"""Read location in egg-link files and return distributions in there.
The path should be a directory; otherwise this returns nothing. This
@@ -106,54 +109,6 @@ class _DistributionFinder:
for dist, info_location in self._find_impl(target_location):
yield Distribution(dist, info_location, path)
- def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]:
- from pip._vendor.pkg_resources import find_distributions
-
- from pip._internal.metadata import pkg_resources as legacy
-
- with os.scandir(location) as it:
- for entry in it:
- if not entry.name.endswith(".egg"):
- continue
- for dist in find_distributions(entry.path):
- yield legacy.Distribution(dist)
-
- def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]:
- from pip._vendor.pkg_resources import find_eggs_in_zip
-
- from pip._internal.metadata import pkg_resources as legacy
-
- try:
- importer = zipimport.zipimporter(location)
- except zipimport.ZipImportError:
- return
- for dist in find_eggs_in_zip(importer, location):
- yield legacy.Distribution(dist)
-
- def find_eggs(self, location: str) -> Iterator[BaseDistribution]:
- """Find eggs in a location.
-
- This actually uses the old *pkg_resources* backend. We likely want to
- deprecate this so we can eventually remove the *pkg_resources*
- dependency entirely. Before that, this should first emit a deprecation
- warning for some versions when using the fallback since importing
- *pkg_resources* is slow for those who don't need it.
- """
- if os.path.isdir(location):
- yield from self._find_eggs_in_dir(location)
- if zipfile.is_zipfile(location):
- yield from self._find_eggs_in_zip(location)
-
-
-@functools.lru_cache(maxsize=None) # Warn a distribution exactly once.
-def _emit_egg_deprecation(location: Optional[str]) -> None:
- deprecated(
- reason=f"Loading egg at {location} is deprecated.",
- replacement="to use pip for package installation",
- gone_in="25.1",
- issue=12330,
- )
-
class Environment(BaseEnvironment):
def __init__(self, paths: Sequence[str]) -> None:
@@ -173,11 +128,7 @@ class Environment(BaseEnvironment):
finder = _DistributionFinder()
for location in self._paths:
yield from finder.find(location)
- for dist in finder.find_eggs(location):
- _emit_egg_deprecation(dist.location)
- yield dist
- # This must go last because that's how pkg_resources tie-breaks.
- yield from finder.find_linked(location)
+ yield from finder.find_legacy_editables(location)
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
canonical_name = canonicalize_name(name)
diff --git a/contrib/python/pip/pip/_internal/models/__init__.py b/contrib/python/pip/pip/_internal/models/__init__.py
index 7855226e4b5..7b1fc295032 100644
--- a/contrib/python/pip/pip/_internal/models/__init__.py
+++ b/contrib/python/pip/pip/_internal/models/__init__.py
@@ -1,2 +1 @@
-"""A package that contains models that represent entities.
-"""
+"""A package that contains models that represent entities."""
diff --git a/contrib/python/pip/pip/_internal/models/direct_url.py b/contrib/python/pip/pip/_internal/models/direct_url.py
index fc5ec8d4aa9..8f990dd0ca1 100644
--- a/contrib/python/pip/pip/_internal/models/direct_url.py
+++ b/contrib/python/pip/pip/_internal/models/direct_url.py
@@ -1,4 +1,4 @@
-""" PEP 610 """
+"""PEP 610"""
import json
import re
diff --git a/contrib/python/pip/pip/_internal/models/link.py b/contrib/python/pip/pip/_internal/models/link.py
index 27ad016090c..f0560f6ec26 100644
--- a/contrib/python/pip/pip/_internal/models/link.py
+++ b/contrib/python/pip/pip/_internal/models/link.py
@@ -380,9 +380,9 @@ class Link:
else:
rp = ""
if self.comes_from:
- return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
+ return f"{self.redacted_url} (from {self.comes_from}){rp}"
else:
- return redact_auth_from_url(str(self._url))
+ return self.redacted_url
def __repr__(self) -> str:
return f"<Link {self}>"
@@ -405,6 +405,10 @@ class Link:
return self._url
@property
+ def redacted_url(self) -> str:
+ return redact_auth_from_url(self.url)
+
+ @property
def filename(self) -> str:
path = self.path.rstrip("/")
name = posixpath.basename(path)
@@ -468,7 +472,7 @@ class Link:
deprecated(
reason=f"{self} contains an egg fragment with a non-PEP 508 name.",
replacement="to use the req @ url syntax, and remove the egg fragment",
- gone_in="25.1",
+ gone_in="25.2",
issue=13157,
)
diff --git a/contrib/python/pip/pip/_internal/models/pylock.py b/contrib/python/pip/pip/_internal/models/pylock.py
new file mode 100644
index 00000000000..d9decb2964f
--- /dev/null
+++ b/contrib/python/pip/pip/_internal/models/pylock.py
@@ -0,0 +1,183 @@
+import dataclasses
+import re
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Any, Dict, Iterable, List, Optional, Tuple
+
+from pip._vendor import tomli_w
+from pip._vendor.typing_extensions import Self
+
+from pip._internal.models.direct_url import ArchiveInfo, DirInfo, VcsInfo
+from pip._internal.models.link import Link
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.urls import url_to_path
+
+PYLOCK_FILE_NAME_RE = re.compile(r"^pylock\.([^.]+)\.toml$")
+
+
+def is_valid_pylock_file_name(path: Path) -> bool:
+ return path.name == "pylock.toml" or bool(re.match(PYLOCK_FILE_NAME_RE, path.name))
+
+
+def _toml_dict_factory(data: List[Tuple[str, Any]]) -> Dict[str, Any]:
+ return {key.replace("_", "-"): value for key, value in data if value is not None}
+
+
+@dataclass
+class PackageVcs:
+ type: str
+ url: Optional[str]
+ # (not supported) path: Optional[str]
+ requested_revision: Optional[str]
+ commit_id: str
+ subdirectory: Optional[str]
+
+
+@dataclass
+class PackageDirectory:
+ path: str
+ editable: Optional[bool]
+ subdirectory: Optional[str]
+
+
+@dataclass
+class PackageArchive:
+ url: Optional[str]
+ # (not supported) path: Optional[str]
+ # (not supported) size: Optional[int]
+ # (not supported) upload_time: Optional[datetime]
+ hashes: Dict[str, str]
+ subdirectory: Optional[str]
+
+
+@dataclass
+class PackageSdist:
+ name: str
+ # (not supported) upload_time: Optional[datetime]
+ url: Optional[str]
+ # (not supported) path: Optional[str]
+ # (not supported) size: Optional[int]
+ hashes: Dict[str, str]
+
+
+@dataclass
+class PackageWheel:
+ name: str
+ # (not supported) upload_time: Optional[datetime]
+ url: Optional[str]
+ # (not supported) path: Optional[str]
+ # (not supported) size: Optional[int]
+ hashes: Dict[str, str]
+
+
+@dataclass
+class Package:
+ name: str
+ version: Optional[str] = None
+ # (not supported) marker: Optional[str]
+ # (not supported) requires_python: Optional[str]
+ # (not supported) dependencies
+ vcs: Optional[PackageVcs] = None
+ directory: Optional[PackageDirectory] = None
+ archive: Optional[PackageArchive] = None
+ # (not supported) index: Optional[str]
+ sdist: Optional[PackageSdist] = None
+ wheels: Optional[List[PackageWheel]] = None
+ # (not supported) attestation_identities: Optional[List[Dict[str, Any]]]
+ # (not supported) tool: Optional[Dict[str, Any]]
+
+ @classmethod
+ def from_install_requirement(cls, ireq: InstallRequirement, base_dir: Path) -> Self:
+ base_dir = base_dir.resolve()
+ dist = ireq.get_dist()
+ download_info = ireq.download_info
+ assert download_info
+ package = cls(name=dist.canonical_name)
+ if ireq.is_direct:
+ if isinstance(download_info.info, VcsInfo):
+ package.vcs = PackageVcs(
+ type=download_info.info.vcs,
+ url=download_info.url,
+ requested_revision=download_info.info.requested_revision,
+ commit_id=download_info.info.commit_id,
+ subdirectory=download_info.subdirectory,
+ )
+ elif isinstance(download_info.info, DirInfo):
+ package.directory = PackageDirectory(
+ path=(
+ Path(url_to_path(download_info.url))
+ .resolve()
+ .relative_to(base_dir)
+ .as_posix()
+ ),
+ editable=(
+ download_info.info.editable
+ if download_info.info.editable
+ else None
+ ),
+ subdirectory=download_info.subdirectory,
+ )
+ elif isinstance(download_info.info, ArchiveInfo):
+ if not download_info.info.hashes:
+ raise NotImplementedError()
+ package.archive = PackageArchive(
+ url=download_info.url,
+ hashes=download_info.info.hashes,
+ subdirectory=download_info.subdirectory,
+ )
+ else:
+ # should never happen
+ raise NotImplementedError()
+ else:
+ package.version = str(dist.version)
+ if isinstance(download_info.info, ArchiveInfo):
+ if not download_info.info.hashes:
+ raise NotImplementedError()
+ link = Link(download_info.url)
+ if link.is_wheel:
+ package.wheels = [
+ PackageWheel(
+ name=link.filename,
+ url=download_info.url,
+ hashes=download_info.info.hashes,
+ )
+ ]
+ else:
+ package.sdist = PackageSdist(
+ name=link.filename,
+ url=download_info.url,
+ hashes=download_info.info.hashes,
+ )
+ else:
+ # should never happen
+ raise NotImplementedError()
+ return package
+
+
+@dataclass
+class Pylock:
+ lock_version: str = "1.0"
+ # (not supported) environments: Optional[List[str]]
+ # (not supported) requires_python: Optional[str]
+ # (not supported) extras: List[str] = []
+ # (not supported) dependency_groups: List[str] = []
+ created_by: str = "pip"
+ packages: List[Package] = dataclasses.field(default_factory=list)
+ # (not supported) tool: Optional[Dict[str, Any]]
+
+ def as_toml(self) -> str:
+ return tomli_w.dumps(dataclasses.asdict(self, dict_factory=_toml_dict_factory))
+
+ @classmethod
+ def from_install_requirements(
+ cls, install_requirements: Iterable[InstallRequirement], base_dir: Path
+ ) -> Self:
+ return cls(
+ packages=sorted(
+ (
+ Package.from_install_requirement(ireq, base_dir)
+ for ireq in install_requirements
+ ),
+ key=lambda p: p.name,
+ )
+ )
diff --git a/contrib/python/pip/pip/_internal/models/wheel.py b/contrib/python/pip/pip/_internal/models/wheel.py
index ea8560089d3..d905d652e3c 100644
--- a/contrib/python/pip/pip/_internal/models/wheel.py
+++ b/contrib/python/pip/pip/_internal/models/wheel.py
@@ -3,13 +3,13 @@ name that have meaning.
"""
import re
-from typing import Dict, Iterable, List
+from typing import Dict, Iterable, List, Optional
from pip._vendor.packaging.tags import Tag
+from pip._vendor.packaging.utils import BuildTag, parse_wheel_filename
from pip._vendor.packaging.utils import (
- InvalidWheelFilename as PackagingInvalidWheelName,
+ InvalidWheelFilename as _PackagingInvalidWheelFilename,
)
-from pip._vendor.packaging.utils import parse_wheel_filename
from pip._internal.exceptions import InvalidWheelFilename
from pip._internal.utils.deprecation import deprecated
@@ -18,7 +18,7 @@ from pip._internal.utils.deprecation import deprecated
class Wheel:
"""A wheel file"""
- wheel_file_re = re.compile(
+ legacy_wheel_file_re = re.compile(
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
\.whl|\.dist-info)$""",
@@ -26,46 +26,67 @@ class Wheel:
)
def __init__(self, filename: str) -> None:
- """
- :raises InvalidWheelFilename: when the filename is invalid for a wheel
- """
- wheel_info = self.wheel_file_re.match(filename)
- if not wheel_info:
- raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
self.filename = filename
- self.name = wheel_info.group("name").replace("_", "-")
- _version = wheel_info.group("ver")
- if "_" in _version:
- try:
- parse_wheel_filename(filename)
- except PackagingInvalidWheelName as e:
- deprecated(
- reason=(
- f"Wheel filename {filename!r} is not correctly normalised. "
- "Future versions of pip will raise the following error:\n"
- f"{e.args[0]}\n\n"
- ),
- replacement=(
- "to rename the wheel to use a correctly normalised "
- "name (this may require updating the version in "
- "the project metadata)"
- ),
- gone_in="25.1",
- issue=12938,
- )
-
- _version = _version.replace("_", "-")
-
- self.version = _version
- self.build_tag = wheel_info.group("build")
- self.pyversions = wheel_info.group("pyver").split(".")
- self.abis = wheel_info.group("abi").split(".")
- self.plats = wheel_info.group("plat").split(".")
-
- # All the tag combinations from this file
- self.file_tags = {
- Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
- }
+
+ # To make mypy happy specify type hints that can come from either
+ # parse_wheel_filename or the legacy_wheel_file_re match.
+ self.name: str
+ self._build_tag: Optional[BuildTag] = None
+
+ try:
+ wheel_info = parse_wheel_filename(filename)
+ self.name, _version, self._build_tag, self.file_tags = wheel_info
+ self.version = str(_version)
+ except _PackagingInvalidWheelFilename as e:
+ # Check if the wheel filename is in the legacy format
+ legacy_wheel_info = self.legacy_wheel_file_re.match(filename)
+ if not legacy_wheel_info:
+ raise InvalidWheelFilename(e.args[0]) from None
+
+ deprecated(
+ reason=(
+ f"Wheel filename {filename!r} is not correctly normalised. "
+ "Future versions of pip will raise the following error:\n"
+ f"{e.args[0]}\n\n"
+ ),
+ replacement=(
+ "to rename the wheel to use a correctly normalised "
+ "name (this may require updating the version in "
+ "the project metadata)"
+ ),
+ gone_in="25.3",
+ issue=12938,
+ )
+
+ self.name = legacy_wheel_info.group("name").replace("_", "-")
+ self.version = legacy_wheel_info.group("ver").replace("_", "-")
+
+ # Generate the file tags from the legacy wheel filename
+ pyversions = legacy_wheel_info.group("pyver").split(".")
+ abis = legacy_wheel_info.group("abi").split(".")
+ plats = legacy_wheel_info.group("plat").split(".")
+ self.file_tags = frozenset(
+ Tag(interpreter=py, abi=abi, platform=plat)
+ for py in pyversions
+ for abi in abis
+ for plat in plats
+ )
+
+ @property
+ def build_tag(self) -> BuildTag:
+ if self._build_tag is not None:
+ return self._build_tag
+
+ # Parse the build tag from the legacy wheel filename
+ legacy_wheel_info = self.legacy_wheel_file_re.match(self.filename)
+ assert legacy_wheel_info is not None, "guaranteed by filename validation"
+ build_tag = legacy_wheel_info.group("build")
+ match = re.match(r"^(\d+)(.*)$", build_tag)
+ assert match is not None, "guaranteed by filename validation"
+ build_tag_groups = match.groups()
+ self._build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
+
+ return self._build_tag
def get_formatted_file_tags(self) -> List[str]:
"""Return the wheel's tags as a sorted list of strings."""
diff --git a/contrib/python/pip/pip/_internal/network/__init__.py b/contrib/python/pip/pip/_internal/network/__init__.py
index b51bde91b2e..0ae1f5626bc 100644
--- a/contrib/python/pip/pip/_internal/network/__init__.py
+++ b/contrib/python/pip/pip/_internal/network/__init__.py
@@ -1,2 +1 @@
-"""Contains purely network-related utilities.
-"""
+"""Contains purely network-related utilities."""
diff --git a/contrib/python/pip/pip/_internal/network/cache.py b/contrib/python/pip/pip/_internal/network/cache.py
index fca04e6945f..2fe00f40263 100644
--- a/contrib/python/pip/pip/_internal/network/cache.py
+++ b/contrib/python/pip/pip/_internal/network/cache.py
@@ -1,5 +1,4 @@
-"""HTTP cache implementation.
-"""
+"""HTTP cache implementation."""
import os
from contextlib import contextmanager
diff --git a/contrib/python/pip/pip/_internal/network/download.py b/contrib/python/pip/pip/_internal/network/download.py
index 5c3bce3d2fd..15ef58b9c93 100644
--- a/contrib/python/pip/pip/_internal/network/download.py
+++ b/contrib/python/pip/pip/_internal/network/download.py
@@ -1,16 +1,17 @@
-"""Download files with progress indicators.
-"""
+"""Download files with progress indicators."""
import email.message
import logging
import mimetypes
import os
-from typing import Iterable, Optional, Tuple
+from http import HTTPStatus
+from typing import BinaryIO, Iterable, Optional, Tuple
from pip._vendor.requests.models import Response
+from pip._vendor.urllib3.exceptions import ReadTimeoutError
from pip._internal.cli.progress_bars import get_download_progress_renderer
-from pip._internal.exceptions import NetworkConnectionError
+from pip._internal.exceptions import IncompleteDownloadError, NetworkConnectionError
from pip._internal.models.index import PyPI
from pip._internal.models.link import Link
from pip._internal.network.cache import is_from_cache
@@ -28,13 +29,21 @@ def _get_http_response_size(resp: Response) -> Optional[int]:
return None
+def _get_http_response_etag_or_last_modified(resp: Response) -> Optional[str]:
+ """
+ Return either the ETag or Last-Modified header (or None if neither exists).
+ The return value can be used in an If-Range header.
+ """
+ return resp.headers.get("etag", resp.headers.get("last-modified"))
+
+
def _prepare_download(
resp: Response,
link: Link,
progress_bar: str,
+ total_length: Optional[int],
+ range_start: Optional[int] = 0,
) -> Iterable[bytes]:
- total_length = _get_http_response_size(resp)
-
if link.netloc == PyPI.file_storage_domain:
url = link.show_url
else:
@@ -43,10 +52,17 @@ def _prepare_download(
logged_url = redact_auth_from_url(url)
if total_length:
- logged_url = f"{logged_url} ({format_size(total_length)})"
+ if range_start:
+ logged_url = (
+ f"{logged_url} ({format_size(range_start)}/{format_size(total_length)})"
+ )
+ else:
+ logged_url = f"{logged_url} ({format_size(total_length)})"
if is_from_cache(resp):
logger.info("Using cached %s", logged_url)
+ elif range_start:
+ logger.info("Resuming download %s", logged_url)
else:
logger.info("Downloading %s", logged_url)
@@ -66,7 +82,9 @@ def _prepare_download(
if not show_progress:
return chunks
- renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
+ renderer = get_download_progress_renderer(
+ bar_type=progress_bar, size=total_length, initial_progress=range_start
+ )
return renderer(chunks)
@@ -113,10 +131,27 @@ def _get_http_response_filename(resp: Response, link: Link) -> str:
return filename
-def _http_get_download(session: PipSession, link: Link) -> Response:
+def _http_get_download(
+ session: PipSession,
+ link: Link,
+ range_start: Optional[int] = 0,
+ if_range: Optional[str] = None,
+) -> Response:
target_url = link.url.split("#", 1)[0]
- resp = session.get(target_url, headers=HEADERS, stream=True)
- raise_for_status(resp)
+ headers = HEADERS.copy()
+ # request a partial download
+ if range_start:
+ headers["Range"] = f"bytes={range_start}-"
+ # make sure the file hasn't changed
+ if if_range:
+ headers["If-Range"] = if_range
+ try:
+ resp = session.get(target_url, headers=headers, stream=True)
+ raise_for_status(resp)
+ except NetworkConnectionError as e:
+ assert e.response is not None
+ logger.critical("HTTP error %s while getting %s", e.response.status_code, link)
+ raise
return resp
@@ -125,30 +160,140 @@ class Downloader:
self,
session: PipSession,
progress_bar: str,
+ resume_retries: int,
) -> None:
+ assert (
+ resume_retries >= 0
+ ), "Number of max resume retries must be bigger or equal to zero"
self._session = session
self._progress_bar = progress_bar
+ self._resume_retries = resume_retries
def __call__(self, link: Link, location: str) -> Tuple[str, str]:
"""Download the file given by link into location."""
- try:
- resp = _http_get_download(self._session, link)
- except NetworkConnectionError as e:
- assert e.response is not None
- logger.critical(
- "HTTP error %s while getting %s", e.response.status_code, link
- )
- raise
+ resp = _http_get_download(self._session, link)
+ # NOTE: The original download size needs to be passed down everywhere
+ # so if the download is resumed (with a HTTP Range request) the progress
+ # bar will report the right size.
+ total_length = _get_http_response_size(resp)
+ content_type = resp.headers.get("Content-Type", "")
filename = _get_http_response_filename(resp, link)
filepath = os.path.join(location, filename)
- chunks = _prepare_download(resp, link, self._progress_bar)
with open(filepath, "wb") as content_file:
+ bytes_received = self._process_response(
+ resp, link, content_file, 0, total_length
+ )
+ # If possible, check for an incomplete download and attempt resuming.
+ if total_length and bytes_received < total_length:
+ self._attempt_resume(
+ resp, link, content_file, total_length, bytes_received
+ )
+
+ return filepath, content_type
+
+ def _process_response(
+ self,
+ resp: Response,
+ link: Link,
+ content_file: BinaryIO,
+ bytes_received: int,
+ total_length: Optional[int],
+ ) -> int:
+ """Process the response and write the chunks to the file."""
+ chunks = _prepare_download(
+ resp, link, self._progress_bar, total_length, range_start=bytes_received
+ )
+ return self._write_chunks_to_file(
+ chunks, content_file, allow_partial=bool(total_length)
+ )
+
+ def _write_chunks_to_file(
+ self, chunks: Iterable[bytes], content_file: BinaryIO, *, allow_partial: bool
+ ) -> int:
+ """Write the chunks to the file and return the number of bytes received."""
+ bytes_received = 0
+ try:
for chunk in chunks:
+ bytes_received += len(chunk)
content_file.write(chunk)
- content_type = resp.headers.get("Content-Type", "")
- return filepath, content_type
+ except ReadTimeoutError as e:
+ # If partial downloads are OK (the download will be retried), don't bail.
+ if not allow_partial:
+ raise e
+
+ # Ensuring bytes_received is returned to attempt resume
+ logger.warning("Connection timed out while downloading.")
+
+ return bytes_received
+
+ def _attempt_resume(
+ self,
+ resp: Response,
+ link: Link,
+ content_file: BinaryIO,
+ total_length: Optional[int],
+ bytes_received: int,
+ ) -> None:
+ """Attempt to resume the download if connection was dropped."""
+ etag_or_last_modified = _get_http_response_etag_or_last_modified(resp)
+
+ attempts_left = self._resume_retries
+ while total_length and attempts_left and bytes_received < total_length:
+ attempts_left -= 1
+
+ logger.warning(
+ "Attempting to resume incomplete download (%s/%s, attempt %d)",
+ format_size(bytes_received),
+ format_size(total_length),
+ (self._resume_retries - attempts_left),
+ )
+
+ try:
+ # Try to resume the download using a HTTP range request.
+ resume_resp = _http_get_download(
+ self._session,
+ link,
+ range_start=bytes_received,
+ if_range=etag_or_last_modified,
+ )
+
+ # Fallback: if the server responded with 200 (i.e., the file has
+ # since been modified or range requests are unsupported) or any
+ # other unexpected status, restart the download from the beginning.
+ must_restart = resume_resp.status_code != HTTPStatus.PARTIAL_CONTENT
+ if must_restart:
+ bytes_received, total_length, etag_or_last_modified = (
+ self._reset_download_state(resume_resp, content_file)
+ )
+
+ bytes_received += self._process_response(
+ resume_resp, link, content_file, bytes_received, total_length
+ )
+ except (ConnectionError, ReadTimeoutError, OSError):
+ continue
+
+ # No more resume attempts. Raise an error if the download is still incomplete.
+ if total_length and bytes_received < total_length:
+ os.remove(content_file.name)
+ raise IncompleteDownloadError(
+ link, bytes_received, total_length, retries=self._resume_retries
+ )
+
+ def _reset_download_state(
+ self,
+ resp: Response,
+ content_file: BinaryIO,
+ ) -> Tuple[int, Optional[int], Optional[str]]:
+ """Reset the download state to restart downloading from the beginning."""
+ content_file.seek(0)
+ content_file.truncate()
+ bytes_received = 0
+ total_length = _get_http_response_size(resp)
+ etag_or_last_modified = _get_http_response_etag_or_last_modified(resp)
+
+ return bytes_received, total_length, etag_or_last_modified
class BatchDownloader:
@@ -156,32 +301,14 @@ class BatchDownloader:
self,
session: PipSession,
progress_bar: str,
+ resume_retries: int,
) -> None:
- self._session = session
- self._progress_bar = progress_bar
+ self._downloader = Downloader(session, progress_bar, resume_retries)
def __call__(
self, links: Iterable[Link], location: str
) -> Iterable[Tuple[Link, Tuple[str, str]]]:
"""Download the files given by links into location."""
for link in links:
- try:
- resp = _http_get_download(self._session, link)
- except NetworkConnectionError as e:
- assert e.response is not None
- logger.critical(
- "HTTP error %s while getting %s",
- e.response.status_code,
- link,
- )
- raise
-
- filename = _get_http_response_filename(resp, link)
- filepath = os.path.join(location, filename)
-
- chunks = _prepare_download(resp, link, self._progress_bar)
- with open(filepath, "wb") as content_file:
- for chunk in chunks:
- content_file.write(chunk)
- content_type = resp.headers.get("Content-Type", "")
+ filepath, content_type = self._downloader(link, location)
yield link, (filepath, content_type)
diff --git a/contrib/python/pip/pip/_internal/network/xmlrpc.py b/contrib/python/pip/pip/_internal/network/xmlrpc.py
index 22ec8d2f4a6..ba5caf337e2 100644
--- a/contrib/python/pip/pip/_internal/network/xmlrpc.py
+++ b/contrib/python/pip/pip/_internal/network/xmlrpc.py
@@ -1,5 +1,4 @@
-"""xmlrpclib.Transport implementation
-"""
+"""xmlrpclib.Transport implementation"""
import logging
import urllib.parse
diff --git a/contrib/python/pip/pip/_internal/operations/build/metadata.py b/contrib/python/pip/pip/_internal/operations/build/metadata.py
index c66ac354deb..a546809ecd5 100644
--- a/contrib/python/pip/pip/_internal/operations/build/metadata.py
+++ b/contrib/python/pip/pip/_internal/operations/build/metadata.py
@@ -1,5 +1,4 @@
-"""Metadata generation logic for source distributions.
-"""
+"""Metadata generation logic for source distributions."""
import os
diff --git a/contrib/python/pip/pip/_internal/operations/build/metadata_editable.py b/contrib/python/pip/pip/_internal/operations/build/metadata_editable.py
index 3397ccf0f92..27ecd7d3d80 100644
--- a/contrib/python/pip/pip/_internal/operations/build/metadata_editable.py
+++ b/contrib/python/pip/pip/_internal/operations/build/metadata_editable.py
@@ -1,5 +1,4 @@
-"""Metadata generation logic for source distributions.
-"""
+"""Metadata generation logic for source distributions."""
import os
diff --git a/contrib/python/pip/pip/_internal/operations/build/metadata_legacy.py b/contrib/python/pip/pip/_internal/operations/build/metadata_legacy.py
index c01dd1c678a..e385b5ddf76 100644
--- a/contrib/python/pip/pip/_internal/operations/build/metadata_legacy.py
+++ b/contrib/python/pip/pip/_internal/operations/build/metadata_legacy.py
@@ -1,5 +1,4 @@
-"""Metadata generation logic for legacy source distributions.
-"""
+"""Metadata generation logic for legacy source distributions."""
import logging
import os
diff --git a/contrib/python/pip/pip/_internal/operations/build/wheel_legacy.py b/contrib/python/pip/pip/_internal/operations/build/wheel_legacy.py
index 3ee2a7058d3..473018173f5 100644
--- a/contrib/python/pip/pip/_internal/operations/build/wheel_legacy.py
+++ b/contrib/python/pip/pip/_internal/operations/build/wheel_legacy.py
@@ -3,6 +3,7 @@ import os.path
from typing import List, Optional
from pip._internal.cli.spinners import open_spinner
+from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
from pip._internal.utils.subprocess import call_subprocess, format_command_args
@@ -68,6 +69,21 @@ def build_wheel_legacy(
Returns path to wheel if successfully built. Otherwise, returns None.
"""
+ deprecated(
+ reason=(
+ f"Building {name!r} using the legacy setup.py bdist_wheel mechanism, "
+ "which will be removed in a future version."
+ ),
+ replacement=(
+ "to use the standardized build interface by "
+ "setting the `--use-pep517` option, "
+ "(possibly combined with `--no-build-isolation`), "
+ f"or adding a `pyproject.toml` file to the source tree of {name!r}"
+ ),
+ gone_in="25.3",
+ issue=6334,
+ )
+
wheel_args = make_setuptools_bdist_wheel_args(
setup_py_path,
global_options=global_options,
diff --git a/contrib/python/pip/pip/_internal/operations/check.py b/contrib/python/pip/pip/_internal/operations/check.py
index 4b6fbc4c375..c6d676d6e61 100644
--- a/contrib/python/pip/pip/_internal/operations/check.py
+++ b/contrib/python/pip/pip/_internal/operations/check.py
@@ -1,5 +1,4 @@
-"""Validation of dependencies of packages
-"""
+"""Validation of dependencies of packages"""
import logging
from contextlib import suppress
diff --git a/contrib/python/pip/pip/_internal/operations/install/__init__.py b/contrib/python/pip/pip/_internal/operations/install/__init__.py
index 24d6a5dd31f..2645a4acad0 100644
--- a/contrib/python/pip/pip/_internal/operations/install/__init__.py
+++ b/contrib/python/pip/pip/_internal/operations/install/__init__.py
@@ -1,2 +1 @@
-"""For modules related to installing packages.
-"""
+"""For modules related to installing packages."""
diff --git a/contrib/python/pip/pip/_internal/operations/install/editable_legacy.py b/contrib/python/pip/pip/_internal/operations/install/editable_legacy.py
index 9aaa699a645..644bcec111f 100644
--- a/contrib/python/pip/pip/_internal/operations/install/editable_legacy.py
+++ b/contrib/python/pip/pip/_internal/operations/install/editable_legacy.py
@@ -1,5 +1,4 @@
-"""Legacy editable installation process, i.e. `setup.py develop`.
-"""
+"""Legacy editable installation process, i.e. `setup.py develop`."""
import logging
from typing import Optional, Sequence
diff --git a/contrib/python/pip/pip/_internal/operations/install/wheel.py b/contrib/python/pip/pip/_internal/operations/install/wheel.py
index aef42aa9eef..73e4bfc7c00 100644
--- a/contrib/python/pip/pip/_internal/operations/install/wheel.py
+++ b/contrib/python/pip/pip/_internal/operations/install/wheel.py
@@ -1,5 +1,4 @@
-"""Support for installing and building the "wheel" binary package format.
-"""
+"""Support for installing and building the "wheel" binary package format."""
import collections
import compileall
@@ -14,10 +13,10 @@ import sys
import warnings
from base64 import urlsafe_b64encode
from email.message import Message
+from io import StringIO
from itertools import chain, filterfalse, starmap
from typing import (
IO,
- TYPE_CHECKING,
Any,
BinaryIO,
Callable,
@@ -51,7 +50,7 @@ from pip._internal.metadata import (
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
-from pip._internal.utils.misc import StreamWrapper, ensure_dir, hash_file, partition
+from pip._internal.utils.misc import ensure_dir, hash_file, partition
from pip._internal.utils.unpacking import (
current_umask,
is_within_directory,
@@ -60,15 +59,14 @@ from pip._internal.utils.unpacking import (
)
from pip._internal.utils.wheel import parse_wheel
-if TYPE_CHECKING:
- class File(Protocol):
- src_record_path: "RecordPath"
- dest_path: str
- changed: bool
+class File(Protocol):
+ src_record_path: "RecordPath"
+ dest_path: str
+ changed: bool
- def save(self) -> None:
- pass
+ def save(self) -> None:
+ pass
logger = logging.getLogger(__name__)
@@ -609,9 +607,7 @@ def _install_wheel( # noqa: C901, PLR0915 function is too long
# Compile all of the pyc files for the installed files
if pycompile:
- with contextlib.redirect_stdout(
- StreamWrapper.from_stream(sys.stdout)
- ) as stdout:
+ with contextlib.redirect_stdout(StringIO()) as stdout:
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
for path in pyc_source_file_paths():
diff --git a/contrib/python/pip/pip/_internal/operations/prepare.py b/contrib/python/pip/pip/_internal/operations/prepare.py
index e6aa3447200..531070a03ad 100644
--- a/contrib/python/pip/pip/_internal/operations/prepare.py
+++ b/contrib/python/pip/pip/_internal/operations/prepare.py
@@ -1,5 +1,4 @@
-"""Prepares a distribution for installation
-"""
+"""Prepares a distribution for installation"""
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
@@ -88,7 +87,12 @@ class File:
def __post_init__(self) -> None:
if self.content_type is None:
- self.content_type = mimetypes.guess_type(self.path)[0]
+ # Try to guess the file's MIME type. If the system MIME tables
+ # can't be loaded, give up.
+ try:
+ self.content_type = mimetypes.guess_type(self.path)[0]
+ except OSError:
+ pass
def get_http_url(
@@ -231,6 +235,7 @@ class RequirementPreparer:
lazy_wheel: bool,
verbosity: int,
legacy_resolver: bool,
+ resume_retries: int,
) -> None:
super().__init__()
@@ -238,8 +243,8 @@ class RequirementPreparer:
self.build_dir = build_dir
self.build_tracker = build_tracker
self._session = session
- self._download = Downloader(session, progress_bar)
- self._batch_download = BatchDownloader(session, progress_bar)
+ self._download = Downloader(session, progress_bar, resume_retries)
+ self._batch_download = BatchDownloader(session, progress_bar, resume_retries)
self.finder = finder
# Where still-packed archives should be written to. If None, they are
diff --git a/contrib/python/pip/pip/_internal/req/__init__.py b/contrib/python/pip/pip/_internal/req/__init__.py
index 422d851d729..bf282dab8bc 100644
--- a/contrib/python/pip/pip/_internal/req/__init__.py
+++ b/contrib/python/pip/pip/_internal/req/__init__.py
@@ -3,6 +3,7 @@ import logging
from dataclasses import dataclass
from typing import Generator, List, Optional, Sequence, Tuple
+from pip._internal.cli.progress_bars import get_install_progress_renderer
from pip._internal.utils.logging import indent_log
from .req_file import parse_requirements
@@ -41,6 +42,7 @@ def install_given_reqs(
warn_script_location: bool,
use_user_site: bool,
pycompile: bool,
+ progress_bar: str,
) -> List[InstallationResult]:
"""
Install everything in the given list.
@@ -57,8 +59,19 @@ def install_given_reqs(
installed = []
+ show_progress = logger.isEnabledFor(logging.INFO) and len(to_install) > 1
+
+ items = iter(to_install.values())
+ if show_progress:
+ renderer = get_install_progress_renderer(
+ bar_type=progress_bar, total=len(to_install)
+ )
+ items = renderer(items)
+
with indent_log():
- for req_name, requirement in to_install.items():
+ for requirement in items:
+ req_name = requirement.name
+ assert req_name is not None
if requirement.should_reinstall:
logger.info("Attempting uninstall: %s", req_name)
with indent_log():
diff --git a/contrib/python/pip/pip/_internal/req/req_dependency_group.py b/contrib/python/pip/pip/_internal/req/req_dependency_group.py
new file mode 100644
index 00000000000..8f124de5b81
--- /dev/null
+++ b/contrib/python/pip/pip/_internal/req/req_dependency_group.py
@@ -0,0 +1,74 @@
+from typing import Any, Dict, Iterable, Iterator, List, Tuple
+
+from pip._vendor import tomli
+from pip._vendor.dependency_groups import DependencyGroupResolver
+
+from pip._internal.exceptions import InstallationError
+
+
+def parse_dependency_groups(groups: List[Tuple[str, str]]) -> List[str]:
+ """
+ Parse dependency groups data as provided via the CLI, in a `[path:]group` syntax.
+
+ Raises InstallationErrors if anything goes wrong.
+ """
+ resolvers = _build_resolvers(path for (path, _) in groups)
+ return list(_resolve_all_groups(resolvers, groups))
+
+
+def _resolve_all_groups(
+ resolvers: Dict[str, DependencyGroupResolver], groups: List[Tuple[str, str]]
+) -> Iterator[str]:
+ """
+ Run all resolution, converting any error from `DependencyGroupResolver` into
+ an InstallationError.
+ """
+ for path, groupname in groups:
+ resolver = resolvers[path]
+ try:
+ yield from (str(req) for req in resolver.resolve(groupname))
+ except (ValueError, TypeError, LookupError) as e:
+ raise InstallationError(
+ f"[dependency-groups] resolution failed for '{groupname}' "
+ f"from '{path}': {e}"
+ ) from e
+
+
+def _build_resolvers(paths: Iterable[str]) -> Dict[str, Any]:
+ resolvers = {}
+ for path in paths:
+ if path in resolvers:
+ continue
+
+ pyproject = _load_pyproject(path)
+ if "dependency-groups" not in pyproject:
+ raise InstallationError(
+ f"[dependency-groups] table was missing from '{path}'. "
+ "Cannot resolve '--group' option."
+ )
+ raw_dependency_groups = pyproject["dependency-groups"]
+ if not isinstance(raw_dependency_groups, dict):
+ raise InstallationError(
+ f"[dependency-groups] table was malformed in {path}. "
+ "Cannot resolve '--group' option."
+ )
+
+ resolvers[path] = DependencyGroupResolver(raw_dependency_groups)
+ return resolvers
+
+
+def _load_pyproject(path: str) -> Dict[str, Any]:
+ """
+ This helper loads a pyproject.toml as TOML.
+
+ It raises an InstallationError if the operation fails.
+ """
+ try:
+ with open(path, "rb") as fp:
+ return tomli.load(fp)
+ except FileNotFoundError:
+ raise InstallationError(f"{path} not found. Cannot resolve '--group' option.")
+ except tomli.TOMLDecodeError as e:
+ raise InstallationError(f"Error parsing {path}: {e}") from e
+ except OSError as e:
+ raise InstallationError(f"Error reading {path}: {e}") from e
diff --git a/contrib/python/pip/pip/_internal/req/req_install.py b/contrib/python/pip/pip/_internal/req/req_install.py
index 3262d82658e..99d6936b735 100644
--- a/contrib/python/pip/pip/_internal/req/req_install.py
+++ b/contrib/python/pip/pip/_internal/req/req_install.py
@@ -837,7 +837,7 @@ class InstallRequirement:
"try using --config-settings editable_mode=compat. "
"Please consult the setuptools documentation for more information"
),
- gone_in="25.1",
+ gone_in="25.3",
issue=11457,
)
if self.config_settings:
@@ -925,7 +925,7 @@ def check_legacy_setup_py_options(
reason="--build-option and --global-option are deprecated.",
issue=11859,
replacement="to use --config-settings",
- gone_in=None,
+ gone_in="25.3",
)
logger.warning(
"Implying --no-binary=:all: due to the presence of "
diff --git a/contrib/python/pip/pip/_internal/req/req_uninstall.py b/contrib/python/pip/pip/_internal/req/req_uninstall.py
index 26df20844b3..a41082317d1 100644
--- a/contrib/python/pip/pip/_internal/req/req_uninstall.py
+++ b/contrib/python/pip/pip/_internal/req/req_uninstall.py
@@ -38,7 +38,7 @@ def _script_names(
def _unique(
- fn: Callable[..., Generator[Any, None, None]]
+ fn: Callable[..., Generator[Any, None, None]],
) -> Callable[..., Generator[Any, None, None]]:
@functools.wraps(fn)
def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]:
@@ -505,10 +505,13 @@ class UninstallPathSet:
# package installed by easy_install
# We cannot match on dist.egg_name because it can slightly vary
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
- paths_to_remove.add(dist_location)
- easy_install_egg = os.path.split(dist_location)[1]
+ # XXX We use normalized_dist_location because dist_location my contain
+ # a trailing / if the distribution is a zipped egg
+ # (which is not a directory).
+ paths_to_remove.add(normalized_dist_location)
+ easy_install_egg = os.path.split(normalized_dist_location)[1]
easy_install_pth = os.path.join(
- os.path.dirname(dist_location),
+ os.path.dirname(normalized_dist_location),
"easy-install.pth",
)
paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/candidates.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/candidates.py
index 6617644fe53..d976026ac18 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/candidates.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/candidates.py
@@ -249,10 +249,12 @@ class _InstallRequirementBackedCandidate(Candidate):
return dist
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
+ # Emit the Requires-Python requirement first to fail fast on
+ # unsupported candidates and avoid pointless downloads/preparation.
+ yield self._factory.make_requires_python_requirement(self.dist.requires_python)
requires = self.dist.iter_dependencies() if with_requires else ()
for r in requires:
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
- yield self._factory.make_requires_python_requirement(self.dist.requires_python)
def get_install_requirement(self) -> Optional[InstallRequirement]:
return self._ireq
@@ -552,6 +554,9 @@ class RequiresPythonCandidate(Candidate):
def __str__(self) -> str:
return f"Python {self._version}"
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self._version!r})"
+
@property
def project_name(self) -> NormalizedName:
return REQUIRES_PYTHON_IDENTIFIER
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py
index 6c273eb88db..55c11b29158 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/factory.py
@@ -748,7 +748,7 @@ class Factory:
# The simplest case is when we have *one* cause that can't be
# satisfied. We just report that case.
if len(e.causes) == 1:
- req, parent = e.causes[0]
+ req, parent = next(iter(e.causes))
if req.name not in constraints:
return self._report_single_requirement_conflict(req, parent)
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/found_candidates.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/found_candidates.py
index a1d57e0f4b2..3a9c2ed723d 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/found_candidates.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/found_candidates.py
@@ -8,10 +8,9 @@ absolutely need, and not "download the world" when we only need one version of
something.
"""
-import functools
import logging
from collections.abc import Sequence
-from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
+from typing import Any, Callable, Iterator, Optional, Set, Tuple
from pip._vendor.packaging.version import _BaseVersion
@@ -23,21 +22,6 @@ logger = logging.getLogger(__name__)
IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
-if TYPE_CHECKING:
- SequenceCandidate = Sequence[Candidate]
-else:
- # For compatibility: Python before 3.9 does not support using [] on the
- # Sequence class.
- #
- # >>> from collections.abc import Sequence
- # >>> Sequence[str]
- # Traceback (most recent call last):
- # File "<stdin>", line 1, in <module>
- # TypeError: 'ABCMeta' object is not subscriptable
- #
- # TODO: Remove this block after dropping Python 3.8 support.
- SequenceCandidate = Sequence
-
def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
"""Iterator for ``FoundCandidates``.
@@ -124,7 +108,7 @@ def _iter_built_with_inserted(
yield installed
-class FoundCandidates(SequenceCandidate):
+class FoundCandidates(Sequence[Candidate]):
"""A lazy sequence to provide candidates to the resolver.
The intended usage is to return this from `find_matches()` so the resolver
@@ -144,6 +128,7 @@ class FoundCandidates(SequenceCandidate):
self._installed = installed
self._prefers_installed = prefers_installed
self._incompatible_ids = incompatible_ids
+ self._bool: Optional[bool] = None
def __getitem__(self, index: Any) -> Any:
# Implemented to satisfy the ABC check. This is not needed by the
@@ -167,8 +152,13 @@ class FoundCandidates(SequenceCandidate):
# performance reasons).
raise NotImplementedError("don't do this")
- @functools.lru_cache(maxsize=1)
def __bool__(self) -> bool:
+ if self._bool is not None:
+ return self._bool
+
if self._prefers_installed and self._installed:
+ self._bool = True
return True
- return any(self)
+
+ self._bool = any(self)
+ return self._bool
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/provider.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/provider.py
index fb0dd85f112..ba4f03b34ee 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/provider.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/provider.py
@@ -1,4 +1,3 @@
-import collections
import math
from functools import lru_cache
from typing import (
@@ -7,16 +6,21 @@ from typing import (
Iterable,
Iterator,
Mapping,
+ Optional,
Sequence,
+ Tuple,
TypeVar,
Union,
)
from pip._vendor.resolvelib.providers import AbstractProvider
+from pip._internal.req.req_install import InstallRequirement
+
from .base import Candidate, Constraint, Requirement
from .candidates import REQUIRES_PYTHON_IDENTIFIER
from .factory import Factory
+from .requirements import ExplicitRequirement
if TYPE_CHECKING:
from pip._vendor.resolvelib.providers import Preference
@@ -100,11 +104,53 @@ class PipProvider(_ProviderBase):
self._ignore_dependencies = ignore_dependencies
self._upgrade_strategy = upgrade_strategy
self._user_requested = user_requested
- self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
return requirement_or_candidate.name
+ def narrow_requirement_selection(
+ self,
+ identifiers: Iterable[str],
+ resolutions: Mapping[str, Candidate],
+ candidates: Mapping[str, Iterator[Candidate]],
+ information: Mapping[str, Iterator["PreferenceInformation"]],
+ backtrack_causes: Sequence["PreferenceInformation"],
+ ) -> Iterable[str]:
+ """Produce a subset of identifiers that should be considered before others.
+
+ Currently pip narrows the following selection:
+ * Requires-Python, if present is always returned by itself
+ * Backtrack causes are considered next because they can be identified
+ in linear time here, whereas because get_preference() is called
+ for each identifier, it would be quadratic to check for them there.
+ Further, the current backtrack causes likely need to be resolved
+ before other requirements as a resolution can't be found while
+ there is a conflict.
+ """
+ backtrack_identifiers = set()
+ for info in backtrack_causes:
+ backtrack_identifiers.add(info.requirement.name)
+ if info.parent is not None:
+ backtrack_identifiers.add(info.parent.name)
+
+ current_backtrack_causes = []
+ for identifier in identifiers:
+ # Requires-Python has only one candidate and the check is basically
+ # free, so we always do it first to avoid needless work if it fails.
+ # This skips calling get_preference() for all other identifiers.
+ if identifier == REQUIRES_PYTHON_IDENTIFIER:
+ return [identifier]
+
+ # Check if this identifier is a backtrack cause
+ if identifier in backtrack_identifiers:
+ current_backtrack_causes.append(identifier)
+ continue
+
+ if current_backtrack_causes:
+ return current_backtrack_causes
+
+ return identifiers
+
def get_preference(
self,
identifier: str,
@@ -120,18 +166,20 @@ class PipProvider(_ProviderBase):
Currently pip considers the following in order:
- * Prefer if any of the known requirements is "direct", e.g. points to an
- explicit URL.
- * If equal, prefer if any requirement is "pinned", i.e. contains
- operator ``===`` or ``==``.
- * If equal, calculate an approximate "depth" and resolve requirements
- closer to the user-specified requirements first. If the depth cannot
- by determined (eg: due to no matching parents), it is considered
- infinite.
- * Order user-specified requirements by the order they are specified.
- * If equal, prefers "non-free" requirements, i.e. contains at least one
- operator, such as ``>=`` or ``<``.
- * If equal, order alphabetically for consistency (helps debuggability).
+ * Any requirement that is "direct", e.g., points to an explicit URL.
+ * Any requirement that is "pinned", i.e., contains the operator ``===``
+ or ``==`` without a wildcard.
+ * Any requirement that imposes an upper version limit, i.e., contains the
+ operator ``<``, ``<=``, ``~=``, or ``==`` with a wildcard. Because
+ pip prioritizes the latest version, preferring explicit upper bounds
+ can rule out infeasible candidates sooner. This does not imply that
+ upper bounds are good practice; they can make dependency management
+ and resolution harder.
+ * Order user-specified requirements as they are specified, placing
+ other requirements afterward.
+ * Any "non-free" requirement, i.e., one that contains at least one
+ operator, such as ``>=`` or ``!=``.
+ * Alphabetical order for consistency (aids debuggability).
"""
try:
next(iter(information[identifier]))
@@ -142,55 +190,39 @@ class PipProvider(_ProviderBase):
else:
has_information = True
- if has_information:
- lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
- candidate, ireqs = zip(*lookups)
+ if not has_information:
+ direct = False
+ ireqs: Tuple[Optional[InstallRequirement], ...] = ()
else:
- candidate, ireqs = None, ()
+ # Go through the information and for each requirement,
+ # check if it's explicit (e.g., a direct link) and get the
+ # InstallRequirement (the second element) from get_candidate_lookup()
+ directs, ireqs = zip(
+ *(
+ (isinstance(r, ExplicitRequirement), r.get_candidate_lookup()[1])
+ for r, _ in information[identifier]
+ )
+ )
+ direct = any(directs)
- operators = [
- specifier.operator
+ operators: list[tuple[str, str]] = [
+ (specifier.operator, specifier.version)
for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
for specifier in specifier_set
]
- direct = candidate is not None
- pinned = any(op[:2] == "==" for op in operators)
+ pinned = any(((op[:2] == "==") and ("*" not in ver)) for op, ver in operators)
+ upper_bounded = any(
+ ((op in ("<", "<=", "~=")) or (op == "==" and "*" in ver))
+ for op, ver in operators
+ )
unfree = bool(operators)
-
- try:
- requested_order: Union[int, float] = self._user_requested[identifier]
- except KeyError:
- requested_order = math.inf
- if has_information:
- parent_depths = (
- self._known_depths[parent.name] if parent is not None else 0.0
- for _, parent in information[identifier]
- )
- inferred_depth = min(d for d in parent_depths) + 1.0
- else:
- inferred_depth = math.inf
- else:
- inferred_depth = 1.0
- self._known_depths[identifier] = inferred_depth
-
requested_order = self._user_requested.get(identifier, math.inf)
- # Requires-Python has only one candidate and the check is basically
- # free, so we always do it first to avoid needless work if it fails.
- requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
-
- # Prefer the causes of backtracking on the assumption that the problem
- # resolving the dependency tree is related to the failures that caused
- # the backtracking
- backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
-
return (
- not requires_python,
not direct,
not pinned,
- not backtrack_cause,
- inferred_depth,
+ not upper_bounded,
requested_order,
not unfree,
identifier,
@@ -238,21 +270,12 @@ class PipProvider(_ProviderBase):
is_satisfied_by=self.is_satisfied_by,
)
+ @staticmethod
@lru_cache(maxsize=None)
- def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
+ def is_satisfied_by(requirement: Requirement, candidate: Candidate) -> bool:
return requirement.is_satisfied_by(candidate)
- def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
+ def get_dependencies(self, candidate: Candidate) -> Iterable[Requirement]:
with_requires = not self._ignore_dependencies
- return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
-
- @staticmethod
- def is_backtrack_cause(
- identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
- ) -> bool:
- for backtrack_cause in backtrack_causes:
- if identifier == backtrack_cause.requirement.name:
- return True
- if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
- return True
- return False
+ # iter_dependencies() can perform nontrivial work so delay until needed.
+ return (r for r in candidate.iter_dependencies(with_requires) if r is not None)
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py
index 0594569d850..f8ad815fe9f 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/reporter.py
@@ -1,6 +1,6 @@
from collections import defaultdict
from logging import getLogger
-from typing import Any, DefaultDict
+from typing import Any, DefaultDict, Optional
from pip._vendor.resolvelib.reporters import BaseReporter
@@ -9,7 +9,7 @@ from .base import Candidate, Requirement
logger = getLogger(__name__)
-class PipReporter(BaseReporter):
+class PipReporter(BaseReporter[Requirement, Candidate, str]):
def __init__(self) -> None:
self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
@@ -55,7 +55,7 @@ class PipReporter(BaseReporter):
logger.debug(msg)
-class PipDebuggingReporter(BaseReporter):
+class PipDebuggingReporter(BaseReporter[Requirement, Candidate, str]):
"""A reporter that does an info log for every event it sees."""
def starting(self) -> None:
@@ -71,7 +71,9 @@ class PipDebuggingReporter(BaseReporter):
def ending(self, state: Any) -> None:
logger.info("Reporter.ending(%r)", state)
- def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
+ def adding_requirement(
+ self, requirement: Requirement, parent: Optional[Candidate]
+ ) -> None:
logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
diff --git a/contrib/python/pip/pip/_internal/resolution/resolvelib/resolver.py b/contrib/python/pip/pip/_internal/resolution/resolvelib/resolver.py
index c12beef0b2a..24c9b16996a 100644
--- a/contrib/python/pip/pip/_internal/resolution/resolvelib/resolver.py
+++ b/contrib/python/pip/pip/_internal/resolution/resolvelib/resolver.py
@@ -5,11 +5,12 @@ import os
from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
+from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible, ResolutionTooDeep
from pip._vendor.resolvelib import Resolver as RLResolver
from pip._vendor.resolvelib.structs import DirectedGraph
from pip._internal.cache import WheelCache
+from pip._internal.exceptions import ResolutionTooDeepError
from pip._internal.index.package_finder import PackageFinder
from pip._internal.operations.prepare import RequirementPreparer
from pip._internal.req.constructors import install_req_extend_extras
@@ -82,7 +83,7 @@ class Resolver(BaseResolver):
user_requested=collected.user_requested,
)
if "PIP_RESOLVER_DEBUG" in os.environ:
- reporter: BaseReporter = PipDebuggingReporter()
+ reporter: BaseReporter[Requirement, Candidate, str] = PipDebuggingReporter()
else:
reporter = PipReporter()
resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
@@ -102,6 +103,8 @@ class Resolver(BaseResolver):
collected.constraints,
)
raise error from e
+ except ResolutionTooDeep:
+ raise ResolutionTooDeepError from None
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
# process candidates with extras last to ensure their base equivalent is
diff --git a/contrib/python/pip/pip/_internal/utils/appdirs.py b/contrib/python/pip/pip/_internal/utils/appdirs.py
index 16933bf8afe..42b6e548bb2 100644
--- a/contrib/python/pip/pip/_internal/utils/appdirs.py
+++ b/contrib/python/pip/pip/_internal/utils/appdirs.py
@@ -42,7 +42,8 @@ def user_config_dir(appname: str, roaming: bool = True) -> str:
# see <https://github.com/pypa/pip/issues/1733>
def site_config_dirs(appname: str) -> List[str]:
if sys.platform == "darwin":
- return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
+ dirval = _appdirs.site_data_dir(appname, appauthor=False, multipath=True)
+ return dirval.split(os.pathsep)
dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
if sys.platform == "win32":
diff --git a/contrib/python/pip/pip/_internal/utils/compatibility_tags.py b/contrib/python/pip/pip/_internal/utils/compatibility_tags.py
index 2e7b7450dce..edbc7c37a7d 100644
--- a/contrib/python/pip/pip/_internal/utils/compatibility_tags.py
+++ b/contrib/python/pip/pip/_internal/utils/compatibility_tags.py
@@ -1,5 +1,4 @@
-"""Generate and work with PEP 425 Compatibility Tags.
-"""
+"""Generate and work with PEP 425 Compatibility Tags."""
import re
from typing import List, Optional, Tuple
@@ -7,6 +6,7 @@ from typing import List, Optional, Tuple
from pip._vendor.packaging.tags import (
PythonVersion,
Tag,
+ android_platforms,
compatible_tags,
cpython_tags,
generic_tags,
@@ -64,6 +64,16 @@ def _ios_platforms(arch: str) -> List[str]:
return arches
+def _android_platforms(arch: str) -> List[str]:
+ match = re.fullmatch(r"android_(\d+)_(.+)", arch)
+ if match:
+ api_level, abi = match.groups()
+ return list(android_platforms(int(api_level), abi))
+ else:
+ # arch pattern didn't match (?!)
+ return [arch]
+
+
def _custom_manylinux_platforms(arch: str) -> List[str]:
arches = [arch]
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
@@ -91,6 +101,8 @@ def _get_custom_platforms(arch: str) -> List[str]:
arches = _mac_platforms(arch)
elif arch.startswith("ios"):
arches = _ios_platforms(arch)
+ elif arch_prefix == "android":
+ arches = _android_platforms(arch)
elif arch_prefix in ["manylinux2014", "manylinux2010"]:
arches = _custom_manylinux_platforms(arch)
else:
diff --git a/contrib/python/pip/pip/_internal/utils/datetime.py b/contrib/python/pip/pip/_internal/utils/datetime.py
index 8668b3b0ec1..776e49898f7 100644
--- a/contrib/python/pip/pip/_internal/utils/datetime.py
+++ b/contrib/python/pip/pip/_internal/utils/datetime.py
@@ -1,5 +1,4 @@
-"""For when pip wants to check the date or time.
-"""
+"""For when pip wants to check the date or time."""
import datetime
diff --git a/contrib/python/pip/pip/_internal/utils/entrypoints.py b/contrib/python/pip/pip/_internal/utils/entrypoints.py
index 15013693854..696148c5097 100644
--- a/contrib/python/pip/pip/_internal/utils/entrypoints.py
+++ b/contrib/python/pip/pip/_internal/utils/entrypoints.py
@@ -77,7 +77,10 @@ def get_best_invocation_for_this_python() -> str:
# Try to use the basename, if it's the first executable.
found_executable = shutil.which(exe_name)
- if found_executable and os.path.samefile(found_executable, exe):
+ # Virtual environments often symlink to their parent Python binaries, but we don't
+ # want to treat the Python binaries as equivalent when the environment's Python is
+ # not on PATH (not activated). Thus, we don't follow symlinks.
+ if found_executable and os.path.samestat(os.lstat(found_executable), os.lstat(exe)):
return exe_name
# Use the full executable name, because we couldn't find something simpler.
diff --git a/contrib/python/pip/pip/_internal/utils/filetypes.py b/contrib/python/pip/pip/_internal/utils/filetypes.py
index 5948570178f..5644638222c 100644
--- a/contrib/python/pip/pip/_internal/utils/filetypes.py
+++ b/contrib/python/pip/pip/_internal/utils/filetypes.py
@@ -1,5 +1,4 @@
-"""Filetype information.
-"""
+"""Filetype information."""
from typing import Tuple
diff --git a/contrib/python/pip/pip/_internal/utils/logging.py b/contrib/python/pip/pip/_internal/utils/logging.py
index 62035fc40ec..099a92c496d 100644
--- a/contrib/python/pip/pip/_internal/utils/logging.py
+++ b/contrib/python/pip/pip/_internal/utils/logging.py
@@ -8,7 +8,7 @@ import threading
from dataclasses import dataclass
from io import TextIOWrapper
from logging import Filter
-from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
+from typing import Any, ClassVar, Generator, List, Optional, Type
from pip._vendor.rich.console import (
Console,
@@ -29,6 +29,8 @@ from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
from pip._internal.utils.misc import ensure_dir
_log_state = threading.local()
+_stdout_console = None
+_stderr_console = None
subprocess_logger = getLogger("pip.subprocessor")
@@ -144,12 +146,21 @@ class PipConsole(Console):
raise BrokenPipeError() from None
+def get_console(*, stderr: bool = False) -> Console:
+ if stderr:
+ assert _stderr_console is not None, "stderr rich console is missing!"
+ return _stderr_console
+ else:
+ assert _stdout_console is not None, "stdout rich console is missing!"
+ return _stdout_console
+
+
class RichPipStreamHandler(RichHandler):
KEYWORDS: ClassVar[Optional[List[str]]] = []
- def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
+ def __init__(self, console: Console) -> None:
super().__init__(
- console=PipConsole(file=stream, no_color=no_color, soft_wrap=True),
+ console=console,
show_time=False,
show_level=False,
show_path=False,
@@ -266,10 +277,6 @@ def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str])
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
# Shorthands for clarity
- log_streams = {
- "stdout": "ext://sys.stdout",
- "stderr": "ext://sys.stderr",
- }
handler_classes = {
"stream": "pip._internal.utils.logging.RichPipStreamHandler",
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
@@ -277,6 +284,9 @@ def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str])
handlers = ["console", "console_errors", "console_subprocess"] + (
["user_log"] if include_user_log else []
)
+ global _stdout_console, stderr_console
+ _stdout_console = PipConsole(file=sys.stdout, no_color=no_color, soft_wrap=True)
+ _stderr_console = PipConsole(file=sys.stderr, no_color=no_color, soft_wrap=True)
logging.config.dictConfig(
{
@@ -311,16 +321,14 @@ def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str])
"console": {
"level": level,
"class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stdout"],
+ "console": _stdout_console,
"filters": ["exclude_subprocess", "exclude_warnings"],
"formatter": "indent",
},
"console_errors": {
"level": "WARNING",
"class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stderr"],
+ "console": _stderr_console,
"filters": ["exclude_subprocess"],
"formatter": "indent",
},
@@ -329,8 +337,7 @@ def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str])
"console_subprocess": {
"level": level,
"class": handler_classes["stream"],
- "stream": log_streams["stderr"],
- "no_color": no_color,
+ "console": _stderr_console,
"filters": ["restrict_to_subprocess"],
"formatter": "indent",
},
diff --git a/contrib/python/pip/pip/_internal/utils/misc.py b/contrib/python/pip/pip/_internal/utils/misc.py
index 44f6a05fbdd..156accda1bd 100644
--- a/contrib/python/pip/pip/_internal/utils/misc.py
+++ b/contrib/python/pip/pip/_internal/utils/misc.py
@@ -11,7 +11,6 @@ import sysconfig
import urllib.parse
from dataclasses import dataclass
from functools import partial
-from io import StringIO
from itertools import filterfalse, tee, zip_longest
from pathlib import Path
from types import FunctionType, TracebackType
@@ -26,7 +25,6 @@ from typing import (
Mapping,
Optional,
Sequence,
- TextIO,
Tuple,
Type,
TypeVar,
@@ -375,22 +373,6 @@ def write_output(msg: Any, *args: Any) -> None:
logger.info(msg, *args)
-class StreamWrapper(StringIO):
- orig_stream: TextIO
-
- @classmethod
- def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
- ret = cls()
- ret.orig_stream = orig_stream
- return ret
-
- # compileall.compile_dir() needs stdout.encoding to print to stdout
- # type ignore is because TextIOBase.encoding is writeable
- @property
- def encoding(self) -> str: # type: ignore
- return self.orig_stream.encoding
-
-
# Simulates an enum
def enum(*sequential: Any, **named: Any) -> Type[Any]:
enums = dict(zip(sequential, range(len(sequential))), **named)
diff --git a/contrib/python/pip/pip/_internal/utils/packaging.py b/contrib/python/pip/pip/_internal/utils/packaging.py
index caad70f7fd1..1295b7ffe20 100644
--- a/contrib/python/pip/pip/_internal/utils/packaging.py
+++ b/contrib/python/pip/pip/_internal/utils/packaging.py
@@ -1,13 +1,10 @@
import functools
import logging
-import re
-from typing import NewType, Optional, Tuple, cast
+from typing import Optional, Tuple
from pip._vendor.packaging import specifiers, version
from pip._vendor.packaging.requirements import Requirement
-NormalizedExtra = NewType("NormalizedExtra", str)
-
logger = logging.getLogger(__name__)
@@ -35,7 +32,7 @@ def check_requires_python(
return python_version in requires_python_specifier
-@functools.lru_cache(maxsize=2048)
+@functools.lru_cache(maxsize=10000)
def get_requirement(req_string: str) -> Requirement:
"""Construct a packaging.Requirement object with caching"""
# Parsing requirement strings is expensive, and is also expected to happen
@@ -44,15 +41,3 @@ def get_requirement(req_string: str) -> Requirement:
# minimize repeated parsing of the same string to construct equivalent
# Requirement objects.
return Requirement(req_string)
-
-
-def safe_extra(extra: str) -> NormalizedExtra:
- """Convert an arbitrary string to a standard 'extra' name
-
- Any runs of non-alphanumeric characters are replaced with a single '_',
- and the result is always lowercased.
-
- This function is duplicated from ``pkg_resources``. Note that this is not
- the same to either ``canonicalize_name`` or ``_egg_link_name``.
- """
- return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
diff --git a/contrib/python/pip/pip/_internal/utils/setuptools_build.py b/contrib/python/pip/pip/_internal/utils/setuptools_build.py
index 96d1b246067..f178f4b3d99 100644
--- a/contrib/python/pip/pip/_internal/utils/setuptools_build.py
+++ b/contrib/python/pip/pip/_internal/utils/setuptools_build.py
@@ -17,16 +17,17 @@ _SETUPTOOLS_SHIM = textwrap.dedent(
# setuptools doesn't think the script is `-c`. This avoids the following warning:
# manifest_maker: standard file '-c' not found".
# - It generates a shim setup.py, for handling setup.cfg-only projects.
- import os, sys, tokenize
+ import os, sys, tokenize, traceback
try:
import setuptools
- except ImportError as error:
+ except ImportError:
print(
- "ERROR: Can not execute `setup.py` since setuptools is not available in "
- "the build environment.",
+ "ERROR: Can not execute `setup.py` since setuptools failed to import in "
+ "the build environment with exception:",
file=sys.stderr,
)
+ traceback.print_exc()
sys.exit(1)
__file__ = %r
diff --git a/contrib/python/pip/pip/_internal/utils/unpacking.py b/contrib/python/pip/pip/_internal/utils/unpacking.py
index 87a6d19ab5a..feb40f8289b 100644
--- a/contrib/python/pip/pip/_internal/utils/unpacking.py
+++ b/contrib/python/pip/pip/_internal/utils/unpacking.py
@@ -1,5 +1,4 @@
-"""Utilities related archives.
-"""
+"""Utilities related archives."""
import logging
import os
@@ -209,7 +208,6 @@ def untar_file(filename: str, location: str) -> None:
member = data_filter(member, location)
except tarfile.LinkOutsideDestinationError:
if sys.version_info[:3] in {
- (3, 8, 17),
(3, 9, 17),
(3, 10, 12),
(3, 11, 4),
diff --git a/contrib/python/pip/pip/_internal/utils/wheel.py b/contrib/python/pip/pip/_internal/utils/wheel.py
index f85aee8a3f9..70e186cdfd1 100644
--- a/contrib/python/pip/pip/_internal/utils/wheel.py
+++ b/contrib/python/pip/pip/_internal/utils/wheel.py
@@ -1,5 +1,4 @@
-"""Support functions for working with wheel files.
-"""
+"""Support functions for working with wheel files."""
import logging
from email.message import Message
diff --git a/contrib/python/pip/pip/_internal/vcs/git.py b/contrib/python/pip/pip/_internal/vcs/git.py
index 0425debb3ae..9c926e969f1 100644
--- a/contrib/python/pip/pip/_internal/vcs/git.py
+++ b/contrib/python/pip/pip/_internal/vcs/git.py
@@ -5,7 +5,7 @@ import re
import urllib.parse
import urllib.request
from dataclasses import replace
-from typing import List, Optional, Tuple
+from typing import Any, List, Optional, Tuple
from pip._internal.exceptions import BadCommand, InstallationError
from pip._internal.utils.misc import HiddenText, display_path, hide_url
@@ -77,6 +77,15 @@ class Git(VersionControl):
def get_base_rev_args(rev: str) -> List[str]:
return [rev]
+ @classmethod
+ def run_command(cls, *args: Any, **kwargs: Any) -> str:
+ if os.environ.get("PIP_NO_INPUT"):
+ extra_environ = kwargs.get("extra_environ", {})
+ extra_environ["GIT_TERMINAL_PROMPT"] = "0"
+ extra_environ["GIT_SSH_COMMAND"] = "ssh -oBatchMode=yes"
+ kwargs["extra_environ"] = extra_environ
+ return super().run_command(*args, **kwargs)
+
def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
_, rev_options = self.get_url_rev_options(hide_url(url))
if not rev_options.rev:
diff --git a/contrib/python/pip/pip/_internal/wheel_builder.py b/contrib/python/pip/pip/_internal/wheel_builder.py
index 93f8e1f5b2f..3cf02e01d98 100644
--- a/contrib/python/pip/pip/_internal/wheel_builder.py
+++ b/contrib/python/pip/pip/_internal/wheel_builder.py
@@ -1,5 +1,4 @@
-"""Orchestrator for building wheels from InstallRequirements.
-"""
+"""Orchestrator for building wheels from InstallRequirements."""
import logging
import os.path
@@ -44,29 +43,14 @@ def _contains_egg_info(s: str) -> bool:
def _should_build(
req: InstallRequirement,
- need_wheel: bool,
) -> bool:
"""Return whether an InstallRequirement should be built into a wheel."""
- if req.constraint:
- # never build requirements that are merely constraints
- return False
+ assert not req.constraint
+
if req.is_wheel:
- if need_wheel:
- logger.info(
- "Skipping %s, due to already being wheel.",
- req.name,
- )
return False
- if need_wheel:
- # i.e. pip wheel, not pip install
- return True
-
- # From this point, this concerns the pip install command only
- # (need_wheel=False).
-
- if not req.source_dir:
- return False
+ assert req.source_dir
if req.editable:
# we only build PEP 660 editable requirements
@@ -75,16 +59,10 @@ def _should_build(
return True
-def should_build_for_wheel_command(
- req: InstallRequirement,
-) -> bool:
- return _should_build(req, need_wheel=True)
-
-
def should_build_for_install_command(
req: InstallRequirement,
) -> bool:
- return _should_build(req, need_wheel=False)
+ return _should_build(req)
def _should_cache(
diff --git a/contrib/python/pip/pip/_vendor/__init__.py b/contrib/python/pip/pip/_vendor/__init__.py
index 561089ccc0c..34ccb990791 100644
--- a/contrib/python/pip/pip/_vendor/__init__.py
+++ b/contrib/python/pip/pip/_vendor/__init__.py
@@ -60,6 +60,7 @@ if DEBUNDLED:
# Actually alias all of our vendored dependencies.
vendored("cachecontrol")
vendored("certifi")
+ vendored("dependency-groups")
vendored("distlib")
vendored("distro")
vendored("packaging")
diff --git a/contrib/python/pip/pip/_vendor/cachecontrol/__init__.py b/contrib/python/pip/pip/_vendor/cachecontrol/__init__.py
index 21916243c56..7be1e04f526 100644
--- a/contrib/python/pip/pip/_vendor/cachecontrol/__init__.py
+++ b/contrib/python/pip/pip/_vendor/cachecontrol/__init__.py
@@ -9,7 +9,7 @@ Make it easy to import from cachecontrol without long namespaces.
__author__ = "Eric Larson"
__email__ = "eric@ionrock.org"
-__version__ = "0.14.1"
+__version__ = "0.14.2"
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
from pip._vendor.cachecontrol.controller import CacheController
diff --git a/contrib/python/pip/pip/_vendor/cachecontrol/adapter.py b/contrib/python/pip/pip/_vendor/cachecontrol/adapter.py
index 34a9eb82798..18084d12fae 100644
--- a/contrib/python/pip/pip/_vendor/cachecontrol/adapter.py
+++ b/contrib/python/pip/pip/_vendor/cachecontrol/adapter.py
@@ -5,6 +5,7 @@ from __future__ import annotations
import functools
import types
+import weakref
import zlib
from typing import TYPE_CHECKING, Any, Collection, Mapping
@@ -128,19 +129,25 @@ class CacheControlAdapter(HTTPAdapter):
response._fp = CallbackFileWrapper( # type: ignore[assignment]
response._fp, # type: ignore[arg-type]
functools.partial(
- self.controller.cache_response, request, response
+ self.controller.cache_response, request, weakref.ref(response)
),
)
if response.chunked:
- super_update_chunk_length = response._update_chunk_length
+ super_update_chunk_length = response.__class__._update_chunk_length
- def _update_chunk_length(self: HTTPResponse) -> None:
- super_update_chunk_length()
+ def _update_chunk_length(
+ weak_self: weakref.ReferenceType[HTTPResponse],
+ ) -> None:
+ self = weak_self()
+ if self is None:
+ return
+
+ super_update_chunk_length(self)
if self.chunk_left == 0:
self._fp._close() # type: ignore[union-attr]
- response._update_chunk_length = types.MethodType( # type: ignore[method-assign]
- _update_chunk_length, response
+ response._update_chunk_length = functools.partial( # type: ignore[method-assign]
+ _update_chunk_length, weakref.ref(response)
)
resp: Response = super().build_response(request, response)
diff --git a/contrib/python/pip/pip/_vendor/cachecontrol/caches/file_cache.py b/contrib/python/pip/pip/_vendor/cachecontrol/caches/file_cache.py
index 81d2ef46cf8..45c632c7f72 100644
--- a/contrib/python/pip/pip/_vendor/cachecontrol/caches/file_cache.py
+++ b/contrib/python/pip/pip/_vendor/cachecontrol/caches/file_cache.py
@@ -5,6 +5,7 @@ from __future__ import annotations
import hashlib
import os
+import tempfile
from textwrap import dedent
from typing import IO, TYPE_CHECKING
from pathlib import Path
@@ -18,47 +19,6 @@ if TYPE_CHECKING:
from filelock import BaseFileLock
-def _secure_open_write(filename: str, fmode: int) -> IO[bytes]:
- # We only want to write to this file, so open it in write only mode
- flags = os.O_WRONLY
-
- # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
- # will open *new* files.
- # We specify this because we want to ensure that the mode we pass is the
- # mode of the file.
- flags |= os.O_CREAT | os.O_EXCL
-
- # Do not follow symlinks to prevent someone from making a symlink that
- # we follow and insecurely open a cache file.
- if hasattr(os, "O_NOFOLLOW"):
- flags |= os.O_NOFOLLOW
-
- # On Windows we'll mark this file as binary
- if hasattr(os, "O_BINARY"):
- flags |= os.O_BINARY
-
- # Before we open our file, we want to delete any existing file that is
- # there
- try:
- os.remove(filename)
- except OSError:
- # The file must not exist already, so we can just skip ahead to opening
- pass
-
- # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
- # race condition happens between the os.remove and this line, that an
- # error will be raised. Because we utilize a lockfile this should only
- # happen if someone is attempting to attack us.
- fd = os.open(filename, flags, fmode)
- try:
- return os.fdopen(fd, "wb")
-
- except:
- # An error occurred wrapping our FD in a file object
- os.close(fd)
- raise
-
-
class _FileCacheMixin:
"""Shared implementation for both FileCache variants."""
@@ -122,15 +82,18 @@ class _FileCacheMixin:
Safely write the data to the given path.
"""
# Make sure the directory exists
- try:
- os.makedirs(os.path.dirname(path), self.dirmode)
- except OSError:
- pass
+ dirname = os.path.dirname(path)
+ os.makedirs(dirname, self.dirmode, exist_ok=True)
with self.lock_class(path + ".lock"):
# Write our actual file
- with _secure_open_write(path, self.filemode) as fh:
- fh.write(data)
+ (fd, name) = tempfile.mkstemp(dir=dirname)
+ try:
+ os.write(fd, data)
+ finally:
+ os.close(fd)
+ os.chmod(name, self.filemode)
+ os.replace(name, path)
def _delete(self, key: str, suffix: str) -> None:
name = self._fn(key) + suffix
diff --git a/contrib/python/pip/pip/_vendor/cachecontrol/controller.py b/contrib/python/pip/pip/_vendor/cachecontrol/controller.py
index f0ff6e1bedc..d92d991c001 100644
--- a/contrib/python/pip/pip/_vendor/cachecontrol/controller.py
+++ b/contrib/python/pip/pip/_vendor/cachecontrol/controller.py
@@ -12,6 +12,7 @@ import calendar
import logging
import re
import time
+import weakref
from email.utils import parsedate_tz
from typing import TYPE_CHECKING, Collection, Mapping
@@ -323,7 +324,7 @@ class CacheController:
def cache_response(
self,
request: PreparedRequest,
- response: HTTPResponse,
+ response_or_ref: HTTPResponse | weakref.ReferenceType[HTTPResponse],
body: bytes | None = None,
status_codes: Collection[int] | None = None,
) -> None:
@@ -332,6 +333,16 @@ class CacheController:
This assumes a requests Response object.
"""
+ if isinstance(response_or_ref, weakref.ReferenceType):
+ response = response_or_ref()
+ if response is None:
+ # The weakref can be None only in case the user used streamed request
+ # and did not consume or close it, and holds no reference to requests.Response.
+ # In such case, we don't want to cache the response.
+ return
+ else:
+ response = response_or_ref
+
# From httplib2: Don't cache 206's since we aren't going to
# handle byte range requests
cacheable_status_codes = status_codes or self.cacheable_status_codes
diff --git a/contrib/python/pip/pip/_vendor/certifi/__init__.py b/contrib/python/pip/pip/_vendor/certifi/__init__.py
index f61d77fa382..177082e0fb1 100644
--- a/contrib/python/pip/pip/_vendor/certifi/__init__.py
+++ b/contrib/python/pip/pip/_vendor/certifi/__init__.py
@@ -1,4 +1,4 @@
from .core import contents, where
__all__ = ["contents", "where"]
-__version__ = "2024.08.30"
+__version__ = "2025.01.31"
diff --git a/contrib/python/pip/pip/_vendor/certifi/cacert.pem b/contrib/python/pip/pip/_vendor/certifi/cacert.pem
index 3c165a1b85e..860f259bd7e 100644
--- a/contrib/python/pip/pip/_vendor/certifi/cacert.pem
+++ b/contrib/python/pip/pip/_vendor/certifi/cacert.pem
@@ -474,47 +474,6 @@ ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
-----END CERTIFICATE-----
-# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Label: "SwissSign Silver CA - G2"
-# Serial: 5700383053117599563
-# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
-# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
-# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
------BEGIN CERTIFICATE-----
-MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
-BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
-IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
-RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
-U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
-MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
-Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
-YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
-nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
-6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
-eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
-c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
-MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
-HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
-jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
-5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
-rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
-F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
-wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
-cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
-AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
-WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
-xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
-2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
-IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
-aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
-em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
-dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
-OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
-hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
-tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
------END CERTIFICATE-----
-
# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
# Subject: CN=SecureTrust CA O=SecureTrust Corporation
# Label: "SecureTrust CA"
@@ -763,35 +722,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
-----END CERTIFICATE-----
-# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Label: "SecureSign RootCA11"
-# Serial: 1
-# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
-# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
-# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
------BEGIN CERTIFICATE-----
-MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
-MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
-A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
-MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
-Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
-QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
-i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
-h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
-MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
-UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
-8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
-h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
-VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
-KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
-X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
-QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
-pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
-QSdJQO7e5iNEOdyhIta6A/I=
------END CERTIFICATE-----
-
# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
# Label: "Microsec e-Szigno Root CA 2009"
@@ -3100,50 +3030,6 @@ LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
mpv0
-----END CERTIFICATE-----
-# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
-# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
-# Label: "Entrust Root Certification Authority - G4"
-# Serial: 289383649854506086828220374796556676440
-# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
-# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
-# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
------BEGIN CERTIFICATE-----
-MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
-gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
-Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
-MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
-BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
-MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
-MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
-c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
-bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
-Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
-AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
-2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
-T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
-5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
-C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
-DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
-wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
-2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
-nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
-dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
-N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
-c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
-VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
-5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
-Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
-hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
-B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
-AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
-H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
-b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
-2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
-IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
-5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
-n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
------END CERTIFICATE-----
-
# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
# Label: "Microsoft ECC Root Certificate Authority 2017"
@@ -3485,6 +3371,46 @@ DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
-----END CERTIFICATE-----
+# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH
+# Label: "GLOBALTRUST 2020"
+# Serial: 109160994242082918454945253
+# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8
+# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2
+# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a
+-----BEGIN CERTIFICATE-----
+MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG
+A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw
+FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx
+MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u
+aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq
+hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b
+RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z
+YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3
+QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw
+yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+
+BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ
+SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH
+r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0
+4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me
+dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw
+q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2
+nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu
+H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA
+VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC
+XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd
+6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf
++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi
+kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7
+wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB
+TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C
+MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn
+4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I
+aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy
+qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg==
+-----END CERTIFICATE-----
+
# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
# Label: "ANF Secure Server Root CA"
@@ -4214,46 +4140,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
-----END CERTIFICATE-----
-# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
-# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
-# Label: "Security Communication RootCA3"
-# Serial: 16247922307909811815
-# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26
-# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a
-# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94
------BEGIN CERTIFICATE-----
-MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV
-BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw
-JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2
-MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc
-U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg
-Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
-CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r
-CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA
-lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG
-TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7
-9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7
-8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4
-g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we
-GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst
-+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M
-0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ
-T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw
-HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP
-BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS
-YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA
-FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd
-9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI
-UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+
-OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke
-gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf
-iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV
-nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD
-2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI//
-1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad
-TdJ0MN1kURXbg4NR16/9M51NZg==
------END CERTIFICATE-----
-
# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
# Label: "Security Communication ECC RootCA1"
@@ -4927,3 +4813,85 @@ Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH
+# Subject: CN=D-TRUST BR Root CA 2 2023 O=D-Trust GmbH
+# Label: "D-TRUST BR Root CA 2 2023"
+# Serial: 153168538924886464690566649552453098598
+# MD5 Fingerprint: e1:09:ed:d3:60:d4:56:1b:47:1f:b7:0c:5f:1b:5f:85
+# SHA1 Fingerprint: 2d:b0:70:ee:71:94:af:69:68:17:db:79:ce:58:9f:a0:6b:96:f7:87
+# SHA256 Fingerprint: 05:52:e6:f8:3f:df:65:e8:fa:96:70:e6:66:df:28:a4:e2:13:40:b5:10:cb:e5:25:66:f9:7c:4f:b9:4b:2b:d1
+-----BEGIN CERTIFICATE-----
+MIIFqTCCA5GgAwIBAgIQczswBEhb2U14LnNLyaHcZjANBgkqhkiG9w0BAQ0FADBI
+MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE
+LVRSVVNUIEJSIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA4NTYzMVoXDTM4MDUw
+OTA4NTYzMFowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi
+MCAGA1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAK7/CVmRgApKaOYkP7in5Mg6CjoWzckjYaCTcfKr
+i3OPoGdlYNJUa2NRb0kz4HIHE304zQaSBylSa053bATTlfrdTIzZXcFhfUvnKLNE
+gXtRr90zsWh81k5M/itoucpmacTsXld/9w3HnDY25QdgrMBM6ghs7wZ8T1soegj8
+k12b9py0i4a6Ibn08OhZWiihNIQaJZG2tY/vsvmA+vk9PBFy2OMvhnbFeSzBqZCT
+Rphny4NqoFAjpzv2gTng7fC5v2Xx2Mt6++9zA84A9H3X4F07ZrjcjrqDy4d2A/wl
+2ecjbwb9Z/Pg/4S8R7+1FhhGaRTMBffb00msa8yr5LULQyReS2tNZ9/WtT5PeB+U
+cSTq3nD88ZP+npNa5JRal1QMNXtfbO4AHyTsA7oC9Xb0n9Sa7YUsOCIvx9gvdhFP
+/Wxc6PWOJ4d/GUohR5AdeY0cW/jPSoXk7bNbjb7EZChdQcRurDhaTyN0dKkSw/bS
+uREVMweR2Ds3OmMwBtHFIjYoYiMQ4EbMl6zWK11kJNXuHA7e+whadSr2Y23OC0K+
+0bpwHJwh5Q8xaRfX/Aq03u2AnMuStIv13lmiWAmlY0cL4UEyNEHZmrHZqLAbWt4N
+DfTisl01gLmB1IRpkQLLddCNxbU9CZEJjxShFHR5PtbJFR2kWVki3PaKRT08EtY+
+XTIvAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUZ5Dw1t61
+GNVGKX5cq/ieCLxklRAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG
+OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfYnJfcm9vdF9jYV8y
+XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQA097N3U9swFrktpSHxQCF16+tI
+FoE9c+CeJyrrd6kTpGoKWloUMz1oH4Guaf2Mn2VsNELZLdB/eBaxOqwjMa1ef67n
+riv6uvw8l5VAk1/DLQOj7aRvU9f6QA4w9QAgLABMjDu0ox+2v5Eyq6+SmNMW5tTR
+VFxDWy6u71cqqLRvpO8NVhTaIasgdp4D/Ca4nj8+AybmTNudX0KEPUUDAxxZiMrc
+LmEkWqTqJwtzEr5SswrPMhfiHocaFpVIbVrg0M8JkiZmkdijYQ6qgYF/6FKC0ULn
+4B0Y+qSFNueG4A3rvNTJ1jxD8V1Jbn6Bm2m1iWKPiFLY1/4nwSPFyysCu7Ff/vtD
+hQNGvl3GyiEm/9cCnnRK3PgTFbGBVzbLZVzRHTF36SXDw7IyN9XxmAnkbWOACKsG
+koHU6XCPpz+y7YaMgmo1yEJagtFSGkUPFaUA8JR7ZSdXOUPPfH/mvTWze/EZTN46
+ls/pdu4D58JDUjxqgejBWoC9EV2Ta/vH5mQ/u2kc6d0li690yVRAysuTEwrt+2aS
+Ecr1wPrYg1UDfNPFIkZ1cGt5SAYqgpq/5usWDiJFAbzdNpQ0qTUmiteXue4Icr80
+knCDgKs4qllo3UCkGJCy89UDyibK79XH4I9TjvAA46jtn/mtd+ArY0+ew+43u3gJ
+hJ65bvspmZDogNOfJA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH
+# Subject: CN=D-TRUST EV Root CA 2 2023 O=D-Trust GmbH
+# Label: "D-TRUST EV Root CA 2 2023"
+# Serial: 139766439402180512324132425437959641711
+# MD5 Fingerprint: 96:b4:78:09:f0:09:cb:77:eb:bb:1b:4d:6f:36:bc:b6
+# SHA1 Fingerprint: a5:5b:d8:47:6c:8f:19:f7:4c:f4:6d:6b:b6:c2:79:82:22:df:54:8b
+# SHA256 Fingerprint: 8e:82:21:b2:e7:d4:00:78:36:a1:67:2f:0d:cc:29:9c:33:bc:07:d3:16:f1:32:fa:1a:20:6d:58:71:50:f1:ce
+-----BEGIN CERTIFICATE-----
+MIIFqTCCA5GgAwIBAgIQaSYJfoBLTKCnjHhiU19abzANBgkqhkiG9w0BAQ0FADBI
+MQswCQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlE
+LVRSVVNUIEVWIFJvb3QgQ0EgMiAyMDIzMB4XDTIzMDUwOTA5MTAzM1oXDTM4MDUw
+OTA5MTAzMlowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEi
+MCAGA1UEAxMZRC1UUlVTVCBFViBSb290IENBIDIgMjAyMzCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBANiOo4mAC7JXUtypU0w3uX9jFxPvp1sjW2l1sJkK
+F8GLxNuo4MwxusLyzV3pt/gdr2rElYfXR8mV2IIEUD2BCP/kPbOx1sWy/YgJ25yE
+7CUXFId/MHibaljJtnMoPDT3mfd/06b4HEV8rSyMlD/YZxBTfiLNTiVR8CUkNRFe
+EMbsh2aJgWi6zCudR3Mfvc2RpHJqnKIbGKBv7FD0fUDCqDDPvXPIEysQEx6Lmqg6
+lHPTGGkKSv/BAQP/eX+1SH977ugpbzZMlWGG2Pmic4ruri+W7mjNPU0oQvlFKzIb
+RlUWaqZLKfm7lVa/Rh3sHZMdwGWyH6FDrlaeoLGPaxK3YG14C8qKXO0elg6DpkiV
+jTujIcSuWMYAsoS0I6SWhjW42J7YrDRJmGOVxcttSEfi8i4YHtAxq9107PncjLgc
+jmgjutDzUNzPZY9zOjLHfP7KgiJPvo5iR2blzYfi6NUPGJ/lBHJLRjwQ8kTCZFZx
+TnXonMkmdMV9WdEKWw9t/p51HBjGGjp82A0EzM23RWV6sY+4roRIPrN6TagD4uJ+
+ARZZaBhDM7DS3LAaQzXupdqpRlyuhoFBAUp0JuyfBr/CBTdkdXgpaP3F9ev+R/nk
+hbDhezGdpn9yo7nELC7MmVcOIQxFAZRl62UJxmMiCzNJkkg8/M3OsD6Onov4/knF
+NXJHAgMBAAGjgY4wgYswDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUqvyREBuH
+kV8Wub9PS5FeAByxMoAwDgYDVR0PAQH/BAQDAgEGMEkGA1UdHwRCMEAwPqA8oDqG
+OGh0dHA6Ly9jcmwuZC10cnVzdC5uZXQvY3JsL2QtdHJ1c3RfZXZfcm9vdF9jYV8y
+XzIwMjMuY3JsMA0GCSqGSIb3DQEBDQUAA4ICAQCTy6UfmRHsmg1fLBWTxj++EI14
+QvBukEdHjqOSMo1wj/Zbjb6JzkcBahsgIIlbyIIQbODnmaprxiqgYzWRaoUlrRc4
+pZt+UPJ26oUFKidBK7GB0aL2QHWpDsvxVUjY7NHss+jOFKE17MJeNRqrphYBBo7q
+3C+jisosketSjl8MmxfPy3MHGcRqwnNU73xDUmPBEcrCRbH0O1P1aa4846XerOhU
+t7KR/aypH/KH5BfGSah82ApB9PI+53c0BFLd6IHyTS9URZ0V4U/M5d40VxDJI3IX
+cI1QcB9WbMy5/zpaT2N6w25lBx2Eof+pDGOJbbJAiDnXH3dotfyc1dZnaVuodNv8
+ifYbMvekJKZ2t0dT741Jj6m2g1qllpBFYfXeA08mD6iL8AOWsKwV0HFaanuU5nCT
+2vFp4LJiTZ6P/4mdm13NRemUAiKN4DV/6PEEeXFsVIP4M7kFMhtYVRFP0OUnR3Hs
+7dpn1mKmS00PaaLJvOwiS5THaJQXfuKOKD62xur1NGyfN4gHONuGcfrNlUhDbqNP
+gofXNJhuS5N5YHVpD/Aa1VP6IQzCP+k/HxiMkl14p3ZnGbuy6n/pcAlWVqOwDAst
+Nl7F6cTVg8uGF5csbBNvh1qvSaYd2804BC5f4ko1Di1L+KIkBI3Y4WNeApI02phh
+XBxvWHZks/wCuPWdCg==
+-----END CERTIFICATE-----
diff --git a/contrib/python/pip/pip/_vendor/dependency_groups/__init__.py b/contrib/python/pip/pip/_vendor/dependency_groups/__init__.py
new file mode 100644
index 00000000000..9fec2029949
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/dependency_groups/__init__.py
@@ -0,0 +1,13 @@
+from ._implementation import (
+ CyclicDependencyError,
+ DependencyGroupInclude,
+ DependencyGroupResolver,
+ resolve,
+)
+
+__all__ = (
+ "CyclicDependencyError",
+ "DependencyGroupInclude",
+ "DependencyGroupResolver",
+ "resolve",
+)
diff --git a/contrib/python/pip/pip/_vendor/dependency_groups/__main__.py b/contrib/python/pip/pip/_vendor/dependency_groups/__main__.py
new file mode 100644
index 00000000000..48ebb0d41cf
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/dependency_groups/__main__.py
@@ -0,0 +1,65 @@
+import argparse
+import sys
+
+from ._implementation import resolve
+from ._toml_compat import tomllib
+
+
+def main() -> None:
+ if tomllib is None:
+ print(
+ "Usage error: dependency-groups CLI requires tomli or Python 3.11+",
+ file=sys.stderr,
+ )
+ raise SystemExit(2)
+
+ parser = argparse.ArgumentParser(
+ description=(
+ "A dependency-groups CLI. Prints out a resolved group, newline-delimited."
+ )
+ )
+ parser.add_argument(
+ "GROUP_NAME", nargs="*", help="The dependency group(s) to resolve."
+ )
+ parser.add_argument(
+ "-f",
+ "--pyproject-file",
+ default="pyproject.toml",
+ help="The pyproject.toml file. Defaults to trying in the current directory.",
+ )
+ parser.add_argument(
+ "-o",
+ "--output",
+ help="An output file. Defaults to stdout.",
+ )
+ parser.add_argument(
+ "-l",
+ "--list",
+ action="store_true",
+ help="List the available dependency groups",
+ )
+ args = parser.parse_args()
+
+ with open(args.pyproject_file, "rb") as fp:
+ pyproject = tomllib.load(fp)
+
+ dependency_groups_raw = pyproject.get("dependency-groups", {})
+
+ if args.list:
+ print(*dependency_groups_raw.keys())
+ return
+ if not args.GROUP_NAME:
+ print("A GROUP_NAME is required", file=sys.stderr)
+ raise SystemExit(3)
+
+ content = "\n".join(resolve(dependency_groups_raw, *args.GROUP_NAME))
+
+ if args.output is None or args.output == "-":
+ print(content)
+ else:
+ with open(args.output, "w", encoding="utf-8") as fp:
+ print(content, file=fp)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/python/pip/pip/_vendor/dependency_groups/_implementation.py b/contrib/python/pip/pip/_vendor/dependency_groups/_implementation.py
new file mode 100644
index 00000000000..80d91693820
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/dependency_groups/_implementation.py
@@ -0,0 +1,213 @@
+from __future__ import annotations
+
+import dataclasses
+import re
+from collections.abc import Mapping
+
+from pip._vendor.packaging.requirements import Requirement
+
+
+def _normalize_name(name: str) -> str:
+ return re.sub(r"[-_.]+", "-", name).lower()
+
+
+def _normalize_group_names(
+ dependency_groups: Mapping[str, str | Mapping[str, str]]
+) -> Mapping[str, str | Mapping[str, str]]:
+ original_names: dict[str, list[str]] = {}
+ normalized_groups = {}
+
+ for group_name, value in dependency_groups.items():
+ normed_group_name = _normalize_name(group_name)
+ original_names.setdefault(normed_group_name, []).append(group_name)
+ normalized_groups[normed_group_name] = value
+
+ errors = []
+ for normed_name, names in original_names.items():
+ if len(names) > 1:
+ errors.append(f"{normed_name} ({', '.join(names)})")
+ if errors:
+ raise ValueError(f"Duplicate dependency group names: {', '.join(errors)}")
+
+ return normalized_groups
+
+
+@dataclasses.dataclass
+class DependencyGroupInclude:
+ include_group: str
+
+
+class CyclicDependencyError(ValueError):
+ """
+ An error representing the detection of a cycle.
+ """
+
+ def __init__(self, requested_group: str, group: str, include_group: str) -> None:
+ self.requested_group = requested_group
+ self.group = group
+ self.include_group = include_group
+
+ if include_group == group:
+ reason = f"{group} includes itself"
+ else:
+ reason = f"{include_group} -> {group}, {group} -> {include_group}"
+ super().__init__(
+ "Cyclic dependency group include while resolving "
+ f"{requested_group}: {reason}"
+ )
+
+
+class DependencyGroupResolver:
+ """
+ A resolver for Dependency Group data.
+
+ This class handles caching, name normalization, cycle detection, and other
+ parsing requirements. There are only two public methods for exploring the data:
+ ``lookup()`` and ``resolve()``.
+
+ :param dependency_groups: A mapping, as provided via pyproject
+ ``[dependency-groups]``.
+ """
+
+ def __init__(
+ self,
+ dependency_groups: Mapping[str, str | Mapping[str, str]],
+ ) -> None:
+ if not isinstance(dependency_groups, Mapping):
+ raise TypeError("Dependency Groups table is not a mapping")
+ self.dependency_groups = _normalize_group_names(dependency_groups)
+ # a map of group names to parsed data
+ self._parsed_groups: dict[
+ str, tuple[Requirement | DependencyGroupInclude, ...]
+ ] = {}
+ # a map of group names to their ancestors, used for cycle detection
+ self._include_graph_ancestors: dict[str, tuple[str, ...]] = {}
+ # a cache of completed resolutions to Requirement lists
+ self._resolve_cache: dict[str, tuple[Requirement, ...]] = {}
+
+ def lookup(self, group: str) -> tuple[Requirement | DependencyGroupInclude, ...]:
+ """
+ Lookup a group name, returning the parsed dependency data for that group.
+ This will not resolve includes.
+
+ :param group: the name of the group to lookup
+
+ :raises ValueError: if the data does not appear to be valid dependency group
+ data
+ :raises TypeError: if the data is not a string
+ :raises LookupError: if group name is absent
+ :raises packaging.requirements.InvalidRequirement: if a specifier is not valid
+ """
+ if not isinstance(group, str):
+ raise TypeError("Dependency group name is not a str")
+ group = _normalize_name(group)
+ return self._parse_group(group)
+
+ def resolve(self, group: str) -> tuple[Requirement, ...]:
+ """
+ Resolve a dependency group to a list of requirements.
+
+ :param group: the name of the group to resolve
+
+ :raises TypeError: if the inputs appear to be the wrong types
+ :raises ValueError: if the data does not appear to be valid dependency group
+ data
+ :raises LookupError: if group name is absent
+ :raises packaging.requirements.InvalidRequirement: if a specifier is not valid
+ """
+ if not isinstance(group, str):
+ raise TypeError("Dependency group name is not a str")
+ group = _normalize_name(group)
+ return self._resolve(group, group)
+
+ def _parse_group(
+ self, group: str
+ ) -> tuple[Requirement | DependencyGroupInclude, ...]:
+ # short circuit -- never do the work twice
+ if group in self._parsed_groups:
+ return self._parsed_groups[group]
+
+ if group not in self.dependency_groups:
+ raise LookupError(f"Dependency group '{group}' not found")
+
+ raw_group = self.dependency_groups[group]
+ if not isinstance(raw_group, list):
+ raise TypeError(f"Dependency group '{group}' is not a list")
+
+ elements: list[Requirement | DependencyGroupInclude] = []
+ for item in raw_group:
+ if isinstance(item, str):
+ # packaging.requirements.Requirement parsing ensures that this is a
+ # valid PEP 508 Dependency Specifier
+ # raises InvalidRequirement on failure
+ elements.append(Requirement(item))
+ elif isinstance(item, dict):
+ if tuple(item.keys()) != ("include-group",):
+ raise ValueError(f"Invalid dependency group item: {item}")
+
+ include_group = next(iter(item.values()))
+ elements.append(DependencyGroupInclude(include_group=include_group))
+ else:
+ raise ValueError(f"Invalid dependency group item: {item}")
+
+ self._parsed_groups[group] = tuple(elements)
+ return self._parsed_groups[group]
+
+ def _resolve(self, group: str, requested_group: str) -> tuple[Requirement, ...]:
+ """
+ This is a helper for cached resolution to strings.
+
+ :param group: The name of the group to resolve.
+ :param requested_group: The group which was used in the original, user-facing
+ request.
+ """
+ if group in self._resolve_cache:
+ return self._resolve_cache[group]
+
+ parsed = self._parse_group(group)
+
+ resolved_group = []
+ for item in parsed:
+ if isinstance(item, Requirement):
+ resolved_group.append(item)
+ elif isinstance(item, DependencyGroupInclude):
+ if item.include_group in self._include_graph_ancestors.get(group, ()):
+ raise CyclicDependencyError(
+ requested_group, group, item.include_group
+ )
+ self._include_graph_ancestors[item.include_group] = (
+ *self._include_graph_ancestors.get(group, ()),
+ group,
+ )
+ resolved_group.extend(
+ self._resolve(item.include_group, requested_group)
+ )
+ else: # unreachable
+ raise NotImplementedError(
+ f"Invalid dependency group item after parse: {item}"
+ )
+
+ self._resolve_cache[group] = tuple(resolved_group)
+ return self._resolve_cache[group]
+
+
+def resolve(
+ dependency_groups: Mapping[str, str | Mapping[str, str]], /, *groups: str
+) -> tuple[str, ...]:
+ """
+ Resolve a dependency group to a tuple of requirements, as strings.
+
+ :param dependency_groups: the parsed contents of the ``[dependency-groups]`` table
+ from ``pyproject.toml``
+ :param groups: the name of the group(s) to resolve
+
+ :raises TypeError: if the inputs appear to be the wrong types
+ :raises ValueError: if the data does not appear to be valid dependency group data
+ :raises LookupError: if group name is absent
+ :raises packaging.requirements.InvalidRequirement: if a specifier is not valid
+ """
+ return tuple(
+ str(r)
+ for group in groups
+ for r in DependencyGroupResolver(dependency_groups).resolve(group)
+ )
diff --git a/contrib/python/pip/pip/_vendor/dependency_groups/_lint_dependency_groups.py b/contrib/python/pip/pip/_vendor/dependency_groups/_lint_dependency_groups.py
new file mode 100644
index 00000000000..09454bdc280
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/dependency_groups/_lint_dependency_groups.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+import argparse
+import sys
+
+from ._implementation import DependencyGroupResolver
+from ._toml_compat import tomllib
+
+
+def main(*, argv: list[str] | None = None) -> None:
+ if tomllib is None:
+ print(
+ "Usage error: dependency-groups CLI requires tomli or Python 3.11+",
+ file=sys.stderr,
+ )
+ raise SystemExit(2)
+
+ parser = argparse.ArgumentParser(
+ description=(
+ "Lint Dependency Groups for validity. "
+ "This will eagerly load and check all of your Dependency Groups."
+ )
+ )
+ parser.add_argument(
+ "-f",
+ "--pyproject-file",
+ default="pyproject.toml",
+ help="The pyproject.toml file. Defaults to trying in the current directory.",
+ )
+ args = parser.parse_args(argv if argv is not None else sys.argv[1:])
+
+ with open(args.pyproject_file, "rb") as fp:
+ pyproject = tomllib.load(fp)
+ dependency_groups_raw = pyproject.get("dependency-groups", {})
+
+ errors: list[str] = []
+ try:
+ resolver = DependencyGroupResolver(dependency_groups_raw)
+ except (ValueError, TypeError) as e:
+ errors.append(f"{type(e).__name__}: {e}")
+ else:
+ for groupname in resolver.dependency_groups:
+ try:
+ resolver.resolve(groupname)
+ except (LookupError, ValueError, TypeError) as e:
+ errors.append(f"{type(e).__name__}: {e}")
+
+ if errors:
+ print("errors encountered while examining dependency groups:")
+ for msg in errors:
+ print(f" {msg}")
+ sys.exit(1)
+ else:
+ print("ok")
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/python/pip/pip/_vendor/dependency_groups/_pip_wrapper.py b/contrib/python/pip/pip/_vendor/dependency_groups/_pip_wrapper.py
new file mode 100644
index 00000000000..f86d8961ba2
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/dependency_groups/_pip_wrapper.py
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+import argparse
+import subprocess
+import sys
+
+from ._implementation import DependencyGroupResolver
+from ._toml_compat import tomllib
+
+
+def _invoke_pip(deps: list[str]) -> None:
+ subprocess.check_call([sys.executable, "-m", "pip", "install", *deps])
+
+
+def main(*, argv: list[str] | None = None) -> None:
+ if tomllib is None:
+ print(
+ "Usage error: dependency-groups CLI requires tomli or Python 3.11+",
+ file=sys.stderr,
+ )
+ raise SystemExit(2)
+
+ parser = argparse.ArgumentParser(description="Install Dependency Groups.")
+ parser.add_argument(
+ "DEPENDENCY_GROUP", nargs="+", help="The dependency groups to install."
+ )
+ parser.add_argument(
+ "-f",
+ "--pyproject-file",
+ default="pyproject.toml",
+ help="The pyproject.toml file. Defaults to trying in the current directory.",
+ )
+ args = parser.parse_args(argv if argv is not None else sys.argv[1:])
+
+ with open(args.pyproject_file, "rb") as fp:
+ pyproject = tomllib.load(fp)
+ dependency_groups_raw = pyproject.get("dependency-groups", {})
+
+ errors: list[str] = []
+ resolved: list[str] = []
+ try:
+ resolver = DependencyGroupResolver(dependency_groups_raw)
+ except (ValueError, TypeError) as e:
+ errors.append(f"{type(e).__name__}: {e}")
+ else:
+ for groupname in args.DEPENDENCY_GROUP:
+ try:
+ resolved.extend(str(r) for r in resolver.resolve(groupname))
+ except (LookupError, ValueError, TypeError) as e:
+ errors.append(f"{type(e).__name__}: {e}")
+
+ if errors:
+ print("errors encountered while examining dependency groups:")
+ for msg in errors:
+ print(f" {msg}")
+ sys.exit(1)
+
+ _invoke_pip(resolved)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/python/pip/pip/_vendor/dependency_groups/_toml_compat.py b/contrib/python/pip/pip/_vendor/dependency_groups/_toml_compat.py
new file mode 100644
index 00000000000..8d6f921c2a5
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/dependency_groups/_toml_compat.py
@@ -0,0 +1,9 @@
+try:
+ import tomllib
+except ImportError:
+ try:
+ from pip._vendor import tomli as tomllib # type: ignore[no-redef, unused-ignore]
+ except ModuleNotFoundError: # pragma: no cover
+ tomllib = None # type: ignore[assignment, unused-ignore]
+
+__all__ = ("tomllib",)
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/compat/__init__.py b/contrib/python/pip/pip/_vendor/dependency_groups/py.typed
index e69de29bb2d..e69de29bb2d 100644
--- a/contrib/python/pip/pip/_vendor/resolvelib/compat/__init__.py
+++ b/contrib/python/pip/pip/_vendor/dependency_groups/py.typed
diff --git a/contrib/python/pip/pip/_vendor/packaging/__init__.py b/contrib/python/pip/pip/_vendor/packaging/__init__.py
index d79f73c574f..d45c22cfd88 100644
--- a/contrib/python/pip/pip/_vendor/packaging/__init__.py
+++ b/contrib/python/pip/pip/_vendor/packaging/__init__.py
@@ -6,7 +6,7 @@ __title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
-__version__ = "24.2"
+__version__ = "25.0"
__author__ = "Donald Stufft and individual contributors"
__email__ = "donald@stufft.io"
diff --git a/contrib/python/pip/pip/_vendor/packaging/_elffile.py b/contrib/python/pip/pip/_vendor/packaging/_elffile.py
index 25f4282cc29..7a5afc33b0a 100644
--- a/contrib/python/pip/pip/_vendor/packaging/_elffile.py
+++ b/contrib/python/pip/pip/_vendor/packaging/_elffile.py
@@ -69,8 +69,7 @@ class ELFFile:
}[(self.capacity, self.encoding)]
except KeyError as e:
raise ELFInvalid(
- f"unrecognized capacity ({self.capacity}) or "
- f"encoding ({self.encoding})"
+ f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})"
) from e
try:
diff --git a/contrib/python/pip/pip/_vendor/packaging/_manylinux.py b/contrib/python/pip/pip/_vendor/packaging/_manylinux.py
index 61339a6fcc1..95f55762e86 100644
--- a/contrib/python/pip/pip/_vendor/packaging/_manylinux.py
+++ b/contrib/python/pip/pip/_vendor/packaging/_manylinux.py
@@ -161,8 +161,7 @@ def _parse_glibc_version(version_str: str) -> tuple[int, int]:
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn(
- f"Expected glibc version with 2 components major.minor,"
- f" got: {version_str}",
+ f"Expected glibc version with 2 components major.minor, got: {version_str}",
RuntimeWarning,
stacklevel=2,
)
diff --git a/contrib/python/pip/pip/_vendor/packaging/_parser.py b/contrib/python/pip/pip/_vendor/packaging/_parser.py
index c1238c06eab..0007c0aa64a 100644
--- a/contrib/python/pip/pip/_vendor/packaging/_parser.py
+++ b/contrib/python/pip/pip/_vendor/packaging/_parser.py
@@ -349,6 +349,5 @@ def _parse_marker_op(tokenizer: Tokenizer) -> Op:
return Op(tokenizer.read().text)
else:
return tokenizer.raise_syntax_error(
- "Expected marker operator, one of "
- "<=, <, !=, ==, >=, >, ~=, ===, in, not in"
+ "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in"
)
diff --git a/contrib/python/pip/pip/_vendor/packaging/_tokenizer.py b/contrib/python/pip/pip/_vendor/packaging/_tokenizer.py
index 89d041605c0..d28a9b6cf5d 100644
--- a/contrib/python/pip/pip/_vendor/packaging/_tokenizer.py
+++ b/contrib/python/pip/pip/_vendor/packaging/_tokenizer.py
@@ -68,7 +68,8 @@ DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
|platform[._](version|machine|python_implementation)
|python_implementation
|implementation_(name|version)
- |extra
+ |extras?
+ |dependency_groups
)\b
""",
re.VERBOSE,
@@ -119,9 +120,9 @@ class Tokenizer:
another check. If `peek` is set to `True`, the token is not loaded and
would need to be checked again.
"""
- assert (
- self.next_token is None
- ), f"Cannot check for {name!r}, already have {self.next_token!r}"
+ assert self.next_token is None, (
+ f"Cannot check for {name!r}, already have {self.next_token!r}"
+ )
assert name in self.rules, f"Unknown token name: {name!r}"
expression = self.rules[name]
diff --git a/contrib/python/pip/pip/_vendor/packaging/licenses/__init__.py b/contrib/python/pip/pip/_vendor/packaging/licenses/__init__.py
index 71a1a7794e0..031f277fc63 100644
--- a/contrib/python/pip/pip/_vendor/packaging/licenses/__init__.py
+++ b/contrib/python/pip/pip/_vendor/packaging/licenses/__init__.py
@@ -37,8 +37,8 @@ from typing import NewType, cast
from pip._vendor.packaging.licenses._spdx import EXCEPTIONS, LICENSES
__all__ = [
- "NormalizedLicenseExpression",
"InvalidLicenseExpression",
+ "NormalizedLicenseExpression",
"canonicalize_license_expression",
]
diff --git a/contrib/python/pip/pip/_vendor/packaging/markers.py b/contrib/python/pip/pip/_vendor/packaging/markers.py
index fb7f49cf8cd..e7cea57297a 100644
--- a/contrib/python/pip/pip/_vendor/packaging/markers.py
+++ b/contrib/python/pip/pip/_vendor/packaging/markers.py
@@ -8,7 +8,7 @@ import operator
import os
import platform
import sys
-from typing import Any, Callable, TypedDict, cast
+from typing import AbstractSet, Any, Callable, Literal, TypedDict, Union, cast
from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
from ._parser import parse_marker as _parse_marker
@@ -17,6 +17,7 @@ from .specifiers import InvalidSpecifier, Specifier
from .utils import canonicalize_name
__all__ = [
+ "EvaluateContext",
"InvalidMarker",
"Marker",
"UndefinedComparison",
@@ -24,7 +25,9 @@ __all__ = [
"default_environment",
]
-Operator = Callable[[str, str], bool]
+Operator = Callable[[str, Union[str, AbstractSet[str]]], bool]
+EvaluateContext = Literal["metadata", "lock_file", "requirement"]
+MARKERS_ALLOWING_SET = {"extras", "dependency_groups"}
class InvalidMarker(ValueError):
@@ -174,13 +177,14 @@ _operators: dict[str, Operator] = {
}
-def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
- try:
- spec = Specifier("".join([op.serialize(), rhs]))
- except InvalidSpecifier:
- pass
- else:
- return spec.contains(lhs, prereleases=True)
+def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str]) -> bool:
+ if isinstance(rhs, str):
+ try:
+ spec = Specifier("".join([op.serialize(), rhs]))
+ except InvalidSpecifier:
+ pass
+ else:
+ return spec.contains(lhs, prereleases=True)
oper: Operator | None = _operators.get(op.serialize())
if oper is None:
@@ -189,19 +193,29 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
return oper(lhs, rhs)
-def _normalize(*values: str, key: str) -> tuple[str, ...]:
+def _normalize(
+ lhs: str, rhs: str | AbstractSet[str], key: str
+) -> tuple[str, str | AbstractSet[str]]:
# PEP 685 – Comparison of extra names for optional distribution dependencies
# https://peps.python.org/pep-0685/
# > When comparing extra names, tools MUST normalize the names being
# > compared using the semantics outlined in PEP 503 for names
if key == "extra":
- return tuple(canonicalize_name(v) for v in values)
+ assert isinstance(rhs, str), "extra value must be a string"
+ return (canonicalize_name(lhs), canonicalize_name(rhs))
+ if key in MARKERS_ALLOWING_SET:
+ if isinstance(rhs, str): # pragma: no cover
+ return (canonicalize_name(lhs), canonicalize_name(rhs))
+ else:
+ return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs})
# other environment markers don't have such standards
- return values
+ return lhs, rhs
-def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
+def _evaluate_markers(
+ markers: MarkerList, environment: dict[str, str | AbstractSet[str]]
+) -> bool:
groups: list[list[bool]] = [[]]
for marker in markers:
@@ -220,7 +234,7 @@ def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
lhs_value = lhs.value
environment_key = rhs.value
rhs_value = environment[environment_key]
-
+ assert isinstance(lhs_value, str), "lhs must be a string"
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
@@ -298,22 +312,36 @@ class Marker:
return str(self) == str(other)
- def evaluate(self, environment: dict[str, str] | None = None) -> bool:
+ def evaluate(
+ self,
+ environment: dict[str, str] | None = None,
+ context: EvaluateContext = "metadata",
+ ) -> bool:
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
- part of the determined environment.
+ part of the determined environment. The *context* parameter specifies what
+ context the markers are being evaluated for, which influences what markers
+ are considered valid. Acceptable values are "metadata" (for core metadata;
+ default), "lock_file", and "requirement" (i.e. all other situations).
The environment is determined from the current Python process.
"""
- current_environment = cast("dict[str, str]", default_environment())
- current_environment["extra"] = ""
+ current_environment = cast(
+ "dict[str, str | AbstractSet[str]]", default_environment()
+ )
+ if context == "lock_file":
+ current_environment.update(
+ extras=frozenset(), dependency_groups=frozenset()
+ )
+ elif context == "metadata":
+ current_environment["extra"] = ""
if environment is not None:
current_environment.update(environment)
# The API used to allow setting extra to None. We need to handle this
# case for backwards compatibility.
- if current_environment["extra"] is None:
+ if "extra" in current_environment and current_environment["extra"] is None:
current_environment["extra"] = ""
return _evaluate_markers(
@@ -321,11 +349,14 @@ class Marker:
)
-def _repair_python_full_version(env: dict[str, str]) -> dict[str, str]:
+def _repair_python_full_version(
+ env: dict[str, str | AbstractSet[str]],
+) -> dict[str, str | AbstractSet[str]]:
"""
Work around platform.python_version() returning something that is not PEP 440
compliant for non-tagged Python builds.
"""
- if env["python_full_version"].endswith("+"):
- env["python_full_version"] += "local"
+ python_full_version = cast(str, env["python_full_version"])
+ if python_full_version.endswith("+"):
+ env["python_full_version"] = f"{python_full_version}local"
return env
diff --git a/contrib/python/pip/pip/_vendor/packaging/metadata.py b/contrib/python/pip/pip/_vendor/packaging/metadata.py
index 721f411cfc4..3bd8602d36c 100644
--- a/contrib/python/pip/pip/_vendor/packaging/metadata.py
+++ b/contrib/python/pip/pip/_vendor/packaging/metadata.py
@@ -678,8 +678,7 @@ class _Validator(Generic[T]):
)
if pathlib.PureWindowsPath(path).as_posix() != path:
raise self._invalid_metadata(
- f"{path!r} is invalid for {{field}}, "
- "paths must use '/' delimiter"
+ f"{path!r} is invalid for {{field}}, paths must use '/' delimiter"
)
paths.append(path)
return paths
diff --git a/contrib/python/pip/pip/_vendor/packaging/specifiers.py b/contrib/python/pip/pip/_vendor/packaging/specifiers.py
index f18016e1663..47c3929a1d8 100644
--- a/contrib/python/pip/pip/_vendor/packaging/specifiers.py
+++ b/contrib/python/pip/pip/_vendor/packaging/specifiers.py
@@ -816,8 +816,7 @@ class SpecifierSet(BaseSpecifier):
specifier._prereleases = self._prereleases
else:
raise ValueError(
- "Cannot combine SpecifierSets with True and False prerelease "
- "overrides."
+ "Cannot combine SpecifierSets with True and False prerelease overrides."
)
return specifier
diff --git a/contrib/python/pip/pip/_vendor/packaging/tags.py b/contrib/python/pip/pip/_vendor/packaging/tags.py
index f5903402abb..8522f59c4f2 100644
--- a/contrib/python/pip/pip/_vendor/packaging/tags.py
+++ b/contrib/python/pip/pip/_vendor/packaging/tags.py
@@ -530,6 +530,43 @@ def ios_platforms(
)
+def android_platforms(
+ api_level: int | None = None, abi: str | None = None
+) -> Iterator[str]:
+ """
+ Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on
+ non-Android platforms, the ``api_level`` and ``abi`` arguments are required.
+
+ :param int api_level: The maximum `API level
+ <https://developer.android.com/tools/releases/platforms>`__ to return. Defaults
+ to the current system's version, as returned by ``platform.android_ver``.
+ :param str abi: The `Android ABI <https://developer.android.com/ndk/guides/abis>`__,
+ e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by
+ ``sysconfig.get_platform``. Hyphens and periods will be replaced with
+ underscores.
+ """
+ if platform.system() != "Android" and (api_level is None or abi is None):
+ raise TypeError(
+ "on non-Android platforms, the api_level and abi arguments are required"
+ )
+
+ if api_level is None:
+ # Python 3.13 was the first version to return platform.system() == "Android",
+ # and also the first version to define platform.android_ver().
+ api_level = platform.android_ver().api_level # type: ignore[attr-defined]
+
+ if abi is None:
+ abi = sysconfig.get_platform().split("-")[-1]
+ abi = _normalize_string(abi)
+
+ # 16 is the minimum API level known to have enough features to support CPython
+ # without major patching. Yield every API level from the maximum down to the
+ # minimum, inclusive.
+ min_api_level = 16
+ for ver in range(api_level, min_api_level - 1, -1):
+ yield f"android_{ver}_{abi}"
+
+
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
linux = _normalize_string(sysconfig.get_platform())
if not linux.startswith("linux_"):
@@ -561,6 +598,8 @@ def platform_tags() -> Iterator[str]:
return mac_platforms()
elif platform.system() == "iOS":
return ios_platforms()
+ elif platform.system() == "Android":
+ return android_platforms()
elif platform.system() == "Linux":
return _linux_platforms()
else:
diff --git a/contrib/python/pip/pip/_vendor/platformdirs/__init__.py b/contrib/python/pip/pip/_vendor/platformdirs/__init__.py
index edc21fad2e9..2325ec2eb6f 100644
--- a/contrib/python/pip/pip/_vendor/platformdirs/__init__.py
+++ b/contrib/python/pip/pip/_vendor/platformdirs/__init__.py
@@ -52,7 +52,7 @@ AppDirs = PlatformDirs #: Backwards compatibility with appdirs
def user_data_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -76,7 +76,7 @@ def user_data_dir(
def site_data_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -100,7 +100,7 @@ def site_data_dir(
def user_config_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -124,7 +124,7 @@ def user_config_dir(
def site_config_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -148,7 +148,7 @@ def site_config_dir(
def user_cache_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -172,7 +172,7 @@ def user_cache_dir(
def site_cache_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -196,7 +196,7 @@ def site_cache_dir(
def user_state_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -220,7 +220,7 @@ def user_state_dir(
def user_log_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -274,7 +274,7 @@ def user_desktop_dir() -> str:
def user_runtime_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -298,7 +298,7 @@ def user_runtime_dir(
def site_runtime_dir(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -322,7 +322,7 @@ def site_runtime_dir(
def user_data_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -346,7 +346,7 @@ def user_data_path(
def site_data_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -370,7 +370,7 @@ def site_data_path(
def user_config_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -394,7 +394,7 @@ def user_config_path(
def site_config_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
multipath: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -418,7 +418,7 @@ def site_config_path(
def site_cache_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -442,7 +442,7 @@ def site_cache_path(
def user_cache_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -466,7 +466,7 @@ def user_cache_path(
def user_state_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -490,7 +490,7 @@ def user_state_path(
def user_log_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -544,7 +544,7 @@ def user_desktop_path() -> Path:
def user_runtime_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
@@ -568,7 +568,7 @@ def user_runtime_path(
def site_runtime_path(
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
opinion: bool = True, # noqa: FBT001, FBT002
ensure_exists: bool = False, # noqa: FBT001, FBT002
diff --git a/contrib/python/pip/pip/_vendor/platformdirs/android.py b/contrib/python/pip/pip/_vendor/platformdirs/android.py
index 7004a852422..92efc852d38 100644
--- a/contrib/python/pip/pip/_vendor/platformdirs/android.py
+++ b/contrib/python/pip/pip/_vendor/platformdirs/android.py
@@ -23,7 +23,7 @@ class Android(PlatformDirsABC):
@property
def user_data_dir(self) -> str:
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
- return self._append_app_name_and_version(cast(str, _android_folder()), "files")
+ return self._append_app_name_and_version(cast("str", _android_folder()), "files")
@property
def site_data_dir(self) -> str:
@@ -36,7 +36,7 @@ class Android(PlatformDirsABC):
:return: config directory tied to the user, e.g. \
``/data/user/<userid>/<packagename>/shared_prefs/<AppName>``
"""
- return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
+ return self._append_app_name_and_version(cast("str", _android_folder()), "shared_prefs")
@property
def site_config_dir(self) -> str:
@@ -46,7 +46,7 @@ class Android(PlatformDirsABC):
@property
def user_cache_dir(self) -> str:
""":return: cache directory tied to the user, e.g.,``/data/user/<userid>/<packagename>/cache/<AppName>``"""
- return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
+ return self._append_app_name_and_version(cast("str", _android_folder()), "cache")
@property
def site_cache_dir(self) -> str:
diff --git a/contrib/python/pip/pip/_vendor/platformdirs/api.py b/contrib/python/pip/pip/_vendor/platformdirs/api.py
index 18d660e4f8c..a352035ec69 100644
--- a/contrib/python/pip/pip/_vendor/platformdirs/api.py
+++ b/contrib/python/pip/pip/_vendor/platformdirs/api.py
@@ -8,7 +8,8 @@ from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
- from typing import Iterator, Literal
+ from collections.abc import Iterator
+ from typing import Literal
class PlatformDirsABC(ABC): # noqa: PLR0904
@@ -17,7 +18,7 @@ class PlatformDirsABC(ABC): # noqa: PLR0904
def __init__( # noqa: PLR0913, PLR0917
self,
appname: str | None = None,
- appauthor: str | None | Literal[False] = None,
+ appauthor: str | Literal[False] | None = None,
version: str | None = None,
roaming: bool = False, # noqa: FBT001, FBT002
multipath: bool = False, # noqa: FBT001, FBT002
diff --git a/contrib/python/pip/pip/_vendor/platformdirs/unix.py b/contrib/python/pip/pip/_vendor/platformdirs/unix.py
index f1942e92ef4..fc75d8d0747 100644
--- a/contrib/python/pip/pip/_vendor/platformdirs/unix.py
+++ b/contrib/python/pip/pip/_vendor/platformdirs/unix.py
@@ -6,10 +6,13 @@ import os
import sys
from configparser import ConfigParser
from pathlib import Path
-from typing import Iterator, NoReturn
+from typing import TYPE_CHECKING, NoReturn
from .api import PlatformDirsABC
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+
if sys.platform == "win32":
def getuid() -> NoReturn:
diff --git a/contrib/python/pip/pip/_vendor/platformdirs/version.py b/contrib/python/pip/pip/_vendor/platformdirs/version.py
index afb49243e3d..ed85187adca 100644
--- a/contrib/python/pip/pip/_vendor/platformdirs/version.py
+++ b/contrib/python/pip/pip/_vendor/platformdirs/version.py
@@ -1,8 +1,13 @@
-# file generated by setuptools_scm
+# file generated by setuptools-scm
# don't change, don't track in version control
+
+__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
+
TYPE_CHECKING = False
if TYPE_CHECKING:
- from typing import Tuple, Union
+ from typing import Tuple
+ from typing import Union
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
else:
VERSION_TUPLE = object
@@ -12,5 +17,5 @@ __version__: str
__version_tuple__: VERSION_TUPLE
version_tuple: VERSION_TUPLE
-__version__ = version = '4.3.6'
-__version_tuple__ = version_tuple = (4, 3, 6)
+__version__ = version = '4.3.7'
+__version_tuple__ = version_tuple = (4, 3, 7)
diff --git a/contrib/python/pip/pip/_vendor/pygments/__init__.py b/contrib/python/pip/pip/_vendor/pygments/__init__.py
index 60ae9bb8508..38e059a35d9 100644
--- a/contrib/python/pip/pip/_vendor/pygments/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/__init__.py
@@ -21,12 +21,12 @@
.. _Pygments master branch:
https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from io import StringIO, BytesIO
-__version__ = '2.18.0'
+__version__ = '2.19.1'
__docformat__ = 'restructuredtext'
__all__ = ['lex', 'format', 'highlight']
diff --git a/contrib/python/pip/pip/_vendor/pygments/__main__.py b/contrib/python/pip/pip/_vendor/pygments/__main__.py
index dcc6e5add71..a2e612f51a8 100644
--- a/contrib/python/pip/pip/_vendor/pygments/__main__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/__main__.py
@@ -4,7 +4,7 @@
Main entry point for ``python -m pygments``.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/cmdline.py b/contrib/python/pip/pip/_vendor/pygments/cmdline.py
deleted file mode 100644
index 0a7072eff3e..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/cmdline.py
+++ /dev/null
@@ -1,668 +0,0 @@
-"""
- pygments.cmdline
- ~~~~~~~~~~~~~~~~
-
- Command line interface.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import os
-import sys
-import shutil
-import argparse
-from textwrap import dedent
-
-from pip._vendor.pygments import __version__, highlight
-from pip._vendor.pygments.util import ClassNotFound, OptionError, docstring_headline, \
- guess_decode, guess_decode_from_terminal, terminal_encoding, \
- UnclosingTextIOWrapper
-from pip._vendor.pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \
- load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename
-from pip._vendor.pygments.lexers.special import TextLexer
-from pip._vendor.pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter
-from pip._vendor.pygments.formatters import get_all_formatters, get_formatter_by_name, \
- load_formatter_from_file, get_formatter_for_filename, find_formatter_class
-from pip._vendor.pygments.formatters.terminal import TerminalFormatter
-from pip._vendor.pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter
-from pip._vendor.pygments.filters import get_all_filters, find_filter_class
-from pip._vendor.pygments.styles import get_all_styles, get_style_by_name
-
-
-def _parse_options(o_strs):
- opts = {}
- if not o_strs:
- return opts
- for o_str in o_strs:
- if not o_str.strip():
- continue
- o_args = o_str.split(',')
- for o_arg in o_args:
- o_arg = o_arg.strip()
- try:
- o_key, o_val = o_arg.split('=', 1)
- o_key = o_key.strip()
- o_val = o_val.strip()
- except ValueError:
- opts[o_arg] = True
- else:
- opts[o_key] = o_val
- return opts
-
-
-def _parse_filters(f_strs):
- filters = []
- if not f_strs:
- return filters
- for f_str in f_strs:
- if ':' in f_str:
- fname, fopts = f_str.split(':', 1)
- filters.append((fname, _parse_options([fopts])))
- else:
- filters.append((f_str, {}))
- return filters
-
-
-def _print_help(what, name):
- try:
- if what == 'lexer':
- cls = get_lexer_by_name(name)
- print(f"Help on the {cls.name} lexer:")
- print(dedent(cls.__doc__))
- elif what == 'formatter':
- cls = find_formatter_class(name)
- print(f"Help on the {cls.name} formatter:")
- print(dedent(cls.__doc__))
- elif what == 'filter':
- cls = find_filter_class(name)
- print(f"Help on the {name} filter:")
- print(dedent(cls.__doc__))
- return 0
- except (AttributeError, ValueError):
- print(f"{what} not found!", file=sys.stderr)
- return 1
-
-
-def _print_list(what):
- if what == 'lexer':
- print()
- print("Lexers:")
- print("~~~~~~~")
-
- info = []
- for fullname, names, exts, _ in get_all_lexers():
- tup = (', '.join(names)+':', fullname,
- exts and '(filenames ' + ', '.join(exts) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* {}\n {} {}').format(*i))
-
- elif what == 'formatter':
- print()
- print("Formatters:")
- print("~~~~~~~~~~~")
-
- info = []
- for cls in get_all_formatters():
- doc = docstring_headline(cls)
- tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and
- '(filenames ' + ', '.join(cls.filenames) + ')' or '')
- info.append(tup)
- info.sort()
- for i in info:
- print(('* {}\n {} {}').format(*i))
-
- elif what == 'filter':
- print()
- print("Filters:")
- print("~~~~~~~~")
-
- for name in get_all_filters():
- cls = find_filter_class(name)
- print("* " + name + ':')
- print(f" {docstring_headline(cls)}")
-
- elif what == 'style':
- print()
- print("Styles:")
- print("~~~~~~~")
-
- for name in get_all_styles():
- cls = get_style_by_name(name)
- print("* " + name + ':')
- print(f" {docstring_headline(cls)}")
-
-
-def _print_list_as_json(requested_items):
- import json
- result = {}
- if 'lexer' in requested_items:
- info = {}
- for fullname, names, filenames, mimetypes in get_all_lexers():
- info[fullname] = {
- 'aliases': names,
- 'filenames': filenames,
- 'mimetypes': mimetypes
- }
- result['lexers'] = info
-
- if 'formatter' in requested_items:
- info = {}
- for cls in get_all_formatters():
- doc = docstring_headline(cls)
- info[cls.name] = {
- 'aliases': cls.aliases,
- 'filenames': cls.filenames,
- 'doc': doc
- }
- result['formatters'] = info
-
- if 'filter' in requested_items:
- info = {}
- for name in get_all_filters():
- cls = find_filter_class(name)
- info[name] = {
- 'doc': docstring_headline(cls)
- }
- result['filters'] = info
-
- if 'style' in requested_items:
- info = {}
- for name in get_all_styles():
- cls = get_style_by_name(name)
- info[name] = {
- 'doc': docstring_headline(cls)
- }
- result['styles'] = info
-
- json.dump(result, sys.stdout)
-
-def main_inner(parser, argns):
- if argns.help:
- parser.print_help()
- return 0
-
- if argns.V:
- print(f'Pygments version {__version__}, (c) 2006-2024 by Georg Brandl, Matthäus '
- 'Chajdas and contributors.')
- return 0
-
- def is_only_option(opt):
- return not any(v for (k, v) in vars(argns).items() if k != opt)
-
- # handle ``pygmentize -L``
- if argns.L is not None:
- arg_set = set()
- for k, v in vars(argns).items():
- if v:
- arg_set.add(k)
-
- arg_set.discard('L')
- arg_set.discard('json')
-
- if arg_set:
- parser.print_help(sys.stderr)
- return 2
-
- # print version
- if not argns.json:
- main(['', '-V'])
- allowed_types = {'lexer', 'formatter', 'filter', 'style'}
- largs = [arg.rstrip('s') for arg in argns.L]
- if any(arg not in allowed_types for arg in largs):
- parser.print_help(sys.stderr)
- return 0
- if not largs:
- largs = allowed_types
- if not argns.json:
- for arg in largs:
- _print_list(arg)
- else:
- _print_list_as_json(largs)
- return 0
-
- # handle ``pygmentize -H``
- if argns.H:
- if not is_only_option('H'):
- parser.print_help(sys.stderr)
- return 2
- what, name = argns.H
- if what not in ('lexer', 'formatter', 'filter'):
- parser.print_help(sys.stderr)
- return 2
- return _print_help(what, name)
-
- # parse -O options
- parsed_opts = _parse_options(argns.O or [])
-
- # parse -P options
- for p_opt in argns.P or []:
- try:
- name, value = p_opt.split('=', 1)
- except ValueError:
- parsed_opts[p_opt] = True
- else:
- parsed_opts[name] = value
-
- # encodings
- inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding'))
- outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding'))
-
- # handle ``pygmentize -N``
- if argns.N:
- lexer = find_lexer_class_for_filename(argns.N)
- if lexer is None:
- lexer = TextLexer
-
- print(lexer.aliases[0])
- return 0
-
- # handle ``pygmentize -C``
- if argns.C:
- inp = sys.stdin.buffer.read()
- try:
- lexer = guess_lexer(inp, inencoding=inencoding)
- except ClassNotFound:
- lexer = TextLexer
-
- print(lexer.aliases[0])
- return 0
-
- # handle ``pygmentize -S``
- S_opt = argns.S
- a_opt = argns.a
- if S_opt is not None:
- f_opt = argns.f
- if not f_opt:
- parser.print_help(sys.stderr)
- return 2
- if argns.l or argns.INPUTFILE:
- parser.print_help(sys.stderr)
- return 2
-
- try:
- parsed_opts['style'] = S_opt
- fmter = get_formatter_by_name(f_opt, **parsed_opts)
- except ClassNotFound as err:
- print(err, file=sys.stderr)
- return 1
-
- print(fmter.get_style_defs(a_opt or ''))
- return 0
-
- # if no -S is given, -a is not allowed
- if argns.a is not None:
- parser.print_help(sys.stderr)
- return 2
-
- # parse -F options
- F_opts = _parse_filters(argns.F or [])
-
- # -x: allow custom (eXternal) lexers and formatters
- allow_custom_lexer_formatter = bool(argns.x)
-
- # select lexer
- lexer = None
-
- # given by name?
- lexername = argns.l
- if lexername:
- # custom lexer, located relative to user's cwd
- if allow_custom_lexer_formatter and '.py' in lexername:
- try:
- filename = None
- name = None
- if ':' in lexername:
- filename, name = lexername.rsplit(':', 1)
-
- if '.py' in name:
- # This can happen on Windows: If the lexername is
- # C:\lexer.py -- return to normal load path in that case
- name = None
-
- if filename and name:
- lexer = load_lexer_from_file(filename, name,
- **parsed_opts)
- else:
- lexer = load_lexer_from_file(lexername, **parsed_opts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
- else:
- try:
- lexer = get_lexer_by_name(lexername, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- # read input code
- code = None
-
- if argns.INPUTFILE:
- if argns.s:
- print('Error: -s option not usable when input file specified',
- file=sys.stderr)
- return 2
-
- infn = argns.INPUTFILE
- try:
- with open(infn, 'rb') as infp:
- code = infp.read()
- except Exception as err:
- print('Error: cannot read infile:', err, file=sys.stderr)
- return 1
- if not inencoding:
- code, inencoding = guess_decode(code)
-
- # do we have to guess the lexer?
- if not lexer:
- try:
- lexer = get_lexer_for_filename(infn, code, **parsed_opts)
- except ClassNotFound as err:
- if argns.g:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
- else:
- print('Error:', err, file=sys.stderr)
- return 1
- except OptionError as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- elif not argns.s: # treat stdin as full file (-s support is later)
- # read code from terminal, always in binary mode since we want to
- # decode ourselves and be tolerant with it
- code = sys.stdin.buffer.read() # use .buffer to get a binary stream
- if not inencoding:
- code, inencoding = guess_decode_from_terminal(code, sys.stdin)
- # else the lexer will do the decoding
- if not lexer:
- try:
- lexer = guess_lexer(code, **parsed_opts)
- except ClassNotFound:
- lexer = TextLexer(**parsed_opts)
-
- else: # -s option needs a lexer with -l
- if not lexer:
- print('Error: when using -s a lexer has to be selected with -l',
- file=sys.stderr)
- return 2
-
- # process filters
- for fname, fopts in F_opts:
- try:
- lexer.add_filter(fname, **fopts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- # select formatter
- outfn = argns.o
- fmter = argns.f
- if fmter:
- # custom formatter, located relative to user's cwd
- if allow_custom_lexer_formatter and '.py' in fmter:
- try:
- filename = None
- name = None
- if ':' in fmter:
- # Same logic as above for custom lexer
- filename, name = fmter.rsplit(':', 1)
-
- if '.py' in name:
- name = None
-
- if filename and name:
- fmter = load_formatter_from_file(filename, name,
- **parsed_opts)
- else:
- fmter = load_formatter_from_file(fmter, **parsed_opts)
- except ClassNotFound as err:
- print('Error:', err, file=sys.stderr)
- return 1
- else:
- try:
- fmter = get_formatter_by_name(fmter, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
-
- if outfn:
- if not fmter:
- try:
- fmter = get_formatter_for_filename(outfn, **parsed_opts)
- except (OptionError, ClassNotFound) as err:
- print('Error:', err, file=sys.stderr)
- return 1
- try:
- outfile = open(outfn, 'wb')
- except Exception as err:
- print('Error: cannot open outfile:', err, file=sys.stderr)
- return 1
- else:
- if not fmter:
- if os.environ.get('COLORTERM','') in ('truecolor', '24bit'):
- fmter = TerminalTrueColorFormatter(**parsed_opts)
- elif '256' in os.environ.get('TERM', ''):
- fmter = Terminal256Formatter(**parsed_opts)
- else:
- fmter = TerminalFormatter(**parsed_opts)
- outfile = sys.stdout.buffer
-
- # determine output encoding if not explicitly selected
- if not outencoding:
- if outfn:
- # output file? use lexer encoding for now (can still be None)
- fmter.encoding = inencoding
- else:
- # else use terminal encoding
- fmter.encoding = terminal_encoding(sys.stdout)
-
- # provide coloring under Windows, if possible
- if not outfn and sys.platform in ('win32', 'cygwin') and \
- fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover
- # unfortunately colorama doesn't support binary streams on Py3
- outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding)
- fmter.encoding = None
- try:
- import colorama.initialise
- except ImportError:
- pass
- else:
- outfile = colorama.initialise.wrap_stream(
- outfile, convert=None, strip=None, autoreset=False, wrap=True)
-
- # When using the LaTeX formatter and the option `escapeinside` is
- # specified, we need a special lexer which collects escaped text
- # before running the chosen language lexer.
- escapeinside = parsed_opts.get('escapeinside', '')
- if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter):
- left = escapeinside[0]
- right = escapeinside[1]
- lexer = LatexEmbeddedLexer(left, right, lexer)
-
- # ... and do it!
- if not argns.s:
- # process whole input as per normal...
- try:
- highlight(code, lexer, fmter, outfile)
- finally:
- if outfn:
- outfile.close()
- return 0
- else:
- # line by line processing of stdin (eg: for 'tail -f')...
- try:
- while 1:
- line = sys.stdin.buffer.readline()
- if not line:
- break
- if not inencoding:
- line = guess_decode_from_terminal(line, sys.stdin)[0]
- highlight(line, lexer, fmter, outfile)
- if hasattr(outfile, 'flush'):
- outfile.flush()
- return 0
- except KeyboardInterrupt: # pragma: no cover
- return 0
- finally:
- if outfn:
- outfile.close()
-
-
-class HelpFormatter(argparse.HelpFormatter):
- def __init__(self, prog, indent_increment=2, max_help_position=16, width=None):
- if width is None:
- try:
- width = shutil.get_terminal_size().columns - 2
- except Exception:
- pass
- argparse.HelpFormatter.__init__(self, prog, indent_increment,
- max_help_position, width)
-
-
-def main(args=sys.argv):
- """
- Main command line entry point.
- """
- desc = "Highlight an input file and write the result to an output file."
- parser = argparse.ArgumentParser(description=desc, add_help=False,
- formatter_class=HelpFormatter)
-
- operation = parser.add_argument_group('Main operation')
- lexersel = operation.add_mutually_exclusive_group()
- lexersel.add_argument(
- '-l', metavar='LEXER',
- help='Specify the lexer to use. (Query names with -L.) If not '
- 'given and -g is not present, the lexer is guessed from the filename.')
- lexersel.add_argument(
- '-g', action='store_true',
- help='Guess the lexer from the file contents, or pass through '
- 'as plain text if nothing can be guessed.')
- operation.add_argument(
- '-F', metavar='FILTER[:options]', action='append',
- help='Add a filter to the token stream. (Query names with -L.) '
- 'Filter options are given after a colon if necessary.')
- operation.add_argument(
- '-f', metavar='FORMATTER',
- help='Specify the formatter to use. (Query names with -L.) '
- 'If not given, the formatter is guessed from the output filename, '
- 'and defaults to the terminal formatter if the output is to the '
- 'terminal or an unknown file extension.')
- operation.add_argument(
- '-O', metavar='OPTION=value[,OPTION=value,...]', action='append',
- help='Give options to the lexer and formatter as a comma-separated '
- 'list of key-value pairs. '
- 'Example: `-O bg=light,python=cool`.')
- operation.add_argument(
- '-P', metavar='OPTION=value', action='append',
- help='Give a single option to the lexer and formatter - with this '
- 'you can pass options whose value contains commas and equal signs. '
- 'Example: `-P "heading=Pygments, the Python highlighter"`.')
- operation.add_argument(
- '-o', metavar='OUTPUTFILE',
- help='Where to write the output. Defaults to standard output.')
-
- operation.add_argument(
- 'INPUTFILE', nargs='?',
- help='Where to read the input. Defaults to standard input.')
-
- flags = parser.add_argument_group('Operation flags')
- flags.add_argument(
- '-v', action='store_true',
- help='Print a detailed traceback on unhandled exceptions, which '
- 'is useful for debugging and bug reports.')
- flags.add_argument(
- '-s', action='store_true',
- help='Process lines one at a time until EOF, rather than waiting to '
- 'process the entire file. This only works for stdin, only for lexers '
- 'with no line-spanning constructs, and is intended for streaming '
- 'input such as you get from `tail -f`. '
- 'Example usage: `tail -f sql.log | pygmentize -s -l sql`.')
- flags.add_argument(
- '-x', action='store_true',
- help='Allow custom lexers and formatters to be loaded from a .py file '
- 'relative to the current working directory. For example, '
- '`-l ./customlexer.py -x`. By default, this option expects a file '
- 'with a class named CustomLexer or CustomFormatter; you can also '
- 'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). '
- 'Users should be very careful not to use this option with untrusted '
- 'files, because it will import and run them.')
- flags.add_argument('--json', help='Output as JSON. This can '
- 'be only used in conjunction with -L.',
- default=False,
- action='store_true')
-
- special_modes_group = parser.add_argument_group(
- 'Special modes - do not do any highlighting')
- special_modes = special_modes_group.add_mutually_exclusive_group()
- special_modes.add_argument(
- '-S', metavar='STYLE -f formatter',
- help='Print style definitions for STYLE for a formatter '
- 'given with -f. The argument given by -a is formatter '
- 'dependent.')
- special_modes.add_argument(
- '-L', nargs='*', metavar='WHAT',
- help='List lexers, formatters, styles or filters -- '
- 'give additional arguments for the thing(s) you want to list '
- '(e.g. "styles"), or omit them to list everything.')
- special_modes.add_argument(
- '-N', metavar='FILENAME',
- help='Guess and print out a lexer name based solely on the given '
- 'filename. Does not take input or highlight anything. If no specific '
- 'lexer can be determined, "text" is printed.')
- special_modes.add_argument(
- '-C', action='store_true',
- help='Like -N, but print out a lexer name based solely on '
- 'a given content from standard input.')
- special_modes.add_argument(
- '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'),
- help='Print detailed help for the object <name> of type <type>, '
- 'where <type> is one of "lexer", "formatter" or "filter".')
- special_modes.add_argument(
- '-V', action='store_true',
- help='Print the package version.')
- special_modes.add_argument(
- '-h', '--help', action='store_true',
- help='Print this help.')
- special_modes_group.add_argument(
- '-a', metavar='ARG',
- help='Formatter-specific additional argument for the -S (print '
- 'style sheet) mode.')
-
- argns = parser.parse_args(args[1:])
-
- try:
- return main_inner(parser, argns)
- except BrokenPipeError:
- # someone closed our stdout, e.g. by quitting a pager.
- return 0
- except Exception:
- if argns.v:
- print(file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print('An unhandled exception occurred while highlighting.',
- file=sys.stderr)
- print('Please report the whole traceback to the issue tracker at',
- file=sys.stderr)
- print('<https://github.com/pygments/pygments/issues>.',
- file=sys.stderr)
- print('*' * 65, file=sys.stderr)
- print(file=sys.stderr)
- raise
- import traceback
- info = traceback.format_exception(*sys.exc_info())
- msg = info[-1].strip()
- if len(info) >= 3:
- # extract relevant file and position info
- msg += '\n (f{})'.format(info[-2].split('\n')[0].strip()[1:])
- print(file=sys.stderr)
- print('*** Error while highlighting:', file=sys.stderr)
- print(msg, file=sys.stderr)
- print('*** If this is a bug you want to report, please rerun with -v.',
- file=sys.stderr)
- return 1
diff --git a/contrib/python/pip/pip/_vendor/pygments/console.py b/contrib/python/pip/pip/_vendor/pygments/console.py
index 4c1a06219ca..ee1ac27a2ff 100644
--- a/contrib/python/pip/pip/_vendor/pygments/console.py
+++ b/contrib/python/pip/pip/_vendor/pygments/console.py
@@ -4,7 +4,7 @@
Format colored console output.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/filter.py b/contrib/python/pip/pip/_vendor/pygments/filter.py
index aa6f76041b6..5efff438d2b 100644
--- a/contrib/python/pip/pip/_vendor/pygments/filter.py
+++ b/contrib/python/pip/pip/_vendor/pygments/filter.py
@@ -4,7 +4,7 @@
Module that implements the default filter.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py b/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py
index 9255ca224db..97380c92d48 100644
--- a/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/filters/__init__.py
@@ -5,7 +5,7 @@
Module containing filter lookup functions and default
filters.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatter.py b/contrib/python/pip/pip/_vendor/pygments/formatter.py
index d2666037f7a..0041e41a187 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatter.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatter.py
@@ -4,7 +4,7 @@
Base formatter class.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py b/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py
index f19e9931f07..014f2ee8d15 100644
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/formatters/__init__.py
@@ -4,7 +4,7 @@
Pygments formatters.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py b/contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py
deleted file mode 100644
index 5a05bd961de..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/bbcode.py
+++ /dev/null
@@ -1,108 +0,0 @@
-"""
- pygments.formatters.bbcode
- ~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- BBcode formatter.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.util import get_bool_opt
-
-__all__ = ['BBCodeFormatter']
-
-
-class BBCodeFormatter(Formatter):
- """
- Format tokens with BBcodes. These formatting codes are used by many
- bulletin boards, so you can highlight your sourcecode with pygments before
- posting it there.
-
- This formatter has no support for background colors and borders, as there
- are no common BBcode tags for that.
-
- Some board systems (e.g. phpBB) don't support colors in their [code] tag,
- so you can't use the highlighting together with that tag.
- Text in a [code] tag usually is shown with a monospace font (which this
- formatter can do with the ``monofont`` option) and no spaces (which you
- need for indentation) are removed.
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `codetag`
- If set to true, put the output into ``[code]`` tags (default:
- ``false``)
-
- `monofont`
- If set to true, add a tag to show the code with a monospace font
- (default: ``false``).
- """
- name = 'BBCode'
- aliases = ['bbcode', 'bb']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self._code = get_bool_opt(options, 'codetag', False)
- self._mono = get_bool_opt(options, 'monofont', False)
-
- self.styles = {}
- self._make_styles()
-
- def _make_styles(self):
- for ttype, ndef in self.style:
- start = end = ''
- if ndef['color']:
- start += '[color=#{}]'.format(ndef['color'])
- end = '[/color]' + end
- if ndef['bold']:
- start += '[b]'
- end = '[/b]' + end
- if ndef['italic']:
- start += '[i]'
- end = '[/i]' + end
- if ndef['underline']:
- start += '[u]'
- end = '[/u]' + end
- # there are no common BBcodes for background-color and border
-
- self.styles[ttype] = start, end
-
- def format_unencoded(self, tokensource, outfile):
- if self._code:
- outfile.write('[code]')
- if self._mono:
- outfile.write('[font=monospace]')
-
- lastval = ''
- lasttype = None
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- if ttype == lasttype:
- lastval += value
- else:
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
- lastval = value
- lasttype = ttype
-
- if lastval:
- start, end = self.styles[lasttype]
- outfile.write(''.join((start, lastval, end)))
-
- if self._mono:
- outfile.write('[/font]')
- if self._code:
- outfile.write('[/code]')
- if self._code or self._mono:
- outfile.write('\n')
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/groff.py b/contrib/python/pip/pip/_vendor/pygments/formatters/groff.py
deleted file mode 100644
index 5c8a958f8d7..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/groff.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""
- pygments.formatters.groff
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for groff output.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import math
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.util import get_bool_opt, get_int_opt
-
-__all__ = ['GroffFormatter']
-
-
-class GroffFormatter(Formatter):
- """
- Format tokens with groff escapes to change their color and font style.
-
- .. versionadded:: 2.11
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `monospaced`
- If set to true, monospace font will be used (default: ``true``).
-
- `linenos`
- If set to true, print the line numbers (default: ``false``).
-
- `wrap`
- Wrap lines to the specified number of characters. Disabled if set to 0
- (default: ``0``).
- """
-
- name = 'groff'
- aliases = ['groff','troff','roff']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.monospaced = get_bool_opt(options, 'monospaced', True)
- self.linenos = get_bool_opt(options, 'linenos', False)
- self._lineno = 0
- self.wrap = get_int_opt(options, 'wrap', 0)
- self._linelen = 0
-
- self.styles = {}
- self._make_styles()
-
-
- def _make_styles(self):
- regular = '\\f[CR]' if self.monospaced else '\\f[R]'
- bold = '\\f[CB]' if self.monospaced else '\\f[B]'
- italic = '\\f[CI]' if self.monospaced else '\\f[I]'
-
- for ttype, ndef in self.style:
- start = end = ''
- if ndef['color']:
- start += '\\m[{}]'.format(ndef['color'])
- end = '\\m[]' + end
- if ndef['bold']:
- start += bold
- end = regular + end
- if ndef['italic']:
- start += italic
- end = regular + end
- if ndef['bgcolor']:
- start += '\\M[{}]'.format(ndef['bgcolor'])
- end = '\\M[]' + end
-
- self.styles[ttype] = start, end
-
-
- def _define_colors(self, outfile):
- colors = set()
- for _, ndef in self.style:
- if ndef['color'] is not None:
- colors.add(ndef['color'])
-
- for color in sorted(colors):
- outfile.write('.defcolor ' + color + ' rgb #' + color + '\n')
-
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s% 4d " % (self._lineno != 1 and '\n' or '', self._lineno))
-
-
- def _wrap_line(self, line):
- length = len(line.rstrip('\n'))
- space = ' ' if self.linenos else ''
- newline = ''
-
- if length > self.wrap:
- for i in range(0, math.floor(length / self.wrap)):
- chunk = line[i*self.wrap:i*self.wrap+self.wrap]
- newline += (chunk + '\n' + space)
- remainder = length % self.wrap
- if remainder > 0:
- newline += line[-remainder-1:]
- self._linelen = remainder
- elif self._linelen + length > self.wrap:
- newline = ('\n' + space) + line
- self._linelen = length
- else:
- newline = line
- self._linelen += length
-
- return newline
-
-
- def _escape_chars(self, text):
- text = text.replace('\\', '\\[u005C]'). \
- replace('.', '\\[char46]'). \
- replace('\'', '\\[u0027]'). \
- replace('`', '\\[u0060]'). \
- replace('~', '\\[u007E]')
- copy = text
-
- for char in copy:
- if len(char) != len(char.encode()):
- uni = char.encode('unicode_escape') \
- .decode()[1:] \
- .replace('x', 'u00') \
- .upper()
- text = text.replace(char, '\\[u' + uni[1:] + ']')
-
- return text
-
-
- def format_unencoded(self, tokensource, outfile):
- self._define_colors(outfile)
-
- outfile.write('.nf\n\\f[CR]\n')
-
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- start, end = self.styles[ttype]
-
- for line in value.splitlines(True):
- if self.wrap > 0:
- line = self._wrap_line(line)
-
- if start and end:
- text = self._escape_chars(line.rstrip('\n'))
- if text != '':
- outfile.write(''.join((start, text, end)))
- else:
- outfile.write(self._escape_chars(line.rstrip('\n')))
-
- if line.endswith('\n'):
- if self.linenos:
- self._write_lineno(outfile)
- self._linelen = 0
- else:
- outfile.write('\n')
- self._linelen = 0
-
- outfile.write('\n.fi')
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/html.py b/contrib/python/pip/pip/_vendor/pygments/formatters/html.py
deleted file mode 100644
index 7aa938f5119..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/html.py
+++ /dev/null
@@ -1,987 +0,0 @@
-"""
- pygments.formatters.html
- ~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for HTML output.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-import functools
-import os
-import sys
-import os.path
-from io import StringIO
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.token import Token, Text, STANDARD_TYPES
-from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt
-
-try:
- import ctags
-except ImportError:
- ctags = None
-
-__all__ = ['HtmlFormatter']
-
-
-_escape_html_table = {
- ord('&'): '&amp;',
- ord('<'): '&lt;',
- ord('>'): '&gt;',
- ord('"'): '&quot;',
- ord("'"): '&#39;',
-}
-
-
-def escape_html(text, table=_escape_html_table):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.translate(table)
-
-
-def webify(color):
- if color.startswith('calc') or color.startswith('var'):
- return color
- else:
- return '#' + color
-
-
-def _get_ttype_class(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = '-' + ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-CSSFILE_TEMPLATE = '''\
-/*
-generated by Pygments <https://pygments.org/>
-Copyright 2006-2024 by the Pygments team.
-Licensed under the BSD license, see LICENSE for details.
-*/
-%(styledefs)s
-'''
-
-DOC_HEADER = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
-<!--
-generated by Pygments <https://pygments.org/>
-Copyright 2006-2024 by the Pygments team.
-Licensed under the BSD license, see LICENSE for details.
--->
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <style type="text/css">
-''' + CSSFILE_TEMPLATE + '''
- </style>
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_HEADER_EXTERNALCSS = '''\
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
- "http://www.w3.org/TR/html4/strict.dtd">
-
-<html>
-<head>
- <title>%(title)s</title>
- <meta http-equiv="content-type" content="text/html; charset=%(encoding)s">
- <link rel="stylesheet" href="%(cssfile)s" type="text/css">
-</head>
-<body>
-<h2>%(title)s</h2>
-
-'''
-
-DOC_FOOTER = '''\
-</body>
-</html>
-'''
-
-
-class HtmlFormatter(Formatter):
- r"""
- Format tokens as HTML 4 ``<span>`` tags. By default, the content is enclosed
- in a ``<pre>`` tag, itself wrapped in a ``<div>`` tag (but see the `nowrap` option).
- The ``<div>``'s CSS class can be set by the `cssclass` option.
-
- If the `linenos` option is set to ``"table"``, the ``<pre>`` is
- additionally wrapped inside a ``<table>`` which has one row and two
- cells: one containing the line numbers and one containing the code.
- Example:
-
- .. sourcecode:: html
-
- <div class="highlight" >
- <table><tr>
- <td class="linenos" title="click to toggle"
- onclick="with (this.firstChild.style)
- { display = (display == '') ? 'none' : '' }">
- <pre>1
- 2</pre>
- </td>
- <td class="code">
- <pre><span class="Ke">def </span><span class="NaFu">foo</span>(bar):
- <span class="Ke">pass</span>
- </pre>
- </td>
- </tr></table></div>
-
- (whitespace added to improve clarity).
-
- A list of lines can be specified using the `hl_lines` option to make these
- lines highlighted (as of Pygments 0.11).
-
- With the `full` option, a complete HTML 4 document is output, including
- the style definitions inside a ``<style>`` tag, or in a separate file if
- the `cssfile` option is given.
-
- When `tagsfile` is set to the path of a ctags index file, it is used to
- generate hyperlinks from names to their definition. You must enable
- `lineanchors` and run ctags with the `-n` option for this to work. The
- `python-ctags` module from PyPI must be installed to use this feature;
- otherwise a `RuntimeError` will be raised.
-
- The `get_style_defs(arg='')` method of a `HtmlFormatter` returns a string
- containing CSS rules for the CSS classes used by the formatter. The
- argument `arg` can be used to specify additional CSS selectors that
- are prepended to the classes. A call `fmter.get_style_defs('td .code')`
- would result in the following CSS classes:
-
- .. sourcecode:: css
-
- td .code .kw { font-weight: bold; color: #00FF00 }
- td .code .cm { color: #999999 }
- ...
-
- If you have Pygments 0.6 or higher, you can also pass a list or tuple to the
- `get_style_defs()` method to request multiple prefixes for the tokens:
-
- .. sourcecode:: python
-
- formatter.get_style_defs(['div.syntax pre', 'pre.syntax'])
-
- The output would then look like this:
-
- .. sourcecode:: css
-
- div.syntax pre .kw,
- pre.syntax .kw { font-weight: bold; color: #00FF00 }
- div.syntax pre .cm,
- pre.syntax .cm { color: #999999 }
- ...
-
- Additional options accepted:
-
- `nowrap`
- If set to ``True``, don't add a ``<pre>`` and a ``<div>`` tag
- around the tokens. This disables most other options (default: ``False``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``). This option has no effect if the `cssfile`
- and `noclobber_cssfile` option are given and the file specified in
- `cssfile` exists.
-
- `noclasses`
- If set to true, token ``<span>`` tags (as well as line number elements)
- will not use CSS classes, but inline styles. This is not recommended
- for larger pieces of code since it increases output size by quite a bit
- (default: ``False``).
-
- `classprefix`
- Since the token types use relatively short class names, they may clash
- with some of your own class names. In this case you can use the
- `classprefix` option to give a string to prepend to all Pygments-generated
- CSS class names for token types.
- Note that this option also affects the output of `get_style_defs()`.
-
- `cssclass`
- CSS class for the wrapping ``<div>`` tag (default: ``'highlight'``).
- If you set this option, the default selector for `get_style_defs()`
- will be this class.
-
- .. versionadded:: 0.9
- If you select the ``'table'`` line numbers, the wrapping table will
- have a CSS class of this string plus ``'table'``, the default is
- accordingly ``'highlighttable'``.
-
- `cssstyles`
- Inline CSS styles for the wrapping ``<div>`` tag (default: ``''``).
-
- `prestyles`
- Inline CSS styles for the ``<pre>`` tag (default: ``''``).
-
- .. versionadded:: 0.11
-
- `cssfile`
- If the `full` option is true and this option is given, it must be the
- name of an external file. If the filename does not include an absolute
- path, the file's path will be assumed to be relative to the main output
- file's path, if the latter can be found. The stylesheet is then written
- to this file instead of the HTML file.
-
- .. versionadded:: 0.6
-
- `noclobber_cssfile`
- If `cssfile` is given and the specified file exists, the css file will
- not be overwritten. This allows the use of the `full` option in
- combination with a user specified css file. Default is ``False``.
-
- .. versionadded:: 1.1
-
- `linenos`
- If set to ``'table'``, output line numbers as a table with two cells,
- one containing the line numbers, the other the whole code. This is
- copy-and-paste-friendly, but may cause alignment problems with some
- browsers or fonts. If set to ``'inline'``, the line numbers will be
- integrated in the ``<pre>`` tag that contains the code (that setting
- is *new in Pygments 0.8*).
-
- For compatibility with Pygments 0.7 and earlier, every true value
- except ``'inline'`` means the same as ``'table'`` (in particular, that
- means also ``True``).
-
- The default value is ``False``, which means no line numbers at all.
-
- **Note:** with the default ("table") line number mechanism, the line
- numbers and code can have different line heights in Internet Explorer
- unless you give the enclosing ``<pre>`` tags an explicit ``line-height``
- CSS property (you get the default line spacing with ``line-height:
- 125%``).
-
- `hl_lines`
- Specify a list of lines to be highlighted. The line numbers are always
- relative to the input (i.e. the first line is line 1) and are
- independent of `linenostart`.
-
- .. versionadded:: 0.11
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `linenospecial`
- If set to a number n > 0, every nth line number is given the CSS
- class ``"special"`` (default: ``0``).
-
- `nobackground`
- If set to ``True``, the formatter won't output the background color
- for the wrapping element (this automatically defaults to ``False``
- when there is no wrapping element [eg: no argument for the
- `get_syntax_defs` method given]) (default: ``False``).
-
- .. versionadded:: 0.6
-
- `lineseparator`
- This string is output between lines of code. It defaults to ``"\n"``,
- which is enough to break a line inside ``<pre>`` tags, but you can
- e.g. set it to ``"<br>"`` to get HTML line breaks.
-
- .. versionadded:: 0.7
-
- `lineanchors`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in an anchor tag with an ``id`` (and `name`) of ``foo-linenumber``.
- This allows easy linking to certain lines.
-
- .. versionadded:: 0.9
-
- `linespans`
- If set to a nonempty string, e.g. ``foo``, the formatter will wrap each
- output line in a span tag with an ``id`` of ``foo-linenumber``.
- This allows easy access to lines via javascript.
-
- .. versionadded:: 1.6
-
- `anchorlinenos`
- If set to `True`, will wrap line numbers in <a> tags. Used in
- combination with `linenos` and `lineanchors`.
-
- `tagsfile`
- If set to the path of a ctags file, wrap names in anchor tags that
- link to their definitions. `lineanchors` should be used, and the
- tags file should specify line numbers (see the `-n` option to ctags).
- The tags file is assumed to be encoded in UTF-8.
-
- .. versionadded:: 1.6
-
- `tagurlformat`
- A string formatting pattern used to generate links to ctags definitions.
- Available variables are `%(path)s`, `%(fname)s` and `%(fext)s`.
- Defaults to an empty string, resulting in just `#prefix-number` links.
-
- .. versionadded:: 1.6
-
- `filename`
- A string used to generate a filename when rendering ``<pre>`` blocks,
- for example if displaying source code. If `linenos` is set to
- ``'table'`` then the filename will be rendered in an initial row
- containing a single `<th>` which spans both columns.
-
- .. versionadded:: 2.1
-
- `wrapcode`
- Wrap the code inside ``<pre>`` blocks using ``<code>``, as recommended
- by the HTML5 specification.
-
- .. versionadded:: 2.4
-
- `debug_token_types`
- Add ``title`` attributes to all token ``<span>`` tags that show the
- name of the token.
-
- .. versionadded:: 2.10
-
-
- **Subclassing the HTML formatter**
-
- .. versionadded:: 0.7
-
- The HTML formatter is now built in a way that allows easy subclassing, thus
- customizing the output HTML code. The `format()` method calls
- `self._format_lines()` which returns a generator that yields tuples of ``(1,
- line)``, where the ``1`` indicates that the ``line`` is a line of the
- formatted source code.
-
- If the `nowrap` option is set, the generator is the iterated over and the
- resulting HTML is output.
-
- Otherwise, `format()` calls `self.wrap()`, which wraps the generator with
- other generators. These may add some HTML code to the one generated by
- `_format_lines()`, either by modifying the lines generated by the latter,
- then yielding them again with ``(1, line)``, and/or by yielding other HTML
- code before or after the lines, with ``(0, html)``. The distinction between
- source lines and other code makes it possible to wrap the generator multiple
- times.
-
- The default `wrap()` implementation adds a ``<div>`` and a ``<pre>`` tag.
-
- A custom `HtmlFormatter` subclass could look like this:
-
- .. sourcecode:: python
-
- class CodeHtmlFormatter(HtmlFormatter):
-
- def wrap(self, source, *, include_div):
- return self._wrap_code(source)
-
- def _wrap_code(self, source):
- yield 0, '<code>'
- for i, t in source:
- if i == 1:
- # it's a line of formatted code
- t += '<br>'
- yield i, t
- yield 0, '</code>'
-
- This results in wrapping the formatted lines with a ``<code>`` tag, where the
- source lines are broken using ``<br>`` tags.
-
- After calling `wrap()`, the `format()` method also adds the "line numbers"
- and/or "full document" wrappers if the respective options are set. Then, all
- HTML yielded by the wrapped generator is output.
- """
-
- name = 'HTML'
- aliases = ['html']
- filenames = ['*.html', '*.htm']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.title = self._decodeifneeded(self.title)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.noclasses = get_bool_opt(options, 'noclasses', False)
- self.classprefix = options.get('classprefix', '')
- self.cssclass = self._decodeifneeded(options.get('cssclass', 'highlight'))
- self.cssstyles = self._decodeifneeded(options.get('cssstyles', ''))
- self.prestyles = self._decodeifneeded(options.get('prestyles', ''))
- self.cssfile = self._decodeifneeded(options.get('cssfile', ''))
- self.noclobber_cssfile = get_bool_opt(options, 'noclobber_cssfile', False)
- self.tagsfile = self._decodeifneeded(options.get('tagsfile', ''))
- self.tagurlformat = self._decodeifneeded(options.get('tagurlformat', ''))
- self.filename = self._decodeifneeded(options.get('filename', ''))
- self.wrapcode = get_bool_opt(options, 'wrapcode', False)
- self.span_element_openers = {}
- self.debug_token_types = get_bool_opt(options, 'debug_token_types', False)
-
- if self.tagsfile:
- if not ctags:
- raise RuntimeError('The "ctags" package must to be installed '
- 'to be able to use the "tagsfile" feature.')
- self._ctags = ctags.CTags(self.tagsfile)
-
- linenos = options.get('linenos', False)
- if linenos == 'inline':
- self.linenos = 2
- elif linenos:
- # compatibility with <= 0.7
- self.linenos = 1
- else:
- self.linenos = 0
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.linenospecial = abs(get_int_opt(options, 'linenospecial', 0))
- self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.lineseparator = options.get('lineseparator', '\n')
- self.lineanchors = options.get('lineanchors', '')
- self.linespans = options.get('linespans', '')
- self.anchorlinenos = get_bool_opt(options, 'anchorlinenos', False)
- self.hl_lines = set()
- for lineno in get_list_opt(options, 'hl_lines', []):
- try:
- self.hl_lines.add(int(lineno))
- except ValueError:
- pass
-
- self._create_stylesheet()
-
- def _get_css_class(self, ttype):
- """Return the css class of this token type prefixed with
- the classprefix option."""
- ttypeclass = _get_ttype_class(ttype)
- if ttypeclass:
- return self.classprefix + ttypeclass
- return ''
-
- def _get_css_classes(self, ttype):
- """Return the CSS classes of this token type prefixed with the classprefix option."""
- cls = self._get_css_class(ttype)
- while ttype not in STANDARD_TYPES:
- ttype = ttype.parent
- cls = self._get_css_class(ttype) + ' ' + cls
- return cls or ''
-
- def _get_css_inline_styles(self, ttype):
- """Return the inline CSS styles for this token type."""
- cclass = self.ttype2class.get(ttype)
- while cclass is None:
- ttype = ttype.parent
- cclass = self.ttype2class.get(ttype)
- return cclass or ''
-
- def _create_stylesheet(self):
- t2c = self.ttype2class = {Token: ''}
- c2s = self.class2style = {}
- for ttype, ndef in self.style:
- name = self._get_css_class(ttype)
- style = ''
- if ndef['color']:
- style += 'color: {}; '.format(webify(ndef['color']))
- if ndef['bold']:
- style += 'font-weight: bold; '
- if ndef['italic']:
- style += 'font-style: italic; '
- if ndef['underline']:
- style += 'text-decoration: underline; '
- if ndef['bgcolor']:
- style += 'background-color: {}; '.format(webify(ndef['bgcolor']))
- if ndef['border']:
- style += 'border: 1px solid {}; '.format(webify(ndef['border']))
- if style:
- t2c[ttype] = name
- # save len(ttype) to enable ordering the styles by
- # hierarchy (necessary for CSS cascading rules!)
- c2s[name] = (style[:-2], ttype, len(ttype))
-
- def get_style_defs(self, arg=None):
- """
- Return CSS style definitions for the classes produced by the current
- highlighting style. ``arg`` can be a string or list of selectors to
- insert before the token type classes.
- """
- style_lines = []
-
- style_lines.extend(self.get_linenos_style_defs())
- style_lines.extend(self.get_background_style_defs(arg))
- style_lines.extend(self.get_token_style_defs(arg))
-
- return '\n'.join(style_lines)
-
- def get_token_style_defs(self, arg=None):
- prefix = self.get_css_prefix(arg)
-
- styles = [
- (level, ttype, cls, style)
- for cls, (style, ttype, level) in self.class2style.items()
- if cls and style
- ]
- styles.sort()
-
- lines = [
- f'{prefix(cls)} {{ {style} }} /* {repr(ttype)[6:]} */'
- for (level, ttype, cls, style) in styles
- ]
-
- return lines
-
- def get_background_style_defs(self, arg=None):
- prefix = self.get_css_prefix(arg)
- bg_color = self.style.background_color
- hl_color = self.style.highlight_color
-
- lines = []
-
- if arg and not self.nobackground and bg_color is not None:
- text_style = ''
- if Text in self.ttype2class:
- text_style = ' ' + self.class2style[self.ttype2class[Text]][0]
- lines.insert(
- 0, '{}{{ background: {};{} }}'.format(
- prefix(''), bg_color, text_style
- )
- )
- if hl_color is not None:
- lines.insert(
- 0, '{} {{ background-color: {} }}'.format(prefix('hll'), hl_color)
- )
-
- return lines
-
- def get_linenos_style_defs(self):
- lines = [
- f'pre {{ {self._pre_style} }}',
- f'td.linenos .normal {{ {self._linenos_style} }}',
- f'span.linenos {{ {self._linenos_style} }}',
- f'td.linenos .special {{ {self._linenos_special_style} }}',
- f'span.linenos.special {{ {self._linenos_special_style} }}',
- ]
-
- return lines
-
- def get_css_prefix(self, arg):
- if arg is None:
- arg = ('cssclass' in self.options and '.'+self.cssclass or '')
- if isinstance(arg, str):
- args = [arg]
- else:
- args = list(arg)
-
- def prefix(cls):
- if cls:
- cls = '.' + cls
- tmp = []
- for arg in args:
- tmp.append((arg and arg + ' ' or '') + cls)
- return ', '.join(tmp)
-
- return prefix
-
- @property
- def _pre_style(self):
- return 'line-height: 125%;'
-
- @property
- def _linenos_style(self):
- color = self.style.line_number_color
- background_color = self.style.line_number_background_color
- return f'color: {color}; background-color: {background_color}; padding-left: 5px; padding-right: 5px;'
-
- @property
- def _linenos_special_style(self):
- color = self.style.line_number_special_color
- background_color = self.style.line_number_special_background_color
- return f'color: {color}; background-color: {background_color}; padding-left: 5px; padding-right: 5px;'
-
- def _decodeifneeded(self, value):
- if isinstance(value, bytes):
- if self.encoding:
- return value.decode(self.encoding)
- return value.decode()
- return value
-
- def _wrap_full(self, inner, outfile):
- if self.cssfile:
- if os.path.isabs(self.cssfile):
- # it's an absolute filename
- cssfilename = self.cssfile
- else:
- try:
- filename = outfile.name
- if not filename or filename[0] == '<':
- # pseudo files, e.g. name == '<fdopen>'
- raise AttributeError
- cssfilename = os.path.join(os.path.dirname(filename),
- self.cssfile)
- except AttributeError:
- print('Note: Cannot determine output file name, '
- 'using current directory as base for the CSS file name',
- file=sys.stderr)
- cssfilename = self.cssfile
- # write CSS file only if noclobber_cssfile isn't given as an option.
- try:
- if not os.path.exists(cssfilename) or not self.noclobber_cssfile:
- with open(cssfilename, "w", encoding="utf-8") as cf:
- cf.write(CSSFILE_TEMPLATE %
- {'styledefs': self.get_style_defs('body')})
- except OSError as err:
- err.strerror = 'Error writing CSS file: ' + err.strerror
- raise
-
- yield 0, (DOC_HEADER_EXTERNALCSS %
- dict(title=self.title,
- cssfile=self.cssfile,
- encoding=self.encoding))
- else:
- yield 0, (DOC_HEADER %
- dict(title=self.title,
- styledefs=self.get_style_defs('body'),
- encoding=self.encoding))
-
- yield from inner
- yield 0, DOC_FOOTER
-
- def _wrap_tablelinenos(self, inner):
- dummyoutfile = StringIO()
- lncount = 0
- for t, line in inner:
- if t:
- lncount += 1
- dummyoutfile.write(line)
-
- fl = self.linenostart
- mw = len(str(lncount + fl - 1))
- sp = self.linenospecial
- st = self.linenostep
- anchor_name = self.lineanchors or self.linespans
- aln = self.anchorlinenos
- nocls = self.noclasses
-
- lines = []
-
- for i in range(fl, fl+lncount):
- print_line = i % st == 0
- special_line = sp and i % sp == 0
-
- if print_line:
- line = '%*d' % (mw, i)
- if aln:
- line = '<a href="#%s-%d">%s</a>' % (anchor_name, i, line)
- else:
- line = ' ' * mw
-
- if nocls:
- if special_line:
- style = f' style="{self._linenos_special_style}"'
- else:
- style = f' style="{self._linenos_style}"'
- else:
- if special_line:
- style = ' class="special"'
- else:
- style = ' class="normal"'
-
- if style:
- line = f'<span{style}>{line}</span>'
-
- lines.append(line)
-
- ls = '\n'.join(lines)
-
- # If a filename was specified, we can't put it into the code table as it
- # would misalign the line numbers. Hence we emit a separate row for it.
- filename_tr = ""
- if self.filename:
- filename_tr = (
- '<tr><th colspan="2" class="filename">'
- '<span class="filename">' + self.filename + '</span>'
- '</th></tr>')
-
- # in case you wonder about the seemingly redundant <div> here: since the
- # content in the other cell also is wrapped in a div, some browsers in
- # some configurations seem to mess up the formatting...
- yield 0, (f'<table class="{self.cssclass}table">' + filename_tr +
- '<tr><td class="linenos"><div class="linenodiv"><pre>' +
- ls + '</pre></div></td><td class="code">')
- yield 0, '<div>'
- yield 0, dummyoutfile.getvalue()
- yield 0, '</div>'
- yield 0, '</td></tr></table>'
-
-
- def _wrap_inlinelinenos(self, inner):
- # need a list of lines since we need the width of a single number :(
- inner_lines = list(inner)
- sp = self.linenospecial
- st = self.linenostep
- num = self.linenostart
- mw = len(str(len(inner_lines) + num - 1))
- anchor_name = self.lineanchors or self.linespans
- aln = self.anchorlinenos
- nocls = self.noclasses
-
- for _, inner_line in inner_lines:
- print_line = num % st == 0
- special_line = sp and num % sp == 0
-
- if print_line:
- line = '%*d' % (mw, num)
- else:
- line = ' ' * mw
-
- if nocls:
- if special_line:
- style = f' style="{self._linenos_special_style}"'
- else:
- style = f' style="{self._linenos_style}"'
- else:
- if special_line:
- style = ' class="linenos special"'
- else:
- style = ' class="linenos"'
-
- if style:
- linenos = f'<span{style}>{line}</span>'
- else:
- linenos = line
-
- if aln:
- yield 1, ('<a href="#%s-%d">%s</a>' % (anchor_name, num, linenos) +
- inner_line)
- else:
- yield 1, linenos + inner_line
- num += 1
-
- def _wrap_lineanchors(self, inner):
- s = self.lineanchors
- # subtract 1 since we have to increment i *before* yielding
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
- href = "" if self.linenos else ' href="#%s-%d"' % (s, i)
- yield 1, '<a id="%s-%d" name="%s-%d"%s></a>' % (s, i, s, i, href) + line
- else:
- yield 0, line
-
- def _wrap_linespans(self, inner):
- s = self.linespans
- i = self.linenostart - 1
- for t, line in inner:
- if t:
- i += 1
- yield 1, '<span id="%s-%d">%s</span>' % (s, i, line)
- else:
- yield 0, line
-
- def _wrap_div(self, inner):
- style = []
- if (self.noclasses and not self.nobackground and
- self.style.background_color is not None):
- style.append(f'background: {self.style.background_color}')
- if self.cssstyles:
- style.append(self.cssstyles)
- style = '; '.join(style)
-
- yield 0, ('<div' + (self.cssclass and f' class="{self.cssclass}"') +
- (style and (f' style="{style}"')) + '>')
- yield from inner
- yield 0, '</div>\n'
-
- def _wrap_pre(self, inner):
- style = []
- if self.prestyles:
- style.append(self.prestyles)
- if self.noclasses:
- style.append(self._pre_style)
- style = '; '.join(style)
-
- if self.filename and self.linenos != 1:
- yield 0, ('<span class="filename">' + self.filename + '</span>')
-
- # the empty span here is to keep leading empty lines from being
- # ignored by HTML parsers
- yield 0, ('<pre' + (style and f' style="{style}"') + '><span></span>')
- yield from inner
- yield 0, '</pre>'
-
- def _wrap_code(self, inner):
- yield 0, '<code>'
- yield from inner
- yield 0, '</code>'
-
- @functools.lru_cache(maxsize=100)
- def _translate_parts(self, value):
- """HTML-escape a value and split it by newlines."""
- return value.translate(_escape_html_table).split('\n')
-
- def _format_lines(self, tokensource):
- """
- Just format the tokens, without any wrapping tags.
- Yield individual lines.
- """
- nocls = self.noclasses
- lsep = self.lineseparator
- tagsfile = self.tagsfile
-
- lspan = ''
- line = []
- for ttype, value in tokensource:
- try:
- cspan = self.span_element_openers[ttype]
- except KeyError:
- title = ' title="{}"'.format('.'.join(ttype)) if self.debug_token_types else ''
- if nocls:
- css_style = self._get_css_inline_styles(ttype)
- if css_style:
- css_style = self.class2style[css_style][0]
- cspan = f'<span style="{css_style}"{title}>'
- else:
- cspan = ''
- else:
- css_class = self._get_css_classes(ttype)
- if css_class:
- cspan = f'<span class="{css_class}"{title}>'
- else:
- cspan = ''
- self.span_element_openers[ttype] = cspan
-
- parts = self._translate_parts(value)
-
- if tagsfile and ttype in Token.Name:
- filename, linenumber = self._lookup_ctag(value)
- if linenumber:
- base, filename = os.path.split(filename)
- if base:
- base += '/'
- filename, extension = os.path.splitext(filename)
- url = self.tagurlformat % {'path': base, 'fname': filename,
- 'fext': extension}
- parts[0] = "<a href=\"%s#%s-%d\">%s" % \
- (url, self.lineanchors, linenumber, parts[0])
- parts[-1] = parts[-1] + "</a>"
-
- # for all but the last line
- for part in parts[:-1]:
- if line:
- # Also check for part being non-empty, so we avoid creating
- # empty <span> tags
- if lspan != cspan and part:
- line.extend(((lspan and '</span>'), cspan, part,
- (cspan and '</span>'), lsep))
- else: # both are the same, or the current part was empty
- line.extend((part, (lspan and '</span>'), lsep))
- yield 1, ''.join(line)
- line = []
- elif part:
- yield 1, ''.join((cspan, part, (cspan and '</span>'), lsep))
- else:
- yield 1, lsep
- # for the last line
- if line and parts[-1]:
- if lspan != cspan:
- line.extend(((lspan and '</span>'), cspan, parts[-1]))
- lspan = cspan
- else:
- line.append(parts[-1])
- elif parts[-1]:
- line = [cspan, parts[-1]]
- lspan = cspan
- # else we neither have to open a new span nor set lspan
-
- if line:
- line.extend(((lspan and '</span>'), lsep))
- yield 1, ''.join(line)
-
- def _lookup_ctag(self, token):
- entry = ctags.TagEntry()
- if self._ctags.find(entry, token.encode(), 0):
- return entry['file'].decode(), entry['lineNumber']
- else:
- return None, None
-
- def _highlight_lines(self, tokensource):
- """
- Highlighted the lines specified in the `hl_lines` option by
- post-processing the token stream coming from `_format_lines`.
- """
- hls = self.hl_lines
-
- for i, (t, value) in enumerate(tokensource):
- if t != 1:
- yield t, value
- if i + 1 in hls: # i + 1 because Python indexes start at 0
- if self.noclasses:
- style = ''
- if self.style.highlight_color is not None:
- style = (f' style="background-color: {self.style.highlight_color}"')
- yield 1, f'<span{style}>{value}</span>'
- else:
- yield 1, f'<span class="hll">{value}</span>'
- else:
- yield 1, value
-
- def wrap(self, source):
- """
- Wrap the ``source``, which is a generator yielding
- individual lines, in custom generators. See docstring
- for `format`. Can be overridden.
- """
-
- output = source
- if self.wrapcode:
- output = self._wrap_code(output)
-
- output = self._wrap_pre(output)
-
- return output
-
- def format_unencoded(self, tokensource, outfile):
- """
- The formatting process uses several nested generators; which of
- them are used is determined by the user's options.
-
- Each generator should take at least one argument, ``inner``,
- and wrap the pieces of text generated by this.
-
- Always yield 2-tuples: (code, text). If "code" is 1, the text
- is part of the original tokensource being highlighted, if it's
- 0, the text is some piece of wrapping. This makes it possible to
- use several different wrappers that process the original source
- linewise, e.g. line number generators.
- """
- source = self._format_lines(tokensource)
-
- # As a special case, we wrap line numbers before line highlighting
- # so the line numbers get wrapped in the highlighting tag.
- if not self.nowrap and self.linenos == 2:
- source = self._wrap_inlinelinenos(source)
-
- if self.hl_lines:
- source = self._highlight_lines(source)
-
- if not self.nowrap:
- if self.lineanchors:
- source = self._wrap_lineanchors(source)
- if self.linespans:
- source = self._wrap_linespans(source)
- source = self.wrap(source)
- if self.linenos == 1:
- source = self._wrap_tablelinenos(source)
- source = self._wrap_div(source)
- if self.full:
- source = self._wrap_full(source, outfile)
-
- for t, piece in source:
- outfile.write(piece)
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/img.py b/contrib/python/pip/pip/_vendor/pygments/formatters/img.py
deleted file mode 100644
index 7542cfad9da..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/img.py
+++ /dev/null
@@ -1,685 +0,0 @@
-"""
- pygments.formatters.img
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for Pixmap output.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-import os
-import sys
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \
- get_choice_opt
-
-import subprocess
-
-# Import this carefully
-try:
- from PIL import Image, ImageDraw, ImageFont
- pil_available = True
-except ImportError:
- pil_available = False
-
-try:
- import _winreg
-except ImportError:
- try:
- import winreg as _winreg
- except ImportError:
- _winreg = None
-
-__all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter',
- 'BmpImageFormatter']
-
-
-# For some unknown reason every font calls it something different
-STYLES = {
- 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'],
- 'ITALIC': ['Oblique', 'Italic'],
- 'BOLD': ['Bold'],
- 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'],
-}
-
-# A sane default for modern systems
-DEFAULT_FONT_NAME_NIX = 'DejaVu Sans Mono'
-DEFAULT_FONT_NAME_WIN = 'Courier New'
-DEFAULT_FONT_NAME_MAC = 'Menlo'
-
-
-class PilNotAvailable(ImportError):
- """When Python imaging library is not available"""
-
-
-class FontNotFound(Exception):
- """When there are no usable fonts specified"""
-
-
-class FontManager:
- """
- Manages a set of fonts: normal, italic, bold, etc...
- """
-
- def __init__(self, font_name, font_size=14):
- self.font_name = font_name
- self.font_size = font_size
- self.fonts = {}
- self.encoding = None
- self.variable = False
- if hasattr(font_name, 'read') or os.path.isfile(font_name):
- font = ImageFont.truetype(font_name, self.font_size)
- self.variable = True
- for style in STYLES:
- self.fonts[style] = font
-
- return
-
- if sys.platform.startswith('win'):
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_WIN
- self._create_win()
- elif sys.platform.startswith('darwin'):
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_MAC
- self._create_mac()
- else:
- if not font_name:
- self.font_name = DEFAULT_FONT_NAME_NIX
- self._create_nix()
-
- def _get_nix_font_path(self, name, style):
- proc = subprocess.Popen(['fc-list', f"{name}:style={style}", 'file'],
- stdout=subprocess.PIPE, stderr=None)
- stdout, _ = proc.communicate()
- if proc.returncode == 0:
- lines = stdout.splitlines()
- for line in lines:
- if line.startswith(b'Fontconfig warning:'):
- continue
- path = line.decode().strip().strip(':')
- if path:
- return path
- return None
-
- def _create_nix(self):
- for name in STYLES['NORMAL']:
- path = self._get_nix_font_path(self.font_name, name)
- if path is not None:
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- break
- else:
- raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- for stylename in STYLES[style]:
- path = self._get_nix_font_path(self.font_name, stylename)
- if path is not None:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- break
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
-
- def _get_mac_font_path(self, font_map, name, style):
- return font_map.get((name + ' ' + style).strip().lower())
-
- def _create_mac(self):
- font_map = {}
- for font_dir in (os.path.join(os.getenv("HOME"), 'Library/Fonts/'),
- '/Library/Fonts/', '/System/Library/Fonts/'):
- font_map.update(
- (os.path.splitext(f)[0].lower(), os.path.join(font_dir, f))
- for f in os.listdir(font_dir)
- if f.lower().endswith(('ttf', 'ttc')))
-
- for name in STYLES['NORMAL']:
- path = self._get_mac_font_path(font_map, self.font_name, name)
- if path is not None:
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- break
- else:
- raise FontNotFound(f'No usable fonts named: "{self.font_name}"')
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- for stylename in STYLES[style]:
- path = self._get_mac_font_path(font_map, self.font_name, stylename)
- if path is not None:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- break
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
-
- def _lookup_win(self, key, basename, styles, fail=False):
- for suffix in ('', ' (TrueType)'):
- for style in styles:
- try:
- valname = '{}{}{}'.format(basename, style and ' '+style, suffix)
- val, _ = _winreg.QueryValueEx(key, valname)
- return val
- except OSError:
- continue
- else:
- if fail:
- raise FontNotFound(f'Font {basename} ({styles[0]}) not found in registry')
- return None
-
- def _create_win(self):
- lookuperror = None
- keynames = [ (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
- (_winreg.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Fonts'),
- (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts'),
- (_winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') ]
- for keyname in keynames:
- try:
- key = _winreg.OpenKey(*keyname)
- try:
- path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True)
- self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size)
- for style in ('ITALIC', 'BOLD', 'BOLDITALIC'):
- path = self._lookup_win(key, self.font_name, STYLES[style])
- if path:
- self.fonts[style] = ImageFont.truetype(path, self.font_size)
- else:
- if style == 'BOLDITALIC':
- self.fonts[style] = self.fonts['BOLD']
- else:
- self.fonts[style] = self.fonts['NORMAL']
- return
- except FontNotFound as err:
- lookuperror = err
- finally:
- _winreg.CloseKey(key)
- except OSError:
- pass
- else:
- # If we get here, we checked all registry keys and had no luck
- # We can be in one of two situations now:
- # * All key lookups failed. In this case lookuperror is None and we
- # will raise a generic error
- # * At least one lookup failed with a FontNotFound error. In this
- # case, we will raise that as a more specific error
- if lookuperror:
- raise lookuperror
- raise FontNotFound('Can\'t open Windows font registry key')
-
- def get_char_size(self):
- """
- Get the character size.
- """
- return self.get_text_size('M')
-
- def get_text_size(self, text):
- """
- Get the text size (width, height).
- """
- font = self.fonts['NORMAL']
- if hasattr(font, 'getbbox'): # Pillow >= 9.2.0
- return font.getbbox(text)[2:4]
- else:
- return font.getsize(text)
-
- def get_font(self, bold, oblique):
- """
- Get the font based on bold and italic flags.
- """
- if bold and oblique:
- if self.variable:
- return self.get_style('BOLDITALIC')
-
- return self.fonts['BOLDITALIC']
- elif bold:
- if self.variable:
- return self.get_style('BOLD')
-
- return self.fonts['BOLD']
- elif oblique:
- if self.variable:
- return self.get_style('ITALIC')
-
- return self.fonts['ITALIC']
- else:
- if self.variable:
- return self.get_style('NORMAL')
-
- return self.fonts['NORMAL']
-
- def get_style(self, style):
- """
- Get the specified style of the font if it is a variable font.
- If not found, return the normal font.
- """
- font = self.fonts[style]
- for style_name in STYLES[style]:
- try:
- font.set_variation_by_name(style_name)
- return font
- except ValueError:
- pass
- except OSError:
- return font
-
- return font
-
-
-class ImageFormatter(Formatter):
- """
- Create a PNG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 0.10
-
- Additional options accepted:
-
- `image_format`
- An image format to output to that is recognised by PIL, these include:
-
- * "PNG" (default)
- * "JPEG"
- * "BMP"
- * "GIF"
-
- `line_pad`
- The extra spacing (in pixels) between each line of text.
-
- Default: 2
-
- `font_name`
- The font name to be used as the base font from which others, such as
- bold and italic fonts will be generated. This really should be a
- monospace font to look sane.
- If a filename or a file-like object is specified, the user must
- provide different styles of the font.
-
- Default: "Courier New" on Windows, "Menlo" on Mac OS, and
- "DejaVu Sans Mono" on \\*nix
-
- `font_size`
- The font size in points to be used.
-
- Default: 14
-
- `image_pad`
- The padding, in pixels to be used at each edge of the resulting image.
-
- Default: 10
-
- `line_numbers`
- Whether line numbers should be shown: True/False
-
- Default: True
-
- `line_number_start`
- The line number of the first line.
-
- Default: 1
-
- `line_number_step`
- The step used when printing line numbers.
-
- Default: 1
-
- `line_number_bg`
- The background colour (in "#123456" format) of the line number bar, or
- None to use the style background color.
-
- Default: "#eed"
-
- `line_number_fg`
- The text color of the line numbers (in "#123456"-like format).
-
- Default: "#886"
-
- `line_number_chars`
- The number of columns of line numbers allowable in the line number
- margin.
-
- Default: 2
-
- `line_number_bold`
- Whether line numbers will be bold: True/False
-
- Default: False
-
- `line_number_italic`
- Whether line numbers will be italicized: True/False
-
- Default: False
-
- `line_number_separator`
- Whether a line will be drawn between the line number area and the
- source code area: True/False
-
- Default: True
-
- `line_number_pad`
- The horizontal padding (in pixels) between the line number margin, and
- the source code area.
-
- Default: 6
-
- `hl_lines`
- Specify a list of lines to be highlighted.
-
- .. versionadded:: 1.2
-
- Default: empty list
-
- `hl_color`
- Specify the color for highlighting lines.
-
- .. versionadded:: 1.2
-
- Default: highlight color of the selected style
- """
-
- # Required by the pygments mapper
- name = 'img'
- aliases = ['img', 'IMG', 'png']
- filenames = ['*.png']
-
- unicodeoutput = False
-
- default_image_format = 'png'
-
- def __init__(self, **options):
- """
- See the class docstring for explanation of options.
- """
- if not pil_available:
- raise PilNotAvailable(
- 'Python Imaging Library is required for this formatter')
- Formatter.__init__(self, **options)
- self.encoding = 'latin1' # let pygments.format() do the right thing
- # Read the style
- self.styles = dict(self.style)
- if self.style.background_color is None:
- self.background_color = '#fff'
- else:
- self.background_color = self.style.background_color
- # Image options
- self.image_format = get_choice_opt(
- options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'],
- self.default_image_format, normcase=True)
- self.image_pad = get_int_opt(options, 'image_pad', 10)
- self.line_pad = get_int_opt(options, 'line_pad', 2)
- # The fonts
- fontsize = get_int_opt(options, 'font_size', 14)
- self.fonts = FontManager(options.get('font_name', ''), fontsize)
- self.fontw, self.fonth = self.fonts.get_char_size()
- # Line number options
- self.line_number_fg = options.get('line_number_fg', '#886')
- self.line_number_bg = options.get('line_number_bg', '#eed')
- self.line_number_chars = get_int_opt(options,
- 'line_number_chars', 2)
- self.line_number_bold = get_bool_opt(options,
- 'line_number_bold', False)
- self.line_number_italic = get_bool_opt(options,
- 'line_number_italic', False)
- self.line_number_pad = get_int_opt(options, 'line_number_pad', 6)
- self.line_numbers = get_bool_opt(options, 'line_numbers', True)
- self.line_number_separator = get_bool_opt(options,
- 'line_number_separator', True)
- self.line_number_step = get_int_opt(options, 'line_number_step', 1)
- self.line_number_start = get_int_opt(options, 'line_number_start', 1)
- if self.line_numbers:
- self.line_number_width = (self.fontw * self.line_number_chars +
- self.line_number_pad * 2)
- else:
- self.line_number_width = 0
- self.hl_lines = []
- hl_lines_str = get_list_opt(options, 'hl_lines', [])
- for line in hl_lines_str:
- try:
- self.hl_lines.append(int(line))
- except ValueError:
- pass
- self.hl_color = options.get('hl_color',
- self.style.highlight_color) or '#f90'
- self.drawables = []
-
- def get_style_defs(self, arg=''):
- raise NotImplementedError('The -S option is meaningless for the image '
- 'formatter. Use -O style=<stylename> instead.')
-
- def _get_line_height(self):
- """
- Get the height of a line.
- """
- return self.fonth + self.line_pad
-
- def _get_line_y(self, lineno):
- """
- Get the Y coordinate of a line number.
- """
- return lineno * self._get_line_height() + self.image_pad
-
- def _get_char_width(self):
- """
- Get the width of a character.
- """
- return self.fontw
-
- def _get_char_x(self, linelength):
- """
- Get the X coordinate of a character position.
- """
- return linelength + self.image_pad + self.line_number_width
-
- def _get_text_pos(self, linelength, lineno):
- """
- Get the actual position for a character and line position.
- """
- return self._get_char_x(linelength), self._get_line_y(lineno)
-
- def _get_linenumber_pos(self, lineno):
- """
- Get the actual position for the start of a line number.
- """
- return (self.image_pad, self._get_line_y(lineno))
-
- def _get_text_color(self, style):
- """
- Get the correct color for the token from the style.
- """
- if style['color'] is not None:
- fill = '#' + style['color']
- else:
- fill = '#000'
- return fill
-
- def _get_text_bg_color(self, style):
- """
- Get the correct background color for the token from the style.
- """
- if style['bgcolor'] is not None:
- bg_color = '#' + style['bgcolor']
- else:
- bg_color = None
- return bg_color
-
- def _get_style_font(self, style):
- """
- Get the correct font for the style.
- """
- return self.fonts.get_font(style['bold'], style['italic'])
-
- def _get_image_size(self, maxlinelength, maxlineno):
- """
- Get the required image size.
- """
- return (self._get_char_x(maxlinelength) + self.image_pad,
- self._get_line_y(maxlineno + 0) + self.image_pad)
-
- def _draw_linenumber(self, posno, lineno):
- """
- Remember a line number drawable to paint later.
- """
- self._draw_text(
- self._get_linenumber_pos(posno),
- str(lineno).rjust(self.line_number_chars),
- font=self.fonts.get_font(self.line_number_bold,
- self.line_number_italic),
- text_fg=self.line_number_fg,
- text_bg=None,
- )
-
- def _draw_text(self, pos, text, font, text_fg, text_bg):
- """
- Remember a single drawable tuple to paint later.
- """
- self.drawables.append((pos, text, font, text_fg, text_bg))
-
- def _create_drawables(self, tokensource):
- """
- Create drawables for the token content.
- """
- lineno = charno = maxcharno = 0
- maxlinelength = linelength = 0
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- style = self.styles[ttype]
- # TODO: make sure tab expansion happens earlier in the chain. It
- # really ought to be done on the input, as to do it right here is
- # quite complex.
- value = value.expandtabs(4)
- lines = value.splitlines(True)
- # print lines
- for i, line in enumerate(lines):
- temp = line.rstrip('\n')
- if temp:
- self._draw_text(
- self._get_text_pos(linelength, lineno),
- temp,
- font = self._get_style_font(style),
- text_fg = self._get_text_color(style),
- text_bg = self._get_text_bg_color(style),
- )
- temp_width, _ = self.fonts.get_text_size(temp)
- linelength += temp_width
- maxlinelength = max(maxlinelength, linelength)
- charno += len(temp)
- maxcharno = max(maxcharno, charno)
- if line.endswith('\n'):
- # add a line for each extra line in the value
- linelength = 0
- charno = 0
- lineno += 1
- self.maxlinelength = maxlinelength
- self.maxcharno = maxcharno
- self.maxlineno = lineno
-
- def _draw_line_numbers(self):
- """
- Create drawables for the line numbers.
- """
- if not self.line_numbers:
- return
- for p in range(self.maxlineno):
- n = p + self.line_number_start
- if (n % self.line_number_step) == 0:
- self._draw_linenumber(p, n)
-
- def _paint_line_number_bg(self, im):
- """
- Paint the line number background on the image.
- """
- if not self.line_numbers:
- return
- if self.line_number_fg is None:
- return
- draw = ImageDraw.Draw(im)
- recth = im.size[-1]
- rectw = self.image_pad + self.line_number_width - self.line_number_pad
- draw.rectangle([(0, 0), (rectw, recth)],
- fill=self.line_number_bg)
- if self.line_number_separator:
- draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg)
- del draw
-
- def format(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- This implementation calculates where it should draw each token on the
- pixmap, then calculates the required pixmap size and draws the items.
- """
- self._create_drawables(tokensource)
- self._draw_line_numbers()
- im = Image.new(
- 'RGB',
- self._get_image_size(self.maxlinelength, self.maxlineno),
- self.background_color
- )
- self._paint_line_number_bg(im)
- draw = ImageDraw.Draw(im)
- # Highlight
- if self.hl_lines:
- x = self.image_pad + self.line_number_width - self.line_number_pad + 1
- recth = self._get_line_height()
- rectw = im.size[0] - x
- for linenumber in self.hl_lines:
- y = self._get_line_y(linenumber - 1)
- draw.rectangle([(x, y), (x + rectw, y + recth)],
- fill=self.hl_color)
- for pos, value, font, text_fg, text_bg in self.drawables:
- if text_bg:
- # see deprecations https://pillow.readthedocs.io/en/stable/releasenotes/9.2.0.html#font-size-and-offset-methods
- if hasattr(draw, 'textsize'):
- text_size = draw.textsize(text=value, font=font)
- else:
- text_size = font.getbbox(value)[2:]
- draw.rectangle([pos[0], pos[1], pos[0] + text_size[0], pos[1] + text_size[1]], fill=text_bg)
- draw.text(pos, value, font=font, fill=text_fg)
- im.save(outfile, self.image_format.upper())
-
-
-# Add one formatter per format, so that the "-f gif" option gives the correct result
-# when used in pygmentize.
-
-class GifImageFormatter(ImageFormatter):
- """
- Create a GIF image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_gif'
- aliases = ['gif']
- filenames = ['*.gif']
- default_image_format = 'gif'
-
-
-class JpgImageFormatter(ImageFormatter):
- """
- Create a JPEG image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_jpg'
- aliases = ['jpg', 'jpeg']
- filenames = ['*.jpg']
- default_image_format = 'jpeg'
-
-
-class BmpImageFormatter(ImageFormatter):
- """
- Create a bitmap image from source code. This uses the Python Imaging Library to
- generate a pixmap from the source code.
-
- .. versionadded:: 1.0
- """
-
- name = 'img_bmp'
- aliases = ['bmp', 'bitmap']
- filenames = ['*.bmp']
- default_image_format = 'bmp'
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/irc.py b/contrib/python/pip/pip/_vendor/pygments/formatters/irc.py
deleted file mode 100644
index 468c2876053..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/irc.py
+++ /dev/null
@@ -1,154 +0,0 @@
-"""
- pygments.formatters.irc
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for IRC output
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pip._vendor.pygments.util import get_choice_opt
-
-
-__all__ = ['IRCFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-IRC_COLORS = {
- Token: ('', ''),
-
- Whitespace: ('gray', 'brightblack'),
- Comment: ('gray', 'brightblack'),
- Comment.Preproc: ('cyan', 'brightcyan'),
- Keyword: ('blue', 'brightblue'),
- Keyword.Type: ('cyan', 'brightcyan'),
- Operator.Word: ('magenta', 'brightcyan'),
- Name.Builtin: ('cyan', 'brightcyan'),
- Name.Function: ('green', 'brightgreen'),
- Name.Namespace: ('_cyan_', '_brightcyan_'),
- Name.Class: ('_green_', '_brightgreen_'),
- Name.Exception: ('cyan', 'brightcyan'),
- Name.Decorator: ('brightblack', 'gray'),
- Name.Variable: ('red', 'brightred'),
- Name.Constant: ('red', 'brightred'),
- Name.Attribute: ('cyan', 'brightcyan'),
- Name.Tag: ('brightblue', 'brightblue'),
- String: ('yellow', 'yellow'),
- Number: ('blue', 'brightblue'),
-
- Generic.Deleted: ('brightred', 'brightred'),
- Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
- Generic.Subheading: ('*magenta*', '*brightmagenta*'),
- Generic.Error: ('brightred', 'brightred'),
-
- Error: ('_brightred_', '_brightred_'),
-}
-
-
-IRC_COLOR_MAP = {
- 'white': 0,
- 'black': 1,
- 'blue': 2,
- 'brightgreen': 3,
- 'brightred': 4,
- 'yellow': 5,
- 'magenta': 6,
- 'orange': 7,
- 'green': 7, #compat w/ ansi
- 'brightyellow': 8,
- 'lightgreen': 9,
- 'brightcyan': 9, # compat w/ ansi
- 'cyan': 10,
- 'lightblue': 11,
- 'red': 11, # compat w/ ansi
- 'brightblue': 12,
- 'brightmagenta': 13,
- 'brightblack': 14,
- 'gray': 15,
-}
-
-def ircformat(color, text):
- if len(color) < 1:
- return text
- add = sub = ''
- if '_' in color: # italic
- add += '\x1D'
- sub = '\x1D' + sub
- color = color.strip('_')
- if '*' in color: # bold
- add += '\x02'
- sub = '\x02' + sub
- color = color.strip('*')
- # underline (\x1F) not supported
- # backgrounds (\x03FF,BB) not supported
- if len(color) > 0: # actual color - may have issues with ircformat("red", "blah")+"10" type stuff
- add += '\x03' + str(IRC_COLOR_MAP[color]).zfill(2)
- sub = '\x03' + sub
- return add + text + sub
- return '<'+add+'>'+text+'</'+sub+'>'
-
-
-class IRCFormatter(Formatter):
- r"""
- Format tokens with IRC color sequences
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers in the output as well
- (default: ``False`` = no line numbers).
- """
- name = 'IRC'
- aliases = ['irc', 'IRC']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or IRC_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def _write_lineno(self, outfile):
- if self.linenos:
- self._lineno += 1
- outfile.write("%04d: " % self._lineno)
-
- def format_unencoded(self, tokensource, outfile):
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- color = self.colorscheme.get(ttype)
- while color is None:
- ttype = ttype[:-1]
- color = self.colorscheme.get(ttype)
- if color:
- color = color[self.darkbg]
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(ircformat(color, line))
- outfile.write('\n')
- self._write_lineno(outfile)
- if spl[-1]:
- outfile.write(ircformat(color, spl[-1]))
- else:
- outfile.write(value)
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/latex.py b/contrib/python/pip/pip/_vendor/pygments/formatters/latex.py
deleted file mode 100644
index 0ec9089b937..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/latex.py
+++ /dev/null
@@ -1,518 +0,0 @@
-"""
- pygments.formatters.latex
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for LaTeX fancyvrb output.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from io import StringIO
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.lexer import Lexer, do_insertions
-from pip._vendor.pygments.token import Token, STANDARD_TYPES
-from pip._vendor.pygments.util import get_bool_opt, get_int_opt
-
-
-__all__ = ['LatexFormatter']
-
-
-def escape_tex(text, commandprefix):
- return text.replace('\\', '\x00'). \
- replace('{', '\x01'). \
- replace('}', '\x02'). \
- replace('\x00', rf'\{commandprefix}Zbs{{}}'). \
- replace('\x01', rf'\{commandprefix}Zob{{}}'). \
- replace('\x02', rf'\{commandprefix}Zcb{{}}'). \
- replace('^', rf'\{commandprefix}Zca{{}}'). \
- replace('_', rf'\{commandprefix}Zus{{}}'). \
- replace('&', rf'\{commandprefix}Zam{{}}'). \
- replace('<', rf'\{commandprefix}Zlt{{}}'). \
- replace('>', rf'\{commandprefix}Zgt{{}}'). \
- replace('#', rf'\{commandprefix}Zsh{{}}'). \
- replace('%', rf'\{commandprefix}Zpc{{}}'). \
- replace('$', rf'\{commandprefix}Zdl{{}}'). \
- replace('-', rf'\{commandprefix}Zhy{{}}'). \
- replace("'", rf'\{commandprefix}Zsq{{}}'). \
- replace('"', rf'\{commandprefix}Zdq{{}}'). \
- replace('~', rf'\{commandprefix}Zti{{}}')
-
-
-DOC_TEMPLATE = r'''
-\documentclass{%(docclass)s}
-\usepackage{fancyvrb}
-\usepackage{color}
-\usepackage[%(encoding)s]{inputenc}
-%(preamble)s
-
-%(styledefs)s
-
-\begin{document}
-
-\section*{%(title)s}
-
-%(code)s
-\end{document}
-'''
-
-## Small explanation of the mess below :)
-#
-# The previous version of the LaTeX formatter just assigned a command to
-# each token type defined in the current style. That obviously is
-# problematic if the highlighted code is produced for a different style
-# than the style commands themselves.
-#
-# This version works much like the HTML formatter which assigns multiple
-# CSS classes to each <span> tag, from the most specific to the least
-# specific token type, thus falling back to the parent token type if one
-# is not defined. Here, the classes are there too and use the same short
-# forms given in token.STANDARD_TYPES.
-#
-# Highlighted code now only uses one custom command, which by default is
-# \PY and selectable by the commandprefix option (and in addition the
-# escapes \PYZat, \PYZlb and \PYZrb which haven't been renamed for
-# backwards compatibility purposes).
-#
-# \PY has two arguments: the classes, separated by +, and the text to
-# render in that style. The classes are resolved into the respective
-# style commands by magic, which serves to ignore unknown classes.
-#
-# The magic macros are:
-# * \PY@it, \PY@bf, etc. are unconditionally wrapped around the text
-# to render in \PY@do. Their definition determines the style.
-# * \PY@reset resets \PY@it etc. to do nothing.
-# * \PY@toks parses the list of classes, using magic inspired by the
-# keyval package (but modified to use plusses instead of commas
-# because fancyvrb redefines commas inside its environments).
-# * \PY@tok processes one class, calling the \PY@tok@classname command
-# if it exists.
-# * \PY@tok@classname sets the \PY@it etc. to reflect the chosen style
-# for its class.
-# * \PY resets the style, parses the classnames and then calls \PY@do.
-#
-# Tip: to read this code, print it out in substituted form using e.g.
-# >>> print STYLE_TEMPLATE % {'cp': 'PY'}
-
-STYLE_TEMPLATE = r'''
-\makeatletter
-\def\%(cp)s@reset{\let\%(cp)s@it=\relax \let\%(cp)s@bf=\relax%%
- \let\%(cp)s@ul=\relax \let\%(cp)s@tc=\relax%%
- \let\%(cp)s@bc=\relax \let\%(cp)s@ff=\relax}
-\def\%(cp)s@tok#1{\csname %(cp)s@tok@#1\endcsname}
-\def\%(cp)s@toks#1+{\ifx\relax#1\empty\else%%
- \%(cp)s@tok{#1}\expandafter\%(cp)s@toks\fi}
-\def\%(cp)s@do#1{\%(cp)s@bc{\%(cp)s@tc{\%(cp)s@ul{%%
- \%(cp)s@it{\%(cp)s@bf{\%(cp)s@ff{#1}}}}}}}
-\def\%(cp)s#1#2{\%(cp)s@reset\%(cp)s@toks#1+\relax+\%(cp)s@do{#2}}
-
-%(styles)s
-
-\def\%(cp)sZbs{\char`\\}
-\def\%(cp)sZus{\char`\_}
-\def\%(cp)sZob{\char`\{}
-\def\%(cp)sZcb{\char`\}}
-\def\%(cp)sZca{\char`\^}
-\def\%(cp)sZam{\char`\&}
-\def\%(cp)sZlt{\char`\<}
-\def\%(cp)sZgt{\char`\>}
-\def\%(cp)sZsh{\char`\#}
-\def\%(cp)sZpc{\char`\%%}
-\def\%(cp)sZdl{\char`\$}
-\def\%(cp)sZhy{\char`\-}
-\def\%(cp)sZsq{\char`\'}
-\def\%(cp)sZdq{\char`\"}
-\def\%(cp)sZti{\char`\~}
-%% for compatibility with earlier versions
-\def\%(cp)sZat{@}
-\def\%(cp)sZlb{[}
-\def\%(cp)sZrb{]}
-\makeatother
-'''
-
-
-def _get_ttype_name(ttype):
- fname = STANDARD_TYPES.get(ttype)
- if fname:
- return fname
- aname = ''
- while fname is None:
- aname = ttype[-1] + aname
- ttype = ttype.parent
- fname = STANDARD_TYPES.get(ttype)
- return fname + aname
-
-
-class LatexFormatter(Formatter):
- r"""
- Format tokens as LaTeX code. This needs the `fancyvrb` and `color`
- standard packages.
-
- Without the `full` option, code is formatted as one ``Verbatim``
- environment, like this:
-
- .. sourcecode:: latex
-
- \begin{Verbatim}[commandchars=\\\{\}]
- \PY{k}{def }\PY{n+nf}{foo}(\PY{n}{bar}):
- \PY{k}{pass}
- \end{Verbatim}
-
- Wrapping can be disabled using the `nowrap` option.
-
- The special command used here (``\PY``) and all the other macros it needs
- are output by the `get_style_defs` method.
-
- With the `full` option, a complete LaTeX document is output, including
- the command definitions in the preamble.
-
- The `get_style_defs()` method of a `LatexFormatter` returns a string
- containing ``\def`` commands defining the macros needed inside the
- ``Verbatim`` environments.
-
- Additional options accepted:
-
- `nowrap`
- If set to ``True``, don't wrap the tokens at all, not even inside a
- ``\begin{Verbatim}`` environment. This disables most other options
- (default: ``False``).
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `full`
- Tells the formatter to output a "full" document, i.e. a complete
- self-contained document (default: ``False``).
-
- `title`
- If `full` is true, the title that should be used to caption the
- document (default: ``''``).
-
- `docclass`
- If the `full` option is enabled, this is the document class to use
- (default: ``'article'``).
-
- `preamble`
- If the `full` option is enabled, this can be further preamble commands,
- e.g. ``\usepackage`` (default: ``''``).
-
- `linenos`
- If set to ``True``, output line numbers (default: ``False``).
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `verboptions`
- Additional options given to the Verbatim environment (see the *fancyvrb*
- docs for possible values) (default: ``''``).
-
- `commandprefix`
- The LaTeX commands used to produce colored output are constructed
- using this prefix and some letters (default: ``'PY'``).
-
- .. versionadded:: 0.7
- .. versionchanged:: 0.10
- The default is now ``'PY'`` instead of ``'C'``.
-
- `texcomments`
- If set to ``True``, enables LaTeX comment lines. That is, LaTex markup
- in comment tokens is not escaped so that LaTeX can render it (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `mathescape`
- If set to ``True``, enables LaTeX math mode escape in comments. That
- is, ``'$...$'`` inside a comment will trigger math mode (default:
- ``False``).
-
- .. versionadded:: 1.2
-
- `escapeinside`
- If set to a string of length 2, enables escaping to LaTeX. Text
- delimited by these 2 characters is read as LaTeX code and
- typeset accordingly. It has no effect in string literals. It has
- no effect in comments if `texcomments` or `mathescape` is
- set. (default: ``''``).
-
- .. versionadded:: 2.0
-
- `envname`
- Allows you to pick an alternative environment name replacing Verbatim.
- The alternate environment still has to support Verbatim's option syntax.
- (default: ``'Verbatim'``).
-
- .. versionadded:: 2.0
- """
- name = 'LaTeX'
- aliases = ['latex', 'tex']
- filenames = ['*.tex']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.docclass = options.get('docclass', 'article')
- self.preamble = options.get('preamble', '')
- self.linenos = get_bool_opt(options, 'linenos', False)
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.verboptions = options.get('verboptions', '')
- self.nobackground = get_bool_opt(options, 'nobackground', False)
- self.commandprefix = options.get('commandprefix', 'PY')
- self.texcomments = get_bool_opt(options, 'texcomments', False)
- self.mathescape = get_bool_opt(options, 'mathescape', False)
- self.escapeinside = options.get('escapeinside', '')
- if len(self.escapeinside) == 2:
- self.left = self.escapeinside[0]
- self.right = self.escapeinside[1]
- else:
- self.escapeinside = ''
- self.envname = options.get('envname', 'Verbatim')
-
- self._create_stylesheet()
-
- def _create_stylesheet(self):
- t2n = self.ttype2name = {Token: ''}
- c2d = self.cmd2def = {}
- cp = self.commandprefix
-
- def rgbcolor(col):
- if col:
- return ','.join(['%.2f' % (int(col[i] + col[i + 1], 16) / 255.0)
- for i in (0, 2, 4)])
- else:
- return '1,1,1'
-
- for ttype, ndef in self.style:
- name = _get_ttype_name(ttype)
- cmndef = ''
- if ndef['bold']:
- cmndef += r'\let\$$@bf=\textbf'
- if ndef['italic']:
- cmndef += r'\let\$$@it=\textit'
- if ndef['underline']:
- cmndef += r'\let\$$@ul=\underline'
- if ndef['roman']:
- cmndef += r'\let\$$@ff=\textrm'
- if ndef['sans']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['mono']:
- cmndef += r'\let\$$@ff=\textsf'
- if ndef['color']:
- cmndef += (r'\def\$$@tc##1{{\textcolor[rgb]{{{}}}{{##1}}}}'.format(rgbcolor(ndef['color'])))
- if ndef['border']:
- cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{\string -\fboxrule}}'
- r'\fcolorbox[rgb]{{{}}}{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['border']),
- rgbcolor(ndef['bgcolor'])))
- elif ndef['bgcolor']:
- cmndef += (r'\def\$$@bc##1{{{{\setlength{{\fboxsep}}{{0pt}}'
- r'\colorbox[rgb]{{{}}}{{\strut ##1}}}}}}'.format(rgbcolor(ndef['bgcolor'])))
- if cmndef == '':
- continue
- cmndef = cmndef.replace('$$', cp)
- t2n[ttype] = name
- c2d[name] = cmndef
-
- def get_style_defs(self, arg=''):
- """
- Return the command sequences needed to define the commands
- used to format text in the verbatim environment. ``arg`` is ignored.
- """
- cp = self.commandprefix
- styles = []
- for name, definition in self.cmd2def.items():
- styles.append(rf'\@namedef{{{cp}@tok@{name}}}{{{definition}}}')
- return STYLE_TEMPLATE % {'cp': self.commandprefix,
- 'styles': '\n'.join(styles)}
-
- def format_unencoded(self, tokensource, outfile):
- # TODO: add support for background colors
- t2n = self.ttype2name
- cp = self.commandprefix
-
- if self.full:
- realoutfile = outfile
- outfile = StringIO()
-
- if not self.nowrap:
- outfile.write('\\begin{' + self.envname + '}[commandchars=\\\\\\{\\}')
- if self.linenos:
- start, step = self.linenostart, self.linenostep
- outfile.write(',numbers=left' +
- (start and ',firstnumber=%d' % start or '') +
- (step and ',stepnumber=%d' % step or ''))
- if self.mathescape or self.texcomments or self.escapeinside:
- outfile.write(',codes={\\catcode`\\$=3\\catcode`\\^=7'
- '\\catcode`\\_=8\\relax}')
- if self.verboptions:
- outfile.write(',' + self.verboptions)
- outfile.write(']\n')
-
- for ttype, value in tokensource:
- if ttype in Token.Comment:
- if self.texcomments:
- # Try to guess comment starting lexeme and escape it ...
- start = value[0:1]
- for i in range(1, len(value)):
- if start[0] != value[i]:
- break
- start += value[i]
-
- value = value[len(start):]
- start = escape_tex(start, cp)
-
- # ... but do not escape inside comment.
- value = start + value
- elif self.mathescape:
- # Only escape parts not inside a math environment.
- parts = value.split('$')
- in_math = False
- for i, part in enumerate(parts):
- if not in_math:
- parts[i] = escape_tex(part, cp)
- in_math = not in_math
- value = '$'.join(parts)
- elif self.escapeinside:
- text = value
- value = ''
- while text:
- a, sep1, text = text.partition(self.left)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- value += escape_tex(a, cp) + b
- else:
- value += escape_tex(a + sep1 + b, cp)
- else:
- value += escape_tex(a, cp)
- else:
- value = escape_tex(value, cp)
- elif ttype not in Token.Escape:
- value = escape_tex(value, cp)
- styles = []
- while ttype is not Token:
- try:
- styles.append(t2n[ttype])
- except KeyError:
- # not in current style
- styles.append(_get_ttype_name(ttype))
- ttype = ttype.parent
- styleval = '+'.join(reversed(styles))
- if styleval:
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(f"\\{cp}{{{styleval}}}{{{line}}}")
- outfile.write('\n')
- if spl[-1]:
- outfile.write(f"\\{cp}{{{styleval}}}{{{spl[-1]}}}")
- else:
- outfile.write(value)
-
- if not self.nowrap:
- outfile.write('\\end{' + self.envname + '}\n')
-
- if self.full:
- encoding = self.encoding or 'utf8'
- # map known existings encodings from LaTeX distribution
- encoding = {
- 'utf_8': 'utf8',
- 'latin_1': 'latin1',
- 'iso_8859_1': 'latin1',
- }.get(encoding.replace('-', '_'), encoding)
- realoutfile.write(DOC_TEMPLATE %
- dict(docclass = self.docclass,
- preamble = self.preamble,
- title = self.title,
- encoding = encoding,
- styledefs = self.get_style_defs(),
- code = outfile.getvalue()))
-
-
-class LatexEmbeddedLexer(Lexer):
- """
- This lexer takes one lexer as argument, the lexer for the language
- being formatted, and the left and right delimiters for escaped text.
-
- First everything is scanned using the language lexer to obtain
- strings and comments. All other consecutive tokens are merged and
- the resulting text is scanned for escaped segments, which are given
- the Token.Escape type. Finally text that is not escaped is scanned
- again with the language lexer.
- """
- def __init__(self, left, right, lang, **options):
- self.left = left
- self.right = right
- self.lang = lang
- Lexer.__init__(self, **options)
-
- def get_tokens_unprocessed(self, text):
- # find and remove all the escape tokens (replace with an empty string)
- # this is very similar to DelegatingLexer.get_tokens_unprocessed.
- buffered = ''
- insertions = []
- insertion_buf = []
- for i, t, v in self._find_safe_escape_tokens(text):
- if t is None:
- if insertion_buf:
- insertions.append((len(buffered), insertion_buf))
- insertion_buf = []
- buffered += v
- else:
- insertion_buf.append((i, t, v))
- if insertion_buf:
- insertions.append((len(buffered), insertion_buf))
- return do_insertions(insertions,
- self.lang.get_tokens_unprocessed(buffered))
-
- def _find_safe_escape_tokens(self, text):
- """ find escape tokens that are not in strings or comments """
- for i, t, v in self._filter_to(
- self.lang.get_tokens_unprocessed(text),
- lambda t: t in Token.Comment or t in Token.String
- ):
- if t is None:
- for i2, t2, v2 in self._find_escape_tokens(v):
- yield i + i2, t2, v2
- else:
- yield i, None, v
-
- def _filter_to(self, it, pred):
- """ Keep only the tokens that match `pred`, merge the others together """
- buf = ''
- idx = 0
- for i, t, v in it:
- if pred(t):
- if buf:
- yield idx, None, buf
- buf = ''
- yield i, t, v
- else:
- if not buf:
- idx = i
- buf += v
- if buf:
- yield idx, None, buf
-
- def _find_escape_tokens(self, text):
- """ Find escape tokens within text, give token=None otherwise """
- index = 0
- while text:
- a, sep1, text = text.partition(self.left)
- if a:
- yield index, None, a
- index += len(a)
- if sep1:
- b, sep2, text = text.partition(self.right)
- if sep2:
- yield index + len(sep1), Token.Escape, b
- index += len(sep1) + len(b) + len(sep2)
- else:
- yield index, Token.Error, sep1
- index += len(sep1)
- text = b
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/other.py b/contrib/python/pip/pip/_vendor/pygments/formatters/other.py
deleted file mode 100644
index de8d9dcf896..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/other.py
+++ /dev/null
@@ -1,160 +0,0 @@
-"""
- pygments.formatters.other
- ~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Other formatters: NullFormatter, RawTokenFormatter.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.util import get_choice_opt
-from pip._vendor.pygments.token import Token
-from pip._vendor.pygments.console import colorize
-
-__all__ = ['NullFormatter', 'RawTokenFormatter', 'TestcaseFormatter']
-
-
-class NullFormatter(Formatter):
- """
- Output the text unchanged without any formatting.
- """
- name = 'Text only'
- aliases = ['text', 'null']
- filenames = ['*.txt']
-
- def format(self, tokensource, outfile):
- enc = self.encoding
- for ttype, value in tokensource:
- if enc:
- outfile.write(value.encode(enc))
- else:
- outfile.write(value)
-
-
-class RawTokenFormatter(Formatter):
- r"""
- Format tokens as a raw representation for storing token streams.
-
- The format is ``tokentype<TAB>repr(tokenstring)\n``. The output can later
- be converted to a token stream with the `RawTokenLexer`, described in the
- :doc:`lexer list <lexers>`.
-
- Only two options are accepted:
-
- `compress`
- If set to ``'gz'`` or ``'bz2'``, compress the output with the given
- compression algorithm after encoding (default: ``''``).
- `error_color`
- If set to a color name, highlight error tokens using that color. If
- set but with no value, defaults to ``'red'``.
-
- .. versionadded:: 0.11
-
- """
- name = 'Raw tokens'
- aliases = ['raw', 'tokens']
- filenames = ['*.raw']
-
- unicodeoutput = False
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- # We ignore self.encoding if it is set, since it gets set for lexer
- # and formatter if given with -Oencoding on the command line.
- # The RawTokenFormatter outputs only ASCII. Override here.
- self.encoding = 'ascii' # let pygments.format() do the right thing
- self.compress = get_choice_opt(options, 'compress',
- ['', 'none', 'gz', 'bz2'], '')
- self.error_color = options.get('error_color', None)
- if self.error_color is True:
- self.error_color = 'red'
- if self.error_color is not None:
- try:
- colorize(self.error_color, '')
- except KeyError:
- raise ValueError(f"Invalid color {self.error_color!r} specified")
-
- def format(self, tokensource, outfile):
- try:
- outfile.write(b'')
- except TypeError:
- raise TypeError('The raw tokens formatter needs a binary '
- 'output file')
- if self.compress == 'gz':
- import gzip
- outfile = gzip.GzipFile('', 'wb', 9, outfile)
-
- write = outfile.write
- flush = outfile.close
- elif self.compress == 'bz2':
- import bz2
- compressor = bz2.BZ2Compressor(9)
-
- def write(text):
- outfile.write(compressor.compress(text))
-
- def flush():
- outfile.write(compressor.flush())
- outfile.flush()
- else:
- write = outfile.write
- flush = outfile.flush
-
- if self.error_color:
- for ttype, value in tokensource:
- line = b"%r\t%r\n" % (ttype, value)
- if ttype is Token.Error:
- write(colorize(self.error_color, line))
- else:
- write(line)
- else:
- for ttype, value in tokensource:
- write(b"%r\t%r\n" % (ttype, value))
- flush()
-
-
-TESTCASE_BEFORE = '''\
- def testNeedsName(lexer):
- fragment = %r
- tokens = [
-'''
-TESTCASE_AFTER = '''\
- ]
- assert list(lexer.get_tokens(fragment)) == tokens
-'''
-
-
-class TestcaseFormatter(Formatter):
- """
- Format tokens as appropriate for a new testcase.
-
- .. versionadded:: 2.0
- """
- name = 'Testcase'
- aliases = ['testcase']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- if self.encoding is not None and self.encoding != 'utf-8':
- raise ValueError("Only None and utf-8 are allowed encodings.")
-
- def format(self, tokensource, outfile):
- indentation = ' ' * 12
- rawbuf = []
- outbuf = []
- for ttype, value in tokensource:
- rawbuf.append(value)
- outbuf.append(f'{indentation}({ttype}, {value!r}),\n')
-
- before = TESTCASE_BEFORE % (''.join(rawbuf),)
- during = ''.join(outbuf)
- after = TESTCASE_AFTER
- if self.encoding is None:
- outfile.write(before + during + after)
- else:
- outfile.write(before.encode('utf-8'))
- outfile.write(during.encode('utf-8'))
- outfile.write(after.encode('utf-8'))
- outfile.flush()
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py b/contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py
deleted file mode 100644
index dfed53ab768..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/pangomarkup.py
+++ /dev/null
@@ -1,83 +0,0 @@
-"""
- pygments.formatters.pangomarkup
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for Pango markup output.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pip._vendor.pygments.formatter import Formatter
-
-
-__all__ = ['PangoMarkupFormatter']
-
-
-_escape_table = {
- ord('&'): '&amp;',
- ord('<'): '&lt;',
-}
-
-
-def escape_special_chars(text, table=_escape_table):
- """Escape & and < for Pango Markup."""
- return text.translate(table)
-
-
-class PangoMarkupFormatter(Formatter):
- """
- Format tokens as Pango Markup code. It can then be rendered to an SVG.
-
- .. versionadded:: 2.9
- """
-
- name = 'Pango Markup'
- aliases = ['pango', 'pangomarkup']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.styles = {}
-
- for token, style in self.style:
- start = ''
- end = ''
- if style['color']:
- start += '<span fgcolor="#{}">'.format(style['color'])
- end = '</span>' + end
- if style['bold']:
- start += '<b>'
- end = '</b>' + end
- if style['italic']:
- start += '<i>'
- end = '</i>' + end
- if style['underline']:
- start += '<u>'
- end = '</u>' + end
- self.styles[token] = (start, end)
-
- def format_unencoded(self, tokensource, outfile):
- lastval = ''
- lasttype = None
-
- outfile.write('<tt>')
-
- for ttype, value in tokensource:
- while ttype not in self.styles:
- ttype = ttype.parent
- if ttype == lasttype:
- lastval += escape_special_chars(value)
- else:
- if lastval:
- stylebegin, styleend = self.styles[lasttype]
- outfile.write(stylebegin + lastval + styleend)
- lastval = escape_special_chars(value)
- lasttype = ttype
-
- if lastval:
- stylebegin, styleend = self.styles[lasttype]
- outfile.write(stylebegin + lastval + styleend)
-
- outfile.write('</tt>')
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py b/contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py
deleted file mode 100644
index eca2a41a1cd..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/rtf.py
+++ /dev/null
@@ -1,349 +0,0 @@
-"""
- pygments.formatters.rtf
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- A formatter that generates RTF files.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from collections import OrderedDict
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.style import _ansimap
-from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, surrogatepair
-
-
-__all__ = ['RtfFormatter']
-
-
-class RtfFormatter(Formatter):
- """
- Format tokens as RTF markup. This formatter automatically outputs full RTF
- documents with color information and other useful stuff. Perfect for Copy and
- Paste into Microsoft(R) Word(R) documents.
-
- Please note that ``encoding`` and ``outencoding`` options are ignored.
- The RTF format is ASCII natively, but handles unicode characters correctly
- thanks to escape sequences.
-
- .. versionadded:: 0.6
-
- Additional options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `fontface`
- The used font family, for example ``Bitstream Vera Sans``. Defaults to
- some generic font which is supposed to have fixed width.
-
- `fontsize`
- Size of the font used. Size is specified in half points. The
- default is 24 half-points, giving a size 12 font.
-
- .. versionadded:: 2.0
-
- `linenos`
- Turn on line numbering (default: ``False``).
-
- .. versionadded:: 2.18
-
- `lineno_fontsize`
- Font size for line numbers. Size is specified in half points
- (default: `fontsize`).
-
- .. versionadded:: 2.18
-
- `lineno_padding`
- Number of spaces between the (inline) line numbers and the
- source code (default: ``2``).
-
- .. versionadded:: 2.18
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- .. versionadded:: 2.18
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- .. versionadded:: 2.18
-
- `lineno_color`
- Color for line numbers specified as a hex triplet, e.g. ``'5e5e5e'``.
- Defaults to the style's line number color if it is a hex triplet,
- otherwise ansi bright black.
-
- .. versionadded:: 2.18
-
- `hl_lines`
- Specify a list of lines to be highlighted, as line numbers separated by
- spaces, e.g. ``'3 7 8'``. The line numbers are relative to the input
- (i.e. the first line is line 1) unless `hl_linenostart` is set.
-
- .. versionadded:: 2.18
-
- `hl_color`
- Color for highlighting the lines specified in `hl_lines`, specified as
- a hex triplet (default: style's `highlight_color`).
-
- .. versionadded:: 2.18
-
- `hl_linenostart`
- If set to ``True`` line numbers in `hl_lines` are specified
- relative to `linenostart` (default ``False``).
-
- .. versionadded:: 2.18
- """
- name = 'RTF'
- aliases = ['rtf']
- filenames = ['*.rtf']
-
- def __init__(self, **options):
- r"""
- Additional options accepted:
-
- ``fontface``
- Name of the font used. Could for example be ``'Courier New'``
- to further specify the default which is ``'\fmodern'``. The RTF
- specification claims that ``\fmodern`` are "Fixed-pitch serif
- and sans serif fonts". Hope every RTF implementation thinks
- the same about modern...
-
- """
- Formatter.__init__(self, **options)
- self.fontface = options.get('fontface') or ''
- self.fontsize = get_int_opt(options, 'fontsize', 0)
- self.linenos = get_bool_opt(options, 'linenos', False)
- self.lineno_fontsize = get_int_opt(options, 'lineno_fontsize',
- self.fontsize)
- self.lineno_padding = get_int_opt(options, 'lineno_padding', 2)
- self.linenostart = abs(get_int_opt(options, 'linenostart', 1))
- self.linenostep = abs(get_int_opt(options, 'linenostep', 1))
- self.hl_linenostart = get_bool_opt(options, 'hl_linenostart', False)
-
- self.hl_color = options.get('hl_color', '')
- if not self.hl_color:
- self.hl_color = self.style.highlight_color
-
- self.hl_lines = []
- for lineno in get_list_opt(options, 'hl_lines', []):
- try:
- lineno = int(lineno)
- if self.hl_linenostart:
- lineno = lineno - self.linenostart + 1
- self.hl_lines.append(lineno)
- except ValueError:
- pass
-
- self.lineno_color = options.get('lineno_color', '')
- if not self.lineno_color:
- if self.style.line_number_color == 'inherit':
- # style color is the css value 'inherit'
- # default to ansi bright-black
- self.lineno_color = _ansimap['ansibrightblack']
- else:
- # style color is assumed to be a hex triplet as other
- # colors in pygments/style.py
- self.lineno_color = self.style.line_number_color
-
- self.color_mapping = self._create_color_mapping()
-
- def _escape(self, text):
- return text.replace('\\', '\\\\') \
- .replace('{', '\\{') \
- .replace('}', '\\}')
-
- def _escape_text(self, text):
- # empty strings, should give a small performance improvement
- if not text:
- return ''
-
- # escape text
- text = self._escape(text)
-
- buf = []
- for c in text:
- cn = ord(c)
- if cn < (2**7):
- # ASCII character
- buf.append(str(c))
- elif (2**7) <= cn < (2**16):
- # single unicode escape sequence
- buf.append('{\\u%d}' % cn)
- elif (2**16) <= cn:
- # RTF limits unicode to 16 bits.
- # Force surrogate pairs
- buf.append('{\\u%d}{\\u%d}' % surrogatepair(cn))
-
- return ''.join(buf).replace('\n', '\\par')
-
- @staticmethod
- def hex_to_rtf_color(hex_color):
- if hex_color[0] == "#":
- hex_color = hex_color[1:]
-
- return '\\red%d\\green%d\\blue%d;' % (
- int(hex_color[0:2], 16),
- int(hex_color[2:4], 16),
- int(hex_color[4:6], 16)
- )
-
- def _split_tokens_on_newlines(self, tokensource):
- """
- Split tokens containing newline characters into multiple token
- each representing a line of the input file. Needed for numbering
- lines of e.g. multiline comments.
- """
- for ttype, value in tokensource:
- if value == '\n':
- yield (ttype, value)
- elif "\n" in value:
- lines = value.split("\n")
- for line in lines[:-1]:
- yield (ttype, line+"\n")
- if lines[-1]:
- yield (ttype, lines[-1])
- else:
- yield (ttype, value)
-
- def _create_color_mapping(self):
- """
- Create a mapping of style hex colors to index/offset in
- the RTF color table.
- """
- color_mapping = OrderedDict()
- offset = 1
-
- if self.linenos:
- color_mapping[self.lineno_color] = offset
- offset += 1
-
- if self.hl_lines:
- color_mapping[self.hl_color] = offset
- offset += 1
-
- for _, style in self.style:
- for color in style['color'], style['bgcolor'], style['border']:
- if color and color not in color_mapping:
- color_mapping[color] = offset
- offset += 1
-
- return color_mapping
-
- @property
- def _lineno_template(self):
- if self.lineno_fontsize != self.fontsize:
- return '{{\\fs{} \\cf{} %s{}}}'.format(self.lineno_fontsize,
- self.color_mapping[self.lineno_color],
- " " * self.lineno_padding)
-
- return '{{\\cf{} %s{}}}'.format(self.color_mapping[self.lineno_color],
- " " * self.lineno_padding)
-
- @property
- def _hl_open_str(self):
- return rf'{{\highlight{self.color_mapping[self.hl_color]} '
-
- @property
- def _rtf_header(self):
- lines = []
- # rtf 1.8 header
- lines.append('{\\rtf1\\ansi\\uc0\\deff0'
- '{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
- % (self.fontface and ' '
- + self._escape(self.fontface) or ''))
-
- # color table
- lines.append('{\\colortbl;')
- for color, _ in self.color_mapping.items():
- lines.append(self.hex_to_rtf_color(color))
- lines.append('}')
-
- # font and fontsize
- lines.append('\\f0\\sa0')
- if self.fontsize:
- lines.append('\\fs%d' % self.fontsize)
-
- # ensure Libre Office Writer imports and renders consecutive
- # space characters the same width, needed for line numbering.
- # https://bugs.documentfoundation.org/show_bug.cgi?id=144050
- lines.append('\\dntblnsbdb')
-
- return lines
-
- def format_unencoded(self, tokensource, outfile):
- for line in self._rtf_header:
- outfile.write(line + "\n")
-
- tokensource = self._split_tokens_on_newlines(tokensource)
-
- # first pass of tokens to count lines, needed for line numbering
- if self.linenos:
- line_count = 0
- tokens = [] # for copying the token source generator
- for ttype, value in tokensource:
- tokens.append((ttype, value))
- if value.endswith("\n"):
- line_count += 1
-
- # width of line number strings (for padding with spaces)
- linenos_width = len(str(line_count+self.linenostart-1))
-
- tokensource = tokens
-
- # highlight stream
- lineno = 1
- start_new_line = True
- for ttype, value in tokensource:
- if start_new_line and lineno in self.hl_lines:
- outfile.write(self._hl_open_str)
-
- if start_new_line and self.linenos:
- if (lineno-self.linenostart+1)%self.linenostep == 0:
- current_lineno = lineno + self.linenostart - 1
- lineno_str = str(current_lineno).rjust(linenos_width)
- else:
- lineno_str = "".rjust(linenos_width)
- outfile.write(self._lineno_template % lineno_str)
-
- while not self.style.styles_token(ttype) and ttype.parent:
- ttype = ttype.parent
- style = self.style.style_for_token(ttype)
- buf = []
- if style['bgcolor']:
- buf.append('\\cb%d' % self.color_mapping[style['bgcolor']])
- if style['color']:
- buf.append('\\cf%d' % self.color_mapping[style['color']])
- if style['bold']:
- buf.append('\\b')
- if style['italic']:
- buf.append('\\i')
- if style['underline']:
- buf.append('\\ul')
- if style['border']:
- buf.append('\\chbrdr\\chcfpat%d' %
- self.color_mapping[style['border']])
- start = ''.join(buf)
- if start:
- outfile.write(f'{{{start} ')
- outfile.write(self._escape_text(value))
- if start:
- outfile.write('}')
- start_new_line = False
-
- # complete line of input
- if value.endswith("\n"):
- # close line highlighting
- if lineno in self.hl_lines:
- outfile.write('}')
- # newline in RTF file after closing }
- outfile.write("\n")
-
- start_new_line = True
- lineno += 1
-
- outfile.write('}\n')
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/svg.py b/contrib/python/pip/pip/_vendor/pygments/formatters/svg.py
deleted file mode 100644
index d3e018ffd80..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/svg.py
+++ /dev/null
@@ -1,185 +0,0 @@
-"""
- pygments.formatters.svg
- ~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for SVG output.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.token import Comment
-from pip._vendor.pygments.util import get_bool_opt, get_int_opt
-
-__all__ = ['SvgFormatter']
-
-
-def escape_html(text):
- """Escape &, <, > as well as single and double quotes for HTML."""
- return text.replace('&', '&amp;'). \
- replace('<', '&lt;'). \
- replace('>', '&gt;'). \
- replace('"', '&quot;'). \
- replace("'", '&#39;')
-
-
-class2style = {}
-
-class SvgFormatter(Formatter):
- """
- Format tokens as an SVG graphics file. This formatter is still experimental.
- Each line of code is a ``<text>`` element with explicit ``x`` and ``y``
- coordinates containing ``<tspan>`` elements with the individual token styles.
-
- By default, this formatter outputs a full SVG document including doctype
- declaration and the ``<svg>`` root element.
-
- .. versionadded:: 0.9
-
- Additional options accepted:
-
- `nowrap`
- Don't wrap the SVG ``<text>`` elements in ``<svg><g>`` elements and
- don't add a XML declaration and a doctype. If true, the `fontfamily`
- and `fontsize` options are ignored. Defaults to ``False``.
-
- `fontfamily`
- The value to give the wrapping ``<g>`` element's ``font-family``
- attribute, defaults to ``"monospace"``.
-
- `fontsize`
- The value to give the wrapping ``<g>`` element's ``font-size``
- attribute, defaults to ``"14px"``.
-
- `linenos`
- If ``True``, add line numbers (default: ``False``).
-
- `linenostart`
- The line number for the first line (default: ``1``).
-
- `linenostep`
- If set to a number n > 1, only every nth line number is printed.
-
- `linenowidth`
- Maximum width devoted to line numbers (default: ``3*ystep``, sufficient
- for up to 4-digit line numbers. Increase width for longer code blocks).
-
- `xoffset`
- Starting offset in X direction, defaults to ``0``.
-
- `yoffset`
- Starting offset in Y direction, defaults to the font size if it is given
- in pixels, or ``20`` else. (This is necessary since text coordinates
- refer to the text baseline, not the top edge.)
-
- `ystep`
- Offset to add to the Y coordinate for each subsequent line. This should
- roughly be the text size plus 5. It defaults to that value if the text
- size is given in pixels, or ``25`` else.
-
- `spacehack`
- Convert spaces in the source to ``&#160;``, which are non-breaking
- spaces. SVG provides the ``xml:space`` attribute to control how
- whitespace inside tags is handled, in theory, the ``preserve`` value
- could be used to keep all whitespace as-is. However, many current SVG
- viewers don't obey that rule, so this option is provided as a workaround
- and defaults to ``True``.
- """
- name = 'SVG'
- aliases = ['svg']
- filenames = ['*.svg']
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.nowrap = get_bool_opt(options, 'nowrap', False)
- self.fontfamily = options.get('fontfamily', 'monospace')
- self.fontsize = options.get('fontsize', '14px')
- self.xoffset = get_int_opt(options, 'xoffset', 0)
- fs = self.fontsize.strip()
- if fs.endswith('px'):
- fs = fs[:-2].strip()
- try:
- int_fs = int(fs)
- except ValueError:
- int_fs = 20
- self.yoffset = get_int_opt(options, 'yoffset', int_fs)
- self.ystep = get_int_opt(options, 'ystep', int_fs + 5)
- self.spacehack = get_bool_opt(options, 'spacehack', True)
- self.linenos = get_bool_opt(options,'linenos',False)
- self.linenostart = get_int_opt(options,'linenostart',1)
- self.linenostep = get_int_opt(options,'linenostep',1)
- self.linenowidth = get_int_opt(options,'linenowidth', 3*self.ystep)
- self._stylecache = {}
-
- def format_unencoded(self, tokensource, outfile):
- """
- Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
- tuples and write it into ``outfile``.
-
- For our implementation we put all lines in their own 'line group'.
- """
- x = self.xoffset
- y = self.yoffset
- if not self.nowrap:
- if self.encoding:
- outfile.write(f'<?xml version="1.0" encoding="{self.encoding}"?>\n')
- else:
- outfile.write('<?xml version="1.0"?>\n')
- outfile.write('<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" '
- '"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/'
- 'svg10.dtd">\n')
- outfile.write('<svg xmlns="http://www.w3.org/2000/svg">\n')
- outfile.write(f'<g font-family="{self.fontfamily}" font-size="{self.fontsize}">\n')
-
- counter = self.linenostart
- counter_step = self.linenostep
- counter_style = self._get_style(Comment)
- line_x = x
-
- if self.linenos:
- if counter % counter_step == 0:
- outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" {counter_style} text-anchor="end">{counter}</text>')
- line_x += self.linenowidth + self.ystep
- counter += 1
-
- outfile.write(f'<text x="{line_x}" y="{y}" xml:space="preserve">')
- for ttype, value in tokensource:
- style = self._get_style(ttype)
- tspan = style and '<tspan' + style + '>' or ''
- tspanend = tspan and '</tspan>' or ''
- value = escape_html(value)
- if self.spacehack:
- value = value.expandtabs().replace(' ', '&#160;')
- parts = value.split('\n')
- for part in parts[:-1]:
- outfile.write(tspan + part + tspanend)
- y += self.ystep
- outfile.write('</text>\n')
- if self.linenos and counter % counter_step == 0:
- outfile.write(f'<text x="{x+self.linenowidth}" y="{y}" text-anchor="end" {counter_style}>{counter}</text>')
-
- counter += 1
- outfile.write(f'<text x="{line_x}" y="{y}" ' 'xml:space="preserve">')
- outfile.write(tspan + parts[-1] + tspanend)
- outfile.write('</text>')
-
- if not self.nowrap:
- outfile.write('</g></svg>\n')
-
- def _get_style(self, tokentype):
- if tokentype in self._stylecache:
- return self._stylecache[tokentype]
- otokentype = tokentype
- while not self.style.styles_token(tokentype):
- tokentype = tokentype.parent
- value = self.style.style_for_token(tokentype)
- result = ''
- if value['color']:
- result = ' fill="#' + value['color'] + '"'
- if value['bold']:
- result += ' font-weight="bold"'
- if value['italic']:
- result += ' font-style="italic"'
- self._stylecache[otokentype] = result
- return result
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py b/contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py
deleted file mode 100644
index 51b902d3e24..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal.py
+++ /dev/null
@@ -1,127 +0,0 @@
-"""
- pygments.formatters.terminal
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for terminal output with ANSI sequences.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.token import Keyword, Name, Comment, String, Error, \
- Number, Operator, Generic, Token, Whitespace
-from pip._vendor.pygments.console import ansiformat
-from pip._vendor.pygments.util import get_choice_opt
-
-
-__all__ = ['TerminalFormatter']
-
-
-#: Map token types to a tuple of color values for light and dark
-#: backgrounds.
-TERMINAL_COLORS = {
- Token: ('', ''),
-
- Whitespace: ('gray', 'brightblack'),
- Comment: ('gray', 'brightblack'),
- Comment.Preproc: ('cyan', 'brightcyan'),
- Keyword: ('blue', 'brightblue'),
- Keyword.Type: ('cyan', 'brightcyan'),
- Operator.Word: ('magenta', 'brightmagenta'),
- Name.Builtin: ('cyan', 'brightcyan'),
- Name.Function: ('green', 'brightgreen'),
- Name.Namespace: ('_cyan_', '_brightcyan_'),
- Name.Class: ('_green_', '_brightgreen_'),
- Name.Exception: ('cyan', 'brightcyan'),
- Name.Decorator: ('brightblack', 'gray'),
- Name.Variable: ('red', 'brightred'),
- Name.Constant: ('red', 'brightred'),
- Name.Attribute: ('cyan', 'brightcyan'),
- Name.Tag: ('brightblue', 'brightblue'),
- String: ('yellow', 'yellow'),
- Number: ('blue', 'brightblue'),
-
- Generic.Deleted: ('brightred', 'brightred'),
- Generic.Inserted: ('green', 'brightgreen'),
- Generic.Heading: ('**', '**'),
- Generic.Subheading: ('*magenta*', '*brightmagenta*'),
- Generic.Prompt: ('**', '**'),
- Generic.Error: ('brightred', 'brightred'),
-
- Error: ('_brightred_', '_brightred_'),
-}
-
-
-class TerminalFormatter(Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a text console.
- Color sequences are terminated at newlines, so that paging the output
- works correctly.
-
- The `get_style_defs()` method doesn't do anything special since there is
- no support for common styles.
-
- Options accepted:
-
- `bg`
- Set to ``"light"`` or ``"dark"`` depending on the terminal's background
- (default: ``"light"``).
-
- `colorscheme`
- A dictionary mapping token types to (lightbg, darkbg) color names or
- ``None`` (default: ``None`` = use builtin colorscheme).
-
- `linenos`
- Set to ``True`` to have line numbers on the terminal output as well
- (default: ``False`` = no line numbers).
- """
- name = 'Terminal'
- aliases = ['terminal', 'console']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
- self.darkbg = get_choice_opt(options, 'bg',
- ['light', 'dark'], 'light') == 'dark'
- self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def format(self, tokensource, outfile):
- return Formatter.format(self, tokensource, outfile)
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
-
- def _get_color(self, ttype):
- # self.colorscheme is a dict containing usually generic types, so we
- # have to walk the tree of dots. The base Token type must be a key,
- # even if it's empty string, as in the default above.
- colors = self.colorscheme.get(ttype)
- while colors is None:
- ttype = ttype.parent
- colors = self.colorscheme.get(ttype)
- return colors[self.darkbg]
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- color = self._get_color(ttype)
-
- for line in value.splitlines(True):
- if color:
- outfile.write(ansiformat(color, line.rstrip('\n')))
- else:
- outfile.write(line.rstrip('\n'))
- if line.endswith('\n'):
- if self.linenos:
- self._write_lineno(outfile)
- else:
- outfile.write('\n')
-
- if self.linenos:
- outfile.write("\n")
diff --git a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py b/contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py
deleted file mode 100644
index 5f254051a80..00000000000
--- a/contrib/python/pip/pip/_vendor/pygments/formatters/terminal256.py
+++ /dev/null
@@ -1,338 +0,0 @@
-"""
- pygments.formatters.terminal256
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
- Formatter for 256-color terminal output with ANSI sequences.
-
- RGB-to-XTERM color conversion routines adapted from xterm256-conv
- tool (http://frexx.de/xterm-256-notes/data/xterm256-conv2.tar.bz2)
- by Wolfgang Frisch.
-
- Formatter version 1.
-
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
- :license: BSD, see LICENSE for details.
-"""
-
-# TODO:
-# - Options to map style's bold/underline/italic/border attributes
-# to some ANSI attrbutes (something like 'italic=underline')
-# - An option to output "style RGB to xterm RGB/index" conversion table
-# - An option to indicate that we are running in "reverse background"
-# xterm. This means that default colors are white-on-black, not
-# black-on-while, so colors like "white background" need to be converted
-# to "white background, black foreground", etc...
-
-from pip._vendor.pygments.formatter import Formatter
-from pip._vendor.pygments.console import codes
-from pip._vendor.pygments.style import ansicolors
-
-
-__all__ = ['Terminal256Formatter', 'TerminalTrueColorFormatter']
-
-
-class EscapeSequence:
- def __init__(self, fg=None, bg=None, bold=False, underline=False, italic=False):
- self.fg = fg
- self.bg = bg
- self.bold = bold
- self.underline = underline
- self.italic = italic
-
- def escape(self, attrs):
- if len(attrs):
- return "\x1b[" + ";".join(attrs) + "m"
- return ""
-
- def color_string(self):
- attrs = []
- if self.fg is not None:
- if self.fg in ansicolors:
- esc = codes[self.fg.replace('ansi','')]
- if ';01m' in esc:
- self.bold = True
- # extract fg color code.
- attrs.append(esc[2:4])
- else:
- attrs.extend(("38", "5", "%i" % self.fg))
- if self.bg is not None:
- if self.bg in ansicolors:
- esc = codes[self.bg.replace('ansi','')]
- # extract fg color code, add 10 for bg.
- attrs.append(str(int(esc[2:4])+10))
- else:
- attrs.extend(("48", "5", "%i" % self.bg))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
- if self.italic:
- attrs.append("03")
- return self.escape(attrs)
-
- def true_color_string(self):
- attrs = []
- if self.fg:
- attrs.extend(("38", "2", str(self.fg[0]), str(self.fg[1]), str(self.fg[2])))
- if self.bg:
- attrs.extend(("48", "2", str(self.bg[0]), str(self.bg[1]), str(self.bg[2])))
- if self.bold:
- attrs.append("01")
- if self.underline:
- attrs.append("04")
- if self.italic:
- attrs.append("03")
- return self.escape(attrs)
-
- def reset_string(self):
- attrs = []
- if self.fg is not None:
- attrs.append("39")
- if self.bg is not None:
- attrs.append("49")
- if self.bold or self.underline or self.italic:
- attrs.append("00")
- return self.escape(attrs)
-
-
-class Terminal256Formatter(Formatter):
- """
- Format tokens with ANSI color sequences, for output in a 256-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- The formatter takes colors from a style defined by the `style` option
- and converts them to nearest ANSI 256-color escape sequences. Bold and
- underline attributes from the style are preserved (and displayed).
-
- .. versionadded:: 0.9
-
- .. versionchanged:: 2.2
- If the used style defines foreground colors in the form ``#ansi*``, then
- `Terminal256Formatter` will map these to non extended foreground color.
- See :ref:`AnsiTerminalStyle` for more information.
-
- .. versionchanged:: 2.4
- The ANSI color names have been updated with names that are easier to
- understand and align with colornames of other projects and terminals.
- See :ref:`this table <new-ansi-color-names>` for more information.
-
-
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
-
- `linenos`
- Set to ``True`` to have line numbers on the terminal output as well
- (default: ``False`` = no line numbers).
- """
- name = 'Terminal256'
- aliases = ['terminal256', 'console256', '256']
- filenames = []
-
- def __init__(self, **options):
- Formatter.__init__(self, **options)
-
- self.xterm_colors = []
- self.best_match = {}
- self.style_string = {}
-
- self.usebold = 'nobold' not in options
- self.useunderline = 'nounderline' not in options
- self.useitalic = 'noitalic' not in options
-
- self._build_color_table() # build an RGB-to-256 color conversion table
- self._setup_styles() # convert selected style's colors to term. colors
-
- self.linenos = options.get('linenos', False)
- self._lineno = 0
-
- def _build_color_table(self):
- # colors 0..15: 16 basic colors
-
- self.xterm_colors.append((0x00, 0x00, 0x00)) # 0
- self.xterm_colors.append((0xcd, 0x00, 0x00)) # 1
- self.xterm_colors.append((0x00, 0xcd, 0x00)) # 2
- self.xterm_colors.append((0xcd, 0xcd, 0x00)) # 3
- self.xterm_colors.append((0x00, 0x00, 0xee)) # 4
- self.xterm_colors.append((0xcd, 0x00, 0xcd)) # 5
- self.xterm_colors.append((0x00, 0xcd, 0xcd)) # 6
- self.xterm_colors.append((0xe5, 0xe5, 0xe5)) # 7
- self.xterm_colors.append((0x7f, 0x7f, 0x7f)) # 8
- self.xterm_colors.append((0xff, 0x00, 0x00)) # 9
- self.xterm_colors.append((0x00, 0xff, 0x00)) # 10
- self.xterm_colors.append((0xff, 0xff, 0x00)) # 11
- self.xterm_colors.append((0x5c, 0x5c, 0xff)) # 12
- self.xterm_colors.append((0xff, 0x00, 0xff)) # 13
- self.xterm_colors.append((0x00, 0xff, 0xff)) # 14
- self.xterm_colors.append((0xff, 0xff, 0xff)) # 15
-
- # colors 16..232: the 6x6x6 color cube
-
- valuerange = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
-
- for i in range(217):
- r = valuerange[(i // 36) % 6]
- g = valuerange[(i // 6) % 6]
- b = valuerange[i % 6]
- self.xterm_colors.append((r, g, b))
-
- # colors 233..253: grayscale
-
- for i in range(1, 22):
- v = 8 + i * 10
- self.xterm_colors.append((v, v, v))
-
- def _closest_color(self, r, g, b):
- distance = 257*257*3 # "infinity" (>distance from #000000 to #ffffff)
- match = 0
-
- for i in range(0, 254):
- values = self.xterm_colors[i]
-
- rd = r - values[0]
- gd = g - values[1]
- bd = b - values[2]
- d = rd*rd + gd*gd + bd*bd
-
- if d < distance:
- match = i
- distance = d
- return match
-
- def _color_index(self, color):
- index = self.best_match.get(color, None)
- if color in ansicolors:
- # strip the `ansi/#ansi` part and look up code
- index = color
- self.best_match[color] = index
- if index is None:
- try:
- rgb = int(str(color), 16)
- except ValueError:
- rgb = 0
-
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- index = self._closest_color(r, g, b)
- self.best_match[color] = index
- return index
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
- # get foreground from ansicolor if set
- if ndef['ansicolor']:
- escape.fg = self._color_index(ndef['ansicolor'])
- elif ndef['color']:
- escape.fg = self._color_index(ndef['color'])
- if ndef['bgansicolor']:
- escape.bg = self._color_index(ndef['bgansicolor'])
- elif ndef['bgcolor']:
- escape.bg = self._color_index(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
- if self.useitalic and ndef['italic']:
- escape.italic = True
- self.style_string[str(ttype)] = (escape.color_string(),
- escape.reset_string())
-
- def _write_lineno(self, outfile):
- self._lineno += 1
- outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
-
- def format(self, tokensource, outfile):
- return Formatter.format(self, tokensource, outfile)
-
- def format_unencoded(self, tokensource, outfile):
- if self.linenos:
- self._write_lineno(outfile)
-
- for ttype, value in tokensource:
- not_found = True
- while ttype and not_found:
- try:
- # outfile.write( "<" + str(ttype) + ">" )
- on, off = self.style_string[str(ttype)]
-
- # Like TerminalFormatter, add "reset colors" escape sequence
- # on newline.
- spl = value.split('\n')
- for line in spl[:-1]:
- if line:
- outfile.write(on + line + off)
- if self.linenos:
- self._write_lineno(outfile)
- else:
- outfile.write('\n')
-
- if spl[-1]:
- outfile.write(on + spl[-1] + off)
-
- not_found = False
- # outfile.write( '#' + str(ttype) + '#' )
-
- except KeyError:
- # ottype = ttype
- ttype = ttype.parent
- # outfile.write( '!' + str(ottype) + '->' + str(ttype) + '!' )
-
- if not_found:
- outfile.write(value)
-
- if self.linenos:
- outfile.write("\n")
-
-
-
-class TerminalTrueColorFormatter(Terminal256Formatter):
- r"""
- Format tokens with ANSI color sequences, for output in a true-color
- terminal or console. Like in `TerminalFormatter` color sequences
- are terminated at newlines, so that paging the output works correctly.
-
- .. versionadded:: 2.1
-
- Options accepted:
-
- `style`
- The style to use, can be a string or a Style subclass (default:
- ``'default'``).
- """
- name = 'TerminalTrueColor'
- aliases = ['terminal16m', 'console16m', '16m']
- filenames = []
-
- def _build_color_table(self):
- pass
-
- def _color_tuple(self, color):
- try:
- rgb = int(str(color), 16)
- except ValueError:
- return None
- r = (rgb >> 16) & 0xff
- g = (rgb >> 8) & 0xff
- b = rgb & 0xff
- return (r, g, b)
-
- def _setup_styles(self):
- for ttype, ndef in self.style:
- escape = EscapeSequence()
- if ndef['color']:
- escape.fg = self._color_tuple(ndef['color'])
- if ndef['bgcolor']:
- escape.bg = self._color_tuple(ndef['bgcolor'])
- if self.usebold and ndef['bold']:
- escape.bold = True
- if self.useunderline and ndef['underline']:
- escape.underline = True
- if self.useitalic and ndef['italic']:
- escape.italic = True
- self.style_string[str(ttype)] = (escape.true_color_string(),
- escape.reset_string())
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexer.py b/contrib/python/pip/pip/_vendor/pygments/lexer.py
index 1348be58782..c05aa8196d4 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexer.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexer.py
@@ -4,7 +4,7 @@
Base lexer classes.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py b/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py
index ac88645a1b0..49184ec8a32 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexers/__init__.py
@@ -4,7 +4,7 @@
Pygments lexers.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py b/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py
index f3e5c460db3..c0d6a8ad285 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexers/_mapping.py
@@ -93,6 +93,7 @@ LEXERS = {
'ClojureScriptLexer': ('pip._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
'CobolFreeformatLexer': ('pip._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
'CobolLexer': ('pip._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
+ 'CodeQLLexer': ('pip._vendor.pygments.lexers.codeql', 'CodeQL', ('codeql', 'ql'), ('*.ql', '*.qll'), ()),
'CoffeeScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
'ColdfusionCFCLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
'ColdfusionHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
@@ -127,6 +128,7 @@ LEXERS = {
'Dasm16Lexer': ('pip._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
'DaxLexer': ('pip._vendor.pygments.lexers.dax', 'Dax', ('dax',), ('*.dax',), ()),
'DebianControlLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
+ 'DebianSourcesLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Sources file', ('debian.sources',), ('*.sources',), ()),
'DelphiLexer': ('pip._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
'DesktopLexer': ('pip._vendor.pygments.lexers.configs', 'Desktop file', ('desktop',), ('*.desktop',), ('application/x-desktop',)),
'DevicetreeLexer': ('pip._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
@@ -155,9 +157,9 @@ LEXERS = {
'ErbLexer': ('pip._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
'ErlangLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
'ErlangShellLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
- 'EvoqueHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
+ 'EvoqueHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), (), ('text/html+evoque',)),
'EvoqueLexer': ('pip._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
- 'EvoqueXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
+ 'EvoqueXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), (), ('application/xml+evoque',)),
'ExeclineLexer': ('pip._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
'EzhilLexer': ('pip._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi', '*.fsx'), ('text/x-fsharp',)),
@@ -189,10 +191,12 @@ LEXERS = {
'GenshiTextLexer': ('pip._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
'GettextLexer': ('pip._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
'GherkinLexer': ('pip._vendor.pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
+ 'GleamLexer': ('pip._vendor.pygments.lexers.gleam', 'Gleam', ('gleam',), ('*.gleam',), ('text/x-gleam',)),
'GnuplotLexer': ('pip._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
'GoLexer': ('pip._vendor.pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
'GoloLexer': ('pip._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
'GoodDataCLLexer': ('pip._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
+ 'GoogleSqlLexer': ('pip._vendor.pygments.lexers.sql', 'GoogleSQL', ('googlesql', 'zetasql'), ('*.googlesql', '*.googlesql.sql'), ('text/x-google-sql', 'text/x-google-sql-aux')),
'GosuLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
'GosuTemplateLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
'GraphQLLexer': ('pip._vendor.pygments.lexers.graphql', 'GraphQL', ('graphql',), ('*.graphql',), ()),
@@ -204,6 +208,7 @@ LEXERS = {
'HamlLexer': ('pip._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
'HandlebarsHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
'HandlebarsLexer': ('pip._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
+ 'HareLexer': ('pip._vendor.pygments.lexers.hare', 'Hare', ('hare',), ('*.ha',), ('text/x-hare',)),
'HaskellLexer': ('pip._vendor.pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
'HaxeLexer': ('pip._vendor.pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
'HexdumpLexer': ('pip._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
@@ -246,6 +251,7 @@ LEXERS = {
'JavascriptUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Javascript+UL4', ('js+ul4',), ('*.jsul4',), ()),
'JclLexer': ('pip._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
'JsgfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
+ 'Json5Lexer': ('pip._vendor.pygments.lexers.json5', 'JSON5', ('json5',), ('*.json5',), ()),
'JsonBareObjectLexer': ('pip._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()),
'JsonLdLexer': ('pip._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', '*.jsonl', '*.ndjson', 'Pipfile.lock'), ('application/json', 'application/json-object', 'application/x-ndjson', 'application/jsonl', 'application/json-seq')),
@@ -303,6 +309,7 @@ LEXERS = {
'MakoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
'MakoLexer': ('pip._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
'MakoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
+ 'MapleLexer': ('pip._vendor.pygments.lexers.maple', 'Maple', ('maple',), ('*.mpl', '*.mi', '*.mm'), ('text/x-maple',)),
'MaqlLexer': ('pip._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
'MarkdownLexer': ('pip._vendor.pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
'MaskLexer': ('pip._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
@@ -354,6 +361,7 @@ LEXERS = {
'NotmuchLexer': ('pip._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
'NuSMVLexer': ('pip._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
'NumPyLexer': ('pip._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
+ 'NumbaIRLexer': ('pip._vendor.pygments.lexers.numbair', 'Numba_IR', ('numba_ir', 'numbair'), ('*.numba_ir',), ('text/x-numba_ir', 'text/x-numbair')),
'ObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
'ObjectiveCLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
'ObjectiveCppLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
@@ -372,6 +380,7 @@ LEXERS = {
'PanLexer': ('pip._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
'ParaSailLexer': ('pip._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
'PawnLexer': ('pip._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
+ 'PddlLexer': ('pip._vendor.pygments.lexers.pddl', 'PDDL', ('pddl',), ('*.pddl',), ()),
'PegLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
'Perl6Lexer': ('pip._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
'PerlLexer': ('pip._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
@@ -407,7 +416,7 @@ LEXERS = {
'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon', 'python-console'), (), ('text/x-python-doctest',)),
- 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
+ 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark', 'pyi'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
'PythonUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()),
'QBasicLexer': ('pip._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
@@ -434,6 +443,7 @@ LEXERS = {
'RedLexer': ('pip._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
'RedcodeLexer': ('pip._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
'RegeditLexer': ('pip._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
+ 'RegoLexer': ('pip._vendor.pygments.lexers.rego', 'Rego', ('rego',), ('*.rego',), ('text/x-rego',)),
'ResourceLexer': ('pip._vendor.pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
'RexxLexer': ('pip._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
'RhtmlLexer': ('pip._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
@@ -501,6 +511,7 @@ LEXERS = {
'TAPLexer': ('pip._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
'TNTLexer': ('pip._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
'TOMLLexer': ('pip._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ('application/toml',)),
+ 'TableGenLexer': ('pip._vendor.pygments.lexers.tablegen', 'TableGen', ('tablegen', 'td'), ('*.td',), ()),
'TactLexer': ('pip._vendor.pygments.lexers.tact', 'Tact', ('tact',), ('*.tact',), ()),
'Tads3Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
'TalLexer': ('pip._vendor.pygments.lexers.tal', 'Tal', ('tal', 'uxntal'), ('*.tal',), ('text/x-uxntal',)),
@@ -524,6 +535,7 @@ LEXERS = {
'TodotxtLexer': ('pip._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
'TransactSqlLexer': ('pip._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
'TreetopLexer': ('pip._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
+ 'TsxLexer': ('pip._vendor.pygments.lexers.jsx', 'TSX', ('tsx',), ('*.tsx',), ('text/typescript-tsx',)),
'TurtleLexer': ('pip._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
'TwigHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
'TwigLexer': ('pip._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
@@ -556,6 +568,7 @@ LEXERS = {
'VimLexer': ('pip._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
'VisualPrologGrammarLexer': ('pip._vendor.pygments.lexers.vip', 'Visual Prolog Grammar', ('visualprologgrammar',), ('*.vipgrm',), ()),
'VisualPrologLexer': ('pip._vendor.pygments.lexers.vip', 'Visual Prolog', ('visualprolog',), ('*.pro', '*.cl', '*.i', '*.pack', '*.ph'), ()),
+ 'VueLexer': ('pip._vendor.pygments.lexers.html', 'Vue', ('vue',), ('*.vue',), ()),
'VyperLexer': ('pip._vendor.pygments.lexers.vyper', 'Vyper', ('vyper',), ('*.vy',), ()),
'WDiffLexer': ('pip._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
diff --git a/contrib/python/pip/pip/_vendor/pygments/lexers/python.py b/contrib/python/pip/pip/_vendor/pygments/lexers/python.py
index b2d07f20800..1b78829617a 100644
--- a/contrib/python/pip/pip/_vendor/pygments/lexers/python.py
+++ b/contrib/python/pip/pip/_vendor/pygments/lexers/python.py
@@ -4,7 +4,7 @@
Lexers for Python and related languages.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
@@ -33,7 +33,7 @@ class PythonLexer(RegexLexer):
name = 'Python'
url = 'https://www.python.org'
- aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark']
+ aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark', 'pyi']
filenames = [
'*.py',
'*.pyw',
@@ -109,11 +109,11 @@ class PythonLexer(RegexLexer):
(r'\\', Text),
include('keywords'),
include('soft-keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
'import'),
include('expr'),
],
@@ -329,14 +329,14 @@ class PythonLexer(RegexLexer):
(uni_name, Name.Class, '#pop'),
],
'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'(\s+)(as)(\s+)', bygroups(Whitespace, Keyword, Whitespace)),
(r'\.', Name.Namespace),
(uni_name, Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ (r'(\s*)(,)(\s*)', bygroups(Whitespace, Operator, Whitespace)),
default('#pop') # all else: go back
],
'fromimport': [
- (r'(\s+)(import)\b', bygroups(Text, Keyword.Namespace), '#pop'),
+ (r'(\s+)(import)\b', bygroups(Whitespace, Keyword.Namespace), '#pop'),
(r'\.', Name.Namespace),
# if None occurs here, it's "raise x from None", since None can
# never be a module name
@@ -459,11 +459,11 @@ class Python2Lexer(RegexLexer):
(r'(in|is|and|or|not)\b', Operator.Word),
(r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator),
include('keywords'),
- (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
- (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'funcname'),
+ (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Whitespace), 'classname'),
+ (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
'fromimport'),
- (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
+ (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Whitespace),
'import'),
include('builtins'),
include('magicfuncs'),
@@ -635,6 +635,7 @@ class Python2Lexer(RegexLexer):
def analyse_text(text):
return shebang_matches(text, r'pythonw?2(\.\d)?')
+
class _PythonConsoleLexerBase(RegexLexer):
name = 'Python console session'
aliases = ['pycon', 'python-console']
@@ -671,6 +672,7 @@ class _PythonConsoleLexerBase(RegexLexer):
],
}
+
class PythonConsoleLexer(DelegatingLexer):
"""
For Python console output or doctests, such as:
@@ -719,6 +721,7 @@ class PythonConsoleLexer(DelegatingLexer):
super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options)
super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options)
+
class PythonTracebackLexer(RegexLexer):
"""
For Python 3.x tracebacks, with support for chained exceptions.
@@ -851,16 +854,16 @@ class CythonLexer(RegexLexer):
bygroups(Punctuation, Keyword.Type, Punctuation)),
(r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
(r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
- bygroups(Keyword, Number.Integer, Operator, Name, Operator,
+ bygroups(Keyword, Number.Integer, Operator, Whitespace, Operator,
Name, Punctuation)),
include('keywords'),
- (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
- (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
+ (r'(def|property)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
+ (r'(cp?def)(\s+)', bygroups(Keyword, Whitespace), 'cdef'),
# (should actually start a block with only cdefs)
(r'(cdef)(:)', bygroups(Keyword, Punctuation)),
- (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
- (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
- (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
+ (r'(class|struct)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
+ (r'(from)(\s+)', bygroups(Keyword, Whitespace), 'fromimport'),
+ (r'(c?import)(\s+)', bygroups(Keyword, Whitespace), 'import'),
include('builtins'),
include('backtick'),
('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
@@ -938,9 +941,9 @@ class CythonLexer(RegexLexer):
(r'(public|readonly|extern|api|inline)\b', Keyword.Reserved),
(r'(struct|enum|union|class)\b', Keyword),
(r'([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)',
- bygroups(Name.Function, Text), '#pop'),
+ bygroups(Name.Function, Whitespace), '#pop'),
(r'([a-zA-Z_]\w*)(\s*)(,)',
- bygroups(Name.Function, Text, Punctuation)),
+ bygroups(Name.Function, Whitespace, Punctuation)),
(r'from\b', Keyword, '#pop'),
(r'as\b', Keyword),
(r':', Punctuation, '#pop'),
@@ -952,13 +955,13 @@ class CythonLexer(RegexLexer):
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
- (r'(\s+)(as)(\s+)', bygroups(Text, Keyword, Text)),
+ (r'(\s+)(as)(\s+)', bygroups(Whitespace, Keyword, Whitespace)),
(r'[a-zA-Z_][\w.]*', Name.Namespace),
- (r'(\s*)(,)(\s*)', bygroups(Text, Operator, Text)),
+ (r'(\s*)(,)(\s*)', bygroups(Whitespace, Operator, Whitespace)),
default('#pop') # all else: go back
],
'fromimport': [
- (r'(\s+)(c?import)\b', bygroups(Text, Keyword), '#pop'),
+ (r'(\s+)(c?import)\b', bygroups(Whitespace, Keyword), '#pop'),
(r'[a-zA-Z_.][\w.]*', Name.Namespace),
# ``cdef foo from "header"``, or ``for foo from 0 < i < 10``
default('#pop'),
diff --git a/contrib/python/pip/pip/_vendor/pygments/modeline.py b/contrib/python/pip/pip/_vendor/pygments/modeline.py
index e4d9fe167bd..c310f0edbdc 100644
--- a/contrib/python/pip/pip/_vendor/pygments/modeline.py
+++ b/contrib/python/pip/pip/_vendor/pygments/modeline.py
@@ -4,7 +4,7 @@
A simple modeline parser (based on pymodeline).
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/plugin.py b/contrib/python/pip/pip/_vendor/pygments/plugin.py
index 2e462f2c2f9..498db423849 100644
--- a/contrib/python/pip/pip/_vendor/pygments/plugin.py
+++ b/contrib/python/pip/pip/_vendor/pygments/plugin.py
@@ -29,7 +29,7 @@
yourfilter = yourfilter:YourFilter
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from importlib.metadata import entry_points
diff --git a/contrib/python/pip/pip/_vendor/pygments/regexopt.py b/contrib/python/pip/pip/_vendor/pygments/regexopt.py
index c44eedbf2ad..cc8d2c31b54 100644
--- a/contrib/python/pip/pip/_vendor/pygments/regexopt.py
+++ b/contrib/python/pip/pip/_vendor/pygments/regexopt.py
@@ -5,7 +5,7 @@
An algorithm that generates optimized regexes for matching long lists of
literal strings.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/scanner.py b/contrib/python/pip/pip/_vendor/pygments/scanner.py
index 112da34917e..3c8c8487316 100644
--- a/contrib/python/pip/pip/_vendor/pygments/scanner.py
+++ b/contrib/python/pip/pip/_vendor/pygments/scanner.py
@@ -11,7 +11,7 @@
Have a look at the `DelphiLexer` to get an idea of how to use
this scanner.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
diff --git a/contrib/python/pip/pip/_vendor/pygments/sphinxext.py b/contrib/python/pip/pip/_vendor/pygments/sphinxext.py
index 34077a2aee8..955d9584f8f 100644
--- a/contrib/python/pip/pip/_vendor/pygments/sphinxext.py
+++ b/contrib/python/pip/pip/_vendor/pygments/sphinxext.py
@@ -5,7 +5,7 @@
Sphinx extension to generate automatic documentation of lexers,
formatters and filters.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/style.py b/contrib/python/pip/pip/_vendor/pygments/style.py
index 076e63f831c..be5f8322879 100644
--- a/contrib/python/pip/pip/_vendor/pygments/style.py
+++ b/contrib/python/pip/pip/_vendor/pygments/style.py
@@ -4,7 +4,7 @@
Basic style object.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py b/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py
index 712f6e69932..96d53dce0da 100644
--- a/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py
+++ b/contrib/python/pip/pip/_vendor/pygments/styles/__init__.py
@@ -4,7 +4,7 @@
Contains built-in styles.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/token.py b/contrib/python/pip/pip/_vendor/pygments/token.py
index f78018a7aa7..2f3b97e09ac 100644
--- a/contrib/python/pip/pip/_vendor/pygments/token.py
+++ b/contrib/python/pip/pip/_vendor/pygments/token.py
@@ -4,7 +4,7 @@
Basic token types and the standard tokens.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/unistring.py b/contrib/python/pip/pip/_vendor/pygments/unistring.py
index e2c3523e4bb..e3bd2e72738 100644
--- a/contrib/python/pip/pip/_vendor/pygments/unistring.py
+++ b/contrib/python/pip/pip/_vendor/pygments/unistring.py
@@ -7,7 +7,7 @@
Inspired by chartypes_create.py from the MoinMoin project.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/pygments/util.py b/contrib/python/pip/pip/_vendor/pygments/util.py
index 83cf1049253..71c5710ae16 100644
--- a/contrib/python/pip/pip/_vendor/pygments/util.py
+++ b/contrib/python/pip/pip/_vendor/pygments/util.py
@@ -4,7 +4,7 @@
Utility functions.
- :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
+ :copyright: Copyright 2006-2025 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/__init__.py b/contrib/python/pip/pip/_vendor/resolvelib/__init__.py
index d92acc7bedf..c655c597c6f 100644
--- a/contrib/python/pip/pip/_vendor/resolvelib/__init__.py
+++ b/contrib/python/pip/pip/_vendor/resolvelib/__init__.py
@@ -11,12 +11,13 @@ __all__ = [
"ResolutionTooDeep",
]
-__version__ = "1.0.1"
+__version__ = "1.1.0"
-from .providers import AbstractProvider, AbstractResolver
+from .providers import AbstractProvider
from .reporters import BaseReporter
from .resolvers import (
+ AbstractResolver,
InconsistentCandidate,
RequirementsConflicted,
ResolutionError,
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/compat/collections_abc.py b/contrib/python/pip/pip/_vendor/resolvelib/compat/collections_abc.py
deleted file mode 100644
index 1becc5093c5..00000000000
--- a/contrib/python/pip/pip/_vendor/resolvelib/compat/collections_abc.py
+++ /dev/null
@@ -1,6 +0,0 @@
-__all__ = ["Mapping", "Sequence"]
-
-try:
- from collections.abc import Mapping, Sequence
-except ImportError:
- from collections import Mapping, Sequence
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/providers.py b/contrib/python/pip/pip/_vendor/resolvelib/providers.py
index e99d87ee75f..524e3d83272 100644
--- a/contrib/python/pip/pip/_vendor/resolvelib/providers.py
+++ b/contrib/python/pip/pip/_vendor/resolvelib/providers.py
@@ -1,30 +1,58 @@
-class AbstractProvider(object):
+from __future__ import annotations
+
+from typing import (
+ TYPE_CHECKING,
+ Generic,
+ Iterable,
+ Iterator,
+ Mapping,
+ Sequence,
+)
+
+from .structs import CT, KT, RT, Matches, RequirementInformation
+
+if TYPE_CHECKING:
+ from typing import Any, Protocol
+
+ class Preference(Protocol):
+ def __lt__(self, __other: Any) -> bool: ...
+
+
+class AbstractProvider(Generic[RT, CT, KT]):
"""Delegate class to provide the required interface for the resolver."""
- def identify(self, requirement_or_candidate):
- """Given a requirement, return an identifier for it.
+ def identify(self, requirement_or_candidate: RT | CT) -> KT:
+ """Given a requirement or candidate, return an identifier for it.
- This is used to identify a requirement, e.g. whether two requirements
- should have their specifier parts merged.
+ This is used to identify, e.g. whether two requirements
+ should have their specifier parts merged or a candidate matches a
+ requirement via ``find_matches()``.
"""
raise NotImplementedError
def get_preference(
self,
- identifier,
- resolutions,
- candidates,
- information,
- backtrack_causes,
- ):
+ identifier: KT,
+ resolutions: Mapping[KT, CT],
+ candidates: Mapping[KT, Iterator[CT]],
+ information: Mapping[KT, Iterator[RequirementInformation[RT, CT]]],
+ backtrack_causes: Sequence[RequirementInformation[RT, CT]],
+ ) -> Preference:
"""Produce a sort key for given requirement based on preference.
+ As this is a sort key it will be called O(n) times per backtrack
+ step, where n is the number of `identifier`s, if you have a check
+ which is expensive in some sense. E.g. It needs to make O(n) checks
+ per call or takes significant wall clock time, consider using
+ `narrow_requirement_selection` to filter the `identifier`s, which
+ is applied before this sort key is called.
+
The preference is defined as "I think this requirement should be
resolved first". The lower the return value is, the more preferred
this group of arguments is.
:param identifier: An identifier as returned by ``identify()``. This
- identifies the dependency matches which should be returned.
+ identifies the requirement being considered.
:param resolutions: Mapping of candidates currently pinned by the
resolver. Each key is an identifier, and the value is a candidate.
The candidate may conflict with requirements from ``information``.
@@ -32,8 +60,9 @@ class AbstractProvider(object):
Each value is an iterator of candidates.
:param information: Mapping of requirement information of each package.
Each value is an iterator of *requirement information*.
- :param backtrack_causes: Sequence of requirement information that were
- the requirements that caused the resolver to most recently backtrack.
+ :param backtrack_causes: Sequence of *requirement information* that are
+ the requirements that caused the resolver to most recently
+ backtrack.
A *requirement information* instance is a named tuple with two members:
@@ -60,15 +89,21 @@ class AbstractProvider(object):
"""
raise NotImplementedError
- def find_matches(self, identifier, requirements, incompatibilities):
+ def find_matches(
+ self,
+ identifier: KT,
+ requirements: Mapping[KT, Iterator[RT]],
+ incompatibilities: Mapping[KT, Iterator[CT]],
+ ) -> Matches[CT]:
"""Find all possible candidates that satisfy the given constraints.
- :param identifier: An identifier as returned by ``identify()``. This
- identifies the dependency matches of which should be returned.
+ :param identifier: An identifier as returned by ``identify()``. All
+ candidates returned by this method should produce the same
+ identifier.
:param requirements: A mapping of requirements that all returned
candidates must satisfy. Each key is an identifier, and the value
an iterator of requirements for that dependency.
- :param incompatibilities: A mapping of known incompatibilities of
+ :param incompatibilities: A mapping of known incompatibile candidates of
each dependency. Each key is an identifier, and the value an
iterator of incompatibilities known to the resolver. All
incompatibilities *must* be excluded from the return value.
@@ -89,7 +124,7 @@ class AbstractProvider(object):
"""
raise NotImplementedError
- def is_satisfied_by(self, requirement, candidate):
+ def is_satisfied_by(self, requirement: RT, candidate: CT) -> bool:
"""Whether the given requirement can be satisfied by a candidate.
The candidate is guaranteed to have been generated from the
@@ -100,7 +135,7 @@ class AbstractProvider(object):
"""
raise NotImplementedError
- def get_dependencies(self, candidate):
+ def get_dependencies(self, candidate: CT) -> Iterable[RT]:
"""Get dependencies of a candidate.
This should return a collection of requirements that `candidate`
@@ -108,26 +143,54 @@ class AbstractProvider(object):
"""
raise NotImplementedError
+ def narrow_requirement_selection(
+ self,
+ identifiers: Iterable[KT],
+ resolutions: Mapping[KT, CT],
+ candidates: Mapping[KT, Iterator[CT]],
+ information: Mapping[KT, Iterator[RequirementInformation[RT, CT]]],
+ backtrack_causes: Sequence[RequirementInformation[RT, CT]],
+ ) -> Iterable[KT]:
+ """
+ An optional method to narrow the selection of requirements being
+ considered during resolution. This method is called O(1) time per
+ backtrack step.
+
+ :param identifiers: An iterable of `identifiers` as returned by
+ ``identify()``. These identify all requirements currently being
+ considered.
+ :param resolutions: A mapping of candidates currently pinned by the
+ resolver. Each key is an identifier, and the value is a candidate
+ that may conflict with requirements from ``information``.
+ :param candidates: A mapping of each dependency's possible candidates.
+ Each value is an iterator of candidates.
+ :param information: A mapping of requirement information for each package.
+ Each value is an iterator of *requirement information*.
+ :param backtrack_causes: A sequence of *requirement information* that are
+ the requirements causing the resolver to most recently
+ backtrack.
-class AbstractResolver(object):
- """The thing that performs the actual resolution work."""
-
- base_exception = Exception
-
- def __init__(self, provider, reporter):
- self.provider = provider
- self.reporter = reporter
-
- def resolve(self, requirements, **kwargs):
- """Take a collection of constraints, spit out the resolution result.
-
- This returns a representation of the final resolution state, with one
- guarenteed attribute ``mapping`` that contains resolved candidates as
- values. The keys are their respective identifiers.
-
- :param requirements: A collection of constraints.
- :param kwargs: Additional keyword arguments that subclasses may accept.
+ A *requirement information* instance is a named tuple with two members:
- :raises: ``self.base_exception`` or its subclass.
+ * ``requirement`` specifies a requirement contributing to the current
+ list of candidates.
+ * ``parent`` specifies the candidate that provides (is depended on for)
+ the requirement, or ``None`` to indicate a root requirement.
+
+ Must return a non-empty subset of `identifiers`, with the default
+ implementation being to return `identifiers` unchanged. Those `identifiers`
+ will then be passed to the sort key `get_preference` to pick the most
+ prefered requirement to attempt to pin, unless `narrow_requirement_selection`
+ returns only 1 requirement, in which case that will be used without
+ calling the sort key `get_preference`.
+
+ This method is designed to be used by the provider to optimize the
+ dependency resolution, e.g. if a check cost is O(m) and it can be done
+ against all identifiers at once then filtering the requirement selection
+ here will cost O(m) but making it part of the sort key in `get_preference`
+ will cost O(m*n), where n is the number of `identifiers`.
+
+ Returns:
+ Iterable[KT]: A non-empty subset of `identifiers`.
"""
- raise NotImplementedError
+ return identifiers
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/reporters.py b/contrib/python/pip/pip/_vendor/resolvelib/reporters.py
index 688b5e10d86..26c9f6e6f92 100644
--- a/contrib/python/pip/pip/_vendor/resolvelib/reporters.py
+++ b/contrib/python/pip/pip/_vendor/resolvelib/reporters.py
@@ -1,26 +1,36 @@
-class BaseReporter(object):
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Collection, Generic
+
+from .structs import CT, KT, RT, RequirementInformation, State
+
+if TYPE_CHECKING:
+ from .resolvers import Criterion
+
+
+class BaseReporter(Generic[RT, CT, KT]):
"""Delegate class to provider progress reporting for the resolver."""
- def starting(self):
+ def starting(self) -> None:
"""Called before the resolution actually starts."""
- def starting_round(self, index):
+ def starting_round(self, index: int) -> None:
"""Called before each round of resolution starts.
The index is zero-based.
"""
- def ending_round(self, index, state):
+ def ending_round(self, index: int, state: State[RT, CT, KT]) -> None:
"""Called before each round of resolution ends.
This is NOT called if the resolution ends at this round. Use `ending`
if you want to report finalization. The index is zero-based.
"""
- def ending(self, state):
+ def ending(self, state: State[RT, CT, KT]) -> None:
"""Called before the resolution ends successfully."""
- def adding_requirement(self, requirement, parent):
+ def adding_requirement(self, requirement: RT, parent: CT | None) -> None:
"""Called when adding a new requirement into the resolve criteria.
:param requirement: The additional requirement to be applied to filter
@@ -30,14 +40,16 @@ class BaseReporter(object):
requirements passed in from ``Resolver.resolve()``.
"""
- def resolving_conflicts(self, causes):
+ def resolving_conflicts(
+ self, causes: Collection[RequirementInformation[RT, CT]]
+ ) -> None:
"""Called when starting to attempt requirement conflict resolution.
:param causes: The information on the collision that caused the backtracking.
"""
- def rejecting_candidate(self, criterion, candidate):
+ def rejecting_candidate(self, criterion: Criterion[RT, CT], candidate: CT) -> None:
"""Called when rejecting a candidate during backtracking."""
- def pinning(self, candidate):
+ def pinning(self, candidate: CT) -> None:
"""Called when adding a candidate to the potential solution."""
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/resolvers/__init__.py b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/__init__.py
new file mode 100644
index 00000000000..7b2c5d597eb
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/__init__.py
@@ -0,0 +1,27 @@
+from ..structs import RequirementInformation
+from .abstract import AbstractResolver, Result
+from .criterion import Criterion
+from .exceptions import (
+ InconsistentCandidate,
+ RequirementsConflicted,
+ ResolutionError,
+ ResolutionImpossible,
+ ResolutionTooDeep,
+ ResolverException,
+)
+from .resolution import Resolution, Resolver
+
+__all__ = [
+ "AbstractResolver",
+ "InconsistentCandidate",
+ "Resolver",
+ "Resolution",
+ "RequirementsConflicted",
+ "ResolutionError",
+ "ResolutionImpossible",
+ "ResolutionTooDeep",
+ "RequirementInformation",
+ "ResolverException",
+ "Result",
+ "Criterion",
+]
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/resolvers/abstract.py b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/abstract.py
new file mode 100644
index 00000000000..f9b5a7aa1fa
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/abstract.py
@@ -0,0 +1,47 @@
+from __future__ import annotations
+
+import collections
+from typing import TYPE_CHECKING, Any, Generic, Iterable, Mapping, NamedTuple
+
+from ..structs import CT, KT, RT, DirectedGraph
+
+if TYPE_CHECKING:
+ from ..providers import AbstractProvider
+ from ..reporters import BaseReporter
+ from .criterion import Criterion
+
+ class Result(NamedTuple, Generic[RT, CT, KT]):
+ mapping: Mapping[KT, CT]
+ graph: DirectedGraph[KT | None]
+ criteria: Mapping[KT, Criterion[RT, CT]]
+
+else:
+ Result = collections.namedtuple("Result", ["mapping", "graph", "criteria"])
+
+
+class AbstractResolver(Generic[RT, CT, KT]):
+ """The thing that performs the actual resolution work."""
+
+ base_exception = Exception
+
+ def __init__(
+ self,
+ provider: AbstractProvider[RT, CT, KT],
+ reporter: BaseReporter[RT, CT, KT],
+ ) -> None:
+ self.provider = provider
+ self.reporter = reporter
+
+ def resolve(self, requirements: Iterable[RT], **kwargs: Any) -> Result[RT, CT, KT]:
+ """Take a collection of constraints, spit out the resolution result.
+
+ This returns a representation of the final resolution state, with one
+ guarenteed attribute ``mapping`` that contains resolved candidates as
+ values. The keys are their respective identifiers.
+
+ :param requirements: A collection of constraints.
+ :param kwargs: Additional keyword arguments that subclasses may accept.
+
+ :raises: ``self.base_exception`` or its subclass.
+ """
+ raise NotImplementedError
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/resolvers/criterion.py b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/criterion.py
new file mode 100644
index 00000000000..ee5019ccd03
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/criterion.py
@@ -0,0 +1,48 @@
+from __future__ import annotations
+
+from typing import Collection, Generic, Iterable, Iterator
+
+from ..structs import CT, RT, RequirementInformation
+
+
+class Criterion(Generic[RT, CT]):
+ """Representation of possible resolution results of a package.
+
+ This holds three attributes:
+
+ * `information` is a collection of `RequirementInformation` pairs.
+ Each pair is a requirement contributing to this criterion, and the
+ candidate that provides the requirement.
+ * `incompatibilities` is a collection of all known not-to-work candidates
+ to exclude from consideration.
+ * `candidates` is a collection containing all possible candidates deducted
+ from the union of contributing requirements and known incompatibilities.
+ It should never be empty, except when the criterion is an attribute of a
+ raised `RequirementsConflicted` (in which case it is always empty).
+
+ .. note::
+ This class is intended to be externally immutable. **Do not** mutate
+ any of its attribute containers.
+ """
+
+ def __init__(
+ self,
+ candidates: Iterable[CT],
+ information: Collection[RequirementInformation[RT, CT]],
+ incompatibilities: Collection[CT],
+ ) -> None:
+ self.candidates = candidates
+ self.information = information
+ self.incompatibilities = incompatibilities
+
+ def __repr__(self) -> str:
+ requirements = ", ".join(
+ f"({req!r}, via={parent!r})" for req, parent in self.information
+ )
+ return f"Criterion({requirements})"
+
+ def iter_requirement(self) -> Iterator[RT]:
+ return (i.requirement for i in self.information)
+
+ def iter_parent(self) -> Iterator[CT | None]:
+ return (i.parent for i in self.information)
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/resolvers/exceptions.py b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/exceptions.py
new file mode 100644
index 00000000000..35e275576f7
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/exceptions.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Collection, Generic
+
+from ..structs import CT, RT, RequirementInformation
+
+if TYPE_CHECKING:
+ from .criterion import Criterion
+
+
+class ResolverException(Exception):
+ """A base class for all exceptions raised by this module.
+
+ Exceptions derived by this class should all be handled in this module. Any
+ bubbling pass the resolver should be treated as a bug.
+ """
+
+
+class RequirementsConflicted(ResolverException, Generic[RT, CT]):
+ def __init__(self, criterion: Criterion[RT, CT]) -> None:
+ super().__init__(criterion)
+ self.criterion = criterion
+
+ def __str__(self) -> str:
+ return "Requirements conflict: {}".format(
+ ", ".join(repr(r) for r in self.criterion.iter_requirement()),
+ )
+
+
+class InconsistentCandidate(ResolverException, Generic[RT, CT]):
+ def __init__(self, candidate: CT, criterion: Criterion[RT, CT]):
+ super().__init__(candidate, criterion)
+ self.candidate = candidate
+ self.criterion = criterion
+
+ def __str__(self) -> str:
+ return "Provided candidate {!r} does not satisfy {}".format(
+ self.candidate,
+ ", ".join(repr(r) for r in self.criterion.iter_requirement()),
+ )
+
+
+class ResolutionError(ResolverException):
+ pass
+
+
+class ResolutionImpossible(ResolutionError, Generic[RT, CT]):
+ def __init__(self, causes: Collection[RequirementInformation[RT, CT]]):
+ super().__init__(causes)
+ # causes is a list of RequirementInformation objects
+ self.causes = causes
+
+
+class ResolutionTooDeep(ResolutionError):
+ def __init__(self, round_count: int) -> None:
+ super().__init__(round_count)
+ self.round_count = round_count
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/resolvers.py b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/resolution.py
index 2c3d0e306f9..da3c66e2ab7 100644
--- a/contrib/python/pip/pip/_vendor/resolvelib/resolvers.py
+++ b/contrib/python/pip/pip/_vendor/resolvelib/resolvers/resolution.py
@@ -1,127 +1,90 @@
+from __future__ import annotations
+
import collections
import itertools
import operator
-
-from .providers import AbstractResolver
-from .structs import DirectedGraph, IteratorMapping, build_iter_view
-
-RequirementInformation = collections.namedtuple(
- "RequirementInformation", ["requirement", "parent"]
+from typing import TYPE_CHECKING, Collection, Generic, Iterable, Mapping
+
+from ..structs import (
+ CT,
+ KT,
+ RT,
+ DirectedGraph,
+ IterableView,
+ IteratorMapping,
+ RequirementInformation,
+ State,
+ build_iter_view,
+)
+from .abstract import AbstractResolver, Result
+from .criterion import Criterion
+from .exceptions import (
+ InconsistentCandidate,
+ RequirementsConflicted,
+ ResolutionImpossible,
+ ResolutionTooDeep,
+ ResolverException,
)
+if TYPE_CHECKING:
+ from ..providers import AbstractProvider, Preference
+ from ..reporters import BaseReporter
-class ResolverException(Exception):
- """A base class for all exceptions raised by this module.
-
- Exceptions derived by this class should all be handled in this module. Any
- bubbling pass the resolver should be treated as a bug.
- """
-
-
-class RequirementsConflicted(ResolverException):
- def __init__(self, criterion):
- super(RequirementsConflicted, self).__init__(criterion)
- self.criterion = criterion
-
- def __str__(self):
- return "Requirements conflict: {}".format(
- ", ".join(repr(r) for r in self.criterion.iter_requirement()),
- )
-
-
-class InconsistentCandidate(ResolverException):
- def __init__(self, candidate, criterion):
- super(InconsistentCandidate, self).__init__(candidate, criterion)
- self.candidate = candidate
- self.criterion = criterion
-
- def __str__(self):
- return "Provided candidate {!r} does not satisfy {}".format(
- self.candidate,
- ", ".join(repr(r) for r in self.criterion.iter_requirement()),
- )
-
-
-class Criterion(object):
- """Representation of possible resolution results of a package.
-
- This holds three attributes:
-
- * `information` is a collection of `RequirementInformation` pairs.
- Each pair is a requirement contributing to this criterion, and the
- candidate that provides the requirement.
- * `incompatibilities` is a collection of all known not-to-work candidates
- to exclude from consideration.
- * `candidates` is a collection containing all possible candidates deducted
- from the union of contributing requirements and known incompatibilities.
- It should never be empty, except when the criterion is an attribute of a
- raised `RequirementsConflicted` (in which case it is always empty).
-
- .. note::
- This class is intended to be externally immutable. **Do not** mutate
- any of its attribute containers.
- """
-
- def __init__(self, candidates, information, incompatibilities):
- self.candidates = candidates
- self.information = information
- self.incompatibilities = incompatibilities
-
- def __repr__(self):
- requirements = ", ".join(
- "({!r}, via={!r})".format(req, parent)
- for req, parent in self.information
- )
- return "Criterion({})".format(requirements)
-
- def iter_requirement(self):
- return (i.requirement for i in self.information)
-
- def iter_parent(self):
- return (i.parent for i in self.information)
-
-
-class ResolutionError(ResolverException):
- pass
-
-
-class ResolutionImpossible(ResolutionError):
- def __init__(self, causes):
- super(ResolutionImpossible, self).__init__(causes)
- # causes is a list of RequirementInformation objects
- self.causes = causes
+def _build_result(state: State[RT, CT, KT]) -> Result[RT, CT, KT]:
+ mapping = state.mapping
+ all_keys: dict[int, KT | None] = {id(v): k for k, v in mapping.items()}
+ all_keys[id(None)] = None
-class ResolutionTooDeep(ResolutionError):
- def __init__(self, round_count):
- super(ResolutionTooDeep, self).__init__(round_count)
- self.round_count = round_count
+ graph: DirectedGraph[KT | None] = DirectedGraph()
+ graph.add(None) # Sentinel as root dependencies' parent.
+ connected: set[KT | None] = {None}
+ for key, criterion in state.criteria.items():
+ if not _has_route_to_root(state.criteria, key, all_keys, connected):
+ continue
+ if key not in graph:
+ graph.add(key)
+ for p in criterion.iter_parent():
+ try:
+ pkey = all_keys[id(p)]
+ except KeyError:
+ continue
+ if pkey not in graph:
+ graph.add(pkey)
+ graph.connect(pkey, key)
-# Resolution state in a round.
-State = collections.namedtuple("State", "mapping criteria backtrack_causes")
+ return Result(
+ mapping={k: v for k, v in mapping.items() if k in connected},
+ graph=graph,
+ criteria=state.criteria,
+ )
-class Resolution(object):
+class Resolution(Generic[RT, CT, KT]):
"""Stateful resolution object.
This is designed as a one-off object that holds information to kick start
the resolution process, and holds the results afterwards.
"""
- def __init__(self, provider, reporter):
+ def __init__(
+ self,
+ provider: AbstractProvider[RT, CT, KT],
+ reporter: BaseReporter[RT, CT, KT],
+ ) -> None:
self._p = provider
self._r = reporter
- self._states = []
+ self._states: list[State[RT, CT, KT]] = []
@property
- def state(self):
+ def state(self) -> State[RT, CT, KT]:
try:
return self._states[-1]
- except IndexError:
- raise AttributeError("state")
+ except IndexError as e:
+ raise AttributeError("state") from e
- def _push_new_state(self):
+ def _push_new_state(self) -> None:
"""Push a new state into history.
This new state will be used to hold resolution results of the next
@@ -135,7 +98,12 @@ class Resolution(object):
)
self._states.append(state)
- def _add_to_criteria(self, criteria, requirement, parent):
+ def _add_to_criteria(
+ self,
+ criteria: dict[KT, Criterion[RT, CT]],
+ requirement: RT,
+ parent: CT | None,
+ ) -> None:
self._r.adding_requirement(requirement=requirement, parent=parent)
identifier = self._p.identify(requirement_or_candidate=requirement)
@@ -174,7 +142,9 @@ class Resolution(object):
raise RequirementsConflicted(criterion)
criteria[identifier] = criterion
- def _remove_information_from_criteria(self, criteria, parents):
+ def _remove_information_from_criteria(
+ self, criteria: dict[KT, Criterion[RT, CT]], parents: Collection[KT]
+ ) -> None:
"""Remove information from parents of criteria.
Concretely, removes all values from each criterion's ``information``
@@ -199,7 +169,7 @@ class Resolution(object):
criterion.incompatibilities,
)
- def _get_preference(self, name):
+ def _get_preference(self, name: KT) -> Preference:
return self._p.get_preference(
identifier=name,
resolutions=self.state.mapping,
@@ -214,7 +184,9 @@ class Resolution(object):
backtrack_causes=self.state.backtrack_causes,
)
- def _is_current_pin_satisfying(self, name, criterion):
+ def _is_current_pin_satisfying(
+ self, name: KT, criterion: Criterion[RT, CT]
+ ) -> bool:
try:
current_pin = self.state.mapping[name]
except KeyError:
@@ -224,16 +196,16 @@ class Resolution(object):
for r in criterion.iter_requirement()
)
- def _get_updated_criteria(self, candidate):
+ def _get_updated_criteria(self, candidate: CT) -> dict[KT, Criterion[RT, CT]]:
criteria = self.state.criteria.copy()
for requirement in self._p.get_dependencies(candidate=candidate):
self._add_to_criteria(criteria, requirement, parent=candidate)
return criteria
- def _attempt_to_pin_criterion(self, name):
+ def _attempt_to_pin_criterion(self, name: KT) -> list[Criterion[RT, CT]]:
criterion = self.state.criteria[name]
- causes = []
+ causes: list[Criterion[RT, CT]] = []
for candidate in criterion.candidates:
try:
criteria = self._get_updated_criteria(candidate)
@@ -267,7 +239,42 @@ class Resolution(object):
# end, signal for backtracking.
return causes
- def _backjump(self, causes):
+ def _patch_criteria(
+ self, incompatibilities_from_broken: list[tuple[KT, list[CT]]]
+ ) -> bool:
+ # Create a new state from the last known-to-work one, and apply
+ # the previously gathered incompatibility information.
+ for k, incompatibilities in incompatibilities_from_broken:
+ if not incompatibilities:
+ continue
+ try:
+ criterion = self.state.criteria[k]
+ except KeyError:
+ continue
+ matches = self._p.find_matches(
+ identifier=k,
+ requirements=IteratorMapping(
+ self.state.criteria,
+ operator.methodcaller("iter_requirement"),
+ ),
+ incompatibilities=IteratorMapping(
+ self.state.criteria,
+ operator.attrgetter("incompatibilities"),
+ {k: incompatibilities},
+ ),
+ )
+ candidates: IterableView[CT] = build_iter_view(matches)
+ if not candidates:
+ return False
+ incompatibilities.extend(criterion.incompatibilities)
+ self.state.criteria[k] = Criterion(
+ candidates=candidates,
+ information=list(criterion.information),
+ incompatibilities=incompatibilities,
+ )
+ return True
+
+ def _backjump(self, causes: list[RequirementInformation[RT, CT]]) -> bool:
"""Perform backjumping.
When we enter here, the stack is like this::
@@ -298,7 +305,7 @@ class Resolution(object):
the new Z and go back to step 2.
5b. If the incompatibilities apply cleanly, end backtracking.
"""
- incompatible_reqs = itertools.chain(
+ incompatible_reqs: Iterable[CT | RT] = itertools.chain(
(c.parent for c in causes if c.parent is not None),
(c.requirement for c in causes),
)
@@ -307,66 +314,44 @@ class Resolution(object):
# Remove the state that triggered backtracking.
del self._states[-1]
- # Ensure to backtrack to a state that caused the incompatibility
- incompatible_state = False
- while not incompatible_state:
+ # Optimistically backtrack to a state that caused the incompatibility
+ broken_state = self.state
+ while True:
# Retrieve the last candidate pin and known incompatibilities.
try:
broken_state = self._states.pop()
name, candidate = broken_state.mapping.popitem()
except (IndexError, KeyError):
- raise ResolutionImpossible(causes)
+ raise ResolutionImpossible(causes) from None
+
+ # Only backjump if the current broken state is
+ # an incompatible dependency
+ if name not in incompatible_deps:
+ break
+
+ # If the current dependencies and the incompatible dependencies
+ # are overlapping then we have found a cause of the incompatibility
current_dependencies = {
- self._p.identify(d)
- for d in self._p.get_dependencies(candidate)
+ self._p.identify(d) for d in self._p.get_dependencies(candidate)
}
- incompatible_state = not current_dependencies.isdisjoint(
- incompatible_deps
- )
+ if not current_dependencies.isdisjoint(incompatible_deps):
+ break
+
+ # Fallback: We should not backtrack to the point where
+ # broken_state.mapping is empty, so stop backtracking for
+ # a chance for the resolution to recover
+ if not broken_state.mapping:
+ break
incompatibilities_from_broken = [
- (k, list(v.incompatibilities))
- for k, v in broken_state.criteria.items()
+ (k, list(v.incompatibilities)) for k, v in broken_state.criteria.items()
]
# Also mark the newly known incompatibility.
incompatibilities_from_broken.append((name, [candidate]))
- # Create a new state from the last known-to-work one, and apply
- # the previously gathered incompatibility information.
- def _patch_criteria():
- for k, incompatibilities in incompatibilities_from_broken:
- if not incompatibilities:
- continue
- try:
- criterion = self.state.criteria[k]
- except KeyError:
- continue
- matches = self._p.find_matches(
- identifier=k,
- requirements=IteratorMapping(
- self.state.criteria,
- operator.methodcaller("iter_requirement"),
- ),
- incompatibilities=IteratorMapping(
- self.state.criteria,
- operator.attrgetter("incompatibilities"),
- {k: incompatibilities},
- ),
- )
- candidates = build_iter_view(matches)
- if not candidates:
- return False
- incompatibilities.extend(criterion.incompatibilities)
- self.state.criteria[k] = Criterion(
- candidates=candidates,
- information=list(criterion.information),
- incompatibilities=incompatibilities,
- )
- return True
-
self._push_new_state()
- success = _patch_criteria()
+ success = self._patch_criteria(incompatibilities_from_broken)
# It works! Let's work on this new state.
if success:
@@ -378,7 +363,13 @@ class Resolution(object):
# No way to backtrack anymore.
return False
- def resolve(self, requirements, max_rounds):
+ def _extract_causes(
+ self, criteron: list[Criterion[RT, CT]]
+ ) -> list[RequirementInformation[RT, CT]]:
+ """Extract causes from list of criterion and deduplicate"""
+ return list({id(i): i for c in criteron for i in c.information}.values())
+
+ def resolve(self, requirements: Iterable[RT], max_rounds: int) -> State[RT, CT, KT]:
if self._states:
raise RuntimeError("already resolved")
@@ -396,7 +387,7 @@ class Resolution(object):
try:
self._add_to_criteria(self.state.criteria, r, parent=None)
except RequirementsConflicted as e:
- raise ResolutionImpossible(e.criterion.information)
+ raise ResolutionImpossible(e.criterion.information) from e
# The root state is saved as a sentinel so the first ever pin can have
# something to backtrack to if it fails. The root state is basically
@@ -418,16 +409,42 @@ class Resolution(object):
return self.state
# keep track of satisfied names to calculate diff after pinning
- satisfied_names = set(self.state.criteria.keys()) - set(
- unsatisfied_names
- )
+ satisfied_names = set(self.state.criteria.keys()) - set(unsatisfied_names)
+
+ if len(unsatisfied_names) > 1:
+ narrowed_unstatisfied_names = list(
+ self._p.narrow_requirement_selection(
+ identifiers=unsatisfied_names,
+ resolutions=self.state.mapping,
+ candidates=IteratorMapping(
+ self.state.criteria,
+ operator.attrgetter("candidates"),
+ ),
+ information=IteratorMapping(
+ self.state.criteria,
+ operator.attrgetter("information"),
+ ),
+ backtrack_causes=self.state.backtrack_causes,
+ )
+ )
+ else:
+ narrowed_unstatisfied_names = unsatisfied_names
+
+ # If there are no unsatisfied names use unsatisfied names
+ if not narrowed_unstatisfied_names:
+ raise RuntimeError("narrow_requirement_selection returned 0 names")
- # Choose the most preferred unpinned criterion to try.
- name = min(unsatisfied_names, key=self._get_preference)
- failure_causes = self._attempt_to_pin_criterion(name)
+ # If there is only 1 unsatisfied name skip calling self._get_preference
+ if len(narrowed_unstatisfied_names) > 1:
+ # Choose the most preferred unpinned criterion to try.
+ name = min(narrowed_unstatisfied_names, key=self._get_preference)
+ else:
+ name = narrowed_unstatisfied_names[0]
- if failure_causes:
- causes = [i for c in failure_causes for i in c.information]
+ failure_criterion = self._attempt_to_pin_criterion(name)
+
+ if failure_criterion:
+ causes = self._extract_causes(failure_criterion)
# Backjump if pinning fails. The backjump process puts us in
# an unpinned state, so we can work on it in the next round.
self._r.resolving_conflicts(causes=causes)
@@ -457,64 +474,16 @@ class Resolution(object):
raise ResolutionTooDeep(max_rounds)
-def _has_route_to_root(criteria, key, all_keys, connected):
- if key in connected:
- return True
- if key not in criteria:
- return False
- for p in criteria[key].iter_parent():
- try:
- pkey = all_keys[id(p)]
- except KeyError:
- continue
- if pkey in connected:
- connected.add(key)
- return True
- if _has_route_to_root(criteria, pkey, all_keys, connected):
- connected.add(key)
- return True
- return False
-
-
-Result = collections.namedtuple("Result", "mapping graph criteria")
-
-
-def _build_result(state):
- mapping = state.mapping
- all_keys = {id(v): k for k, v in mapping.items()}
- all_keys[id(None)] = None
-
- graph = DirectedGraph()
- graph.add(None) # Sentinel as root dependencies' parent.
-
- connected = {None}
- for key, criterion in state.criteria.items():
- if not _has_route_to_root(state.criteria, key, all_keys, connected):
- continue
- if key not in graph:
- graph.add(key)
- for p in criterion.iter_parent():
- try:
- pkey = all_keys[id(p)]
- except KeyError:
- continue
- if pkey not in graph:
- graph.add(pkey)
- graph.connect(pkey, key)
-
- return Result(
- mapping={k: v for k, v in mapping.items() if k in connected},
- graph=graph,
- criteria=state.criteria,
- )
-
-
-class Resolver(AbstractResolver):
+class Resolver(AbstractResolver[RT, CT, KT]):
"""The thing that performs the actual resolution work."""
base_exception = ResolverException
- def resolve(self, requirements, max_rounds=100):
+ def resolve( # type: ignore[override]
+ self,
+ requirements: Iterable[RT],
+ max_rounds: int = 100,
+ ) -> Result[RT, CT, KT]:
"""Take a collection of constraints, spit out the resolution result.
The return value is a representation to the final resolution result. It
@@ -545,3 +514,28 @@ class Resolver(AbstractResolver):
resolution = Resolution(self.provider, self.reporter)
state = resolution.resolve(requirements, max_rounds=max_rounds)
return _build_result(state)
+
+
+def _has_route_to_root(
+ criteria: Mapping[KT, Criterion[RT, CT]],
+ key: KT | None,
+ all_keys: dict[int, KT | None],
+ connected: set[KT | None],
+) -> bool:
+ if key in connected:
+ return True
+ if key not in criteria:
+ return False
+ assert key is not None
+ for p in criteria[key].iter_parent():
+ try:
+ pkey = all_keys[id(p)]
+ except KeyError:
+ continue
+ if pkey in connected:
+ connected.add(key)
+ return True
+ if _has_route_to_root(criteria, pkey, all_keys, connected):
+ connected.add(key)
+ return True
+ return False
diff --git a/contrib/python/pip/pip/_vendor/resolvelib/structs.py b/contrib/python/pip/pip/_vendor/resolvelib/structs.py
index 359a34f6018..18c74d41548 100644
--- a/contrib/python/pip/pip/_vendor/resolvelib/structs.py
+++ b/contrib/python/pip/pip/_vendor/resolvelib/structs.py
@@ -1,34 +1,73 @@
-import itertools
-
-from .compat import collections_abc
-
+from __future__ import annotations
-class DirectedGraph(object):
+import itertools
+from collections import namedtuple
+from typing import (
+ TYPE_CHECKING,
+ Callable,
+ Generic,
+ Iterable,
+ Iterator,
+ Mapping,
+ NamedTuple,
+ Sequence,
+ TypeVar,
+ Union,
+)
+
+KT = TypeVar("KT") # Identifier.
+RT = TypeVar("RT") # Requirement.
+CT = TypeVar("CT") # Candidate.
+
+Matches = Union[Iterable[CT], Callable[[], Iterable[CT]]]
+
+if TYPE_CHECKING:
+ from .resolvers.criterion import Criterion
+
+ class RequirementInformation(NamedTuple, Generic[RT, CT]):
+ requirement: RT
+ parent: CT | None
+
+ class State(NamedTuple, Generic[RT, CT, KT]):
+ """Resolution state in a round."""
+
+ mapping: dict[KT, CT]
+ criteria: dict[KT, Criterion[RT, CT]]
+ backtrack_causes: list[RequirementInformation[RT, CT]]
+
+else:
+ RequirementInformation = namedtuple(
+ "RequirementInformation", ["requirement", "parent"]
+ )
+ State = namedtuple("State", ["mapping", "criteria", "backtrack_causes"])
+
+
+class DirectedGraph(Generic[KT]):
"""A graph structure with directed edges."""
- def __init__(self):
- self._vertices = set()
- self._forwards = {} # <key> -> Set[<key>]
- self._backwards = {} # <key> -> Set[<key>]
+ def __init__(self) -> None:
+ self._vertices: set[KT] = set()
+ self._forwards: dict[KT, set[KT]] = {} # <key> -> Set[<key>]
+ self._backwards: dict[KT, set[KT]] = {} # <key> -> Set[<key>]
- def __iter__(self):
+ def __iter__(self) -> Iterator[KT]:
return iter(self._vertices)
- def __len__(self):
+ def __len__(self) -> int:
return len(self._vertices)
- def __contains__(self, key):
+ def __contains__(self, key: KT) -> bool:
return key in self._vertices
- def copy(self):
+ def copy(self) -> DirectedGraph[KT]:
"""Return a shallow copy of this graph."""
- other = DirectedGraph()
+ other = type(self)()
other._vertices = set(self._vertices)
other._forwards = {k: set(v) for k, v in self._forwards.items()}
other._backwards = {k: set(v) for k, v in self._backwards.items()}
return other
- def add(self, key):
+ def add(self, key: KT) -> None:
"""Add a new vertex to the graph."""
if key in self._vertices:
raise ValueError("vertex exists")
@@ -36,7 +75,7 @@ class DirectedGraph(object):
self._forwards[key] = set()
self._backwards[key] = set()
- def remove(self, key):
+ def remove(self, key: KT) -> None:
"""Remove a vertex from the graph, disconnecting all edges from/to it."""
self._vertices.remove(key)
for f in self._forwards.pop(key):
@@ -44,10 +83,10 @@ class DirectedGraph(object):
for t in self._backwards.pop(key):
self._forwards[t].remove(key)
- def connected(self, f, t):
+ def connected(self, f: KT, t: KT) -> bool:
return f in self._backwards[t] and t in self._forwards[f]
- def connect(self, f, t):
+ def connect(self, f: KT, t: KT) -> None:
"""Connect two existing vertices.
Nothing happens if the vertices are already connected.
@@ -57,56 +96,59 @@ class DirectedGraph(object):
self._forwards[f].add(t)
self._backwards[t].add(f)
- def iter_edges(self):
+ def iter_edges(self) -> Iterator[tuple[KT, KT]]:
for f, children in self._forwards.items():
for t in children:
yield f, t
- def iter_children(self, key):
+ def iter_children(self, key: KT) -> Iterator[KT]:
return iter(self._forwards[key])
- def iter_parents(self, key):
+ def iter_parents(self, key: KT) -> Iterator[KT]:
return iter(self._backwards[key])
-class IteratorMapping(collections_abc.Mapping):
- def __init__(self, mapping, accessor, appends=None):
+class IteratorMapping(Mapping[KT, Iterator[CT]], Generic[RT, CT, KT]):
+ def __init__(
+ self,
+ mapping: Mapping[KT, RT],
+ accessor: Callable[[RT], Iterable[CT]],
+ appends: Mapping[KT, Iterable[CT]] | None = None,
+ ) -> None:
self._mapping = mapping
self._accessor = accessor
- self._appends = appends or {}
+ self._appends: Mapping[KT, Iterable[CT]] = appends or {}
- def __repr__(self):
+ def __repr__(self) -> str:
return "IteratorMapping({!r}, {!r}, {!r})".format(
self._mapping,
self._accessor,
self._appends,
)
- def __bool__(self):
+ def __bool__(self) -> bool:
return bool(self._mapping or self._appends)
- __nonzero__ = __bool__ # XXX: Python 2.
-
- def __contains__(self, key):
+ def __contains__(self, key: object) -> bool:
return key in self._mapping or key in self._appends
- def __getitem__(self, k):
+ def __getitem__(self, k: KT) -> Iterator[CT]:
try:
v = self._mapping[k]
except KeyError:
return iter(self._appends[k])
return itertools.chain(self._accessor(v), self._appends.get(k, ()))
- def __iter__(self):
+ def __iter__(self) -> Iterator[KT]:
more = (k for k in self._appends if k not in self._mapping)
return itertools.chain(self._mapping, more)
- def __len__(self):
+ def __len__(self) -> int:
more = sum(1 for k in self._appends if k not in self._mapping)
return len(self._mapping) + more
-class _FactoryIterableView(object):
+class _FactoryIterableView(Iterable[RT]):
"""Wrap an iterator factory returned by `find_matches()`.
Calling `iter()` on this class would invoke the underlying iterator
@@ -115,56 +157,53 @@ class _FactoryIterableView(object):
built-in Python sequence types.
"""
- def __init__(self, factory):
+ def __init__(self, factory: Callable[[], Iterable[RT]]) -> None:
self._factory = factory
- self._iterable = None
+ self._iterable: Iterable[RT] | None = None
- def __repr__(self):
- return "{}({})".format(type(self).__name__, list(self))
+ def __repr__(self) -> str:
+ return f"{type(self).__name__}({list(self)})"
- def __bool__(self):
+ def __bool__(self) -> bool:
try:
next(iter(self))
except StopIteration:
return False
return True
- __nonzero__ = __bool__ # XXX: Python 2.
-
- def __iter__(self):
- iterable = (
- self._factory() if self._iterable is None else self._iterable
- )
+ def __iter__(self) -> Iterator[RT]:
+ iterable = self._factory() if self._iterable is None else self._iterable
self._iterable, current = itertools.tee(iterable)
return current
-class _SequenceIterableView(object):
+class _SequenceIterableView(Iterable[RT]):
"""Wrap an iterable returned by find_matches().
This is essentially just a proxy to the underlying sequence that provides
the same interface as `_FactoryIterableView`.
"""
- def __init__(self, sequence):
+ def __init__(self, sequence: Sequence[RT]):
self._sequence = sequence
- def __repr__(self):
- return "{}({})".format(type(self).__name__, self._sequence)
+ def __repr__(self) -> str:
+ return f"{type(self).__name__}({self._sequence})"
- def __bool__(self):
+ def __bool__(self) -> bool:
return bool(self._sequence)
- __nonzero__ = __bool__ # XXX: Python 2.
-
- def __iter__(self):
+ def __iter__(self) -> Iterator[RT]:
return iter(self._sequence)
-def build_iter_view(matches):
+def build_iter_view(matches: Matches[CT]) -> Iterable[CT]:
"""Build an iterable view from the value returned by `find_matches()`."""
if callable(matches):
return _FactoryIterableView(matches)
- if not isinstance(matches, collections_abc.Sequence):
+ if not isinstance(matches, Sequence):
matches = list(matches)
return _SequenceIterableView(matches)
+
+
+IterableView = Iterable
diff --git a/contrib/python/pip/pip/_vendor/rich/console.py b/contrib/python/pip/pip/_vendor/rich/console.py
index 572884542a1..57474835f92 100644
--- a/contrib/python/pip/pip/_vendor/rich/console.py
+++ b/contrib/python/pip/pip/_vendor/rich/console.py
@@ -500,7 +500,7 @@ def group(fit: bool = True) -> Callable[..., Callable[..., Group]]:
"""
def decorator(
- method: Callable[..., Iterable[RenderableType]]
+ method: Callable[..., Iterable[RenderableType]],
) -> Callable[..., Group]:
"""Convert a method that returns an iterable of renderables in to a Group."""
@@ -735,7 +735,9 @@ class Console:
self.get_time = get_time or monotonic
self.style = style
self.no_color = (
- no_color if no_color is not None else "NO_COLOR" in self._environ
+ no_color
+ if no_color is not None
+ else self._environ.get("NO_COLOR", "") != ""
)
self.is_interactive = (
(self.is_terminal and not self.is_dumb_terminal)
@@ -933,11 +935,13 @@ class Console:
Returns:
bool: True if the console writing to a device capable of
- understanding terminal codes, otherwise False.
+ understanding escape sequences, otherwise False.
"""
+ # If dev has explicitly set this value, return it
if self._force_terminal is not None:
return self._force_terminal
+ # Fudge for Idle
if hasattr(sys.stdin, "__module__") and sys.stdin.__module__.startswith(
"idlelib"
):
@@ -948,12 +952,22 @@ class Console:
# return False for Jupyter, which may have FORCE_COLOR set
return False
- # If FORCE_COLOR env var has any value at all, we assume a terminal.
- force_color = self._environ.get("FORCE_COLOR")
- if force_color is not None:
- self._force_terminal = True
+ environ = self._environ
+
+ tty_compatible = environ.get("TTY_COMPATIBLE", "")
+ # 0 indicates device is not tty compatible
+ if tty_compatible == "0":
+ return False
+ # 1 indicates device is tty compatible
+ if tty_compatible == "1":
return True
+ # https://force-color.org/
+ force_color = environ.get("FORCE_COLOR")
+ if force_color is not None:
+ return force_color != ""
+
+ # Any other value defaults to auto detect
isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None)
try:
return False if isatty is None else isatty()
diff --git a/contrib/python/pip/pip/_vendor/rich/default_styles.py b/contrib/python/pip/pip/_vendor/rich/default_styles.py
index 6c0d73231d8..61797bf312c 100644
--- a/contrib/python/pip/pip/_vendor/rich/default_styles.py
+++ b/contrib/python/pip/pip/_vendor/rich/default_styles.py
@@ -120,7 +120,9 @@ DEFAULT_STYLES: Dict[str, Style] = {
"traceback.exc_type": Style(color="bright_red", bold=True),
"traceback.exc_value": Style.null(),
"traceback.offset": Style(color="bright_red", bold=True),
- "traceback.error_range": Style(underline=True, bold=True, dim=False),
+ "traceback.error_range": Style(underline=True, bold=True),
+ "traceback.note": Style(color="green", bold=True),
+ "traceback.group.border": Style(color="magenta"),
"bar.back": Style(color="grey23"),
"bar.complete": Style(color="rgb(249,38,114)"),
"bar.finished": Style(color="rgb(114,156,31)"),
diff --git a/contrib/python/pip/pip/_vendor/rich/diagnose.py b/contrib/python/pip/pip/_vendor/rich/diagnose.py
index ad36183898e..b8b8c4347e8 100644
--- a/contrib/python/pip/pip/_vendor/rich/diagnose.py
+++ b/contrib/python/pip/pip/_vendor/rich/diagnose.py
@@ -15,16 +15,17 @@ def report() -> None: # pragma: no cover
inspect(features)
env_names = (
- "TERM",
- "COLORTERM",
"CLICOLOR",
- "NO_COLOR",
- "TERM_PROGRAM",
+ "COLORTERM",
"COLUMNS",
- "LINES",
+ "JPY_PARENT_PID",
"JUPYTER_COLUMNS",
"JUPYTER_LINES",
- "JPY_PARENT_PID",
+ "LINES",
+ "NO_COLOR",
+ "TERM_PROGRAM",
+ "TERM",
+ "TTY_COMPATIBLE",
"VSCODE_VERBOSE_LOGGING",
)
env = {name: os.getenv(name) for name in env_names}
diff --git a/contrib/python/pip/pip/_vendor/rich/panel.py b/contrib/python/pip/pip/_vendor/rich/panel.py
index 8cfa6f4a243..d411e291533 100644
--- a/contrib/python/pip/pip/_vendor/rich/panel.py
+++ b/contrib/python/pip/pip/_vendor/rich/panel.py
@@ -22,7 +22,7 @@ class Panel(JupyterMixin):
Args:
renderable (RenderableType): A console renderable object.
- box (Box, optional): A Box instance that defines the look of the border (see :ref:`appendix_box`. Defaults to box.ROUNDED.
+ box (Box): A Box instance that defines the look of the border (see :ref:`appendix_box`. Defaults to box.ROUNDED.
title (Optional[TextType], optional): Optional title displayed in panel header. Defaults to None.
title_align (AlignMethod, optional): Alignment of title. Defaults to "center".
subtitle (Optional[TextType], optional): Optional subtitle displayed in panel footer. Defaults to None.
diff --git a/contrib/python/pip/pip/_vendor/rich/style.py b/contrib/python/pip/pip/_vendor/rich/style.py
index 262fd6ecad6..835d06f3eb2 100644
--- a/contrib/python/pip/pip/_vendor/rich/style.py
+++ b/contrib/python/pip/pip/_vendor/rich/style.py
@@ -524,7 +524,7 @@ class Style:
if not word:
raise errors.StyleSyntaxError("color expected after 'on'")
try:
- Color.parse(word) is None
+ Color.parse(word)
except ColorParseError as error:
raise errors.StyleSyntaxError(
f"unable to parse {word!r} as background color; {error}"
diff --git a/contrib/python/pip/pip/_vendor/rich/table.py b/contrib/python/pip/pip/_vendor/rich/table.py
index 654c8555411..2b9e3b5d7e8 100644
--- a/contrib/python/pip/pip/_vendor/rich/table.py
+++ b/contrib/python/pip/pip/_vendor/rich/table.py
@@ -929,7 +929,6 @@ class Table(JupyterMixin):
if __name__ == "__main__": # pragma: no cover
from pip._vendor.rich.console import Console
from pip._vendor.rich.highlighter import ReprHighlighter
- from pip._vendor.rich.table import Table as Table
from ._timer import timer
diff --git a/contrib/python/pip/pip/_vendor/rich/traceback.py b/contrib/python/pip/pip/_vendor/rich/traceback.py
index 28d742b4fd0..f82d06f6d13 100644
--- a/contrib/python/pip/pip/_vendor/rich/traceback.py
+++ b/contrib/python/pip/pip/_vendor/rich/traceback.py
@@ -26,15 +26,22 @@ from pip._vendor.pygments.token import Token
from pip._vendor.pygments.util import ClassNotFound
from . import pretty
-from ._loop import loop_last
+from ._loop import loop_first_last, loop_last
from .columns import Columns
-from .console import Console, ConsoleOptions, ConsoleRenderable, RenderResult, group
+from .console import (
+ Console,
+ ConsoleOptions,
+ ConsoleRenderable,
+ Group,
+ RenderResult,
+ group,
+)
from .constrain import Constrain
from .highlighter import RegexHighlighter, ReprHighlighter
from .panel import Panel
from .scope import render_scope
from .style import Style
-from .syntax import Syntax
+from .syntax import Syntax, SyntaxPosition
from .text import Text
from .theme import Theme
@@ -44,6 +51,34 @@ LOCALS_MAX_LENGTH = 10
LOCALS_MAX_STRING = 80
+def _iter_syntax_lines(
+ start: SyntaxPosition, end: SyntaxPosition
+) -> Iterable[Tuple[int, int, int]]:
+ """Yield start and end positions per line.
+
+ Args:
+ start: Start position.
+ end: End position.
+
+ Returns:
+ Iterable of (LINE, COLUMN1, COLUMN2).
+ """
+
+ line1, column1 = start
+ line2, column2 = end
+
+ if line1 == line2:
+ yield line1, column1, column2
+ else:
+ for first, last, line_no in loop_first_last(range(line1, line2 + 1)):
+ if first:
+ yield line_no, column1, -1
+ elif last:
+ yield line_no, 0, column2
+ else:
+ yield line_no, 0, -1
+
+
def install(
*,
console: Optional[Console] = None,
@@ -100,26 +135,25 @@ def install(
value: BaseException,
traceback: Optional[TracebackType],
) -> None:
- traceback_console.print(
- Traceback.from_exception(
- type_,
- value,
- traceback,
- width=width,
- code_width=code_width,
- extra_lines=extra_lines,
- theme=theme,
- word_wrap=word_wrap,
- show_locals=show_locals,
- locals_max_length=locals_max_length,
- locals_max_string=locals_max_string,
- locals_hide_dunder=locals_hide_dunder,
- locals_hide_sunder=bool(locals_hide_sunder),
- indent_guides=indent_guides,
- suppress=suppress,
- max_frames=max_frames,
- )
+ exception_traceback = Traceback.from_exception(
+ type_,
+ value,
+ traceback,
+ width=width,
+ code_width=code_width,
+ extra_lines=extra_lines,
+ theme=theme,
+ word_wrap=word_wrap,
+ show_locals=show_locals,
+ locals_max_length=locals_max_length,
+ locals_max_string=locals_max_string,
+ locals_hide_dunder=locals_hide_dunder,
+ locals_hide_sunder=bool(locals_hide_sunder),
+ indent_guides=indent_guides,
+ suppress=suppress,
+ max_frames=max_frames,
)
+ traceback_console.print(exception_traceback)
def ipy_excepthook_closure(ip: Any) -> None: # pragma: no cover
tb_data = {} # store information about showtraceback call
@@ -191,6 +225,7 @@ class _SyntaxError:
line: str
lineno: int
msg: str
+ notes: List[str] = field(default_factory=list)
@dataclass
@@ -200,6 +235,9 @@ class Stack:
syntax_error: Optional[_SyntaxError] = None
is_cause: bool = False
frames: List[Frame] = field(default_factory=list)
+ notes: List[str] = field(default_factory=list)
+ is_group: bool = False
+ exceptions: List["Trace"] = field(default_factory=list)
@dataclass
@@ -403,6 +441,8 @@ class Traceback:
from pip._vendor.rich import _IMPORT_CWD
+ notes: List[str] = getattr(exc_value, "__notes__", None) or []
+
def safe_str(_object: Any) -> str:
"""Don't allow exceptions from __str__ to propagate."""
try:
@@ -415,8 +455,25 @@ class Traceback:
exc_type=safe_str(exc_type.__name__),
exc_value=safe_str(exc_value),
is_cause=is_cause,
+ notes=notes,
)
+ if sys.version_info >= (3, 11):
+ if isinstance(exc_value, (BaseExceptionGroup, ExceptionGroup)):
+ stack.is_group = True
+ for exception in exc_value.exceptions:
+ stack.exceptions.append(
+ Traceback.extract(
+ type(exception),
+ exception,
+ exception.__traceback__,
+ show_locals=show_locals,
+ locals_max_length=locals_max_length,
+ locals_hide_dunder=locals_hide_dunder,
+ locals_hide_sunder=locals_hide_sunder,
+ )
+ )
+
if isinstance(exc_value, SyntaxError):
stack.syntax_error = _SyntaxError(
offset=exc_value.offset or 0,
@@ -424,13 +481,14 @@ class Traceback:
lineno=exc_value.lineno or 0,
line=exc_value.text or "",
msg=exc_value.msg,
+ notes=notes,
)
stacks.append(stack)
append = stack.frames.append
def get_locals(
- iter_locals: Iterable[Tuple[str, object]]
+ iter_locals: Iterable[Tuple[str, object]],
) -> Iterable[Tuple[str, object]]:
"""Extract locals from an iterator of key pairs."""
if not (locals_hide_dunder or locals_hide_sunder):
@@ -524,6 +582,7 @@ class Traceback:
break # pragma: no cover
trace = Trace(stacks=stacks)
+
return trace
def __rich_console__(
@@ -556,7 +615,9 @@ class Traceback:
)
highlighter = ReprHighlighter()
- for last, stack in loop_last(reversed(self.trace.stacks)):
+
+ @group()
+ def render_stack(stack: Stack, last: bool) -> RenderResult:
if stack.frames:
stack_renderable: ConsoleRenderable = Panel(
self._render_stack(stack),
@@ -569,6 +630,7 @@ class Traceback:
stack_renderable = Constrain(stack_renderable, self.width)
with console.use_theme(traceback_theme):
yield stack_renderable
+
if stack.syntax_error is not None:
with console.use_theme(traceback_theme):
yield Constrain(
@@ -594,6 +656,24 @@ class Traceback:
else:
yield Text.assemble((f"{stack.exc_type}", "traceback.exc_type"))
+ for note in stack.notes:
+ yield Text.assemble(("[NOTE] ", "traceback.note"), highlighter(note))
+
+ if stack.is_group:
+ for group_no, group_exception in enumerate(stack.exceptions, 1):
+ grouped_exceptions: List[Group] = []
+ for group_last, group_stack in loop_last(group_exception.stacks):
+ grouped_exceptions.append(render_stack(group_stack, group_last))
+ yield ""
+ yield Constrain(
+ Panel(
+ Group(*grouped_exceptions),
+ title=f"Sub-exception #{group_no}",
+ border_style="traceback.group.border",
+ ),
+ self.width,
+ )
+
if not last:
if stack.is_cause:
yield Text.from_markup(
@@ -604,6 +684,9 @@ class Traceback:
"\n[i]During handling of the above exception, another exception occurred:\n",
)
+ for last, stack in loop_last(reversed(self.trace.stacks)):
+ yield render_stack(stack, last)
+
@group()
def _render_syntax_error(self, syntax_error: _SyntaxError) -> RenderResult:
highlighter = ReprHighlighter()
@@ -648,17 +731,6 @@ class Traceback:
path_highlighter = PathHighlighter()
theme = self.theme
- def read_code(filename: str) -> str:
- """Read files, and cache results on filename.
-
- Args:
- filename (str): Filename to read
-
- Returns:
- str: Contents of file
- """
- return "".join(linecache.getlines(filename))
-
def render_locals(frame: Frame) -> Iterable[ConsoleRenderable]:
if frame.locals:
yield render_scope(
@@ -720,7 +792,8 @@ class Traceback:
continue
if not suppressed:
try:
- code = read_code(frame.filename)
+ code_lines = linecache.getlines(frame.filename)
+ code = "".join(code_lines)
if not code:
# code may be an empty string if the file doesn't exist, OR
# if the traceback filename is generated dynamically
@@ -749,12 +822,26 @@ class Traceback:
else:
if frame.last_instruction is not None:
start, end = frame.last_instruction
- syntax.stylize_range(
- style="traceback.error_range",
- start=start,
- end=end,
- style_before=True,
- )
+
+ # Stylize a line at a time
+ # So that indentation isn't underlined (which looks bad)
+ for line1, column1, column2 in _iter_syntax_lines(start, end):
+ try:
+ if column1 == 0:
+ line = code_lines[line1 - 1]
+ column1 = len(line) - len(line.lstrip())
+ if column2 == -1:
+ column2 = len(code_lines[line1 - 1])
+ except IndexError:
+ # Being defensive here
+ # If last_instruction reports a line out-of-bounds, we don't want to crash
+ continue
+
+ syntax.stylize_range(
+ style="traceback.error_range",
+ start=(line1, column1),
+ end=(line1, column2),
+ )
yield (
Columns(
[
diff --git a/contrib/python/pip/pip/_vendor/tomli_w/__init__.py b/contrib/python/pip/pip/_vendor/tomli_w/__init__.py
new file mode 100644
index 00000000000..6349c1f05bc
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/tomli_w/__init__.py
@@ -0,0 +1,4 @@
+__all__ = ("dumps", "dump")
+__version__ = "1.2.0" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT
+
+from pip._vendor.tomli_w._writer import dump, dumps
diff --git a/contrib/python/pip/pip/_vendor/tomli_w/_writer.py b/contrib/python/pip/pip/_vendor/tomli_w/_writer.py
new file mode 100644
index 00000000000..b1acd3f26ba
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/tomli_w/_writer.py
@@ -0,0 +1,229 @@
+from __future__ import annotations
+
+from collections.abc import Mapping
+from datetime import date, datetime, time
+from types import MappingProxyType
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from collections.abc import Generator
+ from decimal import Decimal
+ from typing import IO, Any, Final
+
+ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127))
+ILLEGAL_BASIC_STR_CHARS = frozenset('"\\') | ASCII_CTRL - frozenset("\t")
+BARE_KEY_CHARS = frozenset(
+ "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789" "-_"
+)
+ARRAY_TYPES = (list, tuple)
+MAX_LINE_LENGTH = 100
+
+COMPACT_ESCAPES = MappingProxyType(
+ {
+ "\u0008": "\\b", # backspace
+ "\u000A": "\\n", # linefeed
+ "\u000C": "\\f", # form feed
+ "\u000D": "\\r", # carriage return
+ "\u0022": '\\"', # quote
+ "\u005C": "\\\\", # backslash
+ }
+)
+
+
+class Context:
+ def __init__(self, allow_multiline: bool, indent: int):
+ if indent < 0:
+ raise ValueError("Indent width must be non-negative")
+ self.allow_multiline: Final = allow_multiline
+ # cache rendered inline tables (mapping from object id to rendered inline table)
+ self.inline_table_cache: Final[dict[int, str]] = {}
+ self.indent_str: Final = " " * indent
+
+
+def dump(
+ obj: Mapping[str, Any],
+ fp: IO[bytes],
+ /,
+ *,
+ multiline_strings: bool = False,
+ indent: int = 4,
+) -> None:
+ ctx = Context(multiline_strings, indent)
+ for chunk in gen_table_chunks(obj, ctx, name=""):
+ fp.write(chunk.encode())
+
+
+def dumps(
+ obj: Mapping[str, Any], /, *, multiline_strings: bool = False, indent: int = 4
+) -> str:
+ ctx = Context(multiline_strings, indent)
+ return "".join(gen_table_chunks(obj, ctx, name=""))
+
+
+def gen_table_chunks(
+ table: Mapping[str, Any],
+ ctx: Context,
+ *,
+ name: str,
+ inside_aot: bool = False,
+) -> Generator[str, None, None]:
+ yielded = False
+ literals = []
+ tables: list[tuple[str, Any, bool]] = [] # => [(key, value, inside_aot)]
+ for k, v in table.items():
+ if isinstance(v, Mapping):
+ tables.append((k, v, False))
+ elif is_aot(v) and not all(is_suitable_inline_table(t, ctx) for t in v):
+ tables.extend((k, t, True) for t in v)
+ else:
+ literals.append((k, v))
+
+ if inside_aot or name and (literals or not tables):
+ yielded = True
+ yield f"[[{name}]]\n" if inside_aot else f"[{name}]\n"
+
+ if literals:
+ yielded = True
+ for k, v in literals:
+ yield f"{format_key_part(k)} = {format_literal(v, ctx)}\n"
+
+ for k, v, in_aot in tables:
+ if yielded:
+ yield "\n"
+ else:
+ yielded = True
+ key_part = format_key_part(k)
+ display_name = f"{name}.{key_part}" if name else key_part
+ yield from gen_table_chunks(v, ctx, name=display_name, inside_aot=in_aot)
+
+
+def format_literal(obj: object, ctx: Context, *, nest_level: int = 0) -> str:
+ if isinstance(obj, bool):
+ return "true" if obj else "false"
+ if isinstance(obj, (int, float, date, datetime)):
+ return str(obj)
+ if isinstance(obj, time):
+ if obj.tzinfo:
+ raise ValueError("TOML does not support offset times")
+ return str(obj)
+ if isinstance(obj, str):
+ return format_string(obj, allow_multiline=ctx.allow_multiline)
+ if isinstance(obj, ARRAY_TYPES):
+ return format_inline_array(obj, ctx, nest_level)
+ if isinstance(obj, Mapping):
+ return format_inline_table(obj, ctx)
+
+ # Lazy import to improve module import time
+ from decimal import Decimal
+
+ if isinstance(obj, Decimal):
+ return format_decimal(obj)
+ raise TypeError(
+ f"Object of type '{type(obj).__qualname__}' is not TOML serializable"
+ )
+
+
+def format_decimal(obj: Decimal) -> str:
+ if obj.is_nan():
+ return "nan"
+ if obj.is_infinite():
+ return "-inf" if obj.is_signed() else "inf"
+ dec_str = str(obj).lower()
+ return dec_str if "." in dec_str or "e" in dec_str else dec_str + ".0"
+
+
+def format_inline_table(obj: Mapping, ctx: Context) -> str:
+ # check cache first
+ obj_id = id(obj)
+ if obj_id in ctx.inline_table_cache:
+ return ctx.inline_table_cache[obj_id]
+
+ if not obj:
+ rendered = "{}"
+ else:
+ rendered = (
+ "{ "
+ + ", ".join(
+ f"{format_key_part(k)} = {format_literal(v, ctx)}"
+ for k, v in obj.items()
+ )
+ + " }"
+ )
+ ctx.inline_table_cache[obj_id] = rendered
+ return rendered
+
+
+def format_inline_array(obj: tuple | list, ctx: Context, nest_level: int) -> str:
+ if not obj:
+ return "[]"
+ item_indent = ctx.indent_str * (1 + nest_level)
+ closing_bracket_indent = ctx.indent_str * nest_level
+ return (
+ "[\n"
+ + ",\n".join(
+ item_indent + format_literal(item, ctx, nest_level=nest_level + 1)
+ for item in obj
+ )
+ + f",\n{closing_bracket_indent}]"
+ )
+
+
+def format_key_part(part: str) -> str:
+ try:
+ only_bare_key_chars = BARE_KEY_CHARS.issuperset(part)
+ except TypeError:
+ raise TypeError(
+ f"Invalid mapping key '{part}' of type '{type(part).__qualname__}'."
+ " A string is required."
+ ) from None
+
+ if part and only_bare_key_chars:
+ return part
+ return format_string(part, allow_multiline=False)
+
+
+def format_string(s: str, *, allow_multiline: bool) -> str:
+ do_multiline = allow_multiline and "\n" in s
+ if do_multiline:
+ result = '"""\n'
+ s = s.replace("\r\n", "\n")
+ else:
+ result = '"'
+
+ pos = seq_start = 0
+ while True:
+ try:
+ char = s[pos]
+ except IndexError:
+ result += s[seq_start:pos]
+ if do_multiline:
+ return result + '"""'
+ return result + '"'
+ if char in ILLEGAL_BASIC_STR_CHARS:
+ result += s[seq_start:pos]
+ if char in COMPACT_ESCAPES:
+ if do_multiline and char == "\n":
+ result += "\n"
+ else:
+ result += COMPACT_ESCAPES[char]
+ else:
+ result += "\\u" + hex(ord(char))[2:].rjust(4, "0")
+ seq_start = pos + 1
+ pos += 1
+
+
+def is_aot(obj: Any) -> bool:
+ """Decides if an object behaves as an array of tables (i.e. a nonempty list
+ of dicts)."""
+ return bool(
+ isinstance(obj, ARRAY_TYPES)
+ and obj
+ and all(isinstance(v, Mapping) for v in obj)
+ )
+
+
+def is_suitable_inline_table(obj: Mapping, ctx: Context) -> bool:
+ """Use heuristics to decide if the inline-style representation is a good
+ choice for a given table."""
+ rendered_inline = f"{ctx.indent_str}{format_inline_table(obj, ctx)},"
+ return len(rendered_inline) <= MAX_LINE_LENGTH and "\n" not in rendered_inline
diff --git a/contrib/python/pip/pip/_vendor/tomli_w/py.typed b/contrib/python/pip/pip/_vendor/tomli_w/py.typed
new file mode 100644
index 00000000000..7632ecf7754
--- /dev/null
+++ b/contrib/python/pip/pip/_vendor/tomli_w/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561
diff --git a/contrib/python/pip/pip/_vendor/truststore/__init__.py b/contrib/python/pip/pip/_vendor/truststore/__init__.py
index e468bf8cebd..cdff8143feb 100644
--- a/contrib/python/pip/pip/_vendor/truststore/__init__.py
+++ b/contrib/python/pip/pip/_vendor/truststore/__init__.py
@@ -7,7 +7,7 @@ if _sys.version_info < (3, 10):
# Detect Python runtimes which don't implement SSLObject.get_unverified_chain() API
# This API only became public in Python 3.13 but was available in CPython and PyPy since 3.10.
-if _sys.version_info < (3, 13):
+if _sys.version_info < (3, 13) and _sys.implementation.name not in ("cpython", "pypy"):
try:
import ssl as _ssl
except ImportError:
@@ -33,4 +33,4 @@ from ._api import SSLContext, extract_from_ssl, inject_into_ssl # noqa: E402
del _api, _sys # type: ignore[name-defined] # noqa: F821
__all__ = ["SSLContext", "inject_into_ssl", "extract_from_ssl"]
-__version__ = "0.10.0"
+__version__ = "0.10.1"
diff --git a/contrib/python/pip/pip/_vendor/truststore/_api.py b/contrib/python/pip/pip/_vendor/truststore/_api.py
index aeb023af756..2c0ce196a36 100644
--- a/contrib/python/pip/pip/_vendor/truststore/_api.py
+++ b/contrib/python/pip/pip/_vendor/truststore/_api.py
@@ -5,7 +5,7 @@ import ssl
import sys
import typing
-import _ssl # type: ignore[import-not-found]
+import _ssl
from ._ssl_constants import (
_original_SSLContext,
@@ -43,6 +43,23 @@ def inject_into_ssl() -> None:
except ImportError:
pass
+ # requests starting with 2.32.0 added a preloaded SSL context to improve concurrent performance;
+ # this unfortunately leads to a RecursionError, which can be avoided by patching the preloaded SSL context with
+ # the truststore patched instance
+ # also see https://github.com/psf/requests/pull/6667
+ try:
+ from pip._vendor.requests import adapters as requests_adapters
+
+ preloaded_context = getattr(requests_adapters, "_preloaded_ssl_context", None)
+ if preloaded_context is not None:
+ setattr(
+ requests_adapters,
+ "_preloaded_ssl_context",
+ SSLContext(ssl.PROTOCOL_TLS_CLIENT),
+ )
+ except ImportError:
+ pass
+
def extract_from_ssl() -> None:
"""Restores the :class:`ssl.SSLContext` class to its original state"""
diff --git a/contrib/python/pip/pip/_vendor/typing_extensions.py b/contrib/python/pip/pip/_vendor/typing_extensions.py
index e429384e76a..da8126b5bf6 100644
--- a/contrib/python/pip/pip/_vendor/typing_extensions.py
+++ b/contrib/python/pip/pip/_vendor/typing_extensions.py
@@ -1,9 +1,12 @@
import abc
+import builtins
import collections
import collections.abc
import contextlib
+import enum
import functools
import inspect
+import keyword
import operator
import sys
import types as _types
@@ -62,8 +65,11 @@ __all__ = [
'dataclass_transform',
'deprecated',
'Doc',
+ 'evaluate_forward_ref',
'get_overloads',
'final',
+ 'Format',
+ 'get_annotations',
'get_args',
'get_origin',
'get_original_bases',
@@ -83,6 +89,7 @@ __all__ = [
'Text',
'TypeAlias',
'TypeAliasType',
+ 'TypeForm',
'TypeGuard',
'TypeIs',
'TYPE_CHECKING',
@@ -91,6 +98,8 @@ __all__ = [
'ReadOnly',
'Required',
'NotRequired',
+ 'NoDefault',
+ 'NoExtraItems',
# Pure aliases, have always been in typing
'AbstractSet',
@@ -117,7 +126,6 @@ __all__ = [
'MutableMapping',
'MutableSequence',
'MutableSet',
- 'NoDefault',
'Optional',
'Pattern',
'Reversible',
@@ -138,6 +146,9 @@ PEP_560 = True
GenericMeta = type
_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
+# Added with bpo-45166 to 3.10.1+ and some 3.9 versions
+_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__
+
# The functions below are modified copies of typing internal helpers.
# They are needed by _ProtocolMeta and they provide support for PEP 646.
@@ -867,6 +878,63 @@ def _ensure_subclassable(mro_entries):
return inner
+_NEEDS_SINGLETONMETA = (
+ not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems")
+)
+
+if _NEEDS_SINGLETONMETA:
+ class SingletonMeta(type):
+ def __setattr__(cls, attr, value):
+ # TypeError is consistent with the behavior of NoneType
+ raise TypeError(
+ f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+ )
+
+
+if hasattr(typing, "NoDefault"):
+ NoDefault = typing.NoDefault
+else:
+ class NoDefaultType(metaclass=SingletonMeta):
+ """The type of the NoDefault singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoDefault") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoDefault"
+
+ def __reduce__(self):
+ return "NoDefault"
+
+ NoDefault = NoDefaultType()
+ del NoDefaultType
+
+if hasattr(typing, "NoExtraItems"):
+ NoExtraItems = typing.NoExtraItems
+else:
+ class NoExtraItemsType(metaclass=SingletonMeta):
+ """The type of the NoExtraItems singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoExtraItems") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoExtraItems"
+
+ def __reduce__(self):
+ return "NoExtraItems"
+
+ NoExtraItems = NoExtraItemsType()
+ del NoExtraItemsType
+
+if _NEEDS_SINGLETONMETA:
+ del SingletonMeta
+
+
# Update this to something like >=3.13.0b1 if and when
# PEP 728 is implemented in CPython
_PEP_728_IMPLEMENTED = False
@@ -913,7 +981,9 @@ else:
break
class _TypedDictMeta(type):
- def __new__(cls, name, bases, ns, *, total=True, closed=False):
+
+ def __new__(cls, name, bases, ns, *, total=True, closed=None,
+ extra_items=NoExtraItems):
"""Create new typed dict class object.
This method is called when TypedDict is subclassed,
@@ -925,6 +995,8 @@ else:
if type(base) is not _TypedDictMeta and base is not typing.Generic:
raise TypeError('cannot inherit from both a TypedDict type '
'and a non-TypedDict base class')
+ if closed is not None and extra_items is not NoExtraItems:
+ raise TypeError(f"Cannot combine closed={closed!r} and extra_items")
if any(issubclass(b, typing.Generic) for b in bases):
generic_base = (typing.Generic,)
@@ -964,7 +1036,7 @@ else:
optional_keys = set()
readonly_keys = set()
mutable_keys = set()
- extra_items_type = None
+ extra_items_type = extra_items
for base in bases:
base_dict = base.__dict__
@@ -974,13 +1046,12 @@ else:
optional_keys.update(base_dict.get('__optional_keys__', ()))
readonly_keys.update(base_dict.get('__readonly_keys__', ()))
mutable_keys.update(base_dict.get('__mutable_keys__', ()))
- base_extra_items_type = base_dict.get('__extra_items__', None)
- if base_extra_items_type is not None:
- extra_items_type = base_extra_items_type
- if closed and extra_items_type is None:
- extra_items_type = Never
- if closed and "__extra_items__" in own_annotations:
+ # This was specified in an earlier version of PEP 728. Support
+ # is retained for backwards compatibility, but only for Python
+ # 3.13 and lower.
+ if (closed and sys.version_info < (3, 14)
+ and "__extra_items__" in own_annotations):
annotation_type = own_annotations.pop("__extra_items__")
qualifiers = set(_get_typeddict_qualifiers(annotation_type))
if Required in qualifiers:
@@ -1019,8 +1090,7 @@ else:
tp_dict.__optional_keys__ = frozenset(optional_keys)
tp_dict.__readonly_keys__ = frozenset(readonly_keys)
tp_dict.__mutable_keys__ = frozenset(mutable_keys)
- if not hasattr(tp_dict, '__total__'):
- tp_dict.__total__ = total
+ tp_dict.__total__ = total
tp_dict.__closed__ = closed
tp_dict.__extra_items__ = extra_items_type
return tp_dict
@@ -1036,7 +1106,16 @@ else:
_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
@_ensure_subclassable(lambda bases: (_TypedDict,))
- def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs):
+ def TypedDict(
+ typename,
+ fields=_marker,
+ /,
+ *,
+ total=True,
+ closed=None,
+ extra_items=NoExtraItems,
+ **kwargs
+ ):
"""A simple typed namespace. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type such that a type checker will expect all
@@ -1096,9 +1175,14 @@ else:
"using the functional syntax, pass an empty dictionary, e.g. "
) + example + "."
warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
- if closed is not False and closed is not True:
+ # Support a field called "closed"
+ if closed is not False and closed is not True and closed is not None:
kwargs["closed"] = closed
- closed = False
+ closed = None
+ # Or "extra_items"
+ if extra_items is not NoExtraItems:
+ kwargs["extra_items"] = extra_items
+ extra_items = NoExtraItems
fields = kwargs
elif kwargs:
raise TypeError("TypedDict takes either a dict or keyword arguments,"
@@ -1120,7 +1204,8 @@ else:
# Setting correct module is necessary to make typed dict classes pickleable.
ns['__module__'] = module
- td = _TypedDictMeta(typename, (), ns, total=total, closed=closed)
+ td = _TypedDictMeta(typename, (), ns, total=total, closed=closed,
+ extra_items=extra_items)
td.__orig_bases__ = (TypedDict,)
return td
@@ -1232,10 +1317,90 @@ else: # <=3.13
)
else: # 3.8
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+ if sys.version_info < (3, 11):
+ _clean_optional(obj, hint, globalns, localns)
+ if sys.version_info < (3, 9):
+ # In 3.8 eval_type does not flatten Optional[ForwardRef] correctly
+ # This will recreate and and cache Unions.
+ hint = {
+ k: (t
+ if get_origin(t) != Union
+ else Union[t.__args__])
+ for k, t in hint.items()
+ }
if include_extras:
return hint
return {k: _strip_extras(t) for k, t in hint.items()}
+ _NoneType = type(None)
+
+ def _could_be_inserted_optional(t):
+ """detects Union[..., None] pattern"""
+ # 3.8+ compatible checking before _UnionGenericAlias
+ if get_origin(t) is not Union:
+ return False
+ # Assume if last argument is not None they are user defined
+ if t.__args__[-1] is not _NoneType:
+ return False
+ return True
+
+ # < 3.11
+ def _clean_optional(obj, hints, globalns=None, localns=None):
+ # reverts injected Union[..., None] cases from typing.get_type_hints
+ # when a None default value is used.
+ # see https://github.com/python/typing_extensions/issues/310
+ if not hints or isinstance(obj, type):
+ return
+ defaults = typing._get_defaults(obj) # avoid accessing __annotations___
+ if not defaults:
+ return
+ original_hints = obj.__annotations__
+ for name, value in hints.items():
+ # Not a Union[..., None] or replacement conditions not fullfilled
+ if (not _could_be_inserted_optional(value)
+ or name not in defaults
+ or defaults[name] is not None
+ ):
+ continue
+ original_value = original_hints[name]
+ # value=NoneType should have caused a skip above but check for safety
+ if original_value is None:
+ original_value = _NoneType
+ # Forward reference
+ if isinstance(original_value, str):
+ if globalns is None:
+ if isinstance(obj, _types.ModuleType):
+ globalns = obj.__dict__
+ else:
+ nsobj = obj
+ # Find globalns for the unwrapped object.
+ while hasattr(nsobj, '__wrapped__'):
+ nsobj = nsobj.__wrapped__
+ globalns = getattr(nsobj, '__globals__', {})
+ if localns is None:
+ localns = globalns
+ elif localns is None:
+ localns = globalns
+ if sys.version_info < (3, 9):
+ original_value = ForwardRef(original_value)
+ else:
+ original_value = ForwardRef(
+ original_value,
+ is_argument=not isinstance(obj, _types.ModuleType)
+ )
+ original_evaluated = typing._eval_type(original_value, globalns, localns)
+ if sys.version_info < (3, 9) and get_origin(original_evaluated) is Union:
+ # Union[str, None, "str"] is not reduced to Union[str, None]
+ original_evaluated = Union[original_evaluated.__args__]
+ # Compare if values differ. Note that even if equal
+ # value might be cached by typing._tp_cache contrary to original_evaluated
+ if original_evaluated != value or (
+ # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias
+ hasattr(_types, "UnionType")
+ and isinstance(original_evaluated, _types.UnionType)
+ and not isinstance(value, _types.UnionType)
+ ):
+ hints[name] = original_evaluated
# Python 3.9+ has PEP 593 (Annotated)
if hasattr(typing, 'Annotated'):
@@ -1443,34 +1608,6 @@ else:
)
-if hasattr(typing, "NoDefault"):
- NoDefault = typing.NoDefault
-else:
- class NoDefaultTypeMeta(type):
- def __setattr__(cls, attr, value):
- # TypeError is consistent with the behavior of NoneType
- raise TypeError(
- f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
- )
-
- class NoDefaultType(metaclass=NoDefaultTypeMeta):
- """The type of the NoDefault singleton."""
-
- __slots__ = ()
-
- def __new__(cls):
- return globals().get("NoDefault") or object.__new__(cls)
-
- def __repr__(self):
- return "typing_extensions.NoDefault"
-
- def __reduce__(self):
- return "NoDefault"
-
- NoDefault = NoDefaultType()
- del NoDefaultType, NoDefaultTypeMeta
-
-
def _set_default(type_param, default):
type_param.has_default = lambda: default is not NoDefault
type_param.__default__ = default
@@ -1761,6 +1898,23 @@ else:
# 3.8-3.9
if not hasattr(typing, 'Concatenate'):
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+
+ # 3.9.0-1
+ if not hasattr(typing, '_type_convert'):
+ def _type_convert(arg, module=None, *, allow_special_forms=False):
+ """For converting None to type(None), and strings to ForwardRef."""
+ if arg is None:
+ return type(None)
+ if isinstance(arg, str):
+ if sys.version_info <= (3, 9, 6):
+ return ForwardRef(arg)
+ if sys.version_info <= (3, 9, 7):
+ return ForwardRef(arg, module=module)
+ return ForwardRef(arg, module=module, is_class=allow_special_forms)
+ return arg
+ else:
+ _type_convert = typing._type_convert
+
class _ConcatenateGenericAlias(list):
# Trick Generic into looking into this for __parameters__.
@@ -1792,27 +1946,171 @@ if not hasattr(typing, 'Concatenate'):
tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
)
+ # 3.8; needed for typing._subst_tvars
+ # 3.9 used by __getitem__ below
+ def copy_with(self, params):
+ if isinstance(params[-1], _ConcatenateGenericAlias):
+ params = (*params[:-1], *params[-1].__args__)
+ elif isinstance(params[-1], (list, tuple)):
+ return (*params[:-1], *params[-1])
+ elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable or ellipsis.")
+ return self.__class__(self.__origin__, params)
+
+ # 3.9; accessed during GenericAlias.__getitem__ when substituting
+ def __getitem__(self, args):
+ if self.__origin__ in (Generic, Protocol):
+ # Can't subscript Generic[...] or Protocol[...].
+ raise TypeError(f"Cannot subscript already-subscripted {self}")
+ if not self.__parameters__:
+ raise TypeError(f"{self} is not a generic class")
+
+ if not isinstance(args, tuple):
+ args = (args,)
+ args = _unpack_args(*(_type_convert(p) for p in args))
+ params = self.__parameters__
+ for param in params:
+ prepare = getattr(param, "__typing_prepare_subst__", None)
+ if prepare is not None:
+ args = prepare(self, args)
+ # 3.8 - 3.9 & typing.ParamSpec
+ elif isinstance(param, ParamSpec):
+ i = params.index(param)
+ if (
+ i == len(args)
+ and getattr(param, '__default__', NoDefault) is not NoDefault
+ ):
+ args = [*args, param.__default__]
+ if i >= len(args):
+ raise TypeError(f"Too few arguments for {self}")
+ # Special case for Z[[int, str, bool]] == Z[int, str, bool]
+ if len(params) == 1 and not _is_param_expr(args[0]):
+ assert i == 0
+ args = (args,)
+ elif (
+ isinstance(args[i], list)
+ # 3.8 - 3.9
+ # This class inherits from list do not convert
+ and not isinstance(args[i], _ConcatenateGenericAlias)
+ ):
+ args = (*args[:i], tuple(args[i]), *args[i + 1:])
-# 3.8-3.9
+ alen = len(args)
+ plen = len(params)
+ if alen != plen:
+ raise TypeError(
+ f"Too {'many' if alen > plen else 'few'} arguments for {self};"
+ f" actual {alen}, expected {plen}"
+ )
+
+ subst = dict(zip(self.__parameters__, args))
+ # determine new args
+ new_args = []
+ for arg in self.__args__:
+ if isinstance(arg, type):
+ new_args.append(arg)
+ continue
+ if isinstance(arg, TypeVar):
+ arg = subst[arg]
+ if (
+ (isinstance(arg, typing._GenericAlias) and _is_unpack(arg))
+ or (
+ hasattr(_types, "GenericAlias")
+ and isinstance(arg, _types.GenericAlias)
+ and getattr(arg, "__unpacked__", False)
+ )
+ ):
+ raise TypeError(f"{arg} is not valid as type argument")
+
+ elif isinstance(arg,
+ typing._GenericAlias
+ if not hasattr(_types, "GenericAlias") else
+ (typing._GenericAlias, _types.GenericAlias)
+ ):
+ subparams = arg.__parameters__
+ if subparams:
+ subargs = tuple(subst[x] for x in subparams)
+ arg = arg[subargs]
+ new_args.append(arg)
+ return self.copy_with(tuple(new_args))
+
+# 3.10+
+else:
+ _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
+
+ # 3.10
+ if sys.version_info < (3, 11):
+
+ class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True):
+ # needed for checks in collections.abc.Callable to accept this class
+ __module__ = "typing"
+
+ def copy_with(self, params):
+ if isinstance(params[-1], (list, tuple)):
+ return (*params[:-1], *params[-1])
+ if isinstance(params[-1], typing._ConcatenateGenericAlias):
+ params = (*params[:-1], *params[-1].__args__)
+ elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable or ellipsis.")
+ return super(typing._ConcatenateGenericAlias, self).copy_with(params)
+
+ def __getitem__(self, args):
+ value = super().__getitem__(args)
+ if isinstance(value, tuple) and any(_is_unpack(t) for t in value):
+ return tuple(_unpack_args(*(n for n in value)))
+ return value
+
+
+# 3.8-3.9.2
+class _EllipsisDummy: ...
+
+
+# 3.8-3.10
+def _create_concatenate_alias(origin, parameters):
+ if parameters[-1] is ... and sys.version_info < (3, 9, 2):
+ # Hack: Arguments must be types, replace it with one.
+ parameters = (*parameters[:-1], _EllipsisDummy)
+ if sys.version_info >= (3, 10, 3):
+ concatenate = _ConcatenateGenericAlias(origin, parameters,
+ _typevar_types=(TypeVar, ParamSpec),
+ _paramspec_tvars=True)
+ else:
+ concatenate = _ConcatenateGenericAlias(origin, parameters)
+ if parameters[-1] is not _EllipsisDummy:
+ return concatenate
+ # Remove dummy again
+ concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ...
+ for p in concatenate.__args__)
+ if sys.version_info < (3, 10):
+ # backport needs __args__ adjustment only
+ return concatenate
+ concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__
+ if p is not _EllipsisDummy)
+ return concatenate
+
+
+# 3.8-3.10
@typing._tp_cache
def _concatenate_getitem(self, parameters):
if parameters == ():
raise TypeError("Cannot take a Concatenate of no types.")
if not isinstance(parameters, tuple):
parameters = (parameters,)
- if not isinstance(parameters[-1], ParamSpec):
+ if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
raise TypeError("The last parameter to Concatenate should be a "
- "ParamSpec variable.")
+ "ParamSpec variable or ellipsis.")
msg = "Concatenate[arg, ...]: each arg must be a type."
- parameters = tuple(typing._type_check(p, msg) for p in parameters)
- return _ConcatenateGenericAlias(self, parameters)
+ parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]),
+ parameters[-1])
+ return _create_concatenate_alias(self, parameters)
-# 3.10+
-if hasattr(typing, 'Concatenate'):
+# 3.11+; Concatenate does not accept ellipsis in 3.10
+if sys.version_info >= (3, 11):
Concatenate = typing.Concatenate
- _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
-# 3.9
+# 3.9-3.10
elif sys.version_info[:2] >= (3, 9):
@_ExtensionsSpecialForm
def Concatenate(self, parameters):
@@ -1976,7 +2274,7 @@ elif sys.version_info[:2] >= (3, 9):
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
- is the intersection of the type inside ``TypeGuard`` and the argument's
+ is the intersection of the type inside ``TypeIs`` and the argument's
previously known type.
For example::
@@ -2024,7 +2322,7 @@ else:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
- is the intersection of the type inside ``TypeGuard`` and the argument's
+ is the intersection of the type inside ``TypeIs`` and the argument's
previously known type.
For example::
@@ -2042,6 +2340,69 @@ else:
PEP 742 (Narrowing types with TypeIs).
""")
+# 3.14+?
+if hasattr(typing, 'TypeForm'):
+ TypeForm = typing.TypeForm
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
+ # TypeForm(X) is equivalent to X but indicates to the type checker
+ # that the object is a TypeForm.
+ def __call__(self, obj, /):
+ return obj
+
+ @_TypeFormForm
+ def TypeForm(self, parameters):
+ """A special form representing the value that results from the evaluation
+ of a type expression. This value encodes the information supplied in the
+ type expression, and it represents the type described by that type expression.
+
+ When used in a type expression, TypeForm describes a set of type form objects.
+ It accepts a single type argument, which must be a valid type expression.
+ ``TypeForm[T]`` describes the set of all type form objects that represent
+ the type T or types that are assignable to T.
+
+ Usage:
+
+ def cast[T](typ: TypeForm[T], value: Any) -> T: ...
+
+ reveal_type(cast(int, "x")) # int
+
+ See PEP 747 for more information.
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+ class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type')
+ return typing._GenericAlias(self, (item,))
+
+ def __call__(self, obj, /):
+ return obj
+
+ TypeForm = _TypeFormForm(
+ 'TypeForm',
+ doc="""A special form representing the value that results from the evaluation
+ of a type expression. This value encodes the information supplied in the
+ type expression, and it represents the type described by that type expression.
+
+ When used in a type expression, TypeForm describes a set of type form objects.
+ It accepts a single type argument, which must be a valid type expression.
+ ``TypeForm[T]`` describes the set of all type form objects that represent
+ the type T or types that are assignable to T.
+
+ Usage:
+
+ def cast[T](typ: TypeForm[T], value: Any) -> T: ...
+
+ reveal_type(cast(int, "x")) # int
+
+ See PEP 747 for more information.
+ """)
+
# Vendored from cpython typing._SpecialFrom
class _SpecialForm(typing._Final, _root=True):
@@ -2344,7 +2705,9 @@ elif sys.version_info[:2] >= (3, 9): # 3.9+
self.__doc__ = _UNPACK_DOC
class _UnpackAlias(typing._GenericAlias, _root=True):
- __class__ = typing.TypeVar
+ if sys.version_info < (3, 11):
+ # needed for compatibility with Generic[Unpack[Ts]]
+ __class__ = typing.TypeVar
@property
def __typing_unpacked_tuple_args__(self):
@@ -2357,6 +2720,17 @@ elif sys.version_info[:2] >= (3, 9): # 3.9+
return arg.__args__
return None
+ @property
+ def __typing_is_unpacked_typevartuple__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ return isinstance(self.__args__[0], TypeVarTuple)
+
+ def __getitem__(self, args):
+ if self.__typing_is_unpacked_typevartuple__:
+ return args
+ return super().__getitem__(args)
+
@_UnpackSpecialForm
def Unpack(self, parameters):
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
@@ -2369,6 +2743,28 @@ else: # 3.8
class _UnpackAlias(typing._GenericAlias, _root=True):
__class__ = typing.TypeVar
+ @property
+ def __typing_unpacked_tuple_args__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ arg, = self.__args__
+ if isinstance(arg, typing._GenericAlias):
+ if arg.__origin__ is not tuple:
+ raise TypeError("Unpack[...] must be used with a tuple type")
+ return arg.__args__
+ return None
+
+ @property
+ def __typing_is_unpacked_typevartuple__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ return isinstance(self.__args__[0], TypeVarTuple)
+
+ def __getitem__(self, args):
+ if self.__typing_is_unpacked_typevartuple__:
+ return args
+ return super().__getitem__(args)
+
class _UnpackForm(_ExtensionsSpecialForm, _root=True):
def __getitem__(self, parameters):
item = typing._type_check(parameters,
@@ -2381,21 +2777,22 @@ else: # 3.8
return isinstance(obj, _UnpackAlias)
+def _unpack_args(*args):
+ newargs = []
+ for arg in args:
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs is not None and (not (subargs and subargs[-1] is ...)):
+ newargs.extend(subargs)
+ else:
+ newargs.append(arg)
+ return newargs
+
+
if _PEP_696_IMPLEMENTED:
from typing import TypeVarTuple
elif hasattr(typing, "TypeVarTuple"): # 3.11+
- def _unpack_args(*args):
- newargs = []
- for arg in args:
- subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
- if subargs is not None and not (subargs and subargs[-1] is ...):
- newargs.extend(subargs)
- else:
- newargs.append(arg)
- return newargs
-
# Add default parameter - PEP 696
class TypeVarTuple(metaclass=_TypeVarLikeMeta):
"""Type variable tuple."""
@@ -2726,7 +3123,8 @@ else: # <=3.11
return arg
-if hasattr(warnings, "deprecated"):
+# Python 3.13.3+ contains a fix for the wrapped __new__
+if sys.version_info >= (3, 13, 3):
deprecated = warnings.deprecated
else:
_T = typing.TypeVar("_T")
@@ -2806,7 +3204,7 @@ else:
original_new = arg.__new__
@functools.wraps(original_new)
- def __new__(cls, *args, **kwargs):
+ def __new__(cls, /, *args, **kwargs):
if cls is arg:
warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
if original_new is not object.__new__:
@@ -2845,13 +3243,21 @@ else:
__init_subclass__.__deprecated__ = msg
return arg
elif callable(arg):
+ import asyncio.coroutines
import functools
+ import inspect
@functools.wraps(arg)
def wrapper(*args, **kwargs):
warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
return arg(*args, **kwargs)
+ if asyncio.coroutines.iscoroutinefunction(arg):
+ if sys.version_info >= (3, 12):
+ wrapper = inspect.markcoroutinefunction(wrapper)
+ else:
+ wrapper._is_coroutine = asyncio.coroutines._is_coroutine
+
arg.__deprecated__ = wrapper.__deprecated__ = msg
return wrapper
else:
@@ -2860,6 +3266,24 @@ else:
f"a class or callable, not {arg!r}"
)
+if sys.version_info < (3, 10):
+ def _is_param_expr(arg):
+ return arg is ... or isinstance(
+ arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias)
+ )
+else:
+ def _is_param_expr(arg):
+ return arg is ... or isinstance(
+ arg,
+ (
+ tuple,
+ list,
+ ParamSpec,
+ _ConcatenateGenericAlias,
+ typing._ConcatenateGenericAlias,
+ ),
+ )
+
# We have to do some monkey patching to deal with the dual nature of
# Unpack/TypeVarTuple:
@@ -2874,6 +3298,17 @@ if not hasattr(typing, "TypeVarTuple"):
This gives a nice error message in case of count mismatch.
"""
+ # If substituting a single ParamSpec with multiple arguments
+ # we do not check the count
+ if (inspect.isclass(cls) and issubclass(cls, typing.Generic)
+ and len(cls.__parameters__) == 1
+ and isinstance(cls.__parameters__[0], ParamSpec)
+ and parameters
+ and not _is_param_expr(parameters[0])
+ ):
+ # Generic modifies parameters variable, but here we cannot do this
+ return
+
if not elen:
raise TypeError(f"{cls} is not a generic class")
if elen is _marker:
@@ -3007,7 +3442,10 @@ if hasattr(typing, '_collect_type_vars'):
for t in types:
if _is_unpacked_typevartuple(t):
type_var_tuple_encountered = True
- elif isinstance(t, typevar_types) and t not in tvars:
+ elif (
+ isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias)
+ and t not in tvars
+ ):
if enforce_default_ordering:
has_default = getattr(t, '__default__', NoDefault) is not NoDefault
if has_default:
@@ -3022,6 +3460,13 @@ if hasattr(typing, '_collect_type_vars'):
tvars.append(t)
if _should_collect_from_parameters(t):
tvars.extend([t for t in t.__parameters__ if t not in tvars])
+ elif isinstance(t, tuple):
+ # Collect nested type_vars
+ # tuple wrapped by _prepare_paramspec_params(cls, params)
+ for x in t:
+ for collected in _collect_type_vars([x]):
+ if collected not in tvars:
+ tvars.append(collected)
return tuple(tvars)
typing._collect_type_vars = _collect_type_vars
@@ -3379,17 +3824,62 @@ else:
return typing.Union[other, self]
-if hasattr(typing, "TypeAliasType"):
+if sys.version_info >= (3, 14):
TypeAliasType = typing.TypeAliasType
+# 3.8-3.13
else:
- def _is_unionable(obj):
- """Corresponds to is_unionable() in unionobject.c in CPython."""
- return obj is None or isinstance(obj, (
- type,
- _types.GenericAlias,
- _types.UnionType,
- TypeAliasType,
- ))
+ if sys.version_info >= (3, 12):
+ # 3.12-3.14
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ typing.TypeAliasType,
+ TypeAliasType,
+ ))
+ else:
+ # 3.8-3.11
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ TypeAliasType,
+ ))
+
+ if sys.version_info < (3, 10):
+ # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582,
+ # so that we emulate the behaviour of `types.GenericAlias`
+ # on the latest versions of CPython
+ _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({
+ "__class__",
+ "__bases__",
+ "__origin__",
+ "__args__",
+ "__unpacked__",
+ "__parameters__",
+ "__typing_unpacked_tuple_args__",
+ "__mro_entries__",
+ "__reduce_ex__",
+ "__reduce__",
+ "__copy__",
+ "__deepcopy__",
+ })
+
+ class _TypeAliasGenericAlias(typing._GenericAlias, _root=True):
+ def __getattr__(self, attr):
+ if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS:
+ return object.__getattr__(self, attr)
+ return getattr(self.__origin__, attr)
+
+ if sys.version_info < (3, 9):
+ def __getitem__(self, item):
+ result = super().__getitem__(item)
+ result.__class__ = type(self)
+ return result
class TypeAliasType:
"""Create named, parameterized type aliases.
@@ -3422,11 +3912,29 @@ else:
def __init__(self, name: str, value, *, type_params=()):
if not isinstance(name, str):
raise TypeError("TypeAliasType name must be a string")
+ if not isinstance(type_params, tuple):
+ raise TypeError("type_params must be a tuple")
self.__value__ = value
self.__type_params__ = type_params
+ default_value_encountered = False
parameters = []
for type_param in type_params:
+ if (
+ not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec))
+ # 3.8-3.11
+ # Unpack Backport passes isinstance(type_param, TypeVar)
+ or _is_unpack(type_param)
+ ):
+ raise TypeError(f"Expected a type param, got {type_param!r}")
+ has_default = (
+ getattr(type_param, '__default__', NoDefault) is not NoDefault
+ )
+ if default_value_encountered and not has_default:
+ raise TypeError(f"non-default type parameter '{type_param!r}'"
+ " follows default type parameter")
+ if has_default:
+ default_value_encountered = True
if isinstance(type_param, TypeVarTuple):
parameters.extend(type_param)
else:
@@ -3463,16 +3971,49 @@ else:
def __repr__(self) -> str:
return self.__name__
+ if sys.version_info < (3, 11):
+ def _check_single_param(self, param, recursion=0):
+ # Allow [], [int], [int, str], [int, ...], [int, T]
+ if param is ...:
+ return ...
+ if param is None:
+ return None
+ # Note in <= 3.9 _ConcatenateGenericAlias inherits from list
+ if isinstance(param, list) and recursion == 0:
+ return [self._check_single_param(arg, recursion+1)
+ for arg in param]
+ return typing._type_check(
+ param, f'Subscripting {self.__name__} requires a type.'
+ )
+
+ def _check_parameters(self, parameters):
+ if sys.version_info < (3, 11):
+ return tuple(
+ self._check_single_param(item)
+ for item in parameters
+ )
+ return tuple(typing._type_check(
+ item, f'Subscripting {self.__name__} requires a type.'
+ )
+ for item in parameters
+ )
+
def __getitem__(self, parameters):
+ if not self.__type_params__:
+ raise TypeError("Only generic type aliases are subscriptable")
if not isinstance(parameters, tuple):
parameters = (parameters,)
- parameters = [
- typing._type_check(
- item, f'Subscripting {self.__name__} requires a type.'
- )
- for item in parameters
- ]
- return typing._GenericAlias(self, tuple(parameters))
+ # Using 3.9 here will create problems with Concatenate
+ if sys.version_info >= (3, 10):
+ return _types.GenericAlias(self, parameters)
+ type_vars = _collect_type_vars(parameters)
+ parameters = self._check_parameters(parameters)
+ alias = _TypeAliasGenericAlias(self, parameters)
+ # alias.__parameters__ is not complete if Concatenate is present
+ # as it is converted to a list from which no parameters are extracted.
+ if alias.__parameters__ != type_vars:
+ alias.__parameters__ = type_vars
+ return alias
def __reduce__(self):
return self.__name__
@@ -3599,6 +4140,408 @@ if _CapsuleType is not None:
__all__.append("CapsuleType")
+# Using this convoluted approach so that this keeps working
+# whether we end up using PEP 649 as written, PEP 749, or
+# some other variation: in any case, inspect.get_annotations
+# will continue to exist and will gain a `format` parameter.
+_PEP_649_OR_749_IMPLEMENTED = (
+ hasattr(inspect, 'get_annotations')
+ and inspect.get_annotations.__kwdefaults__ is not None
+ and "format" in inspect.get_annotations.__kwdefaults__
+)
+
+
+class Format(enum.IntEnum):
+ VALUE = 1
+ FORWARDREF = 2
+ STRING = 3
+
+
+if _PEP_649_OR_749_IMPLEMENTED:
+ get_annotations = inspect.get_annotations
+else:
+ def get_annotations(obj, *, globals=None, locals=None, eval_str=False,
+ format=Format.VALUE):
+ """Compute the annotations dict for an object.
+
+ obj may be a callable, class, or module.
+ Passing in an object of any other type raises TypeError.
+
+ Returns a dict. get_annotations() returns a new dict every time
+ it's called; calling it twice on the same object will return two
+ different but equivalent dicts.
+
+ This is a backport of `inspect.get_annotations`, which has been
+ in the standard library since Python 3.10. See the standard library
+ documentation for more:
+
+ https://docs.python.org/3/library/inspect.html#inspect.get_annotations
+
+ This backport adds the *format* argument introduced by PEP 649. The
+ three formats supported are:
+ * VALUE: the annotations are returned as-is. This is the default and
+ it is compatible with the behavior on previous Python versions.
+ * FORWARDREF: return annotations as-is if possible, but replace any
+ undefined names with ForwardRef objects. The implementation proposed by
+ PEP 649 relies on language changes that cannot be backported; the
+ typing-extensions implementation simply returns the same result as VALUE.
+ * STRING: return annotations as strings, in a format close to the original
+ source. Again, this behavior cannot be replicated directly in a backport.
+ As an approximation, typing-extensions retrieves the annotations under
+ VALUE semantics and then stringifies them.
+
+ The purpose of this backport is to allow users who would like to use
+ FORWARDREF or STRING semantics once PEP 649 is implemented, but who also
+ want to support earlier Python versions, to simply write:
+
+ typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
+
+ """
+ format = Format(format)
+
+ if eval_str and format is not Format.VALUE:
+ raise ValueError("eval_str=True is only supported with format=Format.VALUE")
+
+ if isinstance(obj, type):
+ # class
+ obj_dict = getattr(obj, '__dict__', None)
+ if obj_dict and hasattr(obj_dict, 'get'):
+ ann = obj_dict.get('__annotations__', None)
+ if isinstance(ann, _types.GetSetDescriptorType):
+ ann = None
+ else:
+ ann = None
+
+ obj_globals = None
+ module_name = getattr(obj, '__module__', None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ obj_globals = getattr(module, '__dict__', None)
+ obj_locals = dict(vars(obj))
+ unwrap = obj
+ elif isinstance(obj, _types.ModuleType):
+ # module
+ ann = getattr(obj, '__annotations__', None)
+ obj_globals = obj.__dict__
+ obj_locals = None
+ unwrap = None
+ elif callable(obj):
+ # this includes types.Function, types.BuiltinFunctionType,
+ # types.BuiltinMethodType, functools.partial, functools.singledispatch,
+ # "class funclike" from Lib/test/test_inspect... on and on it goes.
+ ann = getattr(obj, '__annotations__', None)
+ obj_globals = getattr(obj, '__globals__', None)
+ obj_locals = None
+ unwrap = obj
+ elif hasattr(obj, '__annotations__'):
+ ann = obj.__annotations__
+ obj_globals = obj_locals = unwrap = None
+ else:
+ raise TypeError(f"{obj!r} is not a module, class, or callable.")
+
+ if ann is None:
+ return {}
+
+ if not isinstance(ann, dict):
+ raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
+
+ if not ann:
+ return {}
+
+ if not eval_str:
+ if format is Format.STRING:
+ return {
+ key: value if isinstance(value, str) else typing._type_repr(value)
+ for key, value in ann.items()
+ }
+ return dict(ann)
+
+ if unwrap is not None:
+ while True:
+ if hasattr(unwrap, '__wrapped__'):
+ unwrap = unwrap.__wrapped__
+ continue
+ if isinstance(unwrap, functools.partial):
+ unwrap = unwrap.func
+ continue
+ break
+ if hasattr(unwrap, "__globals__"):
+ obj_globals = unwrap.__globals__
+
+ if globals is None:
+ globals = obj_globals
+ if locals is None:
+ locals = obj_locals or {}
+
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ if type_params := getattr(obj, "__type_params__", ()):
+ locals = {param.__name__: param for param in type_params} | locals
+
+ return_value = {key:
+ value if not isinstance(value, str) else eval(value, globals, locals)
+ for key, value in ann.items() }
+ return return_value
+
+
+if hasattr(typing, "evaluate_forward_ref"):
+ evaluate_forward_ref = typing.evaluate_forward_ref
+else:
+ # Implements annotationlib.ForwardRef.evaluate
+ def _eval_with_owner(
+ forward_ref, *, owner=None, globals=None, locals=None, type_params=None
+ ):
+ if forward_ref.__forward_evaluated__:
+ return forward_ref.__forward_value__
+ if getattr(forward_ref, "__cell__", None) is not None:
+ try:
+ value = forward_ref.__cell__.cell_contents
+ except ValueError:
+ pass
+ else:
+ forward_ref.__forward_evaluated__ = True
+ forward_ref.__forward_value__ = value
+ return value
+ if owner is None:
+ owner = getattr(forward_ref, "__owner__", None)
+
+ if (
+ globals is None
+ and getattr(forward_ref, "__forward_module__", None) is not None
+ ):
+ globals = getattr(
+ sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None
+ )
+ if globals is None:
+ globals = getattr(forward_ref, "__globals__", None)
+ if globals is None:
+ if isinstance(owner, type):
+ module_name = getattr(owner, "__module__", None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ globals = getattr(module, "__dict__", None)
+ elif isinstance(owner, _types.ModuleType):
+ globals = getattr(owner, "__dict__", None)
+ elif callable(owner):
+ globals = getattr(owner, "__globals__", None)
+
+ # If we pass None to eval() below, the globals of this module are used.
+ if globals is None:
+ globals = {}
+
+ if locals is None:
+ locals = {}
+ if isinstance(owner, type):
+ locals.update(vars(owner))
+
+ if type_params is None and owner is not None:
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ type_params = getattr(owner, "__type_params__", None)
+
+ # type parameters require some special handling,
+ # as they exist in their own scope
+ # but `eval()` does not have a dedicated parameter for that scope.
+ # For classes, names in type parameter scopes should override
+ # names in the global scope (which here are called `localns`!),
+ # but should in turn be overridden by names in the class scope
+ # (which here are called `globalns`!)
+ if type_params is not None:
+ globals = dict(globals)
+ locals = dict(locals)
+ for param in type_params:
+ param_name = param.__name__
+ if (
+ _FORWARD_REF_HAS_CLASS and not forward_ref.__forward_is_class__
+ ) or param_name not in globals:
+ globals[param_name] = param
+ locals.pop(param_name, None)
+
+ arg = forward_ref.__forward_arg__
+ if arg.isidentifier() and not keyword.iskeyword(arg):
+ if arg in locals:
+ value = locals[arg]
+ elif arg in globals:
+ value = globals[arg]
+ elif hasattr(builtins, arg):
+ return getattr(builtins, arg)
+ else:
+ raise NameError(arg)
+ else:
+ code = forward_ref.__forward_code__
+ value = eval(code, globals, locals)
+ forward_ref.__forward_evaluated__ = True
+ forward_ref.__forward_value__ = value
+ return value
+
+ def _lax_type_check(
+ value, msg, is_argument=True, *, module=None, allow_special_forms=False
+ ):
+ """
+ A lax Python 3.11+ like version of typing._type_check
+ """
+ if hasattr(typing, "_type_convert"):
+ if (
+ sys.version_info >= (3, 10, 3)
+ or (3, 9, 10) < sys.version_info[:3] < (3, 10)
+ ):
+ # allow_special_forms introduced later cpython/#30926 (bpo-46539)
+ type_ = typing._type_convert(
+ value,
+ module=module,
+ allow_special_forms=allow_special_forms,
+ )
+ # module was added with bpo-41249 before is_class (bpo-46539)
+ elif "__forward_module__" in typing.ForwardRef.__slots__:
+ type_ = typing._type_convert(value, module=module)
+ else:
+ type_ = typing._type_convert(value)
+ else:
+ if value is None:
+ return type(None)
+ if isinstance(value, str):
+ return ForwardRef(value)
+ type_ = value
+ invalid_generic_forms = (Generic, Protocol)
+ if not allow_special_forms:
+ invalid_generic_forms += (ClassVar,)
+ if is_argument:
+ invalid_generic_forms += (Final,)
+ if (
+ isinstance(type_, typing._GenericAlias)
+ and get_origin(type_) in invalid_generic_forms
+ ):
+ raise TypeError(f"{type_} is not valid as type argument") from None
+ if type_ in (Any, LiteralString, NoReturn, Never, Self, TypeAlias):
+ return type_
+ if allow_special_forms and type_ in (ClassVar, Final):
+ return type_
+ if (
+ isinstance(type_, (_SpecialForm, typing._SpecialForm))
+ or type_ in (Generic, Protocol)
+ ):
+ raise TypeError(f"Plain {type_} is not valid as type argument") from None
+ if type(type_) is tuple: # lax version with tuple instead of callable
+ raise TypeError(f"{msg} Got {type_!r:.100}.")
+ return type_
+
+ def evaluate_forward_ref(
+ forward_ref,
+ *,
+ owner=None,
+ globals=None,
+ locals=None,
+ type_params=None,
+ format=Format.VALUE,
+ _recursive_guard=frozenset(),
+ ):
+ """Evaluate a forward reference as a type hint.
+
+ This is similar to calling the ForwardRef.evaluate() method,
+ but unlike that method, evaluate_forward_ref() also:
+
+ * Recursively evaluates forward references nested within the type hint.
+ * Rejects certain objects that are not valid type hints.
+ * Replaces type hints that evaluate to None with types.NoneType.
+ * Supports the *FORWARDREF* and *STRING* formats.
+
+ *forward_ref* must be an instance of ForwardRef. *owner*, if given,
+ should be the object that holds the annotations that the forward reference
+ derived from, such as a module, class object, or function. It is used to
+ infer the namespaces to use for looking up names. *globals* and *locals*
+ can also be explicitly given to provide the global and local namespaces.
+ *type_params* is a tuple of type parameters that are in scope when
+ evaluating the forward reference. This parameter must be provided (though
+ it may be an empty tuple) if *owner* is not given and the forward reference
+ does not already have an owner set. *format* specifies the format of the
+ annotation and is a member of the annotationlib.Format enum.
+
+ """
+ if format == Format.STRING:
+ return forward_ref.__forward_arg__
+ if forward_ref.__forward_arg__ in _recursive_guard:
+ return forward_ref
+
+ # Evaluate the forward reference
+ try:
+ value = _eval_with_owner(
+ forward_ref,
+ owner=owner,
+ globals=globals,
+ locals=locals,
+ type_params=type_params,
+ )
+ except NameError:
+ if format == Format.FORWARDREF:
+ return forward_ref
+ else:
+ raise
+
+ msg = "Forward references must evaluate to types."
+ if not _FORWARD_REF_HAS_CLASS:
+ allow_special_forms = not forward_ref.__forward_is_argument__
+ else:
+ allow_special_forms = forward_ref.__forward_is_class__
+ type_ = _lax_type_check(
+ value,
+ msg,
+ is_argument=forward_ref.__forward_is_argument__,
+ allow_special_forms=allow_special_forms,
+ )
+
+ # Recursively evaluate the type
+ if isinstance(type_, ForwardRef):
+ if getattr(type_, "__forward_module__", True) is not None:
+ globals = None
+ return evaluate_forward_ref(
+ type_,
+ globals=globals,
+ locals=locals,
+ type_params=type_params, owner=owner,
+ _recursive_guard=_recursive_guard, format=format
+ )
+ if sys.version_info < (3, 12, 5) and type_params:
+ # Make use of type_params
+ locals = dict(locals) if locals else {}
+ for tvar in type_params:
+ if tvar.__name__ not in locals: # lets not overwrite something present
+ locals[tvar.__name__] = tvar
+ if sys.version_info < (3, 9):
+ return typing._eval_type(
+ type_,
+ globals,
+ locals,
+ )
+ if sys.version_info < (3, 12, 5):
+ return typing._eval_type(
+ type_,
+ globals,
+ locals,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ )
+ if sys.version_info < (3, 14):
+ return typing._eval_type(
+ type_,
+ globals,
+ locals,
+ type_params,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ )
+ return typing._eval_type(
+ type_,
+ globals,
+ locals,
+ type_params,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ format=format,
+ owner=owner,
+ )
+
+
# Aliases for items that have always been in typing.
# Explicitly assign these (rather than using `from typing import *` at the top),
# so that we get a CI error if one of these is deleted from typing.py
diff --git a/contrib/python/pip/pip/_vendor/vendor.txt b/contrib/python/pip/pip/_vendor/vendor.txt
index f04a9c1e73c..283d57f5f34 100644
--- a/contrib/python/pip/pip/_vendor/vendor.txt
+++ b/contrib/python/pip/pip/_vendor/vendor.txt
@@ -1,18 +1,20 @@
-CacheControl==0.14.1
+CacheControl==0.14.2
distlib==0.3.9
distro==1.9.0
msgpack==1.1.0
-packaging==24.2
-platformdirs==4.3.6
+packaging==25.0
+platformdirs==4.3.7
pyproject-hooks==1.2.0
requests==2.32.3
- certifi==2024.8.30
+ certifi==2025.1.31
idna==3.10
urllib3==1.26.20
-rich==13.9.4
- pygments==2.18.0
- typing_extensions==4.12.2
-resolvelib==1.0.1
+rich==14.0.0
+ pygments==2.19.1
+ typing_extensions==4.13.2
+resolvelib==1.1.0
setuptools==70.3.0
tomli==2.2.1
-truststore==0.10.0
+tomli-w==1.2.0
+truststore==0.10.1
+dependency-groups==1.3.0
diff --git a/contrib/python/pip/ya.make b/contrib/python/pip/ya.make
index 76afcad6d3f..195518f4556 100644
--- a/contrib/python/pip/ya.make
+++ b/contrib/python/pip/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(25.0.1)
+VERSION(25.1)
LICENSE(MIT)
@@ -10,6 +10,7 @@ NO_LINT()
NO_CHECK_IMPORTS(
pip.__pip-runner__
+ pip._internal.cli.progress_bars
pip._internal.locations._distutils
pip._vendor.*
)
@@ -49,6 +50,7 @@ PY_SRCS(
pip/_internal/commands/inspect.py
pip/_internal/commands/install.py
pip/_internal/commands/list.py
+ pip/_internal/commands/lock.py
pip/_internal/commands/search.py
pip/_internal/commands/show.py
pip/_internal/commands/uninstall.py
@@ -84,6 +86,7 @@ PY_SRCS(
pip/_internal/models/index.py
pip/_internal/models/installation_report.py
pip/_internal/models/link.py
+ pip/_internal/models/pylock.py
pip/_internal/models/scheme.py
pip/_internal/models/search_scope.py
pip/_internal/models/selection_prefs.py
@@ -115,6 +118,7 @@ PY_SRCS(
pip/_internal/pyproject.py
pip/_internal/req/__init__.py
pip/_internal/req/constructors.py
+ pip/_internal/req/req_dependency_group.py
pip/_internal/req/req_file.py
pip/_internal/req/req_install.py
pip/_internal/req/req_set.py
@@ -182,6 +186,12 @@ PY_SRCS(
pip/_vendor/certifi/__init__.py
pip/_vendor/certifi/__main__.py
pip/_vendor/certifi/core.py
+ pip/_vendor/dependency_groups/__init__.py
+ pip/_vendor/dependency_groups/__main__.py
+ pip/_vendor/dependency_groups/_implementation.py
+ pip/_vendor/dependency_groups/_lint_dependency_groups.py
+ pip/_vendor/dependency_groups/_pip_wrapper.py
+ pip/_vendor/dependency_groups/_toml_compat.py
pip/_vendor/distlib/__init__.py
pip/_vendor/distlib/compat.py
pip/_vendor/distlib/database.py
@@ -237,25 +247,12 @@ PY_SRCS(
pip/_vendor/platformdirs/windows.py
pip/_vendor/pygments/__init__.py
pip/_vendor/pygments/__main__.py
- pip/_vendor/pygments/cmdline.py
pip/_vendor/pygments/console.py
pip/_vendor/pygments/filter.py
pip/_vendor/pygments/filters/__init__.py
pip/_vendor/pygments/formatter.py
pip/_vendor/pygments/formatters/__init__.py
pip/_vendor/pygments/formatters/_mapping.py
- pip/_vendor/pygments/formatters/bbcode.py
- pip/_vendor/pygments/formatters/groff.py
- pip/_vendor/pygments/formatters/html.py
- pip/_vendor/pygments/formatters/img.py
- pip/_vendor/pygments/formatters/irc.py
- pip/_vendor/pygments/formatters/latex.py
- pip/_vendor/pygments/formatters/other.py
- pip/_vendor/pygments/formatters/pangomarkup.py
- pip/_vendor/pygments/formatters/rtf.py
- pip/_vendor/pygments/formatters/svg.py
- pip/_vendor/pygments/formatters/terminal.py
- pip/_vendor/pygments/formatters/terminal256.py
pip/_vendor/pygments/lexer.py
pip/_vendor/pygments/lexers/__init__.py
pip/_vendor/pygments/lexers/_mapping.py
@@ -294,11 +291,13 @@ PY_SRCS(
pip/_vendor/requests/structures.py
pip/_vendor/requests/utils.py
pip/_vendor/resolvelib/__init__.py
- pip/_vendor/resolvelib/compat/__init__.py
- pip/_vendor/resolvelib/compat/collections_abc.py
pip/_vendor/resolvelib/providers.py
pip/_vendor/resolvelib/reporters.py
- pip/_vendor/resolvelib/resolvers.py
+ pip/_vendor/resolvelib/resolvers/__init__.py
+ pip/_vendor/resolvelib/resolvers/abstract.py
+ pip/_vendor/resolvelib/resolvers/criterion.py
+ pip/_vendor/resolvelib/resolvers/exceptions.py
+ pip/_vendor/resolvelib/resolvers/resolution.py
pip/_vendor/resolvelib/structs.py
pip/_vendor/rich/__init__.py
pip/_vendor/rich/__main__.py
@@ -381,6 +380,8 @@ PY_SRCS(
pip/_vendor/tomli/_parser.py
pip/_vendor/tomli/_re.py
pip/_vendor/tomli/_types.py
+ pip/_vendor/tomli_w/__init__.py
+ pip/_vendor/tomli_w/_writer.py
pip/_vendor/truststore/__init__.py
pip/_vendor/truststore/_api.py
pip/_vendor/truststore/_macos.py
@@ -437,6 +438,7 @@ RESOURCE_FILES(
pip/_vendor/cachecontrol/py.typed
pip/_vendor/certifi/cacert.pem
pip/_vendor/certifi/py.typed
+ pip/_vendor/dependency_groups/py.typed
pip/_vendor/distro/py.typed
pip/_vendor/idna/py.typed
pip/_vendor/packaging/py.typed
@@ -445,6 +447,7 @@ RESOURCE_FILES(
pip/_vendor/resolvelib/py.typed
pip/_vendor/rich/py.typed
pip/_vendor/tomli/py.typed
+ pip/_vendor/tomli_w/py.typed
pip/_vendor/truststore/py.typed
pip/_vendor/vendor.txt
pip/py.typed