aboutsummaryrefslogtreecommitdiffstats
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/libs/croaring/.yandex_meta/override.nix4
-rw-r--r--contrib/libs/croaring/include/roaring/roaring_version.h4
-rw-r--r--contrib/libs/croaring/src/roaring.c2
-rw-r--r--contrib/libs/croaring/src/roaring64.c4
-rw-r--r--contrib/libs/croaring/ya.make4
-rw-r--r--contrib/libs/cxxsupp/libcxx/include/complex3
-rw-r--r--contrib/libs/cxxsupp/libcxx/patches/38-complex.patch14
-rw-r--r--contrib/libs/expat/.yandex_meta/devtools.copyrights.report33
-rw-r--r--contrib/libs/expat/.yandex_meta/licenses.list.txt6
-rw-r--r--contrib/libs/expat/.yandex_meta/override.nix4
-rw-r--r--contrib/libs/expat/Changes85
-rw-r--r--contrib/libs/expat/README.md17
-rw-r--r--contrib/libs/expat/expat.h6
-rw-r--r--contrib/libs/expat/expat_config.h6
-rw-r--r--contrib/libs/expat/lib/xmlparse.c572
-rw-r--r--contrib/libs/expat/ya.make4
-rw-r--r--contrib/python/multidict/.dist-info/METADATA9
-rw-r--r--contrib/python/multidict/multidict/__init__.py14
-rw-r--r--contrib/python/multidict/multidict/__init__.pyi152
-rw-r--r--contrib/python/multidict/multidict/_abc.py77
-rw-r--r--contrib/python/multidict/multidict/_multidict.c85
-rw-r--r--contrib/python/multidict/multidict/_multidict_base.py72
-rw-r--r--contrib/python/multidict/multidict/_multidict_py.py487
-rw-r--r--contrib/python/multidict/multidict/_multilib/defs.h4
-rw-r--r--contrib/python/multidict/multidict/_multilib/istr.h4
-rw-r--r--contrib/python/multidict/multidict/_multilib/pair_list.h60
-rw-r--r--contrib/python/multidict/multidict/_multilib/pythoncapi_compat.h1142
-rw-r--r--contrib/python/multidict/tests/conftest.py68
-rw-r--r--contrib/python/multidict/tests/gen_pickles.py8
-rw-r--r--contrib/python/multidict/tests/test_abc.py86
-rw-r--r--contrib/python/multidict/tests/test_copy.py16
-rw-r--r--contrib/python/multidict/tests/test_guard.py10
-rw-r--r--contrib/python/multidict/tests/test_istr.py2
-rw-r--r--contrib/python/multidict/tests/test_multidict.py252
-rw-r--r--contrib/python/multidict/tests/test_multidict_benchmarks.py391
-rw-r--r--contrib/python/multidict/tests/test_mutable_multidict.py124
-rw-r--r--contrib/python/multidict/tests/test_pickle.py21
-rw-r--r--contrib/python/multidict/tests/test_types.py52
-rw-r--r--contrib/python/multidict/tests/test_update.py44
-rw-r--r--contrib/python/multidict/tests/test_version.py91
-rw-r--r--contrib/python/multidict/ya.make3
-rw-r--r--contrib/python/responses/py3/.dist-info/METADATA3
-rw-r--r--contrib/python/responses/py3/ya.make2
-rw-r--r--contrib/restricted/fast_float/.yandex_meta/override.nix4
-rw-r--r--contrib/restricted/fast_float/README.md6
-rw-r--r--contrib/restricted/fast_float/include/fast_float/float_common.h2
-rw-r--r--contrib/restricted/fast_float/ya.make4
-rw-r--r--contrib/tools/cython/.yandex_meta/__init__.py1
-rw-r--r--contrib/tools/cython/a.yaml26
49 files changed, 2952 insertions, 1138 deletions
diff --git a/contrib/libs/croaring/.yandex_meta/override.nix b/contrib/libs/croaring/.yandex_meta/override.nix
index 616330eb5a..2c7a84ab8f 100644
--- a/contrib/libs/croaring/.yandex_meta/override.nix
+++ b/contrib/libs/croaring/.yandex_meta/override.nix
@@ -1,12 +1,12 @@
pkgs: attrs: with pkgs; with attrs; rec {
pname = "croaring";
- version = "4.3.0";
+ version = "4.3.1";
src = fetchFromGitHub {
owner = "RoaringBitmap";
repo = "CRoaring";
rev = "v${version}";
- hash = "sha256-Se/m+qcYwZu1Bp5F2dcWacHYe4awX7EclB1iChTBkYE=";
+ hash = "sha256-c4o8AMCtDGLChXxJKJyxkWhuYu7axqLb2ce8IOGk920=";
};
patches = [];
diff --git a/contrib/libs/croaring/include/roaring/roaring_version.h b/contrib/libs/croaring/include/roaring/roaring_version.h
index a6c5b01b41..5d5014b42e 100644
--- a/contrib/libs/croaring/include/roaring/roaring_version.h
+++ b/contrib/libs/croaring/include/roaring/roaring_version.h
@@ -2,11 +2,11 @@
// /include/roaring/roaring_version.h automatically generated by release.py, do not change by hand
#ifndef ROARING_INCLUDE_ROARING_VERSION
#define ROARING_INCLUDE_ROARING_VERSION
-#define ROARING_VERSION "4.3.0"
+#define ROARING_VERSION "4.3.1"
enum {
ROARING_VERSION_MAJOR = 4,
ROARING_VERSION_MINOR = 3,
- ROARING_VERSION_REVISION = 0
+ ROARING_VERSION_REVISION = 1
};
#endif // ROARING_INCLUDE_ROARING_VERSION
// clang-format on \ No newline at end of file
diff --git a/contrib/libs/croaring/src/roaring.c b/contrib/libs/croaring/src/roaring.c
index 8f6b5a4f37..2363afa48f 100644
--- a/contrib/libs/croaring/src/roaring.c
+++ b/contrib/libs/croaring/src/roaring.c
@@ -614,7 +614,7 @@ void roaring_bitmap_remove(roaring_bitmap_t *r, uint32_t val) {
ra_set_container_at_index(&r->high_low_container, i, container2,
newtypecode);
}
- if (container_get_cardinality(container2, newtypecode) != 0) {
+ if (container_nonzero_cardinality(container2, newtypecode)) {
ra_set_container_at_index(&r->high_low_container, i, container2,
newtypecode);
} else {
diff --git a/contrib/libs/croaring/src/roaring64.c b/contrib/libs/croaring/src/roaring64.c
index bc65e8b0e5..de1009ea2c 100644
--- a/contrib/libs/croaring/src/roaring64.c
+++ b/contrib/libs/croaring/src/roaring64.c
@@ -697,6 +697,7 @@ static inline bool containerptr_roaring64_bitmap_remove(roaring64_bitmap_t *r,
container_free(container2, typecode2);
bool erased = art_erase(&r->art, high48, (art_val_t *)leaf);
assert(erased);
+ remove_container(r, *leaf);
return true;
}
return false;
@@ -2419,6 +2420,9 @@ roaring64_bitmap_t *roaring64_bitmap_frozen_view(const char *buf,
if (buf == NULL) {
return NULL;
}
+ if ((uintptr_t)buf % CROARING_BITSET_ALIGNMENT != 0) {
+ return NULL;
+ }
roaring64_bitmap_t *r = roaring64_bitmap_create();
diff --git a/contrib/libs/croaring/ya.make b/contrib/libs/croaring/ya.make
index 90f16b7b3d..557df6b83b 100644
--- a/contrib/libs/croaring/ya.make
+++ b/contrib/libs/croaring/ya.make
@@ -10,9 +10,9 @@ LICENSE(
LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
-VERSION(4.3.0)
+VERSION(4.3.1)
-ORIGINAL_SOURCE(https://github.com/RoaringBitmap/CRoaring/archive/v4.3.0.tar.gz)
+ORIGINAL_SOURCE(https://github.com/RoaringBitmap/CRoaring/archive/v4.3.1.tar.gz)
ADDINCL(
GLOBAL contrib/libs/croaring/include
diff --git a/contrib/libs/cxxsupp/libcxx/include/complex b/contrib/libs/cxxsupp/libcxx/include/complex
index 91cf3f4d50..bfe61c506e 100644
--- a/contrib/libs/cxxsupp/libcxx/include/complex
+++ b/contrib/libs/cxxsupp/libcxx/include/complex
@@ -1282,8 +1282,7 @@ _LIBCPP_HIDE_FROM_ABI complex<_Tp> acos(const complex<_Tp>& __x) {
}
if (std::__constexpr_isinf(__x.imag()))
return complex<_Tp>(__pi / _Tp(2), -__x.imag());
- // Somehow isnan can be a macro, so we use __constexpr_isnan
- if (__x.real() == 0 && (__x.imag() == 0 || std::__constexpr_isnan(__x.imag())))
+ if (__x.real() == 0 && (__x.imag() == 0 || std::isnan(__x.imag())))
return complex<_Tp>(__pi / _Tp(2), -__x.imag());
complex<_Tp> __z = std::log(__x + std::sqrt(std::__sqr(__x) - _Tp(1)));
if (std::signbit(__x.imag()))
diff --git a/contrib/libs/cxxsupp/libcxx/patches/38-complex.patch b/contrib/libs/cxxsupp/libcxx/patches/38-complex.patch
deleted file mode 100644
index 6856a5f4b2..0000000000
--- a/contrib/libs/cxxsupp/libcxx/patches/38-complex.patch
+++ /dev/null
@@ -1,14 +0,0 @@
-diff --git a/include/complex b/include/complex
-index bfe61c5..91cf3f4 100644
---- a/include/complex
-+++ b/include/complex
-@@ -1282,7 +1282,8 @@ _LIBCPP_HIDE_FROM_ABI complex<_Tp> acos(const complex<_Tp>& __x) {
- }
- if (std::__constexpr_isinf(__x.imag()))
- return complex<_Tp>(__pi / _Tp(2), -__x.imag());
-- if (__x.real() == 0 && (__x.imag() == 0 || std::isnan(__x.imag())))
-+ // Somehow isnan can be a macro, so we use __constexpr_isnan
-+ if (__x.real() == 0 && (__x.imag() == 0 || std::__constexpr_isnan(__x.imag())))
- return complex<_Tp>(__pi / _Tp(2), -__x.imag());
- complex<_Tp> __z = std::log(__x + std::sqrt(std::__sqr(__x) - _Tp(1)));
- if (std::signbit(__x.imag()))
diff --git a/contrib/libs/expat/.yandex_meta/devtools.copyrights.report b/contrib/libs/expat/.yandex_meta/devtools.copyrights.report
index b67c3a92e5..57cb69b65e 100644
--- a/contrib/libs/expat/.yandex_meta/devtools.copyrights.report
+++ b/contrib/libs/expat/.yandex_meta/devtools.copyrights.report
@@ -174,16 +174,6 @@ BELONGS ya.make
expat.h [9:22]
lib/internal.h [28:36]
-KEEP COPYRIGHT_SERVICE_LABEL 3781ecbe791ef15dc4cdefd436071b60
-BELONGS ya.make
- Note: matched license text is too long. Read it in the source files.
- Scancode info:
- Original SPDX id: COPYRIGHT_SERVICE_LABEL
- Score : 100.00
- Match type : COPYRIGHT
- Files with this license:
- lib/xmlparse.c [9:44]
-
KEEP COPYRIGHT_SERVICE_LABEL 387a03e23bfe968e0bc1919b0ef65164
BELONGS ya.make
Note: matched license text is too long. Read it in the source files.
@@ -342,6 +332,27 @@ BELONGS ya.make
Files with this license:
lib/xmlparse.c [9:44]
+KEEP COPYRIGHT_SERVICE_LABEL 610290a5946ae0c99de2de99646ba086
+BELONGS ya.make
+ Note: matched license text is too long. Read it in the source files.
+ Scancode info:
+ Original SPDX id: COPYRIGHT_SERVICE_LABEL
+ Score : 100.00
+ Match type : COPYRIGHT
+ Files with this license:
+ expat.h [9:22]
+ lib/xmlparse.c [9:44]
+
+KEEP COPYRIGHT_SERVICE_LABEL 61052a80fd00eeac5fb41a5ab5fdeff7
+BELONGS ya.make
+ Note: matched license text is too long. Read it in the source files.
+ Scancode info:
+ Original SPDX id: COPYRIGHT_SERVICE_LABEL
+ Score : 100.00
+ Match type : COPYRIGHT
+ Files with this license:
+ lib/xmlparse.c [9:44]
+
KEEP COPYRIGHT_SERVICE_LABEL 6451d5e490271b354ad3b567c7a03423
BELONGS ya.make
Note: matched license text is too long. Read it in the source files.
@@ -791,9 +802,7 @@ BELONGS ya.make
Score : 100.00
Match type : COPYRIGHT
Files with this license:
- expat.h [9:22]
lib/internal.h [28:36]
- lib/xmlparse.c [9:44]
lib/xmltok.c [9:27]
lib/xmltok.h [9:15]
diff --git a/contrib/libs/expat/.yandex_meta/licenses.list.txt b/contrib/libs/expat/.yandex_meta/licenses.list.txt
index e23955221e..34ffa1ea4e 100644
--- a/contrib/libs/expat/.yandex_meta/licenses.list.txt
+++ b/contrib/libs/expat/.yandex_meta/licenses.list.txt
@@ -21,7 +21,7 @@
Copyright (c) 2000-2005 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
Copyright (c) 2001-2002 Greg Stein <gstein@users.sourceforge.net>
Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
- Copyright (c) 2016-2024 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2016-2025 Sebastian Pipping <sebastian@pipping.org>
Copyright (c) 2016 Cristian Rodríguez <crrodriguez@opensuse.org>
Copyright (c) 2016 Thomas Beutlich <tc@tbeu.de>
Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
@@ -40,7 +40,7 @@
Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
Copyright (c) 2005-2009 Steven Solie <steven@solie.ca>
Copyright (c) 2016 Eric Rahm <erahm@mozilla.com>
- Copyright (c) 2016-2024 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2016-2025 Sebastian Pipping <sebastian@pipping.org>
Copyright (c) 2016 Gaurav <g.gupta@samsung.com>
Copyright (c) 2016 Thomas Beutlich <tc@tbeu.de>
Copyright (c) 2016 Gustavo Grieco <gustavo.grieco@imag.fr>
@@ -66,7 +66,7 @@
Copyright (c) 2022 Sean McBride <sean@rogue-research.com>
Copyright (c) 2023 Owain Davies <owaind@bath.edu>
Copyright (c) 2023-2024 Sony Corporation / Snild Dolkow <snild@sony.com>
- Copyright (c) 2024 Berkay Eren Ürün <berkay.ueruen@siemens.com>
+ Copyright (c) 2024-2025 Berkay Eren Ürün <berkay.ueruen@siemens.com>
Copyright (c) 2024 Hanno Böck <hanno@gentoo.org>
Licensed under the MIT license:
diff --git a/contrib/libs/expat/.yandex_meta/override.nix b/contrib/libs/expat/.yandex_meta/override.nix
index a7493ed82e..01e7ca5b3c 100644
--- a/contrib/libs/expat/.yandex_meta/override.nix
+++ b/contrib/libs/expat/.yandex_meta/override.nix
@@ -1,12 +1,12 @@
pkgs: attrs: with pkgs; with attrs; rec {
- version = "2.6.4";
+ version = "2.7.0";
versionTag = "R_${lib.replaceStrings ["."] ["_"] version}";
src = fetchFromGitHub {
owner = "libexpat";
repo = "libexpat";
rev = "${versionTag}";
- hash = "sha256-ek8/3c8bKG+z7fIM+QCNsH7eoVGAt7z3bXBHZ3QjlS8=";
+ hash = "sha256-5is+ZwHM+tmKaVzDgO20wCJKJafnwxxRjNMDsv2qnYY=";
};
nativeBuildInputs = [ autoreconfHook ];
diff --git a/contrib/libs/expat/Changes b/contrib/libs/expat/Changes
index aa19f70ae2..1f5ba0a028 100644
--- a/contrib/libs/expat/Changes
+++ b/contrib/libs/expat/Changes
@@ -11,16 +11,23 @@
!! The following topics need *additional skilled C developers* to progress !!
!! in a timely manner or at all (loosely ordered by descending priority): !!
!! !!
-!! - <blink>fixing a complex non-public security issue</blink>, !!
!! - teaming up on researching and fixing future security reports and !!
!! ClusterFuzz findings with few-days-max response times in communication !!
!! in order to (1) have a sound fix ready before the end of a 90 days !!
!! grace period and (2) in a sustainable manner, !!
+!! - helping CPython Expat bindings with supporting Expat's billion laughs !!
+!! attack protection API (https://github.com/python/cpython/issues/90949): !!
+!! - XML_SetBillionLaughsAttackProtectionActivationThreshold !!
+!! - XML_SetBillionLaughsAttackProtectionMaximumAmplification !!
+!! - helping Perl's XML::Parser Expat bindings with supporting Expat's !!
+!! security API (https://github.com/cpan-authors/XML-Parser/issues/102): !!
+!! - XML_SetBillionLaughsAttackProtectionActivationThreshold !!
+!! - XML_SetBillionLaughsAttackProtectionMaximumAmplification !!
+!! - XML_SetReparseDeferralEnabled !!
!! - implementing and auto-testing XML 1.0r5 support !!
!! (needs discussion before pull requests), !!
!! - smart ideas on fixing the Autotools CMake files generation issue !!
!! without breaking CI (needs discussion before pull requests), !!
-!! - the Windows binaries topic (needs requirements engineering first), !!
!! - pushing migration from `int` to `size_t` further !!
!! including edge-cases test coverage (needs discussion before anything). !!
!! !!
@@ -30,6 +37,78 @@
!! THANK YOU! Sebastian Pipping -- Berlin, 2024-03-09 !!
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+Release 2.7.0 Thu March 13 2025
+ Security fixes:
+ #893 #973 CVE-2024-8176 -- Fix crash from chaining a large number
+ of entities caused by stack overflow by resolving use of
+ recursion, for all three uses of entities:
+ - general entities in character data ("<e>&g1;</e>")
+ - general entities in attribute values ("<e k1='&g1;'/>")
+ - parameter entities ("%p1;")
+ Known impact is (reliable and easy) denial of service:
+ CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H/E:H/RL:O/RC:C
+ (Base Score: 7.5, Temporal Score: 7.2)
+ Please note that a layer of compression around XML can
+ significantly reduce the minimum attack payload size.
+
+ Other changes:
+ #935 #937 Autotools: Make generated CMake files look for
+ libexpat.@SO_MAJOR@.dylib on macOS
+ #925 Autotools: Sync CMake templates with CMake 3.29
+ #945 #962 #966 CMake: Drop support for CMake <3.13
+ #942 CMake: Small fuzzing related improvements
+ #921 docs: Add missing documentation of error code
+ XML_ERROR_NOT_STARTED that was introduced with 2.6.4
+ #941 docs: Document need for C++11 compiler for use from C++
+ #959 tests/benchmark: Fix a (harmless) TOCTTOU
+ #944 Windows: Fix installer target location of file xmlwf.xml
+ for CMake
+ #953 Windows: Address warning -Wunknown-warning-option
+ about -Wno-pedantic-ms-format from LLVM MinGW
+ #971 Address Cppcheck warnings
+ #969 #970 Mass-migrate links from http:// to https://
+ #947 #958 ..
+ #974 #975 Document changes since the previous release
+ #974 #975 Version info bumped from 11:0:10 (libexpat*.so.1.10.0)
+ to 11:1:10 (libexpat*.so.1.10.1); see https://verbump.de/
+ for what these numbers do
+
+ Infrastructure:
+ #926 tests: Increase robustness
+ #927 #932 ..
+ #930 #933 tests: Increase test coverage
+ #617 #950 ..
+ #951 #952 ..
+ #954 #955 .. Fuzzing: Add new fuzzer "xml_lpm_fuzzer" based on
+ #961 Google's libprotobuf-mutator ("LPM")
+ #957 Fuzzing|CI: Start producing fuzzing code coverage reports
+ #936 CI: Pass -q -q for LCOV >=2.1 in coverage.sh
+ #942 CI: Small fuzzing related improvements
+ #139 #203 ..
+ #791 #946 CI: Make GitHub Actions build using MSVC on Windows and
+ produce 32bit and 64bit Windows binaries
+ #956 CI: Get off of about-to-be-removed Ubuntu 20.04
+ #960 #964 CI: Start uploading to Coverity Scan for static analysis
+ #972 CI: Stop loading DTD from the internet to address flaky CI
+ #971 CI: Adapt to breaking changes in Cppcheck
+
+ Special thanks to:
+ Alexander Gieringer
+ Berkay Eren Ürün
+ Hanno Böck
+ Jann Horn
+ Mark Brand
+ Sebastian Andrzej Siewior
+ Snild Dolkow
+ Thomas Pröll
+ Tomas Korbar
+ valord577
+ and
+ Google Project Zero
+ Linutronix
+ Red Hat
+ Siemens
+
Release 2.6.4 Wed November 6 2024
Security fixes:
#915 CVE-2024-50602 -- Fix crash within function XML_ResumeParser
@@ -46,6 +125,8 @@ Release 2.6.4 Wed November 6 2024
#904 tests: Resolve duplicate handler
#317 #918 tests: Improve tests on doctype closing (ex CVE-2019-15903)
#914 Fix signedness of format strings
+ #915 For use from C++, expat.h started requiring C++11 due to
+ use of C99 features
#919 #920 Version info bumped from 10:3:9 (libexpat*.so.1.9.3)
to 11:0:10 (libexpat*.so.1.10.0); see https://verbump.de/
for what these numbers do
diff --git a/contrib/libs/expat/README.md b/contrib/libs/expat/README.md
index 23d26dad2b..04db829909 100644
--- a/contrib/libs/expat/README.md
+++ b/contrib/libs/expat/README.md
@@ -11,7 +11,7 @@
> at the top of the `Changes` file.
-# Expat, Release 2.6.4
+# Expat, Release 2.7.0
This is Expat, a C99 library for parsing
[XML 1.0 Fourth Edition](https://www.w3.org/TR/2006/REC-xml-20060816/), started by
@@ -22,9 +22,9 @@ are called when the parser discovers the associated structures in the
document being parsed. A start tag is an example of the kind of
structures for which you may register handlers.
-Expat supports the following compilers:
+Expat supports the following C99 compilers:
-- GNU GCC >=4.5
+- GNU GCC >=4.5 (for use from C) or GNU GCC >=4.8.1 (for use from C++)
- LLVM Clang >=3.5
- Microsoft Visual Studio >=16.0/2019 (rolling `${today} minus 5 years`)
@@ -52,7 +52,7 @@ This approach leverages CMake's own [module `FindEXPAT`](https://cmake.org/cmake
Notice the *uppercase* `EXPAT` in the following example:
```cmake
-cmake_minimum_required(VERSION 3.0) # or 3.10, see below
+cmake_minimum_required(VERSION 3.10)
project(hello VERSION 1.0.0)
@@ -62,12 +62,7 @@ add_executable(hello
hello.c
)
-# a) for CMake >=3.10 (see CMake's FindEXPAT docs)
target_link_libraries(hello PUBLIC EXPAT::EXPAT)
-
-# b) for CMake >=3.0
-target_include_directories(hello PRIVATE ${EXPAT_INCLUDE_DIRS})
-target_link_libraries(hello PUBLIC ${EXPAT_LIBRARIES})
```
### b) `find_package` with Config Mode
@@ -85,7 +80,7 @@ or
Notice the *lowercase* `expat` in the following example:
```cmake
-cmake_minimum_required(VERSION 3.0)
+cmake_minimum_required(VERSION 3.10)
project(hello VERSION 1.0.0)
@@ -295,7 +290,7 @@ EXPAT_ENABLE_INSTALL:BOOL=ON
// Use /MT flag (static CRT) when compiling in MSVC
EXPAT_MSVC_STATIC_CRT:BOOL=OFF
-// Build fuzzers via ossfuzz for the expat library
+// Build fuzzers via OSS-Fuzz for the expat library
EXPAT_OSSFUZZ_BUILD:BOOL=OFF
// Build a shared expat library
diff --git a/contrib/libs/expat/expat.h b/contrib/libs/expat/expat.h
index 523b37d8d5..192cfd3f07 100644
--- a/contrib/libs/expat/expat.h
+++ b/contrib/libs/expat/expat.h
@@ -11,7 +11,7 @@
Copyright (c) 2000-2005 Fred L. Drake, Jr. <fdrake@users.sourceforge.net>
Copyright (c) 2001-2002 Greg Stein <gstein@users.sourceforge.net>
Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
- Copyright (c) 2016-2024 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2016-2025 Sebastian Pipping <sebastian@pipping.org>
Copyright (c) 2016 Cristian Rodríguez <crrodriguez@opensuse.org>
Copyright (c) 2016 Thomas Beutlich <tc@tbeu.de>
Copyright (c) 2017 Rhodri James <rhodri@wildebeest.org.uk>
@@ -1067,8 +1067,8 @@ XML_SetReparseDeferralEnabled(XML_Parser parser, XML_Bool enabled);
See https://semver.org
*/
#define XML_MAJOR_VERSION 2
-#define XML_MINOR_VERSION 6
-#define XML_MICRO_VERSION 4
+#define XML_MINOR_VERSION 7
+#define XML_MICRO_VERSION 0
#ifdef __cplusplus
}
diff --git a/contrib/libs/expat/expat_config.h b/contrib/libs/expat/expat_config.h
index 48d7b064fa..4bb2d079bd 100644
--- a/contrib/libs/expat/expat_config.h
+++ b/contrib/libs/expat/expat_config.h
@@ -83,7 +83,7 @@
#define PACKAGE_NAME "expat"
/* Define to the full name and version of this package. */
-#define PACKAGE_STRING "expat 2.6.4"
+#define PACKAGE_STRING "expat 2.7.0"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "expat"
@@ -92,7 +92,7 @@
#define PACKAGE_URL ""
/* Define to the version of this package. */
-#define PACKAGE_VERSION "2.6.4"
+#define PACKAGE_VERSION "2.7.0"
/* Define to 1 if all of the C90 standard headers exist (not just the ones
required in a freestanding environment). This macro is provided for
@@ -100,7 +100,7 @@
#define STDC_HEADERS 1
/* Version number of package */
-#define VERSION "2.6.4"
+#define VERSION "2.7.0"
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
diff --git a/contrib/libs/expat/lib/xmlparse.c b/contrib/libs/expat/lib/xmlparse.c
index e3e46da03b..1dca1c0397 100644
--- a/contrib/libs/expat/lib/xmlparse.c
+++ b/contrib/libs/expat/lib/xmlparse.c
@@ -1,4 +1,4 @@
-/* c5625880f4bf417c1463deee4eb92d86ff413f802048621c57e25fe483eb59e4 (2.6.4+)
+/* 7d6840a33c250b74adb0ba295d6ec818dccebebaffc8c3ed27d0b29c28adbeb3 (2.7.0+)
__ __ _
___\ \/ /_ __ __ _| |_
/ _ \\ /| '_ \ / _` | __|
@@ -13,7 +13,7 @@
Copyright (c) 2002-2016 Karl Waclawek <karl@waclawek.net>
Copyright (c) 2005-2009 Steven Solie <steven@solie.ca>
Copyright (c) 2016 Eric Rahm <erahm@mozilla.com>
- Copyright (c) 2016-2024 Sebastian Pipping <sebastian@pipping.org>
+ Copyright (c) 2016-2025 Sebastian Pipping <sebastian@pipping.org>
Copyright (c) 2016 Gaurav <g.gupta@samsung.com>
Copyright (c) 2016 Thomas Beutlich <tc@tbeu.de>
Copyright (c) 2016 Gustavo Grieco <gustavo.grieco@imag.fr>
@@ -39,7 +39,7 @@
Copyright (c) 2022 Sean McBride <sean@rogue-research.com>
Copyright (c) 2023 Owain Davies <owaind@bath.edu>
Copyright (c) 2023-2024 Sony Corporation / Snild Dolkow <snild@sony.com>
- Copyright (c) 2024 Berkay Eren Ürün <berkay.ueruen@siemens.com>
+ Copyright (c) 2024-2025 Berkay Eren Ürün <berkay.ueruen@siemens.com>
Copyright (c) 2024 Hanno Böck <hanno@gentoo.org>
Licensed under the MIT license:
@@ -325,6 +325,10 @@ typedef struct {
const XML_Char *publicId;
const XML_Char *notation;
XML_Bool open;
+ XML_Bool hasMore; /* true if entity has not been completely processed */
+ /* An entity can be open while being already completely processed (hasMore ==
+ XML_FALSE). The reason is the delayed closing of entities until their inner
+ entities are processed and closed */
XML_Bool is_param;
XML_Bool is_internal; /* true if declared in internal subset outside PE */
} ENTITY;
@@ -415,6 +419,12 @@ typedef struct {
int *scaffIndex;
} DTD;
+enum EntityType {
+ ENTITY_INTERNAL,
+ ENTITY_ATTRIBUTE,
+ ENTITY_VALUE,
+};
+
typedef struct open_internal_entity {
const char *internalEventPtr;
const char *internalEventEndPtr;
@@ -422,6 +432,7 @@ typedef struct open_internal_entity {
ENTITY *entity;
int startTagLevel;
XML_Bool betweenDecl; /* WFC: PE Between Declarations */
+ enum EntityType type;
} OPEN_INTERNAL_ENTITY;
enum XML_Account {
@@ -481,8 +492,8 @@ static enum XML_Error doProlog(XML_Parser parser, const ENCODING *enc,
const char *next, const char **nextPtr,
XML_Bool haveMore, XML_Bool allowClosingDoctype,
enum XML_Account account);
-static enum XML_Error processInternalEntity(XML_Parser parser, ENTITY *entity,
- XML_Bool betweenDecl);
+static enum XML_Error processEntity(XML_Parser parser, ENTITY *entity,
+ XML_Bool betweenDecl, enum EntityType type);
static enum XML_Error doContent(XML_Parser parser, int startTagLevel,
const ENCODING *enc, const char *start,
const char *end, const char **endPtr,
@@ -513,18 +524,22 @@ static enum XML_Error storeAttributeValue(XML_Parser parser,
const char *ptr, const char *end,
STRING_POOL *pool,
enum XML_Account account);
-static enum XML_Error appendAttributeValue(XML_Parser parser,
- const ENCODING *enc,
- XML_Bool isCdata, const char *ptr,
- const char *end, STRING_POOL *pool,
- enum XML_Account account);
+static enum XML_Error
+appendAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
+ const char *ptr, const char *end, STRING_POOL *pool,
+ enum XML_Account account, const char **nextPtr);
static ATTRIBUTE_ID *getAttributeId(XML_Parser parser, const ENCODING *enc,
const char *start, const char *end);
static int setElementTypePrefix(XML_Parser parser, ELEMENT_TYPE *elementType);
#if XML_GE == 1
static enum XML_Error storeEntityValue(XML_Parser parser, const ENCODING *enc,
const char *start, const char *end,
- enum XML_Account account);
+ enum XML_Account account,
+ const char **nextPtr);
+static enum XML_Error callStoreEntityValue(XML_Parser parser,
+ const ENCODING *enc,
+ const char *start, const char *end,
+ enum XML_Account account);
#else
static enum XML_Error storeSelfEntityValue(XML_Parser parser, ENTITY *entity);
#endif
@@ -709,6 +724,10 @@ struct XML_ParserStruct {
const char *m_positionPtr;
OPEN_INTERNAL_ENTITY *m_openInternalEntities;
OPEN_INTERNAL_ENTITY *m_freeInternalEntities;
+ OPEN_INTERNAL_ENTITY *m_openAttributeEntities;
+ OPEN_INTERNAL_ENTITY *m_freeAttributeEntities;
+ OPEN_INTERNAL_ENTITY *m_openValueEntities;
+ OPEN_INTERNAL_ENTITY *m_freeValueEntities;
XML_Bool m_defaultExpandInternalEntities;
int m_tagLevel;
ENTITY *m_declEntity;
@@ -756,6 +775,7 @@ struct XML_ParserStruct {
ACCOUNTING m_accounting;
ENTITY_STATS m_entity_stats;
#endif
+ XML_Bool m_reenter;
};
#define MALLOC(parser, s) (parser->m_mem.malloc_fcn((s)))
@@ -1028,7 +1048,29 @@ callProcessor(XML_Parser parser, const char *start, const char *end,
#if defined(XML_TESTING)
g_bytesScanned += (unsigned)have_now;
#endif
- const enum XML_Error ret = parser->m_processor(parser, start, end, endPtr);
+ // Run in a loop to eliminate dangerous recursion depths
+ enum XML_Error ret;
+ *endPtr = start;
+ while (1) {
+ // Use endPtr as the new start in each iteration, since it will
+ // be set to the next start point by m_processor.
+ ret = parser->m_processor(parser, *endPtr, end, endPtr);
+
+ // Make parsing status (and in particular XML_SUSPENDED) take
+ // precedence over re-enter flag when they disagree
+ if (parser->m_parsingStatus.parsing != XML_PARSING) {
+ parser->m_reenter = XML_FALSE;
+ }
+
+ if (! parser->m_reenter) {
+ break;
+ }
+
+ parser->m_reenter = XML_FALSE;
+ if (ret != XML_ERROR_NONE)
+ return ret;
+ }
+
if (ret == XML_ERROR_NONE) {
// if we consumed nothing, remember what we had on this parse attempt.
if (*endPtr == start) {
@@ -1139,6 +1181,8 @@ parserCreate(const XML_Char *encodingName,
parser->m_freeBindingList = NULL;
parser->m_freeTagList = NULL;
parser->m_freeInternalEntities = NULL;
+ parser->m_freeAttributeEntities = NULL;
+ parser->m_freeValueEntities = NULL;
parser->m_groupSize = 0;
parser->m_groupConnector = NULL;
@@ -1241,6 +1285,8 @@ parserInit(XML_Parser parser, const XML_Char *encodingName) {
parser->m_eventEndPtr = NULL;
parser->m_positionPtr = NULL;
parser->m_openInternalEntities = NULL;
+ parser->m_openAttributeEntities = NULL;
+ parser->m_openValueEntities = NULL;
parser->m_defaultExpandInternalEntities = XML_TRUE;
parser->m_tagLevel = 0;
parser->m_tagStack = NULL;
@@ -1251,6 +1297,8 @@ parserInit(XML_Parser parser, const XML_Char *encodingName) {
parser->m_unknownEncodingData = NULL;
parser->m_parentParser = NULL;
parser->m_parsingStatus.parsing = XML_INITIALIZED;
+ // Reentry can only be triggered inside m_processor calls
+ parser->m_reenter = XML_FALSE;
#ifdef XML_DTD
parser->m_isParamEntity = XML_FALSE;
parser->m_useForeignDTD = XML_FALSE;
@@ -1310,6 +1358,24 @@ XML_ParserReset(XML_Parser parser, const XML_Char *encodingName) {
openEntity->next = parser->m_freeInternalEntities;
parser->m_freeInternalEntities = openEntity;
}
+ /* move m_openAttributeEntities to m_freeAttributeEntities (i.e. same task but
+ * for attributes) */
+ openEntityList = parser->m_openAttributeEntities;
+ while (openEntityList) {
+ OPEN_INTERNAL_ENTITY *openEntity = openEntityList;
+ openEntityList = openEntity->next;
+ openEntity->next = parser->m_freeAttributeEntities;
+ parser->m_freeAttributeEntities = openEntity;
+ }
+ /* move m_openValueEntities to m_freeValueEntities (i.e. same task but
+ * for value entities) */
+ openEntityList = parser->m_openValueEntities;
+ while (openEntityList) {
+ OPEN_INTERNAL_ENTITY *openEntity = openEntityList;
+ openEntityList = openEntity->next;
+ openEntity->next = parser->m_freeValueEntities;
+ parser->m_freeValueEntities = openEntity;
+ }
moveToFreeBindingList(parser, parser->m_inheritedBindings);
FREE(parser, parser->m_unknownEncodingMem);
if (parser->m_unknownEncodingRelease)
@@ -1323,6 +1389,19 @@ XML_ParserReset(XML_Parser parser, const XML_Char *encodingName) {
return XML_TRUE;
}
+static XML_Bool
+parserBusy(XML_Parser parser) {
+ switch (parser->m_parsingStatus.parsing) {
+ case XML_PARSING:
+ case XML_SUSPENDED:
+ return XML_TRUE;
+ case XML_INITIALIZED:
+ case XML_FINISHED:
+ default:
+ return XML_FALSE;
+ }
+}
+
enum XML_Status XMLCALL
XML_SetEncoding(XML_Parser parser, const XML_Char *encodingName) {
if (parser == NULL)
@@ -1331,8 +1410,7 @@ XML_SetEncoding(XML_Parser parser, const XML_Char *encodingName) {
XXX There's no way for the caller to determine which of the
XXX possible error cases caused the XML_STATUS_ERROR return.
*/
- if (parser->m_parsingStatus.parsing == XML_PARSING
- || parser->m_parsingStatus.parsing == XML_SUSPENDED)
+ if (parserBusy(parser))
return XML_STATUS_ERROR;
/* Get rid of any previous encoding name */
@@ -1569,7 +1647,34 @@ XML_ParserFree(XML_Parser parser) {
entityList = entityList->next;
FREE(parser, openEntity);
}
-
+ /* free m_openAttributeEntities and m_freeAttributeEntities */
+ entityList = parser->m_openAttributeEntities;
+ for (;;) {
+ OPEN_INTERNAL_ENTITY *openEntity;
+ if (entityList == NULL) {
+ if (parser->m_freeAttributeEntities == NULL)
+ break;
+ entityList = parser->m_freeAttributeEntities;
+ parser->m_freeAttributeEntities = NULL;
+ }
+ openEntity = entityList;
+ entityList = entityList->next;
+ FREE(parser, openEntity);
+ }
+ /* free m_openValueEntities and m_freeValueEntities */
+ entityList = parser->m_openValueEntities;
+ for (;;) {
+ OPEN_INTERNAL_ENTITY *openEntity;
+ if (entityList == NULL) {
+ if (parser->m_freeValueEntities == NULL)
+ break;
+ entityList = parser->m_freeValueEntities;
+ parser->m_freeValueEntities = NULL;
+ }
+ openEntity = entityList;
+ entityList = entityList->next;
+ FREE(parser, openEntity);
+ }
destroyBindings(parser->m_freeBindingList, parser);
destroyBindings(parser->m_inheritedBindings, parser);
poolDestroy(&parser->m_tempPool);
@@ -1611,8 +1716,7 @@ XML_UseForeignDTD(XML_Parser parser, XML_Bool useDTD) {
return XML_ERROR_INVALID_ARGUMENT;
#ifdef XML_DTD
/* block after XML_Parse()/XML_ParseBuffer() has been called */
- if (parser->m_parsingStatus.parsing == XML_PARSING
- || parser->m_parsingStatus.parsing == XML_SUSPENDED)
+ if (parserBusy(parser))
return XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING;
parser->m_useForeignDTD = useDTD;
return XML_ERROR_NONE;
@@ -1627,8 +1731,7 @@ XML_SetReturnNSTriplet(XML_Parser parser, int do_nst) {
if (parser == NULL)
return;
/* block after XML_Parse()/XML_ParseBuffer() has been called */
- if (parser->m_parsingStatus.parsing == XML_PARSING
- || parser->m_parsingStatus.parsing == XML_SUSPENDED)
+ if (parserBusy(parser))
return;
parser->m_ns_triplets = do_nst ? XML_TRUE : XML_FALSE;
}
@@ -1897,8 +2000,7 @@ XML_SetParamEntityParsing(XML_Parser parser,
if (parser == NULL)
return 0;
/* block after XML_Parse()/XML_ParseBuffer() has been called */
- if (parser->m_parsingStatus.parsing == XML_PARSING
- || parser->m_parsingStatus.parsing == XML_SUSPENDED)
+ if (parserBusy(parser))
return 0;
#ifdef XML_DTD
parser->m_paramEntityParsing = peParsing;
@@ -1915,8 +2017,7 @@ XML_SetHashSalt(XML_Parser parser, unsigned long hash_salt) {
if (parser->m_parentParser)
return XML_SetHashSalt(parser->m_parentParser, hash_salt);
/* block after XML_Parse()/XML_ParseBuffer() has been called */
- if (parser->m_parsingStatus.parsing == XML_PARSING
- || parser->m_parsingStatus.parsing == XML_SUSPENDED)
+ if (parserBusy(parser))
return 0;
parser->m_hash_secret_salt = hash_salt;
return 1;
@@ -2230,6 +2331,11 @@ XML_GetBuffer(XML_Parser parser, int len) {
return parser->m_bufferEnd;
}
+static void
+triggerReenter(XML_Parser parser) {
+ parser->m_reenter = XML_TRUE;
+}
+
enum XML_Status XMLCALL
XML_StopParser(XML_Parser parser, XML_Bool resumable) {
if (parser == NULL)
@@ -2704,8 +2810,9 @@ static enum XML_Error PTRCALL
contentProcessor(XML_Parser parser, const char *start, const char *end,
const char **endPtr) {
enum XML_Error result = doContent(
- parser, 0, parser->m_encoding, start, end, endPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_ACCOUNT_DIRECT);
+ parser, parser->m_parentParser ? 1 : 0, parser->m_encoding, start, end,
+ endPtr, (XML_Bool)! parser->m_parsingStatus.finalBuffer,
+ XML_ACCOUNT_DIRECT);
if (result == XML_ERROR_NONE) {
if (! storeRawNames(parser))
return XML_ERROR_NO_MEMORY;
@@ -2793,6 +2900,11 @@ externalEntityInitProcessor3(XML_Parser parser, const char *start,
return XML_ERROR_NONE;
case XML_FINISHED:
return XML_ERROR_ABORTED;
+ case XML_PARSING:
+ if (parser->m_reenter) {
+ return XML_ERROR_UNEXPECTED_STATE; // LCOV_EXCL_LINE
+ }
+ /* Fall through */
default:
start = next;
}
@@ -2966,7 +3078,7 @@ doContent(XML_Parser parser, int startTagLevel, const ENCODING *enc,
reportDefault(parser, enc, s, next);
break;
}
- result = processInternalEntity(parser, entity, XML_FALSE);
+ result = processEntity(parser, entity, XML_FALSE, ENTITY_INTERNAL);
if (result != XML_ERROR_NONE)
return result;
} else if (parser->m_externalEntityRefHandler) {
@@ -3092,7 +3204,9 @@ doContent(XML_Parser parser, int startTagLevel, const ENCODING *enc,
}
if ((parser->m_tagLevel == 0)
&& (parser->m_parsingStatus.parsing != XML_FINISHED)) {
- if (parser->m_parsingStatus.parsing == XML_SUSPENDED)
+ if (parser->m_parsingStatus.parsing == XML_SUSPENDED
+ || (parser->m_parsingStatus.parsing == XML_PARSING
+ && parser->m_reenter))
parser->m_processor = epilogProcessor;
else
return epilogProcessor(parser, next, end, nextPtr);
@@ -3153,7 +3267,9 @@ doContent(XML_Parser parser, int startTagLevel, const ENCODING *enc,
}
if ((parser->m_tagLevel == 0)
&& (parser->m_parsingStatus.parsing != XML_FINISHED)) {
- if (parser->m_parsingStatus.parsing == XML_SUSPENDED)
+ if (parser->m_parsingStatus.parsing == XML_SUSPENDED
+ || (parser->m_parsingStatus.parsing == XML_PARSING
+ && parser->m_reenter))
parser->m_processor = epilogProcessor;
else
return epilogProcessor(parser, next, end, nextPtr);
@@ -3293,6 +3409,12 @@ doContent(XML_Parser parser, int startTagLevel, const ENCODING *enc,
return XML_ERROR_NONE;
case XML_FINISHED:
return XML_ERROR_ABORTED;
+ case XML_PARSING:
+ if (parser->m_reenter) {
+ *nextPtr = next;
+ return XML_ERROR_NONE;
+ }
+ /* Fall through */
default:;
}
}
@@ -4217,6 +4339,11 @@ doCdataSection(XML_Parser parser, const ENCODING *enc, const char **startPtr,
return XML_ERROR_NONE;
case XML_FINISHED:
return XML_ERROR_ABORTED;
+ case XML_PARSING:
+ if (parser->m_reenter) {
+ return XML_ERROR_UNEXPECTED_STATE; // LCOV_EXCL_LINE
+ }
+ /* Fall through */
default:;
}
}
@@ -4549,7 +4676,7 @@ entityValueInitProcessor(XML_Parser parser, const char *s, const char *end,
}
/* found end of entity value - can store it now */
return storeEntityValue(parser, parser->m_encoding, s, end,
- XML_ACCOUNT_DIRECT);
+ XML_ACCOUNT_DIRECT, NULL);
} else if (tok == XML_TOK_XML_DECL) {
enum XML_Error result;
result = processXmlDecl(parser, 0, start, next);
@@ -4676,7 +4803,7 @@ entityValueProcessor(XML_Parser parser, const char *s, const char *end,
break;
}
/* found end of entity value - can store it now */
- return storeEntityValue(parser, enc, s, end, XML_ACCOUNT_DIRECT);
+ return storeEntityValue(parser, enc, s, end, XML_ACCOUNT_DIRECT, NULL);
}
start = next;
}
@@ -5119,9 +5246,9 @@ doProlog(XML_Parser parser, const ENCODING *enc, const char *s, const char *end,
#if XML_GE == 1
// This will store the given replacement text in
// parser->m_declEntity->textPtr.
- enum XML_Error result
- = storeEntityValue(parser, enc, s + enc->minBytesPerChar,
- next - enc->minBytesPerChar, XML_ACCOUNT_NONE);
+ enum XML_Error result = callStoreEntityValue(
+ parser, enc, s + enc->minBytesPerChar, next - enc->minBytesPerChar,
+ XML_ACCOUNT_NONE);
if (parser->m_declEntity) {
parser->m_declEntity->textPtr = poolStart(&dtd->entityValuePool);
parser->m_declEntity->textLen
@@ -5546,7 +5673,7 @@ doProlog(XML_Parser parser, const ENCODING *enc, const char *s, const char *end,
enum XML_Error result;
XML_Bool betweenDecl
= (role == XML_ROLE_PARAM_ENTITY_REF ? XML_TRUE : XML_FALSE);
- result = processInternalEntity(parser, entity, betweenDecl);
+ result = processEntity(parser, entity, betweenDecl, ENTITY_INTERNAL);
if (result != XML_ERROR_NONE)
return result;
handleDefault = XML_FALSE;
@@ -5751,6 +5878,12 @@ doProlog(XML_Parser parser, const ENCODING *enc, const char *s, const char *end,
return XML_ERROR_NONE;
case XML_FINISHED:
return XML_ERROR_ABORTED;
+ case XML_PARSING:
+ if (parser->m_reenter) {
+ *nextPtr = next;
+ return XML_ERROR_NONE;
+ }
+ /* Fall through */
default:
s = next;
tok = XmlPrologTok(enc, s, end, &next);
@@ -5825,21 +5958,49 @@ epilogProcessor(XML_Parser parser, const char *s, const char *end,
return XML_ERROR_NONE;
case XML_FINISHED:
return XML_ERROR_ABORTED;
+ case XML_PARSING:
+ if (parser->m_reenter) {
+ return XML_ERROR_UNEXPECTED_STATE; // LCOV_EXCL_LINE
+ }
+ /* Fall through */
default:;
}
}
}
static enum XML_Error
-processInternalEntity(XML_Parser parser, ENTITY *entity, XML_Bool betweenDecl) {
- const char *textStart, *textEnd;
- const char *next;
- enum XML_Error result;
- OPEN_INTERNAL_ENTITY *openEntity;
+processEntity(XML_Parser parser, ENTITY *entity, XML_Bool betweenDecl,
+ enum EntityType type) {
+ OPEN_INTERNAL_ENTITY *openEntity, **openEntityList, **freeEntityList;
+ switch (type) {
+ case ENTITY_INTERNAL:
+ parser->m_processor = internalEntityProcessor;
+ openEntityList = &parser->m_openInternalEntities;
+ freeEntityList = &parser->m_freeInternalEntities;
+ break;
+ case ENTITY_ATTRIBUTE:
+ openEntityList = &parser->m_openAttributeEntities;
+ freeEntityList = &parser->m_freeAttributeEntities;
+ break;
+ case ENTITY_VALUE:
+ openEntityList = &parser->m_openValueEntities;
+ freeEntityList = &parser->m_freeValueEntities;
+ break;
+ /* default case serves merely as a safety net in case of a
+ * wrong entityType. Therefore we exclude the following lines
+ * from the test coverage.
+ *
+ * LCOV_EXCL_START
+ */
+ default:
+ // Should not reach here
+ assert(0);
+ /* LCOV_EXCL_STOP */
+ }
- if (parser->m_freeInternalEntities) {
- openEntity = parser->m_freeInternalEntities;
- parser->m_freeInternalEntities = openEntity->next;
+ if (*freeEntityList) {
+ openEntity = *freeEntityList;
+ *freeEntityList = openEntity->next;
} else {
openEntity
= (OPEN_INTERNAL_ENTITY *)MALLOC(parser, sizeof(OPEN_INTERNAL_ENTITY));
@@ -5847,55 +6008,34 @@ processInternalEntity(XML_Parser parser, ENTITY *entity, XML_Bool betweenDecl) {
return XML_ERROR_NO_MEMORY;
}
entity->open = XML_TRUE;
+ entity->hasMore = XML_TRUE;
#if XML_GE == 1
entityTrackingOnOpen(parser, entity, __LINE__);
#endif
entity->processed = 0;
- openEntity->next = parser->m_openInternalEntities;
- parser->m_openInternalEntities = openEntity;
+ openEntity->next = *openEntityList;
+ *openEntityList = openEntity;
openEntity->entity = entity;
+ openEntity->type = type;
openEntity->startTagLevel = parser->m_tagLevel;
openEntity->betweenDecl = betweenDecl;
openEntity->internalEventPtr = NULL;
openEntity->internalEventEndPtr = NULL;
- textStart = (const char *)entity->textPtr;
- textEnd = (const char *)(entity->textPtr + entity->textLen);
- /* Set a safe default value in case 'next' does not get set */
- next = textStart;
-
- if (entity->is_param) {
- int tok
- = XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next);
- result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd,
- tok, next, &next, XML_FALSE, XML_FALSE,
- XML_ACCOUNT_ENTITY_EXPANSION);
- } else {
- result = doContent(parser, parser->m_tagLevel, parser->m_internalEncoding,
- textStart, textEnd, &next, XML_FALSE,
- XML_ACCOUNT_ENTITY_EXPANSION);
- }
- if (result == XML_ERROR_NONE) {
- if (textEnd != next && parser->m_parsingStatus.parsing == XML_SUSPENDED) {
- entity->processed = (int)(next - textStart);
- parser->m_processor = internalEntityProcessor;
- } else if (parser->m_openInternalEntities->entity == entity) {
-#if XML_GE == 1
- entityTrackingOnClose(parser, entity, __LINE__);
-#endif /* XML_GE == 1 */
- entity->open = XML_FALSE;
- parser->m_openInternalEntities = openEntity->next;
- /* put openEntity back in list of free instances */
- openEntity->next = parser->m_freeInternalEntities;
- parser->m_freeInternalEntities = openEntity;
- }
+ // Only internal entities make use of the reenter flag
+ // therefore no need to set it for other entity types
+ if (type == ENTITY_INTERNAL) {
+ triggerReenter(parser);
}
- return result;
+ return XML_ERROR_NONE;
}
static enum XML_Error PTRCALL
internalEntityProcessor(XML_Parser parser, const char *s, const char *end,
const char **nextPtr) {
+ UNUSED_P(s);
+ UNUSED_P(end);
+ UNUSED_P(nextPtr);
ENTITY *entity;
const char *textStart, *textEnd;
const char *next;
@@ -5905,68 +6045,67 @@ internalEntityProcessor(XML_Parser parser, const char *s, const char *end,
return XML_ERROR_UNEXPECTED_STATE;
entity = openEntity->entity;
- textStart = ((const char *)entity->textPtr) + entity->processed;
- textEnd = (const char *)(entity->textPtr + entity->textLen);
- /* Set a safe default value in case 'next' does not get set */
- next = textStart;
-
- if (entity->is_param) {
- int tok
- = XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next);
- result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd,
- tok, next, &next, XML_FALSE, XML_TRUE,
- XML_ACCOUNT_ENTITY_EXPANSION);
- } else {
- result = doContent(parser, openEntity->startTagLevel,
- parser->m_internalEncoding, textStart, textEnd, &next,
- XML_FALSE, XML_ACCOUNT_ENTITY_EXPANSION);
- }
- if (result != XML_ERROR_NONE)
- return result;
+ // This will return early
+ if (entity->hasMore) {
+ textStart = ((const char *)entity->textPtr) + entity->processed;
+ textEnd = (const char *)(entity->textPtr + entity->textLen);
+ /* Set a safe default value in case 'next' does not get set */
+ next = textStart;
+
+ if (entity->is_param) {
+ int tok
+ = XmlPrologTok(parser->m_internalEncoding, textStart, textEnd, &next);
+ result = doProlog(parser, parser->m_internalEncoding, textStart, textEnd,
+ tok, next, &next, XML_FALSE, XML_FALSE,
+ XML_ACCOUNT_ENTITY_EXPANSION);
+ } else {
+ result = doContent(parser, openEntity->startTagLevel,
+ parser->m_internalEncoding, textStart, textEnd, &next,
+ XML_FALSE, XML_ACCOUNT_ENTITY_EXPANSION);
+ }
+
+ if (result != XML_ERROR_NONE)
+ return result;
+ // Check if entity is complete, if not, mark down how much of it is
+ // processed
+ if (textEnd != next
+ && (parser->m_parsingStatus.parsing == XML_SUSPENDED
+ || (parser->m_parsingStatus.parsing == XML_PARSING
+ && parser->m_reenter))) {
+ entity->processed = (int)(next - (const char *)entity->textPtr);
+ return result;
+ }
- if (textEnd != next && parser->m_parsingStatus.parsing == XML_SUSPENDED) {
- entity->processed = (int)(next - (const char *)entity->textPtr);
+ // Entity is complete. We cannot close it here since we need to first
+ // process its possible inner entities (which are added to the
+ // m_openInternalEntities during doProlog or doContent calls above)
+ entity->hasMore = XML_FALSE;
+ triggerReenter(parser);
return result;
- }
+ } // End of entity processing, "if" block will return here
+ // Remove fully processed openEntity from open entity list.
#if XML_GE == 1
entityTrackingOnClose(parser, entity, __LINE__);
#endif
+ // openEntity is m_openInternalEntities' head, as we set it at the start of
+ // this function and we skipped doProlog and doContent calls with hasMore set
+ // to false. This means we can directly remove the head of
+ // m_openInternalEntities
+ assert(parser->m_openInternalEntities == openEntity);
entity->open = XML_FALSE;
- parser->m_openInternalEntities = openEntity->next;
+ parser->m_openInternalEntities = parser->m_openInternalEntities->next;
+
/* put openEntity back in list of free instances */
openEntity->next = parser->m_freeInternalEntities;
parser->m_freeInternalEntities = openEntity;
- // If there are more open entities we want to stop right here and have the
- // upcoming call to XML_ResumeParser continue with entity content, or it would
- // be ignored altogether.
- if (parser->m_openInternalEntities != NULL
- && parser->m_parsingStatus.parsing == XML_SUSPENDED) {
- return XML_ERROR_NONE;
- }
-
- if (entity->is_param) {
- int tok;
- parser->m_processor = prologProcessor;
- tok = XmlPrologTok(parser->m_encoding, s, end, &next);
- return doProlog(parser, parser->m_encoding, s, end, tok, next, nextPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer, XML_TRUE,
- XML_ACCOUNT_DIRECT);
- } else {
- parser->m_processor = contentProcessor;
- /* see externalEntityContentProcessor vs contentProcessor */
- result = doContent(parser, parser->m_parentParser ? 1 : 0,
- parser->m_encoding, s, end, nextPtr,
- (XML_Bool)! parser->m_parsingStatus.finalBuffer,
- XML_ACCOUNT_DIRECT);
- if (result == XML_ERROR_NONE) {
- if (! storeRawNames(parser))
- return XML_ERROR_NO_MEMORY;
- }
- return result;
+ if (parser->m_openInternalEntities == NULL) {
+ parser->m_processor = entity->is_param ? prologProcessor : contentProcessor;
}
+ triggerReenter(parser);
+ return XML_ERROR_NONE;
}
static enum XML_Error PTRCALL
@@ -5982,8 +6121,70 @@ static enum XML_Error
storeAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
const char *ptr, const char *end, STRING_POOL *pool,
enum XML_Account account) {
- enum XML_Error result
- = appendAttributeValue(parser, enc, isCdata, ptr, end, pool, account);
+ const char *next = ptr;
+ enum XML_Error result = XML_ERROR_NONE;
+
+ while (1) {
+ if (! parser->m_openAttributeEntities) {
+ result = appendAttributeValue(parser, enc, isCdata, next, end, pool,
+ account, &next);
+ } else {
+ OPEN_INTERNAL_ENTITY *const openEntity = parser->m_openAttributeEntities;
+ if (! openEntity)
+ return XML_ERROR_UNEXPECTED_STATE;
+
+ ENTITY *const entity = openEntity->entity;
+ const char *const textStart
+ = ((const char *)entity->textPtr) + entity->processed;
+ const char *const textEnd
+ = (const char *)(entity->textPtr + entity->textLen);
+ /* Set a safe default value in case 'next' does not get set */
+ const char *nextInEntity = textStart;
+ if (entity->hasMore) {
+ result = appendAttributeValue(
+ parser, parser->m_internalEncoding, isCdata, textStart, textEnd,
+ pool, XML_ACCOUNT_ENTITY_EXPANSION, &nextInEntity);
+ if (result != XML_ERROR_NONE)
+ break;
+ // Check if entity is complete, if not, mark down how much of it is
+ // processed. A XML_SUSPENDED check here is not required as
+ // appendAttributeValue will never suspend the parser.
+ if (textEnd != nextInEntity) {
+ entity->processed
+ = (int)(nextInEntity - (const char *)entity->textPtr);
+ continue;
+ }
+
+ // Entity is complete. We cannot close it here since we need to first
+ // process its possible inner entities (which are added to the
+ // m_openAttributeEntities during appendAttributeValue)
+ entity->hasMore = XML_FALSE;
+ continue;
+ } // End of entity processing, "if" block skips the rest
+
+ // Remove fully processed openEntity from open entity list.
+#if XML_GE == 1
+ entityTrackingOnClose(parser, entity, __LINE__);
+#endif
+ // openEntity is m_openAttributeEntities' head, since we set it at the
+ // start of this function and because we skipped appendAttributeValue call
+ // with hasMore set to false. This means we can directly remove the head
+ // of m_openAttributeEntities
+ assert(parser->m_openAttributeEntities == openEntity);
+ entity->open = XML_FALSE;
+ parser->m_openAttributeEntities = parser->m_openAttributeEntities->next;
+
+ /* put openEntity back in list of free instances */
+ openEntity->next = parser->m_freeAttributeEntities;
+ parser->m_freeAttributeEntities = openEntity;
+ }
+
+ // Break if an error occurred or there is nothing left to process
+ if (result || (parser->m_openAttributeEntities == NULL && end == next)) {
+ break;
+ }
+ }
+
if (result)
return result;
if (! isCdata && poolLength(pool) && poolLastChar(pool) == 0x20)
@@ -5996,7 +6197,7 @@ storeAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
static enum XML_Error
appendAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
const char *ptr, const char *end, STRING_POOL *pool,
- enum XML_Account account) {
+ enum XML_Account account, const char **nextPtr) {
DTD *const dtd = parser->m_dtd; /* save one level of indirection */
#ifndef XML_DTD
UNUSED_P(account);
@@ -6014,6 +6215,9 @@ appendAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
#endif
switch (tok) {
case XML_TOK_NONE:
+ if (nextPtr) {
+ *nextPtr = next;
+ }
return XML_ERROR_NONE;
case XML_TOK_INVALID:
if (enc == parser->m_encoding)
@@ -6154,21 +6358,11 @@ appendAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
return XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF;
} else {
enum XML_Error result;
- const XML_Char *textEnd = entity->textPtr + entity->textLen;
- entity->open = XML_TRUE;
-#if XML_GE == 1
- entityTrackingOnOpen(parser, entity, __LINE__);
-#endif
- result = appendAttributeValue(parser, parser->m_internalEncoding,
- isCdata, (const char *)entity->textPtr,
- (const char *)textEnd, pool,
- XML_ACCOUNT_ENTITY_EXPANSION);
-#if XML_GE == 1
- entityTrackingOnClose(parser, entity, __LINE__);
-#endif
- entity->open = XML_FALSE;
- if (result)
- return result;
+ result = processEntity(parser, entity, XML_FALSE, ENTITY_ATTRIBUTE);
+ if ((result == XML_ERROR_NONE) && (nextPtr != NULL)) {
+ *nextPtr = next;
+ }
+ return result;
}
} break;
default:
@@ -6197,7 +6391,7 @@ appendAttributeValue(XML_Parser parser, const ENCODING *enc, XML_Bool isCdata,
static enum XML_Error
storeEntityValue(XML_Parser parser, const ENCODING *enc,
const char *entityTextPtr, const char *entityTextEnd,
- enum XML_Account account) {
+ enum XML_Account account, const char **nextPtr) {
DTD *const dtd = parser->m_dtd; /* save one level of indirection */
STRING_POOL *pool = &(dtd->entityValuePool);
enum XML_Error result = XML_ERROR_NONE;
@@ -6215,8 +6409,9 @@ storeEntityValue(XML_Parser parser, const ENCODING *enc,
return XML_ERROR_NO_MEMORY;
}
+ const char *next;
for (;;) {
- const char *next
+ next
= entityTextPtr; /* XmlEntityValueTok doesn't always set the last arg */
int tok = XmlEntityValueTok(enc, entityTextPtr, entityTextEnd, &next);
@@ -6278,16 +6473,8 @@ storeEntityValue(XML_Parser parser, const ENCODING *enc,
} else
dtd->keepProcessing = dtd->standalone;
} else {
- entity->open = XML_TRUE;
- entityTrackingOnOpen(parser, entity, __LINE__);
- result = storeEntityValue(
- parser, parser->m_internalEncoding, (const char *)entity->textPtr,
- (const char *)(entity->textPtr + entity->textLen),
- XML_ACCOUNT_ENTITY_EXPANSION);
- entityTrackingOnClose(parser, entity, __LINE__);
- entity->open = XML_FALSE;
- if (result)
- goto endEntityValue;
+ result = processEntity(parser, entity, XML_FALSE, ENTITY_VALUE);
+ goto endEntityValue;
}
break;
}
@@ -6375,6 +6562,81 @@ endEntityValue:
# ifdef XML_DTD
parser->m_prologState.inEntityValue = oldInEntityValue;
# endif /* XML_DTD */
+ // If 'nextPtr' is given, it should be updated during the processing
+ if (nextPtr != NULL) {
+ *nextPtr = next;
+ }
+ return result;
+}
+
+static enum XML_Error
+callStoreEntityValue(XML_Parser parser, const ENCODING *enc,
+ const char *entityTextPtr, const char *entityTextEnd,
+ enum XML_Account account) {
+ const char *next = entityTextPtr;
+ enum XML_Error result = XML_ERROR_NONE;
+ while (1) {
+ if (! parser->m_openValueEntities) {
+ result
+ = storeEntityValue(parser, enc, next, entityTextEnd, account, &next);
+ } else {
+ OPEN_INTERNAL_ENTITY *const openEntity = parser->m_openValueEntities;
+ if (! openEntity)
+ return XML_ERROR_UNEXPECTED_STATE;
+
+ ENTITY *const entity = openEntity->entity;
+ const char *const textStart
+ = ((const char *)entity->textPtr) + entity->processed;
+ const char *const textEnd
+ = (const char *)(entity->textPtr + entity->textLen);
+ /* Set a safe default value in case 'next' does not get set */
+ const char *nextInEntity = textStart;
+ if (entity->hasMore) {
+ result = storeEntityValue(parser, parser->m_internalEncoding, textStart,
+ textEnd, XML_ACCOUNT_ENTITY_EXPANSION,
+ &nextInEntity);
+ if (result != XML_ERROR_NONE)
+ break;
+ // Check if entity is complete, if not, mark down how much of it is
+ // processed. A XML_SUSPENDED check here is not required as
+ // appendAttributeValue will never suspend the parser.
+ if (textEnd != nextInEntity) {
+ entity->processed
+ = (int)(nextInEntity - (const char *)entity->textPtr);
+ continue;
+ }
+
+ // Entity is complete. We cannot close it here since we need to first
+ // process its possible inner entities (which are added to the
+ // m_openValueEntities during storeEntityValue)
+ entity->hasMore = XML_FALSE;
+ continue;
+ } // End of entity processing, "if" block skips the rest
+
+ // Remove fully processed openEntity from open entity list.
+# if XML_GE == 1
+ entityTrackingOnClose(parser, entity, __LINE__);
+# endif
+ // openEntity is m_openValueEntities' head, since we set it at the
+ // start of this function and because we skipped storeEntityValue call
+ // with hasMore set to false. This means we can directly remove the head
+ // of m_openValueEntities
+ assert(parser->m_openValueEntities == openEntity);
+ entity->open = XML_FALSE;
+ parser->m_openValueEntities = parser->m_openValueEntities->next;
+
+ /* put openEntity back in list of free instances */
+ openEntity->next = parser->m_freeValueEntities;
+ parser->m_freeValueEntities = openEntity;
+ }
+
+ // Break if an error occurred or there is nothing left to process
+ if (result
+ || (parser->m_openValueEntities == NULL && entityTextEnd == next)) {
+ break;
+ }
+ }
+
return result;
}
@@ -8542,11 +8804,13 @@ unsignedCharToPrintable(unsigned char c) {
return "\\xFE";
case 255:
return "\\xFF";
+ // LCOV_EXCL_START
default:
assert(0); /* never gets here */
return "dead code";
}
assert(0); /* never gets here */
+ // LCOV_EXCL_STOP
}
#endif /* XML_GE == 1 */
diff --git a/contrib/libs/expat/ya.make b/contrib/libs/expat/ya.make
index 489ae45db1..b41c7311e2 100644
--- a/contrib/libs/expat/ya.make
+++ b/contrib/libs/expat/ya.make
@@ -10,9 +10,9 @@ LICENSE(
LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
-VERSION(2.6.4)
+VERSION(2.7.0)
-ORIGINAL_SOURCE(https://github.com/libexpat/libexpat/archive/R_2_6_4.tar.gz)
+ORIGINAL_SOURCE(https://github.com/libexpat/libexpat/archive/R_2_7_0.tar.gz)
ADDINCL(
contrib/libs/expat
diff --git a/contrib/python/multidict/.dist-info/METADATA b/contrib/python/multidict/.dist-info/METADATA
index 93f85177b9..b5c6dad90b 100644
--- a/contrib/python/multidict/.dist-info/METADATA
+++ b/contrib/python/multidict/.dist-info/METADATA
@@ -1,6 +1,6 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.2
Name: multidict
-Version: 6.1.0
+Version: 6.2.0
Summary: multidict implementation
Home-page: https://github.com/aio-libs/multidict
Author: Andrew Svetlov
@@ -20,16 +20,15 @@ Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: 3.13
-Requires-Python: >=3.8
+Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
-Requires-Dist: typing-extensions >=4.1.0 ; python_version < "3.11"
+Requires-Dist: typing-extensions>=4.1.0; python_version < "3.11"
=========
multidict
diff --git a/contrib/python/multidict/multidict/__init__.py b/contrib/python/multidict/multidict/__init__.py
index 25ddca41e9..b6b532a1f2 100644
--- a/contrib/python/multidict/multidict/__init__.py
+++ b/contrib/python/multidict/multidict/__init__.py
@@ -5,6 +5,8 @@ multidict. It behaves mostly like a dict but it can have
several values for the same key.
"""
+from typing import TYPE_CHECKING
+
from ._abc import MultiMapping, MutableMultiMapping
from ._compat import USE_EXTENSIONS
@@ -20,13 +22,11 @@ __all__ = (
"getversion",
)
-__version__ = "6.1.0"
+__version__ = "6.2.0"
-try:
- if not USE_EXTENSIONS:
- raise ImportError
- from ._multidict import (
+if TYPE_CHECKING or not USE_EXTENSIONS:
+ from ._multidict_py import (
CIMultiDict,
CIMultiDictProxy,
MultiDict,
@@ -34,8 +34,8 @@ try:
getversion,
istr,
)
-except ImportError: # pragma: no cover
- from ._multidict_py import (
+else:
+ from ._multidict import (
CIMultiDict,
CIMultiDictProxy,
MultiDict,
diff --git a/contrib/python/multidict/multidict/__init__.pyi b/contrib/python/multidict/multidict/__init__.pyi
deleted file mode 100644
index 0940340f81..0000000000
--- a/contrib/python/multidict/multidict/__init__.pyi
+++ /dev/null
@@ -1,152 +0,0 @@
-import abc
-from typing import (
- Generic,
- Iterable,
- Iterator,
- Mapping,
- MutableMapping,
- TypeVar,
- overload,
-)
-
-class istr(str): ...
-
-upstr = istr
-
-_S = str | istr
-
-_T = TypeVar("_T")
-
-_T_co = TypeVar("_T_co", covariant=True)
-
-_D = TypeVar("_D")
-
-class MultiMapping(Mapping[_S, _T_co]):
- @overload
- @abc.abstractmethod
- def getall(self, key: _S) -> list[_T_co]: ...
- @overload
- @abc.abstractmethod
- def getall(self, key: _S, default: _D) -> list[_T_co] | _D: ...
- @overload
- @abc.abstractmethod
- def getone(self, key: _S) -> _T_co: ...
- @overload
- @abc.abstractmethod
- def getone(self, key: _S, default: _D) -> _T_co | _D: ...
-
-_Arg = (
- Mapping[str, _T]
- | Mapping[istr, _T]
- | dict[str, _T]
- | dict[istr, _T]
- | MultiMapping[_T]
- | Iterable[tuple[str, _T]]
- | Iterable[tuple[istr, _T]]
-)
-
-class MutableMultiMapping(MultiMapping[_T], MutableMapping[_S, _T], Generic[_T]):
- @abc.abstractmethod
- def add(self, key: _S, value: _T) -> None: ...
- @abc.abstractmethod
- def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
- @overload
- @abc.abstractmethod
- def popone(self, key: _S) -> _T: ...
- @overload
- @abc.abstractmethod
- def popone(self, key: _S, default: _D) -> _T | _D: ...
- @overload
- @abc.abstractmethod
- def popall(self, key: _S) -> list[_T]: ...
- @overload
- @abc.abstractmethod
- def popall(self, key: _S, default: _D) -> list[_T] | _D: ...
-
-class MultiDict(MutableMultiMapping[_T], Generic[_T]):
- def __init__(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
- def copy(self) -> MultiDict[_T]: ...
- def __getitem__(self, k: _S) -> _T: ...
- def __setitem__(self, k: _S, v: _T) -> None: ...
- def __delitem__(self, v: _S) -> None: ...
- def __iter__(self) -> Iterator[_S]: ...
- def __len__(self) -> int: ...
- @overload
- def getall(self, key: _S) -> list[_T]: ...
- @overload
- def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
- @overload
- def getone(self, key: _S) -> _T: ...
- @overload
- def getone(self, key: _S, default: _D) -> _T | _D: ...
- def add(self, key: _S, value: _T) -> None: ...
- def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
- @overload
- def popone(self, key: _S) -> _T: ...
- @overload
- def popone(self, key: _S, default: _D) -> _T | _D: ...
- @overload
- def popall(self, key: _S) -> list[_T]: ...
- @overload
- def popall(self, key: _S, default: _D) -> list[_T] | _D: ...
-
-class CIMultiDict(MutableMultiMapping[_T], Generic[_T]):
- def __init__(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
- def copy(self) -> CIMultiDict[_T]: ...
- def __getitem__(self, k: _S) -> _T: ...
- def __setitem__(self, k: _S, v: _T) -> None: ...
- def __delitem__(self, v: _S) -> None: ...
- def __iter__(self) -> Iterator[_S]: ...
- def __len__(self) -> int: ...
- @overload
- def getall(self, key: _S) -> list[_T]: ...
- @overload
- def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
- @overload
- def getone(self, key: _S) -> _T: ...
- @overload
- def getone(self, key: _S, default: _D) -> _T | _D: ...
- def add(self, key: _S, value: _T) -> None: ...
- def extend(self, arg: _Arg[_T] = ..., **kwargs: _T) -> None: ...
- @overload
- def popone(self, key: _S) -> _T: ...
- @overload
- def popone(self, key: _S, default: _D) -> _T | _D: ...
- @overload
- def popall(self, key: _S) -> list[_T]: ...
- @overload
- def popall(self, key: _S, default: _D) -> list[_T] | _D: ...
-
-class MultiDictProxy(MultiMapping[_T], Generic[_T]):
- def __init__(self, arg: MultiMapping[_T] | MutableMultiMapping[_T]) -> None: ...
- def copy(self) -> MultiDict[_T]: ...
- def __getitem__(self, k: _S) -> _T: ...
- def __iter__(self) -> Iterator[_S]: ...
- def __len__(self) -> int: ...
- @overload
- def getall(self, key: _S) -> list[_T]: ...
- @overload
- def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
- @overload
- def getone(self, key: _S) -> _T: ...
- @overload
- def getone(self, key: _S, default: _D) -> _T | _D: ...
-
-class CIMultiDictProxy(MultiMapping[_T], Generic[_T]):
- def __init__(self, arg: MultiMapping[_T] | MutableMultiMapping[_T]) -> None: ...
- def __getitem__(self, k: _S) -> _T: ...
- def __iter__(self) -> Iterator[_S]: ...
- def __len__(self) -> int: ...
- @overload
- def getall(self, key: _S) -> list[_T]: ...
- @overload
- def getall(self, key: _S, default: _D) -> list[_T] | _D: ...
- @overload
- def getone(self, key: _S) -> _T: ...
- @overload
- def getone(self, key: _S, default: _D) -> _T | _D: ...
- def copy(self) -> CIMultiDict[_T]: ...
-
-def getversion(
- md: MultiDict[_T] | CIMultiDict[_T] | MultiDictProxy[_T] | CIMultiDictProxy[_T],
-) -> int: ...
diff --git a/contrib/python/multidict/multidict/_abc.py b/contrib/python/multidict/multidict/_abc.py
index 0603cdd244..ff0e2a6976 100644
--- a/contrib/python/multidict/multidict/_abc.py
+++ b/contrib/python/multidict/multidict/_abc.py
@@ -1,48 +1,69 @@
import abc
-import sys
-import types
-from collections.abc import Mapping, MutableMapping
+from collections.abc import Iterable, Mapping, MutableMapping
+from typing import TYPE_CHECKING, Protocol, TypeVar, Union, overload
+if TYPE_CHECKING:
+ from ._multidict_py import istr
+else:
+ istr = str
-class _TypingMeta(abc.ABCMeta):
- # A fake metaclass to satisfy typing deps in runtime
- # basically MultiMapping[str] and other generic-like type instantiations
- # are emulated.
- # Note: real type hints are provided by __init__.pyi stub file
- if sys.version_info >= (3, 9):
+_V = TypeVar("_V")
+_V_co = TypeVar("_V_co", covariant=True)
+_T = TypeVar("_T")
- def __getitem__(self, key):
- return types.GenericAlias(self, key)
- else:
+class SupportsKeys(Protocol[_V_co]):
+ def keys(self) -> Iterable[str]: ...
+ def __getitem__(self, key: str, /) -> _V_co: ...
- def __getitem__(self, key):
- return self
+class SupportsIKeys(Protocol[_V_co]):
+ def keys(self) -> Iterable[istr]: ...
+ def __getitem__(self, key: istr, /) -> _V_co: ...
-class MultiMapping(Mapping, metaclass=_TypingMeta):
+
+MDArg = Union[SupportsKeys[_V], SupportsIKeys[_V], Iterable[tuple[str, _V]], None]
+
+
+class MultiMapping(Mapping[str, _V_co]):
+ @overload
+ def getall(self, key: str) -> list[_V_co]: ...
+ @overload
+ def getall(self, key: str, default: _T) -> Union[list[_V_co], _T]: ...
@abc.abstractmethod
- def getall(self, key, default=None):
- raise KeyError
+ def getall(self, key: str, default: _T = ...) -> Union[list[_V_co], _T]:
+ """Return all values for key."""
+ @overload
+ def getone(self, key: str) -> _V_co: ...
+ @overload
+ def getone(self, key: str, default: _T) -> Union[_V_co, _T]: ...
@abc.abstractmethod
- def getone(self, key, default=None):
- raise KeyError
+ def getone(self, key: str, default: _T = ...) -> Union[_V_co, _T]:
+ """Return first value for key."""
-class MutableMultiMapping(MultiMapping, MutableMapping):
+class MutableMultiMapping(MultiMapping[_V], MutableMapping[str, _V]):
@abc.abstractmethod
- def add(self, key, value):
- raise NotImplementedError
+ def add(self, key: str, value: _V) -> None:
+ """Add value to list."""
@abc.abstractmethod
- def extend(self, *args, **kwargs):
- raise NotImplementedError
+ def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
+ """Add everything from arg and kwargs to the mapping."""
+ @overload
+ def popone(self, key: str) -> _V: ...
+ @overload
+ def popone(self, key: str, default: _T) -> Union[_V, _T]: ...
@abc.abstractmethod
- def popone(self, key, default=None):
- raise KeyError
+ def popone(self, key: str, default: _T = ...) -> Union[_V, _T]:
+ """Remove specified key and return the corresponding value."""
+ @overload
+ def popall(self, key: str) -> list[_V]: ...
+ @overload
+ def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ...
@abc.abstractmethod
- def popall(self, key, default=None):
- raise KeyError
+ def popall(self, key: str, default: _T = ...) -> Union[list[_V], _T]:
+ """Remove all occurrences of key and return the list of corresponding values."""
diff --git a/contrib/python/multidict/multidict/_multidict.c b/contrib/python/multidict/multidict/_multidict.c
index 60864953b1..ebb1949f0a 100644
--- a/contrib/python/multidict/multidict/_multidict.c
+++ b/contrib/python/multidict/multidict/_multidict.c
@@ -1,6 +1,8 @@
#include "Python.h"
#include "structmember.h"
+#include "_multilib/pythoncapi_compat.h"
+
// Include order important
#include "_multilib/defs.h"
#include "_multilib/istr.h"
@@ -9,7 +11,7 @@
#include "_multilib/iter.h"
#include "_multilib/views.h"
-#if PY_MAJOR_VERSION < 3 || PY_MINOR_VERSION < 12
+#if PY_MINOR_VERSION < 12
#ifndef _PyArg_UnpackKeywords
#define FASTCALL_OLD
#endif
@@ -19,14 +21,13 @@
static PyObject *collections_abc_mapping;
static PyObject *collections_abc_mut_mapping;
static PyObject *collections_abc_mut_multi_mapping;
+static PyObject *repr_func;
static PyTypeObject multidict_type;
static PyTypeObject cimultidict_type;
static PyTypeObject multidict_proxy_type;
static PyTypeObject cimultidict_proxy_type;
-static PyObject *repr_func;
-
#define MultiDict_CheckExact(o) (Py_TYPE(o) == &multidict_type)
#define CIMultiDict_CheckExact(o) (Py_TYPE(o) == &cimultidict_type)
#define MultiDictProxy_CheckExact(o) (Py_TYPE(o) == &multidict_proxy_type)
@@ -155,13 +156,17 @@ _multidict_append_items_seq(MultiDictObject *self, PyObject *arg,
Py_INCREF(value);
}
else if (PyList_CheckExact(item)) {
- if (PyList_GET_SIZE(item) != 2) {
+ if (PyList_Size(item) != 2) {
+ goto invalid_type;
+ }
+ key = PyList_GetItemRef(item, 0);
+ if (key == NULL) {
+ goto invalid_type;
+ }
+ value = PyList_GetItemRef(item, 1);
+ if (value == NULL) {
goto invalid_type;
}
- key = PyList_GET_ITEM(item, 0);
- Py_INCREF(key);
- value = PyList_GET_ITEM(item, 1);
- Py_INCREF(value);
}
else if (PySequence_Check(item)) {
if (PySequence_Size(item) != 2) {
@@ -339,8 +344,8 @@ _multidict_extend(MultiDictObject *self, PyObject *args, PyObject *kwds,
if (args && PyObject_Length(args) > 1) {
PyErr_Format(
PyExc_TypeError,
- "%s takes at most 1 positional argument (%zd given)",
- name, PyObject_Length(args), NULL
+ "%s takes from 1 to 2 positional arguments but %zd were given",
+ name, PyObject_Length(args) + 1, NULL
);
return -1;
}
@@ -769,21 +774,13 @@ static inline void
multidict_tp_dealloc(MultiDictObject *self)
{
PyObject_GC_UnTrack(self);
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
Py_TRASHCAN_BEGIN(self, multidict_tp_dealloc)
-#else
- Py_TRASHCAN_SAFE_BEGIN(self);
-#endif
if (self->weaklist != NULL) {
PyObject_ClearWeakRefs((PyObject *)self);
};
pair_list_dealloc(&self->pairs);
Py_TYPE(self)->tp_free((PyObject *)self);
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
Py_TRASHCAN_END // there should be no code after this
-#else
- Py_TRASHCAN_SAFE_END(self);
-#endif
}
static inline int
@@ -1230,16 +1227,7 @@ PyDoc_STRVAR(multidict_update_doc,
"Update the dictionary from *other*, overwriting existing keys.");
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
#define multidict_class_getitem Py_GenericAlias
-#else
-static inline PyObject *
-multidict_class_getitem(PyObject *self, PyObject *arg)
-{
- Py_INCREF(self);
- return self;
-}
-#endif
PyDoc_STRVAR(sizeof__doc__,
@@ -1941,9 +1929,7 @@ getversion(PyObject *self, PyObject *md)
static inline void
module_free(void *m)
{
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
Py_CLEAR(multidict_str_lower);
-#endif
Py_CLEAR(collections_abc_mapping);
Py_CLEAR(collections_abc_mut_mapping);
Py_CLEAR(collections_abc_mut_multi_mapping);
@@ -1972,29 +1958,14 @@ static PyModuleDef multidict_module = {
PyMODINIT_FUNC
PyInit__multidict(void)
{
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
multidict_str_lower = PyUnicode_InternFromString("lower");
if (multidict_str_lower == NULL) {
goto fail;
}
-#endif
PyObject *module = NULL,
*reg_func_call_result = NULL;
-#define WITH_MOD(NAME) \
- Py_CLEAR(module); \
- module = PyImport_ImportModule(NAME); \
- if (module == NULL) { \
- goto fail; \
- }
-
-#define GET_MOD_ATTR(VAR, NAME) \
- VAR = PyObject_GetAttrString(module, NAME); \
- if (VAR == NULL) { \
- goto fail; \
- }
-
if (multidict_views_init() < 0) {
goto fail;
}
@@ -2015,18 +1986,31 @@ PyInit__multidict(void)
goto fail;
}
+#define WITH_MOD(NAME) \
+ Py_CLEAR(module); \
+ module = PyImport_ImportModule(NAME); \
+ if (module == NULL) { \
+ goto fail; \
+ }
+
+#define GET_MOD_ATTR(VAR, NAME) \
+ VAR = PyObject_GetAttrString(module, NAME); \
+ if (VAR == NULL) { \
+ goto fail; \
+ }
+
WITH_MOD("collections.abc");
GET_MOD_ATTR(collections_abc_mapping, "Mapping");
WITH_MOD("multidict._abc");
GET_MOD_ATTR(collections_abc_mut_mapping, "MultiMapping");
-
- WITH_MOD("multidict._abc");
GET_MOD_ATTR(collections_abc_mut_multi_mapping, "MutableMultiMapping");
WITH_MOD("multidict._multidict_base");
GET_MOD_ATTR(repr_func, "_mdrepr");
+ Py_CLEAR(module); \
+
/* Register in _abc mappings (CI)MultiDict and (CI)MultiDictProxy */
reg_func_call_result = PyObject_CallMethod(
collections_abc_mut_mapping,
@@ -2070,6 +2054,13 @@ PyInit__multidict(void)
/* Instantiate this module */
module = PyModule_Create(&multidict_module);
+ if (module == NULL) {
+ goto fail;
+ }
+
+#ifdef Py_GIL_DISABLED
+ PyUnstable_Module_SetGIL(module, Py_MOD_GIL_NOT_USED);
+#endif
Py_INCREF(&istr_type);
if (PyModule_AddObject(
@@ -2109,9 +2100,7 @@ PyInit__multidict(void)
return module;
fail:
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
Py_XDECREF(multidict_str_lower);
-#endif
Py_XDECREF(collections_abc_mapping);
Py_XDECREF(collections_abc_mut_mapping);
Py_XDECREF(collections_abc_mut_multi_mapping);
diff --git a/contrib/python/multidict/multidict/_multidict_base.py b/contrib/python/multidict/multidict/_multidict_base.py
index de2f762a5c..df0d70097a 100644
--- a/contrib/python/multidict/multidict/_multidict_base.py
+++ b/contrib/python/multidict/multidict/_multidict_base.py
@@ -1,5 +1,19 @@
import sys
-from collections.abc import ItemsView, Iterable, KeysView, Set, ValuesView
+from collections.abc import (
+ Container,
+ ItemsView,
+ Iterable,
+ KeysView,
+ Mapping,
+ Set,
+ ValuesView,
+)
+from typing import Literal, Union
+
+if sys.version_info >= (3, 10):
+ from types import NotImplementedType
+else:
+ from typing import Any as NotImplementedType
if sys.version_info >= (3, 11):
from typing import assert_never
@@ -7,26 +21,28 @@ else:
from typing_extensions import assert_never
-def _abc_itemsview_register(view_cls):
+def _abc_itemsview_register(view_cls: type[object]) -> None:
ItemsView.register(view_cls)
-def _abc_keysview_register(view_cls):
+def _abc_keysview_register(view_cls: type[object]) -> None:
KeysView.register(view_cls)
-def _abc_valuesview_register(view_cls):
+def _abc_valuesview_register(view_cls: type[object]) -> None:
ValuesView.register(view_cls)
-def _viewbaseset_richcmp(view, other, op):
+def _viewbaseset_richcmp(
+ view: set[object], other: object, op: Literal[0, 1, 2, 3, 4, 5]
+) -> Union[bool, NotImplementedType]:
if op == 0: # <
if not isinstance(other, Set):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
return len(view) < len(other) and view <= other
elif op == 1: # <=
if not isinstance(other, Set):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
if len(view) > len(other):
return False
for elem in view:
@@ -35,17 +51,17 @@ def _viewbaseset_richcmp(view, other, op):
return True
elif op == 2: # ==
if not isinstance(other, Set):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
return len(view) == len(other) and view <= other
elif op == 3: # !=
return not view == other
elif op == 4: # >
if not isinstance(other, Set):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
return len(view) > len(other) and view >= other
elif op == 5: # >=
if not isinstance(other, Set):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
if len(view) < len(other):
return False
for elem in other:
@@ -56,9 +72,11 @@ def _viewbaseset_richcmp(view, other, op):
assert_never(op)
-def _viewbaseset_and(view, other):
+def _viewbaseset_and(
+ view: set[object], other: object
+) -> Union[set[object], NotImplementedType]:
if not isinstance(other, Iterable):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
if isinstance(view, Set):
view = set(iter(view))
if isinstance(other, Set):
@@ -68,9 +86,11 @@ def _viewbaseset_and(view, other):
return view & other
-def _viewbaseset_or(view, other):
+def _viewbaseset_or(
+ view: set[object], other: object
+) -> Union[set[object], NotImplementedType]:
if not isinstance(other, Iterable):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
if isinstance(view, Set):
view = set(iter(view))
if isinstance(other, Set):
@@ -80,9 +100,11 @@ def _viewbaseset_or(view, other):
return view | other
-def _viewbaseset_sub(view, other):
+def _viewbaseset_sub(
+ view: set[object], other: object
+) -> Union[set[object], NotImplementedType]:
if not isinstance(other, Iterable):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
if isinstance(view, Set):
view = set(iter(view))
if isinstance(other, Set):
@@ -92,9 +114,11 @@ def _viewbaseset_sub(view, other):
return view - other
-def _viewbaseset_xor(view, other):
+def _viewbaseset_xor(
+ view: set[object], other: object
+) -> Union[set[object], NotImplementedType]:
if not isinstance(other, Iterable):
- return NotImplemented
+ return NotImplemented # type: ignore[no-any-return]
if isinstance(view, Set):
view = set(iter(view))
if isinstance(other, Set):
@@ -104,7 +128,7 @@ def _viewbaseset_xor(view, other):
return view ^ other
-def _itemsview_isdisjoint(view, other):
+def _itemsview_isdisjoint(view: Container[object], other: Iterable[object]) -> bool:
"Return True if two sets have a null intersection."
for v in other:
if v in view:
@@ -112,7 +136,7 @@ def _itemsview_isdisjoint(view, other):
return True
-def _itemsview_repr(view):
+def _itemsview_repr(view: Iterable[tuple[object, object]]) -> str:
lst = []
for k, v in view:
lst.append("{!r}: {!r}".format(k, v))
@@ -120,7 +144,7 @@ def _itemsview_repr(view):
return "{}({})".format(view.__class__.__name__, body)
-def _keysview_isdisjoint(view, other):
+def _keysview_isdisjoint(view: Container[object], other: Iterable[object]) -> bool:
"Return True if two sets have a null intersection."
for k in other:
if k in view:
@@ -128,7 +152,7 @@ def _keysview_isdisjoint(view, other):
return True
-def _keysview_repr(view):
+def _keysview_repr(view: Iterable[object]) -> str:
lst = []
for k in view:
lst.append("{!r}".format(k))
@@ -136,7 +160,7 @@ def _keysview_repr(view):
return "{}({})".format(view.__class__.__name__, body)
-def _valuesview_repr(view):
+def _valuesview_repr(view: Iterable[object]) -> str:
lst = []
for v in view:
lst.append("{!r}".format(v))
@@ -144,7 +168,7 @@ def _valuesview_repr(view):
return "{}({})".format(view.__class__.__name__, body)
-def _mdrepr(md):
+def _mdrepr(md: Mapping[object, object]) -> str:
lst = []
for k, v in md.items():
lst.append("'{}': {!r}".format(k, v))
diff --git a/contrib/python/multidict/multidict/_multidict_py.py b/contrib/python/multidict/multidict/_multidict_py.py
index 79c45aa19c..b8ecb8b962 100644
--- a/contrib/python/multidict/multidict/_multidict_py.py
+++ b/contrib/python/multidict/multidict/_multidict_py.py
@@ -1,47 +1,56 @@
+import enum
import sys
-import types
from array import array
-from collections import abc
-
-from ._abc import MultiMapping, MutableMultiMapping
-
-_marker = object()
-
-if sys.version_info >= (3, 9):
- GenericAlias = types.GenericAlias
+from collections.abc import (
+ Callable,
+ ItemsView,
+ Iterable,
+ Iterator,
+ KeysView,
+ Mapping,
+ ValuesView,
+)
+from typing import (
+ TYPE_CHECKING,
+ Generic,
+ NoReturn,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+
+from ._abc import MDArg, MultiMapping, MutableMultiMapping, SupportsKeys
+
+if sys.version_info >= (3, 11):
+ from typing import Self
else:
-
- def GenericAlias(cls):
- return cls
+ from typing_extensions import Self
class istr(str):
-
"""Case insensitive str."""
__is_istr__ = True
-upstr = istr # for relaxing backward compatibility problems
-
-
-def getversion(md):
- if not isinstance(md, _Base):
- raise TypeError("Parameter should be multidict or proxy")
- return md._impl._version
+_V = TypeVar("_V")
+_T = TypeVar("_T")
+_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
+sentinel = _SENTINEL.sentinel
_version = array("Q", [0])
-class _Impl:
+class _Impl(Generic[_V]):
__slots__ = ("_items", "_version")
- def __init__(self):
- self._items = []
+ def __init__(self) -> None:
+ self._items: list[tuple[str, str, _V]] = []
self.incr_version()
- def incr_version(self):
+ def incr_version(self) -> None:
global _version
v = _version
v[0] += 1
@@ -49,25 +58,138 @@ class _Impl:
if sys.implementation.name != "pypy":
- def __sizeof__(self):
+ def __sizeof__(self) -> int:
return object.__sizeof__(self) + sys.getsizeof(self._items)
-class _Base:
- def _title(self, key):
+class _Iter(Generic[_T]):
+ __slots__ = ("_size", "_iter")
+
+ def __init__(self, size: int, iterator: Iterator[_T]):
+ self._size = size
+ self._iter = iterator
+
+ def __iter__(self) -> Self:
+ return self
+
+ def __next__(self) -> _T:
+ return next(self._iter)
+
+ def __length_hint__(self) -> int:
+ return self._size
+
+
+class _ViewBase(Generic[_V]):
+ def __init__(self, impl: _Impl[_V]):
+ self._impl = impl
+
+ def __len__(self) -> int:
+ return len(self._impl._items)
+
+
+class _ItemsView(_ViewBase[_V], ItemsView[str, _V]):
+ def __contains__(self, item: object) -> bool:
+ if not isinstance(item, (tuple, list)) or len(item) != 2:
+ return False
+ for i, k, v in self._impl._items:
+ if item[0] == k and item[1] == v:
+ return True
+ return False
+
+ def __iter__(self) -> _Iter[tuple[str, _V]]:
+ return _Iter(len(self), self._iter(self._impl._version))
+
+ def _iter(self, version: int) -> Iterator[tuple[str, _V]]:
+ for i, k, v in self._impl._items:
+ if version != self._impl._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield k, v
+
+ def __repr__(self) -> str:
+ lst = []
+ for item in self._impl._items:
+ lst.append("{!r}: {!r}".format(item[1], item[2]))
+ body = ", ".join(lst)
+ return "{}({})".format(self.__class__.__name__, body)
+
+
+class _ValuesView(_ViewBase[_V], ValuesView[_V]):
+ def __contains__(self, value: object) -> bool:
+ for item in self._impl._items:
+ if item[2] == value:
+ return True
+ return False
+
+ def __iter__(self) -> _Iter[_V]:
+ return _Iter(len(self), self._iter(self._impl._version))
+
+ def _iter(self, version: int) -> Iterator[_V]:
+ for item in self._impl._items:
+ if version != self._impl._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield item[2]
+
+ def __repr__(self) -> str:
+ lst = []
+ for item in self._impl._items:
+ lst.append("{!r}".format(item[2]))
+ body = ", ".join(lst)
+ return "{}({})".format(self.__class__.__name__, body)
+
+
+class _KeysView(_ViewBase[_V], KeysView[str]):
+ def __contains__(self, key: object) -> bool:
+ for item in self._impl._items:
+ if item[1] == key:
+ return True
+ return False
+
+ def __iter__(self) -> _Iter[str]:
+ return _Iter(len(self), self._iter(self._impl._version))
+
+ def _iter(self, version: int) -> Iterator[str]:
+ for item in self._impl._items:
+ if version != self._impl._version:
+ raise RuntimeError("Dictionary changed during iteration")
+ yield item[1]
+
+ def __repr__(self) -> str:
+ lst = []
+ for item in self._impl._items:
+ lst.append("{!r}".format(item[1]))
+ body = ", ".join(lst)
+ return "{}({})".format(self.__class__.__name__, body)
+
+
+class _Base(MultiMapping[_V]):
+ _impl: _Impl[_V]
+
+ def _title(self, key: str) -> str:
return key
- def getall(self, key, default=_marker):
+ @overload
+ def getall(self, key: str) -> list[_V]: ...
+ @overload
+ def getall(self, key: str, default: _T) -> Union[list[_V], _T]: ...
+ def getall(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[list[_V], _T]:
"""Return a list of all values matching the key."""
identity = self._title(key)
res = [v for i, k, v in self._impl._items if i == identity]
if res:
return res
- if not res and default is not _marker:
+ if not res and default is not sentinel:
return default
raise KeyError("Key not found: %r" % key)
- def getone(self, key, default=_marker):
+ @overload
+ def getone(self, key: str) -> _V: ...
+ @overload
+ def getone(self, key: str, default: _T) -> Union[_V, _T]: ...
+ def getone(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[_V, _T]:
"""Get first value matching the key.
Raises KeyError if the key is not found and no default is provided.
@@ -76,42 +198,46 @@ class _Base:
for i, k, v in self._impl._items:
if i == identity:
return v
- if default is not _marker:
+ if default is not sentinel:
return default
raise KeyError("Key not found: %r" % key)
# Mapping interface #
- def __getitem__(self, key):
+ def __getitem__(self, key: str) -> _V:
return self.getone(key)
- def get(self, key, default=None):
+ @overload
+ def get(self, key: str, /) -> Union[_V, None]: ...
+ @overload
+ def get(self, key: str, /, default: _T) -> Union[_V, _T]: ...
+ def get(self, key: str, default: Union[_T, None] = None) -> Union[_V, _T, None]:
"""Get first value matching the key.
If the key is not found, returns the default (or None if no default is provided)
"""
return self.getone(key, default)
- def __iter__(self):
+ def __iter__(self) -> Iterator[str]:
return iter(self.keys())
- def __len__(self):
+ def __len__(self) -> int:
return len(self._impl._items)
- def keys(self):
+ def keys(self) -> KeysView[str]:
"""Return a new view of the dictionary's keys."""
return _KeysView(self._impl)
- def items(self):
+ def items(self) -> ItemsView[str, _V]:
"""Return a new view of the dictionary's items *(key, value) pairs)."""
return _ItemsView(self._impl)
- def values(self):
+ def values(self) -> _ValuesView[_V]:
"""Return a new view of the dictionary's values."""
return _ValuesView(self._impl)
- def __eq__(self, other):
- if not isinstance(other, abc.Mapping):
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Mapping):
return NotImplemented
if isinstance(other, _Base):
lft = self._impl._items
@@ -125,124 +251,83 @@ class _Base:
if len(self._impl._items) != len(other):
return False
for k, v in self.items():
- nv = other.get(k, _marker)
+ nv = other.get(k, sentinel)
if v != nv:
return False
return True
- def __contains__(self, key):
+ def __contains__(self, key: object) -> bool:
+ if not isinstance(key, str):
+ return False
identity = self._title(key)
for i, k, v in self._impl._items:
if i == identity:
return True
return False
- def __repr__(self):
+ def __repr__(self) -> str:
body = ", ".join("'{}': {!r}".format(k, v) for k, v in self.items())
return "<{}({})>".format(self.__class__.__name__, body)
- __class_getitem__ = classmethod(GenericAlias)
-
-
-class MultiDictProxy(_Base, MultiMapping):
- """Read-only proxy for MultiDict instance."""
-
- def __init__(self, arg):
- if not isinstance(arg, (MultiDict, MultiDictProxy)):
- raise TypeError(
- "ctor requires MultiDict or MultiDictProxy instance"
- ", not {}".format(type(arg))
- )
-
- self._impl = arg._impl
- def __reduce__(self):
- raise TypeError("can't pickle {} objects".format(self.__class__.__name__))
-
- def copy(self):
- """Return a copy of itself."""
- return MultiDict(self.items())
-
-
-class CIMultiDictProxy(MultiDictProxy):
- """Read-only proxy for CIMultiDict instance."""
-
- def __init__(self, arg):
- if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)):
- raise TypeError(
- "ctor requires CIMultiDict or CIMultiDictProxy instance"
- ", not {}".format(type(arg))
- )
-
- self._impl = arg._impl
-
- def _title(self, key):
- return key.title()
-
- def copy(self):
- """Return a copy of itself."""
- return CIMultiDict(self.items())
-
-
-class MultiDict(_Base, MutableMultiMapping):
+class MultiDict(_Base[_V], MutableMultiMapping[_V]):
"""Dictionary with the support for duplicate keys."""
- def __init__(self, *args, **kwargs):
+ def __init__(self, arg: MDArg[_V] = None, /, **kwargs: _V):
self._impl = _Impl()
- self._extend(args, kwargs, self.__class__.__name__, self._extend_items)
+ self._extend(arg, kwargs, self.__class__.__name__, self._extend_items)
if sys.implementation.name != "pypy":
- def __sizeof__(self):
+ def __sizeof__(self) -> int:
return object.__sizeof__(self) + sys.getsizeof(self._impl)
- def __reduce__(self):
+ def __reduce__(self) -> tuple[type[Self], tuple[list[tuple[str, _V]]]]:
return (self.__class__, (list(self.items()),))
- def _title(self, key):
+ def _title(self, key: str) -> str:
return key
- def _key(self, key):
+ def _key(self, key: str) -> str:
if isinstance(key, str):
return key
else:
- raise TypeError(
- "MultiDict keys should be either str " "or subclasses of str"
- )
+ raise TypeError("MultiDict keys should be either str or subclasses of str")
- def add(self, key, value):
+ def add(self, key: str, value: _V) -> None:
identity = self._title(key)
self._impl._items.append((identity, self._key(key), value))
self._impl.incr_version()
- def copy(self):
+ def copy(self) -> Self:
"""Return a copy of itself."""
cls = self.__class__
return cls(self.items())
__copy__ = copy
- def extend(self, *args, **kwargs):
+ def extend(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
"""Extend current MultiDict with more values.
This method must be used instead of update.
"""
- self._extend(args, kwargs, "extend", self._extend_items)
-
- def _extend(self, args, kwargs, name, method):
- if len(args) > 1:
- raise TypeError(
- "{} takes at most 1 positional argument"
- " ({} given)".format(name, len(args))
- )
- if args:
- arg = args[0]
- if isinstance(args[0], (MultiDict, MultiDictProxy)) and not kwargs:
+ self._extend(arg, kwargs, "extend", self._extend_items)
+
+ def _extend(
+ self,
+ arg: MDArg[_V],
+ kwargs: Mapping[str, _V],
+ name: str,
+ method: Callable[[list[tuple[str, str, _V]]], None],
+ ) -> None:
+ if arg:
+ if isinstance(arg, (MultiDict, MultiDictProxy)) and not kwargs:
items = arg._impl._items
else:
- if hasattr(arg, "items"):
- arg = arg.items()
+ if hasattr(arg, "keys"):
+ arg = cast(SupportsKeys[_V], arg)
+ arg = [(k, arg[k]) for k in arg.keys()]
if kwargs:
arg = list(arg)
arg.extend(list(kwargs.items()))
@@ -264,21 +349,21 @@ class MultiDict(_Base, MutableMultiMapping):
]
)
- def _extend_items(self, items):
+ def _extend_items(self, items: Iterable[tuple[str, str, _V]]) -> None:
for identity, key, value in items:
self.add(key, value)
- def clear(self):
+ def clear(self) -> None:
"""Remove all items from MultiDict."""
self._impl._items.clear()
self._impl.incr_version()
# Mapping interface #
- def __setitem__(self, key, value):
+ def __setitem__(self, key: str, value: _V) -> None:
self._replace(key, value)
- def __delitem__(self, key):
+ def __delitem__(self, key: str) -> None:
identity = self._title(key)
items = self._impl._items
found = False
@@ -291,16 +376,28 @@ class MultiDict(_Base, MutableMultiMapping):
else:
self._impl.incr_version()
- def setdefault(self, key, default=None):
+ @overload
+ def setdefault(
+ self: "MultiDict[Union[_T, None]]", key: str, default: None = None
+ ) -> Union[_T, None]: ...
+ @overload
+ def setdefault(self, key: str, default: _V) -> _V: ...
+ def setdefault(self, key: str, default: Union[_V, None] = None) -> Union[_V, None]: # type: ignore[misc]
"""Return value for key, set value to default if key is not present."""
identity = self._title(key)
for i, k, v in self._impl._items:
if i == identity:
return v
- self.add(key, default)
+ self.add(key, default) # type: ignore[arg-type]
return default
- def popone(self, key, default=_marker):
+ @overload
+ def popone(self, key: str) -> _V: ...
+ @overload
+ def popone(self, key: str, default: _T) -> Union[_V, _T]: ...
+ def popone(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[_V, _T]:
"""Remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise
@@ -314,14 +411,22 @@ class MultiDict(_Base, MutableMultiMapping):
del self._impl._items[i]
self._impl.incr_version()
return value
- if default is _marker:
+ if default is sentinel:
raise KeyError(key)
else:
return default
- pop = popone # type: ignore
-
- def popall(self, key, default=_marker):
+ # Type checking will inherit signature for pop() if we don't confuse it here.
+ if not TYPE_CHECKING:
+ pop = popone
+
+ @overload
+ def popall(self, key: str) -> list[_V]: ...
+ @overload
+ def popall(self, key: str, default: _T) -> Union[list[_V], _T]: ...
+ def popall(
+ self, key: str, default: Union[_T, _SENTINEL] = sentinel
+ ) -> Union[list[_V], _T]:
"""Remove all occurrences of key and return the list of corresponding
values.
@@ -340,7 +445,7 @@ class MultiDict(_Base, MutableMultiMapping):
self._impl.incr_version()
found = True
if not found:
- if default is _marker:
+ if default is sentinel:
raise KeyError(key)
else:
return default
@@ -348,7 +453,7 @@ class MultiDict(_Base, MutableMultiMapping):
ret.reverse()
return ret
- def popitem(self):
+ def popitem(self) -> tuple[str, _V]:
"""Remove and return an arbitrary (key, value) pair."""
if self._impl._items:
i = self._impl._items.pop(0)
@@ -357,14 +462,14 @@ class MultiDict(_Base, MutableMultiMapping):
else:
raise KeyError("empty multidict")
- def update(self, *args, **kwargs):
+ def update(self, arg: MDArg[_V] = None, /, **kwargs: _V) -> None:
"""Update the dictionary from *other*, overwriting existing keys."""
- self._extend(args, kwargs, "update", self._update_items)
+ self._extend(arg, kwargs, "update", self._update_items)
- def _update_items(self, items):
+ def _update_items(self, items: list[tuple[str, str, _V]]) -> None:
if not items:
return
- used_keys = {}
+ used_keys: dict[str, int] = {}
for identity, key, value in items:
start = used_keys.get(identity, 0)
for i in range(start, len(self._impl._items)):
@@ -393,7 +498,7 @@ class MultiDict(_Base, MutableMultiMapping):
self._impl.incr_version()
- def _replace(self, key, value):
+ def _replace(self, key: str, value: _V) -> None:
key = self._key(key)
identity = self._title(key)
items = self._impl._items
@@ -412,7 +517,8 @@ class MultiDict(_Base, MutableMultiMapping):
return
# remove all tail items
- i = rgt + 1
+ # Mypy bug: https://github.com/python/mypy/issues/14209
+ i = rgt + 1 # type: ignore[possibly-undefined]
while i < len(items):
item = items[i]
if item[0] == identity:
@@ -421,107 +527,54 @@ class MultiDict(_Base, MutableMultiMapping):
i += 1
-class CIMultiDict(MultiDict):
+class CIMultiDict(MultiDict[_V]):
"""Dictionary with the support for duplicate case-insensitive keys."""
- def _title(self, key):
+ def _title(self, key: str) -> str:
return key.title()
-class _Iter:
- __slots__ = ("_size", "_iter")
-
- def __init__(self, size, iterator):
- self._size = size
- self._iter = iterator
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return next(self._iter)
-
- def __length_hint__(self):
- return self._size
-
-
-class _ViewBase:
- def __init__(self, impl):
- self._impl = impl
-
- def __len__(self):
- return len(self._impl._items)
-
-
-class _ItemsView(_ViewBase, abc.ItemsView):
- def __contains__(self, item):
- assert isinstance(item, tuple) or isinstance(item, list)
- assert len(item) == 2
- for i, k, v in self._impl._items:
- if item[0] == k and item[1] == v:
- return True
- return False
-
- def __iter__(self):
- return _Iter(len(self), self._iter(self._impl._version))
+class MultiDictProxy(_Base[_V]):
+ """Read-only proxy for MultiDict instance."""
- def _iter(self, version):
- for i, k, v in self._impl._items:
- if version != self._impl._version:
- raise RuntimeError("Dictionary changed during iteration")
- yield k, v
+ def __init__(self, arg: Union[MultiDict[_V], "MultiDictProxy[_V]"]):
+ if not isinstance(arg, (MultiDict, MultiDictProxy)):
+ raise TypeError(
+ "ctor requires MultiDict or MultiDictProxy instance"
+ ", not {}".format(type(arg))
+ )
- def __repr__(self):
- lst = []
- for item in self._impl._items:
- lst.append("{!r}: {!r}".format(item[1], item[2]))
- body = ", ".join(lst)
- return "{}({})".format(self.__class__.__name__, body)
+ self._impl = arg._impl
+ def __reduce__(self) -> NoReturn:
+ raise TypeError("can't pickle {} objects".format(self.__class__.__name__))
-class _ValuesView(_ViewBase, abc.ValuesView):
- def __contains__(self, value):
- for item in self._impl._items:
- if item[2] == value:
- return True
- return False
+ def copy(self) -> MultiDict[_V]:
+ """Return a copy of itself."""
+ return MultiDict(self.items())
- def __iter__(self):
- return _Iter(len(self), self._iter(self._impl._version))
- def _iter(self, version):
- for item in self._impl._items:
- if version != self._impl._version:
- raise RuntimeError("Dictionary changed during iteration")
- yield item[2]
+class CIMultiDictProxy(MultiDictProxy[_V]):
+ """Read-only proxy for CIMultiDict instance."""
- def __repr__(self):
- lst = []
- for item in self._impl._items:
- lst.append("{!r}".format(item[2]))
- body = ", ".join(lst)
- return "{}({})".format(self.__class__.__name__, body)
+ def __init__(self, arg: Union[MultiDict[_V], MultiDictProxy[_V]]):
+ if not isinstance(arg, (CIMultiDict, CIMultiDictProxy)):
+ raise TypeError(
+ "ctor requires CIMultiDict or CIMultiDictProxy instance"
+ ", not {}".format(type(arg))
+ )
+ self._impl = arg._impl
-class _KeysView(_ViewBase, abc.KeysView):
- def __contains__(self, key):
- for item in self._impl._items:
- if item[1] == key:
- return True
- return False
+ def _title(self, key: str) -> str:
+ return key.title()
- def __iter__(self):
- return _Iter(len(self), self._iter(self._impl._version))
+ def copy(self) -> CIMultiDict[_V]:
+ """Return a copy of itself."""
+ return CIMultiDict(self.items())
- def _iter(self, version):
- for item in self._impl._items:
- if version != self._impl._version:
- raise RuntimeError("Dictionary changed during iteration")
- yield item[1]
- def __repr__(self):
- lst = []
- for item in self._impl._items:
- lst.append("{!r}".format(item[1]))
- body = ", ".join(lst)
- return "{}({})".format(self.__class__.__name__, body)
+def getversion(md: Union[MultiDict[object], MultiDictProxy[object]]) -> int:
+ if not isinstance(md, _Base):
+ raise TypeError("Parameter should be multidict or proxy")
+ return md._impl._version
diff --git a/contrib/python/multidict/multidict/_multilib/defs.h b/contrib/python/multidict/multidict/_multilib/defs.h
index 55c21074dd..51a6639c42 100644
--- a/contrib/python/multidict/multidict/_multilib/defs.h
+++ b/contrib/python/multidict/multidict/_multilib/defs.h
@@ -5,11 +5,7 @@
extern "C" {
#endif
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
static PyObject *multidict_str_lower = NULL;
-#else
-_Py_IDENTIFIER(lower);
-#endif
/* We link this module statically for convenience. If compiled as a shared
library instead, some compilers don't allow addresses of Python objects
diff --git a/contrib/python/multidict/multidict/_multilib/istr.h b/contrib/python/multidict/multidict/_multilib/istr.h
index 61dc61aec6..8454f78b88 100644
--- a/contrib/python/multidict/multidict/_multilib/istr.h
+++ b/contrib/python/multidict/multidict/_multilib/istr.h
@@ -43,11 +43,7 @@ istr_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (!ret) {
goto fail;
}
-#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 9
s = PyObject_CallMethodNoArgs(ret, multidict_str_lower);
-#else
- s =_PyObject_CallMethodId(ret, &PyId_lower, NULL);
-#endif
if (!s) {
goto fail;
}
diff --git a/contrib/python/multidict/multidict/_multilib/pair_list.h b/contrib/python/multidict/multidict/_multilib/pair_list.h
index 15291d46a8..b23150dfad 100644
--- a/contrib/python/multidict/multidict/_multilib/pair_list.h
+++ b/contrib/python/multidict/multidict/_multilib/pair_list.h
@@ -31,11 +31,7 @@ The embedded buffer intention is to fit the vast majority of possible
HTTP headers into the buffer without allocating an extra memory block.
*/
-#if (PY_VERSION_HEX < 0x03080000)
-#define EMBEDDED_CAPACITY 28
-#else
#define EMBEDDED_CAPACITY 29
-#endif
typedef struct pair_list {
Py_ssize_t capacity;
@@ -110,11 +106,7 @@ ci_key_to_str(PyObject *key)
return ret;
}
if (PyUnicode_Check(key)) {
-#if PY_VERSION_HEX < 0x03090000
- return _PyObject_CallMethodId(key, &PyId_lower, NULL);
-#else
return PyObject_CallMethodNoArgs(key, multidict_str_lower);
-#endif
}
PyErr_SetString(PyExc_TypeError,
"CIMultiDict keys should be either str "
@@ -497,6 +489,10 @@ pair_list_contains(pair_list_t *list, PyObject *key)
PyObject *identity = NULL;
int tmp;
+ if (!PyUnicode_Check(key)) {
+ return 0;
+ }
+
ident = pair_list_calc_identity(list, key);
if (ident == NULL) {
goto fail;
@@ -916,13 +912,18 @@ _pair_list_post_update(pair_list_t *list, PyObject* used_keys, Py_ssize_t pos)
for (; pos < list->size; pos++) {
pair = pair_list_get(list, pos);
- tmp = PyDict_GetItem(used_keys, pair->identity);
- if (tmp == NULL) {
+ int status = PyDict_GetItemRef(used_keys, pair->identity, &tmp);
+ if (status == -1) {
+ // exception set
+ return -1;
+ }
+ else if (status == 0) {
// not found
continue;
}
num = PyLong_AsSsize_t(tmp);
+ Py_DECREF(tmp);
if (num == -1) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_RuntimeError, "invalid internal state");
@@ -955,12 +956,18 @@ _pair_list_update(pair_list_t *list, PyObject *key,
int found;
int ident_cmp_res;
- item = PyDict_GetItem(used_keys, identity);
- if (item == NULL) {
+ int status = PyDict_GetItemRef(used_keys, identity, &item);
+ if (status == -1) {
+ // exception set
+ return -1;
+ }
+ else if (status == 0) {
+ // not found
pos = 0;
}
else {
pos = PyLong_AsSsize_t(item);
+ Py_DECREF(item);
if (pos == -1) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_RuntimeError, "invalid internal state");
@@ -1087,18 +1094,28 @@ pair_list_update_from_seq(pair_list_t *list, PyObject *seq)
}
// Convert item to sequence, and verify length 2.
+#ifdef Py_GIL_DISABLED
+ if (!PySequence_Check(item)) {
+#else
fast = PySequence_Fast(item, "");
if (fast == NULL) {
if (PyErr_ExceptionMatches(PyExc_TypeError)) {
+#endif
PyErr_Format(PyExc_TypeError,
"multidict cannot convert sequence element #%zd"
" to a sequence",
i);
+#ifndef Py_GIL_DISABLED
}
+#endif
goto fail_1;
}
+#ifdef Py_GIL_DISABLED
+ n = PySequence_Size(item);
+#else
n = PySequence_Fast_GET_SIZE(fast);
+#endif
if (n != 2) {
PyErr_Format(PyExc_ValueError,
"multidict update sequence element #%zd "
@@ -1107,10 +1124,27 @@ pair_list_update_from_seq(pair_list_t *list, PyObject *seq)
goto fail_1;
}
+#ifdef Py_GIL_DISABLED
+ key = PySequence_ITEM(item, 0);
+ if (key == NULL) {
+ PyErr_Format(PyExc_ValueError,
+ "multidict update sequence element #%zd's "
+ "key could not be fetched", i);
+ goto fail_1;
+ }
+ value = PySequence_ITEM(item, 1);
+ if (value == NULL) {
+ PyErr_Format(PyExc_ValueError,
+ "multidict update sequence element #%zd's "
+ "value could not be fetched", i);
+ goto fail_1;
+ }
+#else
key = PySequence_Fast_GET_ITEM(fast, 0);
value = PySequence_Fast_GET_ITEM(fast, 1);
Py_INCREF(key);
Py_INCREF(value);
+#endif
identity = pair_list_calc_identity(list, key);
if (identity == NULL) {
@@ -1128,7 +1162,9 @@ pair_list_update_from_seq(pair_list_t *list, PyObject *seq)
Py_DECREF(key);
Py_DECREF(value);
+#ifndef Py_GIL_DISABLED
Py_DECREF(fast);
+#endif
Py_DECREF(item);
Py_DECREF(identity);
}
diff --git a/contrib/python/multidict/multidict/_multilib/pythoncapi_compat.h b/contrib/python/multidict/multidict/_multilib/pythoncapi_compat.h
new file mode 100644
index 0000000000..971981993b
--- /dev/null
+++ b/contrib/python/multidict/multidict/_multilib/pythoncapi_compat.h
@@ -0,0 +1,1142 @@
+// Header file providing new C API functions to old Python versions.
+//
+// File distributed under the Zero Clause BSD (0BSD) license.
+// Copyright Contributors to the pythoncapi_compat project.
+//
+// Homepage:
+// https://github.com/python/pythoncapi_compat
+//
+// Latest version:
+// https://raw.githubusercontent.com/python/pythoncapi_compat/master/pythoncapi_compat.h
+//
+// The vendored version comes from commit:
+// https://raw.githubusercontent.com/python/pythoncapi-compat/2d18aecd7b2f549d38a13e27b682ea4966f37bd8/pythoncapi_compat.h
+//
+// SPDX-License-Identifier: 0BSD
+
+#ifndef PYTHONCAPI_COMPAT
+#define PYTHONCAPI_COMPAT
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <Python.h>
+
+// Python 3.11.0b4 added PyFrame_Back() to Python.h
+#if PY_VERSION_HEX < 0x030b00B4 && !defined(PYPY_VERSION)
+# include "frameobject.h" // PyFrameObject, PyFrame_GetBack()
+#endif
+
+
+#ifndef _Py_CAST
+# define _Py_CAST(type, expr) ((type)(expr))
+#endif
+
+// Static inline functions should use _Py_NULL rather than using directly NULL
+// to prevent C++ compiler warnings. On C23 and newer and on C++11 and newer,
+// _Py_NULL is defined as nullptr.
+#if (defined (__STDC_VERSION__) && __STDC_VERSION__ > 201710L) \
+ || (defined(__cplusplus) && __cplusplus >= 201103)
+# define _Py_NULL nullptr
+#else
+# define _Py_NULL NULL
+#endif
+
+// Cast argument to PyObject* type.
+#ifndef _PyObject_CAST
+# define _PyObject_CAST(op) _Py_CAST(PyObject*, op)
+#endif
+
+
+// bpo-42262 added Py_NewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_NewRef)
+static inline PyObject* _Py_NewRef(PyObject *obj)
+{
+ Py_INCREF(obj);
+ return obj;
+}
+#define Py_NewRef(obj) _Py_NewRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-42262 added Py_XNewRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030A00A3 && !defined(Py_XNewRef)
+static inline PyObject* _Py_XNewRef(PyObject *obj)
+{
+ Py_XINCREF(obj);
+ return obj;
+}
+#define Py_XNewRef(obj) _Py_XNewRef(_PyObject_CAST(obj))
+#endif
+
+
+// bpo-43753 added Py_Is(), Py_IsNone(), Py_IsTrue() and Py_IsFalse()
+// to Python 3.10.0b1.
+#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_Is)
+# define Py_Is(x, y) ((x) == (y))
+#endif
+#if PY_VERSION_HEX < 0x030A00B1 && !defined(Py_IsNone)
+# define Py_IsNone(x) Py_Is(x, Py_None)
+#endif
+#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsTrue)
+# define Py_IsTrue(x) Py_Is(x, Py_True)
+#endif
+#if (PY_VERSION_HEX < 0x030A00B1 || defined(PYPY_VERSION)) && !defined(Py_IsFalse)
+# define Py_IsFalse(x) Py_Is(x, Py_False)
+#endif
+
+
+#if defined(PYPY_VERSION)
+static inline PyCodeObject* PyFrame_GetCode(PyFrameObject *frame)
+{
+ assert(frame != _Py_NULL);
+ assert(frame->f_code != _Py_NULL);
+ return _Py_CAST(PyCodeObject*, Py_NewRef(frame->f_code));
+}
+#endif
+
+static inline PyCodeObject* _PyFrame_GetCodeBorrow(PyFrameObject *frame)
+{
+ PyCodeObject *code = PyFrame_GetCode(frame);
+ Py_DECREF(code);
+ return code;
+}
+
+#if !defined(PYPY_VERSION)
+static inline PyFrameObject* _PyFrame_GetBackBorrow(PyFrameObject *frame)
+{
+ PyFrameObject *back = PyFrame_GetBack(frame);
+ Py_XDECREF(back);
+ return back;
+}
+#endif
+
+
+// bpo-40421 added PyFrame_GetLocals() to Python 3.11.0a7
+#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION)
+static inline PyObject* PyFrame_GetLocals(PyFrameObject *frame)
+{
+ if (PyFrame_FastToLocalsWithError(frame) < 0) {
+ return NULL;
+ }
+ return Py_NewRef(frame->f_locals);
+}
+#endif
+
+
+// bpo-40421 added PyFrame_GetGlobals() to Python 3.11.0a7
+#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION)
+static inline PyObject* PyFrame_GetGlobals(PyFrameObject *frame)
+{
+ return Py_NewRef(frame->f_globals);
+}
+#endif
+
+
+// bpo-40421 added PyFrame_GetBuiltins() to Python 3.11.0a7
+#if PY_VERSION_HEX < 0x030B00A7 && !defined(PYPY_VERSION)
+static inline PyObject* PyFrame_GetBuiltins(PyFrameObject *frame)
+{
+ return Py_NewRef(frame->f_builtins);
+}
+#endif
+
+
+// bpo-40421 added PyFrame_GetLasti() to Python 3.11.0b1
+#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION)
+static inline int PyFrame_GetLasti(PyFrameObject *frame)
+{
+#if PY_VERSION_HEX >= 0x030A00A7
+ // bpo-27129: Since Python 3.10.0a7, f_lasti is an instruction offset,
+ // not a bytes offset anymore. Python uses 16-bit "wordcode" (2 bytes)
+ // instructions.
+ if (frame->f_lasti < 0) {
+ return -1;
+ }
+ return frame->f_lasti * 2;
+#else
+ return frame->f_lasti;
+#endif
+}
+#endif
+
+
+// gh-91248 added PyFrame_GetVar() to Python 3.12.0a2
+#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION)
+static inline PyObject* PyFrame_GetVar(PyFrameObject *frame, PyObject *name)
+{
+ PyObject *locals, *value;
+
+ locals = PyFrame_GetLocals(frame);
+ if (locals == NULL) {
+ return NULL;
+ }
+ value = PyDict_GetItemWithError(locals, name);
+ Py_DECREF(locals);
+
+ if (value == NULL) {
+ if (PyErr_Occurred()) {
+ return NULL;
+ }
+ PyErr_Format(PyExc_NameError, "variable %R does not exist", name);
+ return NULL;
+ }
+ return Py_NewRef(value);
+}
+#endif
+
+
+// gh-91248 added PyFrame_GetVarString() to Python 3.12.0a2
+#if PY_VERSION_HEX < 0x030C00A2 && !defined(PYPY_VERSION)
+static inline PyObject*
+PyFrame_GetVarString(PyFrameObject *frame, const char *name)
+{
+ PyObject *name_obj, *value;
+ name_obj = PyUnicode_FromString(name);
+ if (name_obj == NULL) {
+ return NULL;
+ }
+ value = PyFrame_GetVar(frame, name_obj);
+ Py_DECREF(name_obj);
+ return value;
+}
+#endif
+
+
+#if defined(PYPY_VERSION)
+static inline PyInterpreterState *
+PyThreadState_GetInterpreter(PyThreadState *tstate)
+{
+ assert(tstate != _Py_NULL);
+ return tstate->interp;
+}
+#endif
+
+#if !defined(PYPY_VERSION)
+static inline PyFrameObject*
+_PyThreadState_GetFrameBorrow(PyThreadState *tstate)
+{
+ PyFrameObject *frame = PyThreadState_GetFrame(tstate);
+ Py_XDECREF(frame);
+ return frame;
+}
+#endif
+
+
+#if defined(PYPY_VERSION)
+static inline PyInterpreterState* PyInterpreterState_Get(void)
+{
+ PyThreadState *tstate;
+ PyInterpreterState *interp;
+
+ tstate = PyThreadState_GET();
+ if (tstate == _Py_NULL) {
+ Py_FatalError("GIL released (tstate is NULL)");
+ }
+ interp = tstate->interp;
+ if (interp == _Py_NULL) {
+ Py_FatalError("no current interpreter");
+ }
+ return interp;
+}
+#endif
+
+// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2
+#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION)
+static inline void PyThreadState_EnterTracing(PyThreadState *tstate)
+{
+ tstate->tracing++;
+#if PY_VERSION_HEX >= 0x030A00A1
+ tstate->cframe->use_tracing = 0;
+#else
+ tstate->use_tracing = 0;
+#endif
+}
+#endif
+
+// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2
+#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION)
+static inline void PyThreadState_LeaveTracing(PyThreadState *tstate)
+{
+ int use_tracing = (tstate->c_tracefunc != _Py_NULL
+ || tstate->c_profilefunc != _Py_NULL);
+ tstate->tracing--;
+#if PY_VERSION_HEX >= 0x030A00A1
+ tstate->cframe->use_tracing = use_tracing;
+#else
+ tstate->use_tracing = use_tracing;
+#endif
+}
+#endif
+
+
+// bpo-1635741 added PyModule_AddObjectRef() to Python 3.10.0a3
+#if PY_VERSION_HEX < 0x030A00A3
+static inline int
+PyModule_AddObjectRef(PyObject *module, const char *name, PyObject *value)
+{
+ int res;
+
+ if (!value && !PyErr_Occurred()) {
+ // PyModule_AddObject() raises TypeError in this case
+ PyErr_SetString(PyExc_SystemError,
+ "PyModule_AddObjectRef() must be called "
+ "with an exception raised if value is NULL");
+ return -1;
+ }
+
+ Py_XINCREF(value);
+ res = PyModule_AddObject(module, name, value);
+ if (res < 0) {
+ Py_XDECREF(value);
+ }
+ return res;
+}
+#endif
+
+
+// bpo-46906 added PyFloat_Pack2() and PyFloat_Unpack2() to Python 3.11a7.
+// Python 3.11a2 moved _PyFloat_Pack2() and _PyFloat_Unpack2() to the internal
+// C API: Python 3.11a2-3.11a6 versions are not supported.
+#if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION)
+static inline int PyFloat_Pack2(double x, char *p, int le)
+{ return _PyFloat_Pack2(x, (unsigned char*)p, le); }
+
+static inline double PyFloat_Unpack2(const char *p, int le)
+{ return _PyFloat_Unpack2((const unsigned char *)p, le); }
+#endif
+
+
+// bpo-46906 added PyFloat_Pack4(), PyFloat_Pack8(), PyFloat_Unpack4() and
+// PyFloat_Unpack8() to Python 3.11a7.
+// Python 3.11a2 moved _PyFloat_Pack4(), _PyFloat_Pack8(), _PyFloat_Unpack4()
+// and _PyFloat_Unpack8() to the internal C API: Python 3.11a2-3.11a6 versions
+// are not supported.
+#if PY_VERSION_HEX <= 0x030B00A1 && !defined(PYPY_VERSION)
+static inline int PyFloat_Pack4(double x, char *p, int le)
+{ return _PyFloat_Pack4(x, (unsigned char*)p, le); }
+
+static inline int PyFloat_Pack8(double x, char *p, int le)
+{ return _PyFloat_Pack8(x, (unsigned char*)p, le); }
+
+static inline double PyFloat_Unpack4(const char *p, int le)
+{ return _PyFloat_Unpack4((const unsigned char *)p, le); }
+
+static inline double PyFloat_Unpack8(const char *p, int le)
+{ return _PyFloat_Unpack8((const unsigned char *)p, le); }
+#endif
+
+
+// gh-92154 added PyCode_GetCode() to Python 3.11.0b1
+#if PY_VERSION_HEX < 0x030B00B1 && !defined(PYPY_VERSION)
+static inline PyObject* PyCode_GetCode(PyCodeObject *code)
+{
+ return Py_NewRef(code->co_code);
+}
+#endif
+
+
+// gh-95008 added PyCode_GetVarnames() to Python 3.11.0rc1
+#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION)
+static inline PyObject* PyCode_GetVarnames(PyCodeObject *code)
+{
+ return Py_NewRef(code->co_varnames);
+}
+#endif
+
+// gh-95008 added PyCode_GetFreevars() to Python 3.11.0rc1
+#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION)
+static inline PyObject* PyCode_GetFreevars(PyCodeObject *code)
+{
+ return Py_NewRef(code->co_freevars);
+}
+#endif
+
+// gh-95008 added PyCode_GetCellvars() to Python 3.11.0rc1
+#if PY_VERSION_HEX < 0x030B00C1 && !defined(PYPY_VERSION)
+static inline PyObject* PyCode_GetCellvars(PyCodeObject *code)
+{
+ return Py_NewRef(code->co_cellvars);
+}
+#endif
+
+
+// gh-105922 added PyImport_AddModuleRef() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A0
+static inline PyObject* PyImport_AddModuleRef(const char *name)
+{
+ return Py_XNewRef(PyImport_AddModule(name));
+}
+#endif
+
+
+// gh-105927 added PyWeakref_GetRef() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D0000
+static inline int PyWeakref_GetRef(PyObject *ref, PyObject **pobj)
+{
+ PyObject *obj;
+ if (ref != NULL && !PyWeakref_Check(ref)) {
+ *pobj = NULL;
+ PyErr_SetString(PyExc_TypeError, "expected a weakref");
+ return -1;
+ }
+ obj = PyWeakref_GetObject(ref);
+ if (obj == NULL) {
+ // SystemError if ref is NULL
+ *pobj = NULL;
+ return -1;
+ }
+ if (obj == Py_None) {
+ *pobj = NULL;
+ return 0;
+ }
+ *pobj = Py_NewRef(obj);
+ return (*pobj != NULL);
+}
+#endif
+
+
+// gh-106521 added PyObject_GetOptionalAttr() and
+// PyObject_GetOptionalAttrString() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyObject_GetOptionalAttr(PyObject *obj, PyObject *attr_name, PyObject **result)
+{
+ return _PyObject_LookupAttr(obj, attr_name, result);
+}
+
+static inline int
+PyObject_GetOptionalAttrString(PyObject *obj, const char *attr_name, PyObject **result)
+{
+ PyObject *name_obj;
+ int rc;
+ name_obj = PyUnicode_FromString(attr_name);
+ if (name_obj == NULL) {
+ *result = NULL;
+ return -1;
+ }
+ rc = PyObject_GetOptionalAttr(obj, name_obj, result);
+ Py_DECREF(name_obj);
+ return rc;
+}
+#endif
+
+
+// gh-106307 added PyObject_GetOptionalAttr() and
+// PyMapping_GetOptionalItemString() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyMapping_GetOptionalItem(PyObject *obj, PyObject *key, PyObject **result)
+{
+ *result = PyObject_GetItem(obj, key);
+ if (*result) {
+ return 1;
+ }
+ if (!PyErr_ExceptionMatches(PyExc_KeyError)) {
+ return -1;
+ }
+ PyErr_Clear();
+ return 0;
+}
+
+static inline int
+PyMapping_GetOptionalItemString(PyObject *obj, const char *key, PyObject **result)
+{
+ PyObject *key_obj;
+ int rc;
+ key_obj = PyUnicode_FromString(key);
+ if (key_obj == NULL) {
+ *result = NULL;
+ return -1;
+ }
+ rc = PyMapping_GetOptionalItem(obj, key_obj, result);
+ Py_DECREF(key_obj);
+ return rc;
+}
+#endif
+
+// gh-108511 added PyMapping_HasKeyWithError() and
+// PyMapping_HasKeyStringWithError() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyMapping_HasKeyWithError(PyObject *obj, PyObject *key)
+{
+ PyObject *res;
+ int rc = PyMapping_GetOptionalItem(obj, key, &res);
+ Py_XDECREF(res);
+ return rc;
+}
+
+static inline int
+PyMapping_HasKeyStringWithError(PyObject *obj, const char *key)
+{
+ PyObject *res;
+ int rc = PyMapping_GetOptionalItemString(obj, key, &res);
+ Py_XDECREF(res);
+ return rc;
+}
+#endif
+
+
+// gh-108511 added PyObject_HasAttrWithError() and
+// PyObject_HasAttrStringWithError() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyObject_HasAttrWithError(PyObject *obj, PyObject *attr)
+{
+ PyObject *res;
+ int rc = PyObject_GetOptionalAttr(obj, attr, &res);
+ Py_XDECREF(res);
+ return rc;
+}
+
+static inline int
+PyObject_HasAttrStringWithError(PyObject *obj, const char *attr)
+{
+ PyObject *res;
+ int rc = PyObject_GetOptionalAttrString(obj, attr, &res);
+ Py_XDECREF(res);
+ return rc;
+}
+#endif
+
+
+// gh-106004 added PyDict_GetItemRef() and PyDict_GetItemStringRef()
+// to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyDict_GetItemRef(PyObject *mp, PyObject *key, PyObject **result)
+{
+ PyObject *item = PyDict_GetItemWithError(mp, key);
+ if (item != NULL) {
+ *result = Py_NewRef(item);
+ return 1; // found
+ }
+ if (!PyErr_Occurred()) {
+ *result = NULL;
+ return 0; // not found
+ }
+ *result = NULL;
+ return -1;
+}
+
+static inline int
+PyDict_GetItemStringRef(PyObject *mp, const char *key, PyObject **result)
+{
+ int res;
+ PyObject *key_obj = PyUnicode_FromString(key);
+ if (key_obj == NULL) {
+ *result = NULL;
+ return -1;
+ }
+ res = PyDict_GetItemRef(mp, key_obj, result);
+ Py_DECREF(key_obj);
+ return res;
+}
+#endif
+
+
+// gh-106307 added PyModule_Add() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyModule_Add(PyObject *mod, const char *name, PyObject *value)
+{
+ int res = PyModule_AddObjectRef(mod, name, value);
+ Py_XDECREF(value);
+ return res;
+}
+#endif
+
+
+// gh-108014 added Py_IsFinalizing() to Python 3.13.0a1
+// bpo-1856 added _Py_Finalizing to Python 3.2.1b1.
+// _Py_IsFinalizing() was added to PyPy 7.3.0.
+#if (PY_VERSION_HEX < 0x030D00A1) \
+ && (!defined(PYPY_VERSION_NUM) || PYPY_VERSION_NUM >= 0x7030000)
+static inline int Py_IsFinalizing(void)
+{
+ return _Py_IsFinalizing();
+}
+#endif
+
+
+// gh-108323 added PyDict_ContainsString() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int PyDict_ContainsString(PyObject *op, const char *key)
+{
+ PyObject *key_obj = PyUnicode_FromString(key);
+ if (key_obj == NULL) {
+ return -1;
+ }
+ int res = PyDict_Contains(op, key_obj);
+ Py_DECREF(key_obj);
+ return res;
+}
+#endif
+
+
+// gh-108445 added PyLong_AsInt() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int PyLong_AsInt(PyObject *obj)
+{
+#ifdef PYPY_VERSION
+ long value = PyLong_AsLong(obj);
+ if (value == -1 && PyErr_Occurred()) {
+ return -1;
+ }
+ if (value < (long)INT_MIN || (long)INT_MAX < value) {
+ PyErr_SetString(PyExc_OverflowError,
+ "Python int too large to convert to C int");
+ return -1;
+ }
+ return (int)value;
+#else
+ return _PyLong_AsInt(obj);
+#endif
+}
+#endif
+
+
+// gh-107073 added PyObject_VisitManagedDict() to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyObject_VisitManagedDict(PyObject *obj, visitproc visit, void *arg)
+{
+ PyObject **dict = _PyObject_GetDictPtr(obj);
+ if (*dict == NULL) {
+ return -1;
+ }
+ Py_VISIT(*dict);
+ return 0;
+}
+
+static inline void
+PyObject_ClearManagedDict(PyObject *obj)
+{
+ PyObject **dict = _PyObject_GetDictPtr(obj);
+ if (*dict == NULL) {
+ return;
+ }
+ Py_CLEAR(*dict);
+}
+#endif
+
+// gh-108867 added PyThreadState_GetUnchecked() to Python 3.13.0a1.
+#if PY_VERSION_HEX < 0x030D00A1
+static inline PyThreadState*
+PyThreadState_GetUnchecked(void)
+{
+ return _PyThreadState_UncheckedGet();
+}
+#endif
+
+// gh-110289 added PyUnicode_EqualToUTF8() and PyUnicode_EqualToUTF8AndSize()
+// to Python 3.13.0a1
+#if PY_VERSION_HEX < 0x030D00A1
+static inline int
+PyUnicode_EqualToUTF8AndSize(PyObject *unicode, const char *str, Py_ssize_t str_len)
+{
+ Py_ssize_t len;
+ const void *utf8;
+ PyObject *exc_type, *exc_value, *exc_tb;
+ int res;
+
+ // API cannot report errors so save/restore the exception
+ PyErr_Fetch(&exc_type, &exc_value, &exc_tb);
+
+ if (PyUnicode_IS_ASCII(unicode)) {
+ utf8 = PyUnicode_DATA(unicode);
+ len = PyUnicode_GET_LENGTH(unicode);
+ }
+ else {
+ utf8 = PyUnicode_AsUTF8AndSize(unicode, &len);
+ if (utf8 == NULL) {
+ // Memory allocation failure. The API cannot report error,
+ // so ignore the exception and return 0.
+ res = 0;
+ goto done;
+ }
+ }
+
+ if (len != str_len) {
+ res = 0;
+ goto done;
+ }
+ res = (memcmp(utf8, str, (size_t)len) == 0);
+
+done:
+ PyErr_Restore(exc_type, exc_value, exc_tb);
+ return res;
+}
+
+static inline int
+PyUnicode_EqualToUTF8(PyObject *unicode, const char *str)
+{
+ return PyUnicode_EqualToUTF8AndSize(unicode, str, (Py_ssize_t)strlen(str));
+}
+#endif
+
+
+// gh-111138 added PyList_Extend() and PyList_Clear() to Python 3.13.0a2
+#if PY_VERSION_HEX < 0x030D00A2
+static inline int
+PyList_Extend(PyObject *list, PyObject *iterable)
+{
+ return PyList_SetSlice(list, PY_SSIZE_T_MAX, PY_SSIZE_T_MAX, iterable);
+}
+
+static inline int
+PyList_Clear(PyObject *list)
+{
+ return PyList_SetSlice(list, 0, PY_SSIZE_T_MAX, NULL);
+}
+#endif
+
+// gh-111262 added PyDict_Pop() and PyDict_PopString() to Python 3.13.0a2
+#if PY_VERSION_HEX < 0x030D00A2
+static inline int
+PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result)
+{
+ PyObject *value;
+
+ if (!PyDict_Check(dict)) {
+ PyErr_BadInternalCall();
+ if (result) {
+ *result = NULL;
+ }
+ return -1;
+ }
+
+ // Python 3.13.0a1 removed _PyDict_Pop().
+#if defined(PYPY_VERSION) || PY_VERSION_HEX >= 0x030D0000
+ value = PyObject_CallMethod(dict, "pop", "O", key);
+#else
+ value = _PyDict_Pop(dict, key, NULL);
+#endif
+ if (value == NULL) {
+ if (result) {
+ *result = NULL;
+ }
+ if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_KeyError)) {
+ return -1;
+ }
+ PyErr_Clear();
+ return 0;
+ }
+ if (result) {
+ *result = value;
+ }
+ else {
+ Py_DECREF(value);
+ }
+ return 1;
+}
+
+static inline int
+PyDict_PopString(PyObject *dict, const char *key, PyObject **result)
+{
+ PyObject *key_obj = PyUnicode_FromString(key);
+ if (key_obj == NULL) {
+ if (result != NULL) {
+ *result = NULL;
+ }
+ return -1;
+ }
+
+ int res = PyDict_Pop(dict, key_obj, result);
+ Py_DECREF(key_obj);
+ return res;
+}
+#endif
+
+
+// gh-111545 added Py_HashPointer() to Python 3.13.0a3
+#if PY_VERSION_HEX < 0x030D00A3
+static inline Py_hash_t Py_HashPointer(const void *ptr)
+{
+#if !defined(PYPY_VERSION)
+ return _Py_HashPointer(ptr);
+#else
+ return _Py_HashPointer(_Py_CAST(void*, ptr));
+#endif
+}
+#endif
+
+
+// Python 3.13a4 added a PyTime API.
+#if PY_VERSION_HEX < 0x030D00A4
+typedef _PyTime_t PyTime_t;
+#define PyTime_MIN _PyTime_MIN
+#define PyTime_MAX _PyTime_MAX
+
+static inline double PyTime_AsSecondsDouble(PyTime_t t)
+{ return _PyTime_AsSecondsDouble(t); }
+
+static inline int PyTime_Monotonic(PyTime_t *result)
+{ return _PyTime_GetMonotonicClockWithInfo(result, NULL); }
+
+static inline int PyTime_Time(PyTime_t *result)
+{ return _PyTime_GetSystemClockWithInfo(result, NULL); }
+
+static inline int PyTime_PerfCounter(PyTime_t *result)
+{
+#if !defined(PYPY_VERSION)
+ return _PyTime_GetPerfCounterWithInfo(result, NULL);
+#else
+ // Call time.perf_counter_ns() and convert Python int object to PyTime_t.
+ // Cache time.perf_counter_ns() function for best performance.
+ static PyObject *func = NULL;
+ if (func == NULL) {
+ PyObject *mod = PyImport_ImportModule("time");
+ if (mod == NULL) {
+ return -1;
+ }
+
+ func = PyObject_GetAttrString(mod, "perf_counter_ns");
+ Py_DECREF(mod);
+ if (func == NULL) {
+ return -1;
+ }
+ }
+
+ PyObject *res = PyObject_CallNoArgs(func);
+ if (res == NULL) {
+ return -1;
+ }
+ long long value = PyLong_AsLongLong(res);
+ Py_DECREF(res);
+
+ if (value == -1 && PyErr_Occurred()) {
+ return -1;
+ }
+
+ Py_BUILD_ASSERT(sizeof(value) >= sizeof(PyTime_t));
+ *result = (PyTime_t)value;
+ return 0;
+#endif
+}
+
+#endif
+
+// gh-111389 added hash constants to Python 3.13.0a5. These constants were
+// added first as private macros to Python 3.4.0b1 and PyPy 7.3.9.
+#if (!defined(PyHASH_BITS) \
+ && (!defined(PYPY_VERSION) \
+ || (defined(PYPY_VERSION) && PYPY_VERSION_NUM >= 0x07090000)))
+# define PyHASH_BITS _PyHASH_BITS
+# define PyHASH_MODULUS _PyHASH_MODULUS
+# define PyHASH_INF _PyHASH_INF
+# define PyHASH_IMAG _PyHASH_IMAG
+#endif
+
+
+// gh-111545 added Py_GetConstant() and Py_GetConstantBorrowed()
+// to Python 3.13.0a6
+#if PY_VERSION_HEX < 0x030D00A6 && !defined(Py_CONSTANT_NONE)
+
+#define Py_CONSTANT_NONE 0
+#define Py_CONSTANT_FALSE 1
+#define Py_CONSTANT_TRUE 2
+#define Py_CONSTANT_ELLIPSIS 3
+#define Py_CONSTANT_NOT_IMPLEMENTED 4
+#define Py_CONSTANT_ZERO 5
+#define Py_CONSTANT_ONE 6
+#define Py_CONSTANT_EMPTY_STR 7
+#define Py_CONSTANT_EMPTY_BYTES 8
+#define Py_CONSTANT_EMPTY_TUPLE 9
+
+static inline PyObject* Py_GetConstant(unsigned int constant_id)
+{
+ static PyObject* constants[Py_CONSTANT_EMPTY_TUPLE + 1] = {NULL};
+
+ if (constants[Py_CONSTANT_NONE] == NULL) {
+ constants[Py_CONSTANT_NONE] = Py_None;
+ constants[Py_CONSTANT_FALSE] = Py_False;
+ constants[Py_CONSTANT_TRUE] = Py_True;
+ constants[Py_CONSTANT_ELLIPSIS] = Py_Ellipsis;
+ constants[Py_CONSTANT_NOT_IMPLEMENTED] = Py_NotImplemented;
+
+ constants[Py_CONSTANT_ZERO] = PyLong_FromLong(0);
+ if (constants[Py_CONSTANT_ZERO] == NULL) {
+ goto fatal_error;
+ }
+
+ constants[Py_CONSTANT_ONE] = PyLong_FromLong(1);
+ if (constants[Py_CONSTANT_ONE] == NULL) {
+ goto fatal_error;
+ }
+
+ constants[Py_CONSTANT_EMPTY_STR] = PyUnicode_FromStringAndSize("", 0);
+ if (constants[Py_CONSTANT_EMPTY_STR] == NULL) {
+ goto fatal_error;
+ }
+
+ constants[Py_CONSTANT_EMPTY_BYTES] = PyBytes_FromStringAndSize("", 0);
+ if (constants[Py_CONSTANT_EMPTY_BYTES] == NULL) {
+ goto fatal_error;
+ }
+
+ constants[Py_CONSTANT_EMPTY_TUPLE] = PyTuple_New(0);
+ if (constants[Py_CONSTANT_EMPTY_TUPLE] == NULL) {
+ goto fatal_error;
+ }
+ // goto dance to avoid compiler warnings about Py_FatalError()
+ goto init_done;
+
+fatal_error:
+ // This case should never happen
+ Py_FatalError("Py_GetConstant() failed to get constants");
+ }
+
+init_done:
+ if (constant_id <= Py_CONSTANT_EMPTY_TUPLE) {
+ return Py_NewRef(constants[constant_id]);
+ }
+ else {
+ PyErr_BadInternalCall();
+ return NULL;
+ }
+}
+
+static inline PyObject* Py_GetConstantBorrowed(unsigned int constant_id)
+{
+ PyObject *obj = Py_GetConstant(constant_id);
+ Py_XDECREF(obj);
+ return obj;
+}
+#endif
+
+
+// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4
+#if PY_VERSION_HEX < 0x030D00A4
+static inline PyObject *
+PyList_GetItemRef(PyObject *op, Py_ssize_t index)
+{
+ PyObject *item = PyList_GetItem(op, index);
+ Py_XINCREF(item);
+ return item;
+}
+#endif
+
+
+// gh-114329 added PyList_GetItemRef() to Python 3.13.0a4
+#if PY_VERSION_HEX < 0x030D00A4
+static inline int
+PyDict_SetDefaultRef(PyObject *d, PyObject *key, PyObject *default_value,
+ PyObject **result)
+{
+ PyObject *value;
+ if (PyDict_GetItemRef(d, key, &value) < 0) {
+ // get error
+ if (result) {
+ *result = NULL;
+ }
+ return -1;
+ }
+ if (value != NULL) {
+ // present
+ if (result) {
+ *result = value;
+ }
+ else {
+ Py_DECREF(value);
+ }
+ return 1;
+ }
+
+ // missing: set the item
+ if (PyDict_SetItem(d, key, default_value) < 0) {
+ // set error
+ if (result) {
+ *result = NULL;
+ }
+ return -1;
+ }
+ if (result) {
+ *result = Py_NewRef(default_value);
+ }
+ return 0;
+}
+#endif
+
+#if PY_VERSION_HEX < 0x030D00B3
+# define Py_BEGIN_CRITICAL_SECTION(op) {
+# define Py_END_CRITICAL_SECTION() }
+# define Py_BEGIN_CRITICAL_SECTION2(a, b) {
+# define Py_END_CRITICAL_SECTION2() }
+#endif
+
+#if PY_VERSION_HEX < 0x030E0000 && !defined(PYPY_VERSION)
+typedef struct PyUnicodeWriter PyUnicodeWriter;
+
+static inline void PyUnicodeWriter_Discard(PyUnicodeWriter *writer)
+{
+ _PyUnicodeWriter_Dealloc((_PyUnicodeWriter*)writer);
+ PyMem_Free(writer);
+}
+
+static inline PyUnicodeWriter* PyUnicodeWriter_Create(Py_ssize_t length)
+{
+ if (length < 0) {
+ PyErr_SetString(PyExc_ValueError,
+ "length must be positive");
+ return NULL;
+ }
+
+ const size_t size = sizeof(_PyUnicodeWriter);
+ PyUnicodeWriter *pub_writer = (PyUnicodeWriter *)PyMem_Malloc(size);
+ if (pub_writer == _Py_NULL) {
+ PyErr_NoMemory();
+ return _Py_NULL;
+ }
+ _PyUnicodeWriter *writer = (_PyUnicodeWriter *)pub_writer;
+
+ _PyUnicodeWriter_Init(writer);
+ if (_PyUnicodeWriter_Prepare(writer, length, 127) < 0) {
+ PyUnicodeWriter_Discard(pub_writer);
+ return NULL;
+ }
+ writer->overallocate = 1;
+ return pub_writer;
+}
+
+static inline PyObject* PyUnicodeWriter_Finish(PyUnicodeWriter *writer)
+{
+ PyObject *str = _PyUnicodeWriter_Finish((_PyUnicodeWriter*)writer);
+ assert(((_PyUnicodeWriter*)writer)->buffer == NULL);
+ PyMem_Free(writer);
+ return str;
+}
+
+static inline int
+PyUnicodeWriter_WriteChar(PyUnicodeWriter *writer, Py_UCS4 ch)
+{
+ if (ch > 0x10ffff) {
+ PyErr_SetString(PyExc_ValueError,
+ "character must be in range(0x110000)");
+ return -1;
+ }
+
+ return _PyUnicodeWriter_WriteChar((_PyUnicodeWriter*)writer, ch);
+}
+
+static inline int
+PyUnicodeWriter_WriteStr(PyUnicodeWriter *writer, PyObject *obj)
+{
+ PyObject *str = PyObject_Str(obj);
+ if (str == NULL) {
+ return -1;
+ }
+
+ int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
+ Py_DECREF(str);
+ return res;
+}
+
+static inline int
+PyUnicodeWriter_WriteRepr(PyUnicodeWriter *writer, PyObject *obj)
+{
+ PyObject *str = PyObject_Repr(obj);
+ if (str == NULL) {
+ return -1;
+ }
+
+ int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
+ Py_DECREF(str);
+ return res;
+}
+
+static inline int
+PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer,
+ const char *str, Py_ssize_t size)
+{
+ if (size < 0) {
+ size = (Py_ssize_t)strlen(str);
+ }
+
+ PyObject *str_obj = PyUnicode_FromStringAndSize(str, size);
+ if (str_obj == _Py_NULL) {
+ return -1;
+ }
+
+ int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
+ Py_DECREF(str_obj);
+ return res;
+}
+
+static inline int
+PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer,
+ const wchar_t *str, Py_ssize_t size)
+{
+ if (size < 0) {
+ size = (Py_ssize_t)wcslen(str);
+ }
+
+ PyObject *str_obj = PyUnicode_FromWideChar(str, size);
+ if (str_obj == _Py_NULL) {
+ return -1;
+ }
+
+ int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str_obj);
+ Py_DECREF(str_obj);
+ return res;
+}
+
+static inline int
+PyUnicodeWriter_WriteSubstring(PyUnicodeWriter *writer, PyObject *str,
+ Py_ssize_t start, Py_ssize_t end)
+{
+ if (!PyUnicode_Check(str)) {
+ PyErr_Format(PyExc_TypeError, "expect str, not %T", str);
+ return -1;
+ }
+ if (start < 0 || start > end) {
+ PyErr_Format(PyExc_ValueError, "invalid start argument");
+ return -1;
+ }
+ if (end > PyUnicode_GET_LENGTH(str)) {
+ PyErr_Format(PyExc_ValueError, "invalid end argument");
+ return -1;
+ }
+
+ return _PyUnicodeWriter_WriteSubstring((_PyUnicodeWriter*)writer, str,
+ start, end);
+}
+
+static inline int
+PyUnicodeWriter_Format(PyUnicodeWriter *writer, const char *format, ...)
+{
+ va_list vargs;
+ va_start(vargs, format);
+ PyObject *str = PyUnicode_FromFormatV(format, vargs);
+ va_end(vargs);
+ if (str == _Py_NULL) {
+ return -1;
+ }
+
+ int res = _PyUnicodeWriter_WriteStr((_PyUnicodeWriter*)writer, str);
+ Py_DECREF(str);
+ return res;
+}
+#endif // PY_VERSION_HEX < 0x030E0000
+
+// gh-116560 added PyLong_GetSign() to Python 3.14.0a0
+#if PY_VERSION_HEX < 0x030E00A0
+static inline int PyLong_GetSign(PyObject *obj, int *sign)
+{
+ if (!PyLong_Check(obj)) {
+ PyErr_Format(PyExc_TypeError, "expect int, got %s", Py_TYPE(obj)->tp_name);
+ return -1;
+ }
+
+ *sign = _PyLong_Sign(obj);
+ return 0;
+}
+#endif
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // PYTHONCAPI_COMPAT
diff --git a/contrib/python/multidict/tests/conftest.py b/contrib/python/multidict/tests/conftest.py
index 0d003950cd..a37f58f2d1 100644
--- a/contrib/python/multidict/tests/conftest.py
+++ b/contrib/python/multidict/tests/conftest.py
@@ -3,26 +3,22 @@ from __future__ import annotations
import argparse
import pickle
from dataclasses import dataclass
+from functools import cached_property
from importlib import import_module
-from sys import version_info as _version_info
from types import ModuleType
-from typing import Callable, Type
-
-try:
- from functools import cached_property # Python 3.8+
-except ImportError:
- from functools import lru_cache as _lru_cache
-
- def cached_property(func):
- return property(_lru_cache()(func))
-
+from typing import Callable, Type, Union
import pytest
-from multidict import MultiMapping, MutableMultiMapping
+from multidict import (
+ CIMultiDict,
+ MultiDict,
+ MultiDictProxy,
+ MultiMapping,
+ MutableMultiMapping,
+)
C_EXT_MARK = pytest.mark.c_extension
-PY_38_AND_BELOW = _version_info < (3, 9)
@dataclass(frozen=True)
@@ -51,7 +47,7 @@ class MultidictImplementation:
importable_module = "_multidict_py" if self.is_pure_python else "_multidict"
return import_module(f"multidict.{importable_module}")
- def __str__(self):
+ def __str__(self) -> str:
"""Render the implementation facade instance as a string."""
return f"{self.tag}-module"
@@ -69,7 +65,7 @@ class MultidictImplementation:
)
def multidict_implementation(request: pytest.FixtureRequest) -> MultidictImplementation:
"""Return a multidict variant facade."""
- return request.param
+ return request.param # type: ignore[no-any-return]
@pytest.fixture(scope="session")
@@ -87,7 +83,7 @@ def multidict_module(
)
def any_multidict_class_name(request: pytest.FixtureRequest) -> str:
"""Return a class name of a mutable multidict implementation."""
- return request.param
+ return request.param # type: ignore[no-any-return]
@pytest.fixture(scope="session")
@@ -96,29 +92,29 @@ def any_multidict_class(
multidict_module: ModuleType,
) -> Type[MutableMultiMapping[str]]:
"""Return a class object of a mutable multidict implementation."""
- return getattr(multidict_module, any_multidict_class_name)
+ return getattr(multidict_module, any_multidict_class_name) # type: ignore[no-any-return]
@pytest.fixture(scope="session")
def case_sensitive_multidict_class(
multidict_module: ModuleType,
-) -> Type[MutableMultiMapping[str]]:
+) -> Type[MultiDict[str]]:
"""Return a case-sensitive mutable multidict class."""
- return multidict_module.MultiDict
+ return multidict_module.MultiDict # type: ignore[no-any-return]
@pytest.fixture(scope="session")
def case_insensitive_multidict_class(
multidict_module: ModuleType,
-) -> Type[MutableMultiMapping[str]]:
+) -> Type[CIMultiDict[str]]:
"""Return a case-insensitive mutable multidict class."""
- return multidict_module.CIMultiDict
+ return multidict_module.CIMultiDict # type: ignore[no-any-return]
@pytest.fixture(scope="session")
def case_insensitive_str_class(multidict_module: ModuleType) -> Type[str]:
"""Return a case-insensitive string class."""
- return multidict_module.istr
+ return multidict_module.istr # type: ignore[no-any-return]
@pytest.fixture(scope="session")
@@ -133,7 +129,7 @@ def any_multidict_proxy_class(
multidict_module: ModuleType,
) -> Type[MultiMapping[str]]:
"""Return an immutable multidict implementation class object."""
- return getattr(multidict_module, any_multidict_proxy_class_name)
+ return getattr(multidict_module, any_multidict_proxy_class_name) # type: ignore[no-any-return]
@pytest.fixture(scope="session")
@@ -141,7 +137,7 @@ def case_sensitive_multidict_proxy_class(
multidict_module: ModuleType,
) -> Type[MutableMultiMapping[str]]:
"""Return a case-sensitive immutable multidict class."""
- return multidict_module.MultiDictProxy
+ return multidict_module.MultiDictProxy # type: ignore[no-any-return]
@pytest.fixture(scope="session")
@@ -149,13 +145,15 @@ def case_insensitive_multidict_proxy_class(
multidict_module: ModuleType,
) -> Type[MutableMultiMapping[str]]:
"""Return a case-insensitive immutable multidict class."""
- return multidict_module.CIMultiDictProxy
+ return multidict_module.CIMultiDictProxy # type: ignore[no-any-return]
@pytest.fixture(scope="session")
-def multidict_getversion_callable(multidict_module: ModuleType) -> Callable:
+def multidict_getversion_callable(
+ multidict_module: ModuleType,
+) -> Callable[[Union[MultiDict[object], MultiDictProxy[object]]], int]:
"""Return a ``getversion()`` function for current implementation."""
- return multidict_module.getversion
+ return multidict_module.getversion # type: ignore[no-any-return]
def pytest_addoption(
@@ -171,20 +169,12 @@ def pytest_addoption(
parser.addoption(
"--c-extensions", # disabled with `--no-c-extensions`
- action="store_true" if PY_38_AND_BELOW else argparse.BooleanOptionalAction,
+ action=argparse.BooleanOptionalAction,
default=True,
dest="c_extensions",
help="Test C-extensions (on by default)",
)
- if PY_38_AND_BELOW:
- parser.addoption(
- "--no-c-extensions",
- action="store_false",
- dest="c_extensions",
- help="Skip testing C-extensions (on by default)",
- )
-
def pytest_collection_modifyitems(
session: pytest.Session,
@@ -197,8 +187,8 @@ def pytest_collection_modifyitems(
if test_c_extensions:
return
- selected_tests = []
- deselected_tests = []
+ selected_tests: list[pytest.Item] = []
+ deselected_tests: list[pytest.Item] = []
for item in items:
c_ext = item.get_closest_marker(C_EXT_MARK.name) is not None
@@ -218,7 +208,7 @@ def pytest_configure(config: pytest.Config) -> None:
)
-def pytest_generate_tests(metafunc):
+def pytest_generate_tests(metafunc: pytest.Metafunc) -> None:
if "pickle_protocol" in metafunc.fixturenames:
metafunc.parametrize(
"pickle_protocol", list(range(pickle.HIGHEST_PROTOCOL + 1)), scope="session"
diff --git a/contrib/python/multidict/tests/gen_pickles.py b/contrib/python/multidict/tests/gen_pickles.py
index 4e0d268bed..72f41b7565 100644
--- a/contrib/python/multidict/tests/gen_pickles.py
+++ b/contrib/python/multidict/tests/gen_pickles.py
@@ -1,18 +1,22 @@
import pickle
from importlib import import_module
from pathlib import Path
+from typing import Union
+
+from multidict import CIMultiDict, MultiDict
TESTS_DIR = Path(__file__).parent.resolve()
+_MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]]
-def write(tag, cls, proto):
+def write(tag: str, cls: _MD_Classes, proto: int) -> None:
d = cls([("a", 1), ("a", 2)])
file_basename = f"{cls.__name__.lower()}-{tag}"
with (TESTS_DIR / f"{file_basename}.pickle.{proto}").open("wb") as f:
pickle.dump(d, f, proto)
-def generate():
+def generate() -> None:
_impl_map = {
"c-extension": "_multidict",
"pure-python": "_multidict_py",
diff --git a/contrib/python/multidict/tests/test_abc.py b/contrib/python/multidict/tests/test_abc.py
index e18ad83f82..611d0fa8c3 100644
--- a/contrib/python/multidict/tests/test_abc.py
+++ b/contrib/python/multidict/tests/test_abc.py
@@ -1,94 +1,32 @@
from collections.abc import Mapping, MutableMapping
-import pytest
+from multidict import (
+ MultiDict,
+ MultiDictProxy,
+ MultiMapping,
+ MutableMultiMapping,
+)
-from multidict import MultiMapping, MutableMultiMapping
-
-def test_abc_inheritance():
+def test_abc_inheritance() -> None:
assert issubclass(MultiMapping, Mapping)
assert not issubclass(MultiMapping, MutableMapping)
assert issubclass(MutableMultiMapping, Mapping)
assert issubclass(MutableMultiMapping, MutableMapping)
-class A(MultiMapping):
- def __getitem__(self, key):
- pass
-
- def __iter__(self):
- pass
-
- def __len__(self):
- pass
-
- def getall(self, key, default=None):
- super().getall(key, default)
-
- def getone(self, key, default=None):
- super().getone(key, default)
-
-
-def test_abc_getall():
- with pytest.raises(KeyError):
- A().getall("key")
-
-
-def test_abc_getone():
- with pytest.raises(KeyError):
- A().getone("key")
-
-
-class B(A, MutableMultiMapping):
- def __setitem__(self, key, value):
- pass
-
- def __delitem__(self, key):
- pass
-
- def add(self, key, value):
- super().add(key, value)
-
- def extend(self, *args, **kwargs):
- super().extend(*args, **kwargs)
-
- def popall(self, key, default=None):
- super().popall(key, default)
-
- def popone(self, key, default=None):
- super().popone(key, default)
-
-
-def test_abc_add():
- with pytest.raises(NotImplementedError):
- B().add("key", "val")
-
-
-def test_abc_extend():
- with pytest.raises(NotImplementedError):
- B().extend()
-
-
-def test_abc_popone():
- with pytest.raises(KeyError):
- B().popone("key")
-
-
-def test_abc_popall():
- with pytest.raises(KeyError):
- B().popall("key")
-
-
-def test_multidict_inheritance(any_multidict_class):
+def test_multidict_inheritance(any_multidict_class: type[MultiDict[str]]) -> None:
assert issubclass(any_multidict_class, MultiMapping)
assert issubclass(any_multidict_class, MutableMultiMapping)
-def test_proxy_inheritance(any_multidict_proxy_class):
+def test_proxy_inheritance(
+ any_multidict_proxy_class: type[MultiDictProxy[str]],
+) -> None:
assert issubclass(any_multidict_proxy_class, MultiMapping)
assert not issubclass(any_multidict_proxy_class, MutableMultiMapping)
-def test_generic_type_in_runtime():
+def test_generic_type_in_runtime() -> None:
MultiMapping[str]
MutableMultiMapping[str]
diff --git a/contrib/python/multidict/tests/test_copy.py b/contrib/python/multidict/tests/test_copy.py
index cd926cdc1d..deff64db37 100644
--- a/contrib/python/multidict/tests/test_copy.py
+++ b/contrib/python/multidict/tests/test_copy.py
@@ -1,7 +1,13 @@
import copy
+from typing import Union
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
-def test_copy(any_multidict_class):
+_MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]]
+_MDP_Classes = Union[type[MultiDictProxy[int]], type[CIMultiDictProxy[int]]]
+
+
+def test_copy(any_multidict_class: _MD_Classes) -> None:
d = any_multidict_class()
d["foo"] = 6
d2 = d.copy()
@@ -10,7 +16,9 @@ def test_copy(any_multidict_class):
assert d2["foo"] == 7
-def test_copy_proxy(any_multidict_class, any_multidict_proxy_class):
+def test_copy_proxy(
+ any_multidict_class: _MD_Classes, any_multidict_proxy_class: _MDP_Classes
+) -> None:
d = any_multidict_class()
d["foo"] = 6
p = any_multidict_proxy_class(d)
@@ -21,7 +29,7 @@ def test_copy_proxy(any_multidict_class, any_multidict_proxy_class):
assert d2["foo"] == 7
-def test_copy_std_copy(any_multidict_class):
+def test_copy_std_copy(any_multidict_class: _MD_Classes) -> None:
d = any_multidict_class()
d["foo"] = 6
d2 = copy.copy(d)
@@ -30,7 +38,7 @@ def test_copy_std_copy(any_multidict_class):
assert d2["foo"] == 7
-def test_ci_multidict_clone(any_multidict_class):
+def test_ci_multidict_clone(any_multidict_class: _MD_Classes) -> None:
d = any_multidict_class(foo=6)
d2 = any_multidict_class(d)
d2["foo"] = 7
diff --git a/contrib/python/multidict/tests/test_guard.py b/contrib/python/multidict/tests/test_guard.py
index 225da67c8d..c877fbf803 100644
--- a/contrib/python/multidict/tests/test_guard.py
+++ b/contrib/python/multidict/tests/test_guard.py
@@ -1,12 +1,10 @@
-from typing import Type
-
import pytest
-from multidict import MultiMapping
+from multidict import MultiDict
def test_guard_items(
- case_sensitive_multidict_class: Type[MultiMapping[str]],
+ case_sensitive_multidict_class: type[MultiDict[str]],
) -> None:
md = case_sensitive_multidict_class({"a": "b"})
it = iter(md.items())
@@ -16,7 +14,7 @@ def test_guard_items(
def test_guard_keys(
- case_sensitive_multidict_class: Type[MultiMapping[str]],
+ case_sensitive_multidict_class: type[MultiDict[str]],
) -> None:
md = case_sensitive_multidict_class({"a": "b"})
it = iter(md.keys())
@@ -26,7 +24,7 @@ def test_guard_keys(
def test_guard_values(
- case_sensitive_multidict_class: Type[MultiMapping[str]],
+ case_sensitive_multidict_class: type[MultiDict[str]],
) -> None:
md = case_sensitive_multidict_class({"a": "b"})
it = iter(md.values())
diff --git a/contrib/python/multidict/tests/test_istr.py b/contrib/python/multidict/tests/test_istr.py
index 1918153532..101f5fe8e5 100644
--- a/contrib/python/multidict/tests/test_istr.py
+++ b/contrib/python/multidict/tests/test_istr.py
@@ -71,4 +71,4 @@ def test_leak(create_istrs: Callable[[], None]) -> None:
gc.collect()
cnt2 = len(gc.get_objects())
- assert abs(cnt - cnt2) < 10 # on PyPy these numbers are not equal
+ assert abs(cnt - cnt2) < 50 # on PyPy these numbers are not equal
diff --git a/contrib/python/multidict/tests/test_multidict.py b/contrib/python/multidict/tests/test_multidict.py
index bcfa699c15..d144130a41 100644
--- a/contrib/python/multidict/tests/test_multidict.py
+++ b/contrib/python/multidict/tests/test_multidict.py
@@ -5,27 +5,20 @@ import operator
import sys
import weakref
from collections import deque
-from collections.abc import Mapping
+from collections.abc import Callable, Iterable, Iterator, KeysView, Mapping
from types import ModuleType
-from typing import (
- Callable,
- Dict,
- Iterable,
- Iterator,
- KeysView,
- List,
- Mapping,
- Set,
- Tuple,
- Type,
- Union,
- cast,
-)
+from typing import Union, cast
import pytest
import multidict
-from multidict import CIMultiDict, MultiDict, MultiMapping, MutableMultiMapping
+from multidict import (
+ CIMultiDict,
+ MultiDict,
+ MultiDictProxy,
+ MultiMapping,
+ MutableMultiMapping,
+)
def chained_callable(
@@ -71,7 +64,7 @@ def cls( # type: ignore[misc]
def test_exposed_names(any_multidict_class_name: str) -> None:
- assert any_multidict_class_name in multidict.__all__ # type: ignore[attr-defined]
+ assert any_multidict_class_name in multidict.__all__
@pytest.mark.parametrize(
@@ -86,8 +79,8 @@ def test_exposed_names(any_multidict_class_name: str) -> None:
indirect=["cls"],
)
def test__iter__types(
- cls: Type[MultiDict[Union[str, int]]],
- key_cls: Type[object],
+ cls: type[MultiDict[Union[str, int]]],
+ key_cls: type[str],
) -> None:
d = cls([("key", "one"), ("key2", "two"), ("key", 3)])
for i in d:
@@ -95,26 +88,26 @@ def test__iter__types(
def test_proxy_copy(
- any_multidict_class: Type[MutableMultiMapping[str]],
- any_multidict_proxy_class: Type[MultiMapping[str]],
+ any_multidict_class: type[MultiDict[str]],
+ any_multidict_proxy_class: type[MultiDictProxy[str]],
) -> None:
d1 = any_multidict_class(key="value", a="b")
p1 = any_multidict_proxy_class(d1)
- d2 = p1.copy() # type: ignore[attr-defined]
+ d2 = p1.copy()
assert d1 == d2
assert d1 is not d2
def test_multidict_subclassing(
- any_multidict_class: Type[MutableMultiMapping[str]],
+ any_multidict_class: type[MultiDict[str]],
) -> None:
class DummyMultidict(any_multidict_class): # type: ignore[valid-type,misc]
pass
def test_multidict_proxy_subclassing(
- any_multidict_proxy_class: Type[MultiMapping[str]],
+ any_multidict_proxy_class: type[MultiDictProxy[str]],
) -> None:
class DummyMultidictProxy(
any_multidict_proxy_class, # type: ignore[valid-type,misc]
@@ -123,7 +116,7 @@ def test_multidict_proxy_subclassing(
class BaseMultiDictTest:
- def test_instantiate__empty(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_instantiate__empty(self, cls: type[MutableMultiMapping[str]]) -> None:
d = cls()
empty: Mapping[str, str] = {}
assert d == empty
@@ -133,14 +126,14 @@ class BaseMultiDictTest:
assert list(d.items()) == []
assert cls() != list() # type: ignore[comparison-overlap]
- with pytest.raises(TypeError, match=r"(2 given)"):
+ with pytest.raises(TypeError, match=r"3 were given"):
cls(("key1", "value1"), ("key2", "value2")) # type: ignore[call-arg] # noqa: E501
@pytest.mark.parametrize("arg0", ([("key", "value1")], {"key": "value1"}))
def test_instantiate__from_arg0(
self,
- cls: Type[MutableMultiMapping[str]],
- arg0: Union[List[Tuple[str, str]], Dict[str, str]],
+ cls: type[MultiDict[str]],
+ arg0: Union[list[tuple[str, str]], dict[str, str]],
) -> None:
d = cls(arg0)
@@ -152,7 +145,7 @@ class BaseMultiDictTest:
def test_instantiate__with_kwargs(
self,
- cls: Type[MutableMultiMapping[str]],
+ cls: type[MultiDict[str]],
) -> None:
d = cls([("key", "value1")], key2="value2")
@@ -163,7 +156,7 @@ class BaseMultiDictTest:
assert sorted(d.items()) == [("key", "value1"), ("key2", "value2")]
def test_instantiate__from_generator(
- self, cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]]
) -> None:
d = cls((str(i), i) for i in range(2))
@@ -175,7 +168,7 @@ class BaseMultiDictTest:
def test_instantiate__from_list_of_lists(
self,
- cls: Type[MutableMultiMapping[str]],
+ cls: type[MutableMultiMapping[str]],
) -> None:
# Should work at runtime, but won't type check.
d = cls([["key", "value1"]]) # type: ignore[call-arg]
@@ -183,7 +176,7 @@ class BaseMultiDictTest:
def test_instantiate__from_list_of_custom_pairs(
self,
- cls: Type[MutableMultiMapping[str]],
+ cls: type[MultiDict[str]],
) -> None:
class Pair:
def __len__(self) -> int:
@@ -193,10 +186,10 @@ class BaseMultiDictTest:
return ("key", "value1")[pos]
# Works at runtime, but won't type check.
- d = cls([Pair()])
+ d = cls([Pair()]) # type: ignore[list-item]
assert d == {"key": "value1"}
- def test_getone(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_getone(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")], key="value2")
assert d.getone("key") == "value1"
@@ -210,25 +203,42 @@ class BaseMultiDictTest:
assert d.getone("key2", "default") == "default"
- def test_call_with_kwargs(self, cls: Type[MultiDict[str]]) -> None:
+ def test_call_with_kwargs(self, cls: type[MultiDict[str]]) -> None:
d = cls([("present", "value")])
assert d.getall(default="missing", key="notfound") == "missing"
def test__iter__(
self,
cls: Union[
- Type[MultiDict[Union[str, int]]],
- Type[CIMultiDict[Union[str, int]]],
+ type[MultiDict[Union[str, int]]],
+ type[CIMultiDict[Union[str, int]]],
],
) -> None:
d = cls([("key", "one"), ("key2", "two"), ("key", 3)])
assert list(d) == ["key", "key2", "key"]
+ def test__contains(
+ self,
+ cls: Union[
+ type[MultiDict[Union[str, int]]],
+ type[CIMultiDict[Union[str, int]]],
+ ],
+ ) -> None:
+ d = cls([("key", "one"), ("key2", "two"), ("key", 3)])
+
+ assert list(d) == ["key", "key2", "key"]
+
+ assert "key" in d
+ assert "key2" in d
+
+ assert "foo" not in d
+ assert 42 not in d # type: ignore[comparison-overlap]
+
def test_keys__contains(
self,
cls: Union[
- Type[MultiDict[Union[str, int]]],
- Type[CIMultiDict[Union[str, int]]],
+ type[MultiDict[Union[str, int]]],
+ type[CIMultiDict[Union[str, int]]],
],
) -> None:
d = cls([("key", "one"), ("key2", "two"), ("key", 3)])
@@ -239,12 +249,13 @@ class BaseMultiDictTest:
assert "key2" in d.keys()
assert "foo" not in d.keys()
+ assert 42 not in d.keys() # type: ignore[comparison-overlap]
def test_values__contains(
self,
cls: Union[
- Type[MultiDict[Union[str, int]]],
- Type[CIMultiDict[Union[str, int]]],
+ type[MultiDict[Union[str, int]]],
+ type[CIMultiDict[Union[str, int]]],
],
) -> None:
d = cls([("key", "one"), ("key", "two"), ("key", 3)])
@@ -260,8 +271,8 @@ class BaseMultiDictTest:
def test_items__contains(
self,
cls: Union[
- Type[MultiDict[Union[str, int]]],
- Type[CIMultiDict[Union[str, int]]],
+ type[MultiDict[Union[str, int]]],
+ type[CIMultiDict[Union[str, int]]],
],
) -> None:
d = cls([("key", "one"), ("key", "two"), ("key", 3)])
@@ -273,15 +284,17 @@ class BaseMultiDictTest:
assert ("key", 3) in d.items()
assert ("foo", "bar") not in d.items()
+ assert (42, 3) not in d.items() # type: ignore[comparison-overlap]
+ assert 42 not in d.items() # type: ignore[comparison-overlap]
def test_cannot_create_from_unaccepted(
self,
- cls: Type[MutableMultiMapping[str]],
+ cls: type[MutableMultiMapping[str]],
) -> None:
with pytest.raises(TypeError):
cls([(1, 2, 3)]) # type: ignore[call-arg]
- def test_keys_is_set_less(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_keys_is_set_less(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert d.keys() < {"key", "key2"}
@@ -297,8 +310,8 @@ class BaseMultiDictTest:
)
def test_keys_is_set_less_equal(
self,
- cls: Type[MutableMultiMapping[str]],
- contents: List[Tuple[str, str]],
+ cls: type[MultiDict[str]],
+ contents: list[tuple[str, str]],
expected: bool,
) -> None:
d = cls(contents)
@@ -306,12 +319,17 @@ class BaseMultiDictTest:
result = d.keys() <= {"key", "key2"}
assert result is expected
- def test_keys_is_set_equal(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_keys_is_set_equal(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert d.keys() == {"key"}
- def test_keys_is_set_greater(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_items_is_set_equal(self, cls: type[MultiDict[str]]) -> None:
+ d = cls([("key", "value1")])
+
+ assert d.items() == {("key", "value1")}
+
+ def test_keys_is_set_greater(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
assert d.keys() > {"key"}
@@ -326,16 +344,14 @@ class BaseMultiDictTest:
),
)
def test_keys_is_set_greater_equal(
- self, cls: Type[MutableMultiMapping[str]], set_: Set[str], expected: bool
+ self, cls: type[MultiDict[str]], set_: set[str], expected: bool
) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
result = d.keys() >= set_
assert result is expected
- def test_keys_less_than_not_implemented(
- self, cls: Type[MutableMultiMapping[str]]
- ) -> None:
+ def test_keys_less_than_not_implemented(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
sentinel_operation_result = object()
@@ -348,7 +364,7 @@ class BaseMultiDictTest:
assert (d.keys() < RightOperand()) is sentinel_operation_result
def test_keys_less_than_or_equal_not_implemented(
- self, cls: Type[MutableMultiMapping[str]]
+ self, cls: type[MultiDict[str]]
) -> None:
d = cls([("key", "value1")])
@@ -361,9 +377,7 @@ class BaseMultiDictTest:
assert (d.keys() <= RightOperand()) is sentinel_operation_result
- def test_keys_greater_than_not_implemented(
- self, cls: Type[MutableMultiMapping[str]]
- ) -> None:
+ def test_keys_greater_than_not_implemented(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
sentinel_operation_result = object()
@@ -376,7 +390,7 @@ class BaseMultiDictTest:
assert (d.keys() > RightOperand()) is sentinel_operation_result
def test_keys_greater_than_or_equal_not_implemented(
- self, cls: Type[MutableMultiMapping[str]]
+ self, cls: type[MultiDict[str]]
) -> None:
d = cls([("key", "value1")])
@@ -389,30 +403,28 @@ class BaseMultiDictTest:
assert (d.keys() >= RightOperand()) is sentinel_operation_result
- def test_keys_is_set_not_equal(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_keys_is_set_not_equal(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert d.keys() != {"key2"}
- def test_keys_not_equal_unrelated_type(
- self, cls: Type[MutableMultiMapping[str]]
- ) -> None:
+ def test_keys_not_equal_unrelated_type(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
- assert d.keys() != "other"
+ assert d.keys() != "other" # type: ignore[comparison-overlap]
- def test_eq(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_eq(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert {"key": "value1"} == d
- def test_eq2(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_eq2(self, cls: type[MultiDict[str]]) -> None:
d1 = cls([("key", "value1")])
d2 = cls([("key2", "value1")])
assert d1 != d2
- def test_eq3(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_eq3(self, cls: type[MultiDict[str]]) -> None:
d1 = cls([("key", "value1")])
d2 = cls()
@@ -420,7 +432,7 @@ class BaseMultiDictTest:
def test_eq_other_mapping_contains_more_keys(
self,
- cls: Type[MutableMultiMapping[str]],
+ cls: type[MultiDict[str]],
) -> None:
d1 = cls(foo="bar")
d2 = dict(foo="bar", bar="baz")
@@ -428,7 +440,7 @@ class BaseMultiDictTest:
assert d1 != d2
def test_eq_bad_mapping_len(
- self, cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]]
+ self, cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]]
) -> None:
class BadMapping(Mapping[str, int]):
def __getitem__(self, key: str) -> int:
@@ -437,8 +449,8 @@ class BaseMultiDictTest:
def __iter__(self) -> Iterator[str]:
yield "a" # pragma: no cover # `len()` fails earlier
- def __len__(self) -> int: # type: ignore[return]
- 1 / 0
+ def __len__(self) -> int:
+ return 1 // 0
d1 = cls(a=1)
d2 = BadMapping()
@@ -447,11 +459,11 @@ class BaseMultiDictTest:
def test_eq_bad_mapping_getitem(
self,
- cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]],
+ cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]],
) -> None:
class BadMapping(Mapping[str, int]):
- def __getitem__(self, key: str) -> int: # type: ignore[return]
- 1 / 0
+ def __getitem__(self, key: str) -> int:
+ return 1 // 0
def __iter__(self) -> Iterator[str]:
yield "a" # pragma: no cover # foreign objects no iterated
@@ -464,24 +476,22 @@ class BaseMultiDictTest:
with pytest.raises(ZeroDivisionError):
d1 == d2
- def test_ne(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_ne(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert d != {"key": "another_value"}
- def test_and(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_and(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert {"key"} == d.keys() & {"key", "key2"}
- def test_and2(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_and2(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert {"key"} == {"key", "key2"} & d.keys()
- def test_bitwise_and_not_implemented(
- self, cls: Type[MutableMultiMapping[str]]
- ) -> None:
+ def test_bitwise_and_not_implemented(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
sentinel_operation_result = object()
@@ -493,26 +503,22 @@ class BaseMultiDictTest:
assert d.keys() & RightOperand() is sentinel_operation_result
- def test_bitwise_and_iterable_not_set(
- self, cls: Type[MutableMultiMapping[str]]
- ) -> None:
+ def test_bitwise_and_iterable_not_set(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert {"key"} == d.keys() & ["key", "key2"]
- def test_or(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_or(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert {"key", "key2"} == d.keys() | {"key2"}
- def test_or2(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_or2(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert {"key", "key2"} == {"key2"} | d.keys()
- def test_bitwise_or_not_implemented(
- self, cls: Type[MutableMultiMapping[str]]
- ) -> None:
+ def test_bitwise_or_not_implemented(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
sentinel_operation_result = object()
@@ -524,24 +530,22 @@ class BaseMultiDictTest:
assert d.keys() | RightOperand() is sentinel_operation_result
- def test_bitwise_or_iterable_not_set(
- self, cls: Type[MutableMultiMapping[str]]
- ) -> None:
+ def test_bitwise_or_iterable_not_set(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")])
assert {"key", "key2"} == d.keys() | ["key2"]
- def test_sub(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_sub(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
assert {"key"} == d.keys() - {"key2"}
- def test_sub2(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_sub2(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
assert {"key3"} == {"key", "key2", "key3"} - d.keys()
- def test_sub_not_implemented(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_sub_not_implemented(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
sentinel_operation_result = object()
@@ -553,22 +557,22 @@ class BaseMultiDictTest:
assert d.keys() - RightOperand() is sentinel_operation_result
- def test_sub_iterable_not_set(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_sub_iterable_not_set(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
assert {"key"} == d.keys() - ["key2"]
- def test_xor(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_xor(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
assert {"key", "key3"} == d.keys() ^ {"key2", "key3"}
- def test_xor2(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_xor2(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
assert {"key", "key3"} == {"key2", "key3"} ^ d.keys()
- def test_xor_not_implemented(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_xor_not_implemented(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
sentinel_operation_result = object()
@@ -580,7 +584,7 @@ class BaseMultiDictTest:
assert d.keys() ^ RightOperand() is sentinel_operation_result
- def test_xor_iterable_not_set(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_xor_iterable_not_set(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1"), ("key2", "value2")])
assert {"key", "key3"} == d.keys() ^ ["key2", "key3"]
@@ -590,13 +594,13 @@ class BaseMultiDictTest:
(("key2", "v", True), ("key", "value1", False)),
)
def test_isdisjoint(
- self, cls: Type[MutableMultiMapping[str]], key: str, value: str, expected: bool
+ self, cls: type[MultiDict[str]], key: str, value: str, expected: bool
) -> None:
d = cls([("key", "value1")])
assert d.items().isdisjoint({(key, value)}) is expected
assert d.keys().isdisjoint({key}) is expected
- def test_repr_aiohttp_issue_410(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_repr_aiohttp_issue_410(self, cls: type[MutableMultiMapping[str]]) -> None:
d = cls()
try:
@@ -614,9 +618,9 @@ class BaseMultiDictTest:
@pytest.mark.parametrize("other", ({"other"},))
def test_op_issue_aiohttp_issue_410(
self,
- cls: Type[MutableMultiMapping[str]],
+ cls: type[MultiDict[str]],
op: Callable[[object, object], object],
- other: Set[str],
+ other: set[str],
) -> None:
d = cls([("key", "value")])
@@ -628,7 +632,7 @@ class BaseMultiDictTest:
assert sys.exc_info()[1] == e # noqa: PT017
- def test_weakref(self, cls: Type[MutableMultiMapping[str]]) -> None:
+ def test_weakref(self, cls: type[MutableMultiMapping[str]]) -> None:
called = False
def cb(wr: object) -> None:
@@ -644,7 +648,7 @@ class BaseMultiDictTest:
def test_iter_length_hint_keys(
self,
- cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]],
+ cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]],
) -> None:
md = cls(a=1, b=2)
it = iter(md.keys())
@@ -652,7 +656,7 @@ class BaseMultiDictTest:
def test_iter_length_hint_items(
self,
- cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]],
+ cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]],
) -> None:
md = cls(a=1, b=2)
it = iter(md.items())
@@ -660,15 +664,15 @@ class BaseMultiDictTest:
def test_iter_length_hint_values(
self,
- cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]],
+ cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]],
) -> None:
md = cls(a=1, b=2)
it = iter(md.values())
- assert it.__length_hint__() == 2 # type: ignore[attr-defined]
+ assert it.__length_hint__() == 2
def test_ctor_list_arg_and_kwds(
self,
- cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]],
+ cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]],
) -> None:
arg = [("a", 1)]
obj = cls(arg, b=2)
@@ -677,7 +681,7 @@ class BaseMultiDictTest:
def test_ctor_tuple_arg_and_kwds(
self,
- cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]],
+ cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]],
) -> None:
arg = (("a", 1),)
obj = cls(arg, b=2)
@@ -686,7 +690,7 @@ class BaseMultiDictTest:
def test_ctor_deque_arg_and_kwds(
self,
- cls: Union[Type[MultiDict[int]], Type[CIMultiDict[int]]],
+ cls: Union[type[MultiDict[int]], type[CIMultiDict[int]]],
) -> None:
arg = deque([("a", 1)])
obj = cls(arg, b=2)
@@ -709,7 +713,7 @@ class TestMultiDict(BaseMultiDictTest):
"""Make a case-sensitive multidict class/proxy constructor."""
return chained_callable(multidict_module, request.param)
- def test__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ def test__repr__(self, cls: type[MultiDict[str]]) -> None:
d = cls()
_cls = type(d)
@@ -719,7 +723,7 @@ class TestMultiDict(BaseMultiDictTest):
assert str(d) == "<%s('key': 'one', 'key': 'two')>" % _cls.__name__
- def test_getall(self, cls: Type[MultiDict[str]]) -> None:
+ def test_getall(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")], key="value2")
assert d != {"key": "value1"}
@@ -735,27 +739,27 @@ class TestMultiDict(BaseMultiDictTest):
def test_preserve_stable_ordering(
self,
- cls: Type[MultiDict[Union[str, int]]],
+ cls: type[MultiDict[Union[str, int]]],
) -> None:
d = cls([("a", 1), ("b", "2"), ("a", 3)])
s = "&".join("{}={}".format(k, v) for k, v in d.items())
assert s == "a=1&b=2&a=3"
- def test_get(self, cls: Type[MultiDict[int]]) -> None:
+ def test_get(self, cls: type[MultiDict[int]]) -> None:
d = cls([("a", 1), ("a", 2)])
assert d["a"] == 1
- def test_items__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ def test_items__repr__(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")], key="value2")
expected = "_ItemsView('key': 'value1', 'key': 'value2')"
assert repr(d.items()) == expected
- def test_keys__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ def test_keys__repr__(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")], key="value2")
assert repr(d.keys()) == "_KeysView('key', 'key')"
- def test_values__repr__(self, cls: Type[MultiDict[str]]) -> None:
+ def test_values__repr__(self, cls: type[MultiDict[str]]) -> None:
d = cls([("key", "value1")], key="value2")
assert repr(d.values()) == "_ValuesView('value1', 'value2')"
@@ -775,7 +779,7 @@ class TestCIMultiDict(BaseMultiDictTest):
"""Make a case-insensitive multidict class/proxy constructor."""
return chained_callable(multidict_module, request.param)
- def test_basics(self, cls: Type[CIMultiDict[str]]) -> None:
+ def test_basics(self, cls: type[CIMultiDict[str]]) -> None:
d = cls([("KEY", "value1")], KEY="value2")
assert d.getone("key") == "value1"
@@ -789,7 +793,7 @@ class TestCIMultiDict(BaseMultiDictTest):
with pytest.raises(KeyError, match="key2"):
d.getone("key2")
- def test_getall(self, cls: Type[CIMultiDict[str]]) -> None:
+ def test_getall(self, cls: type[CIMultiDict[str]]) -> None:
d = cls([("KEY", "value1")], KEY="value2")
assert not d == {"KEY": "value1"}
@@ -800,26 +804,26 @@ class TestCIMultiDict(BaseMultiDictTest):
with pytest.raises(KeyError, match="some_key"):
d.getall("some_key")
- def test_get(self, cls: Type[CIMultiDict[int]]) -> None:
+ def test_get(self, cls: type[CIMultiDict[int]]) -> None:
d = cls([("A", 1), ("a", 2)])
assert 1 == d["a"]
- def test__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ def test__repr__(self, cls: type[CIMultiDict[str]]) -> None:
d = cls([("KEY", "value1")], key="value2")
_cls = type(d)
expected = "<%s('KEY': 'value1', 'key': 'value2')>" % _cls.__name__
assert str(d) == expected
- def test_items__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ def test_items__repr__(self, cls: type[CIMultiDict[str]]) -> None:
d = cls([("KEY", "value1")], key="value2")
expected = "_ItemsView('KEY': 'value1', 'key': 'value2')"
assert repr(d.items()) == expected
- def test_keys__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ def test_keys__repr__(self, cls: type[CIMultiDict[str]]) -> None:
d = cls([("KEY", "value1")], key="value2")
assert repr(d.keys()) == "_KeysView('KEY', 'key')"
- def test_values__repr__(self, cls: Type[CIMultiDict[str]]) -> None:
+ def test_values__repr__(self, cls: type[CIMultiDict[str]]) -> None:
d = cls([("KEY", "value1")], key="value2")
assert repr(d.values()) == "_ValuesView('value1', 'value2')"
diff --git a/contrib/python/multidict/tests/test_multidict_benchmarks.py b/contrib/python/multidict/tests/test_multidict_benchmarks.py
new file mode 100644
index 0000000000..e6a538f3cc
--- /dev/null
+++ b/contrib/python/multidict/tests/test_multidict_benchmarks.py
@@ -0,0 +1,391 @@
+"""codspeed benchmarks for multidict."""
+
+from typing import Dict, Union
+
+from pytest_codspeed import BenchmarkFixture
+
+from multidict import CIMultiDict, MultiDict, istr
+
+# Note that this benchmark should not be refactored to use pytest.mark.parametrize
+# since each benchmark name should be unique.
+
+_SENTINEL = object()
+
+
+def test_multidict_insert_str(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict()
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md[i] = i
+
+
+def test_cimultidict_insert_str(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict()
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md[i] = i
+
+
+def test_cimultidict_insert_istr(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[istr] = CIMultiDict()
+ items = [istr(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md[i] = i
+
+
+def test_multidict_add_str(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict()
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.add(i, i)
+
+
+def test_cimultidict_add_str(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict()
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.add(i, i)
+
+
+def test_cimultidict_add_istr(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[istr] = CIMultiDict()
+ items = [istr(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.add(i, i)
+
+
+def test_multidict_pop_str(benchmark: BenchmarkFixture) -> None:
+ md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for i in items:
+ md.pop(i)
+
+
+def test_cimultidict_pop_str(benchmark: BenchmarkFixture) -> None:
+ md_base: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for i in items:
+ md.pop(i)
+
+
+def test_cimultidict_pop_istr(benchmark: BenchmarkFixture) -> None:
+ md_base: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = [istr(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for i in items:
+ md.pop(i)
+
+
+def test_multidict_popitem_str(benchmark: BenchmarkFixture) -> None:
+ md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for _ in range(100):
+ md.popitem()
+
+
+def test_cimultidict_popitem_str(benchmark: BenchmarkFixture) -> None:
+ md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for _ in range(100):
+ md.popitem()
+
+
+def test_multidict_clear_str(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+
+ @benchmark
+ def _run() -> None:
+ md.clear()
+
+
+def test_cimultidict_clear_str(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+
+ @benchmark
+ def _run() -> None:
+ md.clear()
+
+
+def test_multidict_update_str(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+ items = {str(i): str(i) for i in range(100, 200)}
+
+ @benchmark
+ def _run() -> None:
+ md.update(items)
+
+
+def test_cimultidict_update_str(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = {str(i): str(i) for i in range(100, 200)}
+
+ @benchmark
+ def _run() -> None:
+ md.update(items)
+
+
+def test_cimultidict_update_istr(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items: Dict[Union[str, istr], istr] = {istr(i): istr(i) for i in range(100, 200)}
+
+ @benchmark
+ def _run() -> None:
+ md.update(items)
+
+
+def test_multidict_extend_str(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = {str(i): str(i) for i in range(200)}
+
+ @benchmark
+ def _run() -> None:
+ md.extend(items)
+
+
+def test_cimultidict_extend_str(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = {str(i): str(i) for i in range(200)}
+
+ @benchmark
+ def _run() -> None:
+ md.extend(items)
+
+
+def test_cimultidict_extend_istr(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = {istr(i): istr(i) for i in range(200)}
+
+ @benchmark
+ def _run() -> None:
+ md.extend(items)
+
+
+def test_multidict_delitem_str(benchmark: BenchmarkFixture) -> None:
+ md_base: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for i in items:
+ del md[i]
+
+
+def test_cimultidict_delitem_str(benchmark: BenchmarkFixture) -> None:
+ md_base: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for i in items:
+ del md[i]
+
+
+def test_cimultidict_delitem_istr(benchmark: BenchmarkFixture) -> None:
+ md_base: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = [istr(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ md = md_base.copy()
+ for i in items:
+ del md[i]
+
+
+def test_multidict_getall_str_hit(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict(("all", str(i)) for i in range(100))
+
+ @benchmark
+ def _run() -> None:
+ md.getall("all")
+
+
+def test_cimultidict_getall_str_hit(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict(("all", str(i)) for i in range(100))
+
+ @benchmark
+ def _run() -> None:
+ md.getall("all")
+
+
+def test_cimultidict_getall_istr_hit(benchmark: BenchmarkFixture) -> None:
+ all_istr = istr("all")
+ md: CIMultiDict[istr] = CIMultiDict((all_istr, istr(i)) for i in range(100))
+
+ @benchmark
+ def _run() -> None:
+ md.getall(all_istr)
+
+
+def test_multidict_fetch(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md[i]
+
+
+def test_cimultidict_fetch_str(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md[i]
+
+
+def test_cimultidict_fetch_istr(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = [istr(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md[i]
+
+
+def test_multidict_get_hit(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i)
+
+
+def test_multidict_get_miss(benchmark: BenchmarkFixture) -> None:
+ md: MultiDict[str] = MultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100, 200)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i)
+
+
+def test_cimultidict_get_hit(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i)
+
+
+def test_cimultidict_get_miss(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100, 200)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i)
+
+
+def test_cimultidict_get_istr_hit(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = [istr(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i)
+
+
+def test_cimultidict_get_istr_miss(benchmark: BenchmarkFixture) -> None:
+ md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = [istr(i) for i in range(100, 200)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i)
+
+
+def test_cimultidict_get_hit_with_default(
+ benchmark: BenchmarkFixture,
+) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i, _SENTINEL)
+
+
+def test_cimultidict_get_miss_with_default(
+ benchmark: BenchmarkFixture,
+) -> None:
+ md: CIMultiDict[str] = CIMultiDict((str(i), str(i)) for i in range(100))
+ items = [str(i) for i in range(100, 200)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i, _SENTINEL)
+
+
+def test_cimultidict_get_istr_hit_with_default(
+ benchmark: BenchmarkFixture,
+) -> None:
+ md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = [istr(i) for i in range(100)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i, _SENTINEL)
+
+
+def test_cimultidict_get_istr_with_default_miss(
+ benchmark: BenchmarkFixture,
+) -> None:
+ md: CIMultiDict[istr] = CIMultiDict((istr(i), istr(i)) for i in range(100))
+ items = [istr(i) for i in range(100, 200)]
+
+ @benchmark
+ def _run() -> None:
+ for i in items:
+ md.get(i, _SENTINEL)
diff --git a/contrib/python/multidict/tests/test_mutable_multidict.py b/contrib/python/multidict/tests/test_mutable_multidict.py
index 3cacec25af..45f1cdf5f6 100644
--- a/contrib/python/multidict/tests/test_mutable_multidict.py
+++ b/contrib/python/multidict/tests/test_mutable_multidict.py
@@ -1,16 +1,16 @@
import string
import sys
-from typing import Type
+from typing import Union
import pytest
-from multidict import MultiMapping, MutableMultiMapping
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDictProxy, istr
class TestMutableMultiDict:
def test_copy(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d1 = case_sensitive_multidict_class(key="value", a="b")
@@ -20,7 +20,7 @@ class TestMutableMultiDict:
def test__repr__(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
assert str(d) == "<%s()>" % case_sensitive_multidict_class.__name__
@@ -35,7 +35,7 @@ class TestMutableMultiDict:
def test_getall(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class([("key", "value1")], key="value2")
assert len(d) == 2
@@ -50,7 +50,7 @@ class TestMutableMultiDict:
def test_add(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
@@ -73,7 +73,7 @@ class TestMutableMultiDict:
def test_extend(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[Union[str, int]]],
) -> None:
d = case_sensitive_multidict_class()
assert d == {}
@@ -101,12 +101,12 @@ class TestMutableMultiDict:
assert 6 == len(d)
with pytest.raises(TypeError):
- d.extend("foo", "bar")
+ d.extend("foo", "bar") # type: ignore[arg-type, call-arg]
def test_extend_from_proxy(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
- case_sensitive_multidict_proxy_class: Type[MultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
+ case_sensitive_multidict_proxy_class: type[MultiDictProxy[str]],
) -> None:
d = case_sensitive_multidict_class([("a", "a"), ("b", "b")])
proxy = case_sensitive_multidict_proxy_class(d)
@@ -118,7 +118,7 @@ class TestMutableMultiDict:
def test_clear(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class([("key", "one")], key="two", foo="bar")
@@ -128,7 +128,7 @@ class TestMutableMultiDict:
def test_del(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class([("key", "one"), ("key", "two")], foo="bar")
assert list(d.keys()) == ["key", "key", "foo"]
@@ -142,7 +142,7 @@ class TestMutableMultiDict:
def test_set_default(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class([("key", "one"), ("key", "two")], foo="bar")
assert "one" == d.setdefault("key", "three")
@@ -152,7 +152,7 @@ class TestMutableMultiDict:
def test_popitem(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
d.add("key", "val1")
@@ -163,7 +163,7 @@ class TestMutableMultiDict:
def test_popitem_empty_multidict(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
@@ -172,7 +172,7 @@ class TestMutableMultiDict:
def test_pop(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
d.add("key", "val1")
@@ -183,7 +183,7 @@ class TestMutableMultiDict:
def test_pop2(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
d.add("key", "val1")
@@ -195,7 +195,7 @@ class TestMutableMultiDict:
def test_pop_default(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class(other="val")
@@ -204,7 +204,7 @@ class TestMutableMultiDict:
def test_pop_raises(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class(other="val")
@@ -215,7 +215,7 @@ class TestMutableMultiDict:
def test_replacement_order(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
d.add("key1", "val1")
@@ -231,16 +231,16 @@ class TestMutableMultiDict:
def test_nonstr_key(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
with pytest.raises(TypeError):
- d[1] = "val"
+ d[1] = "val" # type: ignore[index]
def test_istr_key(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
- case_insensitive_str_class: Type[str],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
+ case_insensitive_str_class: type[str],
) -> None:
d = case_sensitive_multidict_class()
d[case_insensitive_str_class("1")] = "val"
@@ -248,7 +248,7 @@ class TestMutableMultiDict:
def test_str_derived_key(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
class A(str):
pass
@@ -259,8 +259,8 @@ class TestMutableMultiDict:
def test_istr_key_add(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
- case_insensitive_str_class: Type[str],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
+ case_insensitive_str_class: type[str],
) -> None:
d = case_sensitive_multidict_class()
d.add(case_insensitive_str_class("1"), "val")
@@ -268,7 +268,7 @@ class TestMutableMultiDict:
def test_str_derived_key_add(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
class A(str):
pass
@@ -279,7 +279,7 @@ class TestMutableMultiDict:
def test_popall(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
d.add("key1", "val1")
@@ -291,14 +291,14 @@ class TestMutableMultiDict:
def test_popall_default(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
assert "val" == d.popall("key", "val")
def test_popall_key_error(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_sensitive_multidict_class()
with pytest.raises(KeyError, match="key"):
@@ -306,7 +306,7 @@ class TestMutableMultiDict:
def test_large_multidict_resizing(
self,
- case_sensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_sensitive_multidict_class: type[CIMultiDict[int]],
) -> None:
SIZE = 1024
d = case_sensitive_multidict_class()
@@ -322,7 +322,7 @@ class TestMutableMultiDict:
class TestCIMutableMultiDict:
def test_getall(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class([("KEY", "value1")], KEY="value2")
@@ -336,7 +336,7 @@ class TestCIMutableMultiDict:
def test_ctor(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class(k1="v1")
assert "v1" == d["K1"]
@@ -344,7 +344,7 @@ class TestCIMutableMultiDict:
def test_setitem(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
d["k1"] = "v1"
@@ -353,7 +353,7 @@ class TestCIMutableMultiDict:
def test_delitem(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
d["k1"] = "v1"
@@ -363,7 +363,7 @@ class TestCIMutableMultiDict:
def test_copy(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d1 = case_insensitive_multidict_class(key="KEY", a="b")
@@ -374,7 +374,7 @@ class TestCIMutableMultiDict:
def test__repr__(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
assert str(d) == "<%s()>" % case_insensitive_multidict_class.__name__
@@ -389,7 +389,7 @@ class TestCIMutableMultiDict:
def test_add(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
@@ -421,7 +421,7 @@ class TestCIMutableMultiDict:
def test_extend(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[Union[str, int]]],
) -> None:
d = case_insensitive_multidict_class()
assert d == {}
@@ -450,12 +450,12 @@ class TestCIMutableMultiDict:
assert 6 == len(d)
with pytest.raises(TypeError):
- d.extend("foo", "bar")
+ d.extend("foo", "bar") # type: ignore[arg-type, call-arg]
def test_extend_from_proxy(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
- case_insensitive_multidict_proxy_class: Type[MultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
+ case_insensitive_multidict_proxy_class: type[CIMultiDictProxy[str]],
) -> None:
d = case_insensitive_multidict_class([("a", "a"), ("b", "b")])
proxy = case_insensitive_multidict_proxy_class(d)
@@ -467,7 +467,7 @@ class TestCIMutableMultiDict:
def test_clear(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class([("KEY", "one")], key="two", foo="bar")
@@ -477,7 +477,7 @@ class TestCIMutableMultiDict:
def test_del(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class(
[("KEY", "one"), ("key", "two")],
@@ -493,7 +493,7 @@ class TestCIMutableMultiDict:
def test_set_default(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class(
[("KEY", "one"), ("key", "two")],
@@ -507,7 +507,7 @@ class TestCIMutableMultiDict:
def test_popitem(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
d.add("KEY", "val1")
@@ -520,7 +520,7 @@ class TestCIMutableMultiDict:
def test_popitem_empty_multidict(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
@@ -529,7 +529,7 @@ class TestCIMutableMultiDict:
def test_pop(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
d.add("KEY", "val1")
@@ -540,7 +540,7 @@ class TestCIMutableMultiDict:
def test_pop_lowercase(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class()
d.add("KEY", "val1")
@@ -551,7 +551,7 @@ class TestCIMutableMultiDict:
def test_pop_default(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class(OTHER="val")
@@ -560,7 +560,7 @@ class TestCIMutableMultiDict:
def test_pop_raises(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d = case_insensitive_multidict_class(OTHER="val")
@@ -571,8 +571,8 @@ class TestCIMutableMultiDict:
def test_extend_with_istr(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
- case_insensitive_str_class: Type[str],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
+ case_insensitive_str_class: type[istr],
) -> None:
us = case_insensitive_str_class("aBc")
d = case_insensitive_multidict_class()
@@ -582,8 +582,8 @@ class TestCIMutableMultiDict:
def test_copy_istr(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
- case_insensitive_str_class: Type[str],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
+ case_insensitive_str_class: type[istr],
) -> None:
d = case_insensitive_multidict_class({case_insensitive_str_class("Foo"): "bar"})
d2 = d.copy()
@@ -591,7 +591,7 @@ class TestCIMutableMultiDict:
def test_eq(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
d1 = case_insensitive_multidict_class(Key="val")
d2 = case_insensitive_multidict_class(KEY="val")
@@ -604,7 +604,7 @@ class TestCIMutableMultiDict:
)
def test_sizeof(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
md = case_insensitive_multidict_class()
s1 = sys.getsizeof(md)
@@ -621,14 +621,14 @@ class TestCIMutableMultiDict:
)
def test_min_sizeof(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
md = case_insensitive_multidict_class()
assert sys.getsizeof(md) < 1024
def test_issue_620_items(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
# https://github.com/aio-libs/multidict/issues/620
d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"})
@@ -639,7 +639,7 @@ class TestCIMutableMultiDict:
def test_issue_620_keys(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
# https://github.com/aio-libs/multidict/issues/620
d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"})
@@ -650,7 +650,7 @@ class TestCIMutableMultiDict:
def test_issue_620_values(
self,
- case_insensitive_multidict_class: Type[MutableMultiMapping[str]],
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
) -> None:
# https://github.com/aio-libs/multidict/issues/620
d = case_insensitive_multidict_class({"a": "123, 456", "b": "789"})
diff --git a/contrib/python/multidict/tests/test_pickle.py b/contrib/python/multidict/tests/test_pickle.py
index 48adea13f0..3159ea45c6 100644
--- a/contrib/python/multidict/tests/test_pickle.py
+++ b/contrib/python/multidict/tests/test_pickle.py
@@ -1,13 +1,21 @@
import pickle
from pathlib import Path
+from typing import TYPE_CHECKING
import pytest
+from multidict import MultiDict, MultiDictProxy
+
+if TYPE_CHECKING:
+ from conftest import MultidictImplementation
+
import yatest.common as yc
here = Path(yc.source_path(__file__)).resolve().parent
-def test_pickle(any_multidict_class, pickle_protocol):
+def test_pickle(
+ any_multidict_class: type[MultiDict[int]], pickle_protocol: int
+) -> None:
d = any_multidict_class([("a", 1), ("a", 2)])
pbytes = pickle.dumps(d, pickle_protocol)
obj = pickle.loads(pbytes)
@@ -15,14 +23,21 @@ def test_pickle(any_multidict_class, pickle_protocol):
assert isinstance(obj, any_multidict_class)
-def test_pickle_proxy(any_multidict_class, any_multidict_proxy_class):
+def test_pickle_proxy(
+ any_multidict_class: type[MultiDict[int]],
+ any_multidict_proxy_class: type[MultiDictProxy[int]],
+) -> None:
d = any_multidict_class([("a", 1), ("a", 2)])
proxy = any_multidict_proxy_class(d)
with pytest.raises(TypeError):
pickle.dumps(proxy)
-def test_load_from_file(any_multidict_class, multidict_implementation, pickle_protocol):
+def test_load_from_file(
+ any_multidict_class: type[MultiDict[int]],
+ multidict_implementation: "MultidictImplementation",
+ pickle_protocol: int,
+) -> None:
multidict_class_name = any_multidict_class.__name__
pickle_file_basename = "-".join(
(
diff --git a/contrib/python/multidict/tests/test_types.py b/contrib/python/multidict/tests/test_types.py
index ceaa391e37..6339006b68 100644
--- a/contrib/python/multidict/tests/test_types.py
+++ b/contrib/python/multidict/tests/test_types.py
@@ -1,52 +1,57 @@
-import sys
import types
import pytest
-def test_proxies(multidict_module):
+def test_proxies(multidict_module: types.ModuleType) -> None:
assert issubclass(
multidict_module.CIMultiDictProxy,
multidict_module.MultiDictProxy,
)
-def test_dicts(multidict_module):
+def test_dicts(multidict_module: types.ModuleType) -> None:
assert issubclass(multidict_module.CIMultiDict, multidict_module.MultiDict)
-def test_proxy_not_inherited_from_dict(multidict_module):
+def test_proxy_not_inherited_from_dict(multidict_module: types.ModuleType) -> None:
assert not issubclass(multidict_module.MultiDictProxy, multidict_module.MultiDict)
-def test_dict_not_inherited_from_proxy(multidict_module):
+def test_dict_not_inherited_from_proxy(multidict_module: types.ModuleType) -> None:
assert not issubclass(multidict_module.MultiDict, multidict_module.MultiDictProxy)
-def test_multidict_proxy_copy_type(multidict_module):
+def test_multidict_proxy_copy_type(multidict_module: types.ModuleType) -> None:
d = multidict_module.MultiDict(key="val")
p = multidict_module.MultiDictProxy(d)
assert isinstance(p.copy(), multidict_module.MultiDict)
-def test_cimultidict_proxy_copy_type(multidict_module):
+def test_cimultidict_proxy_copy_type(multidict_module: types.ModuleType) -> None:
d = multidict_module.CIMultiDict(key="val")
p = multidict_module.CIMultiDictProxy(d)
assert isinstance(p.copy(), multidict_module.CIMultiDict)
-def test_create_multidict_proxy_from_nonmultidict(multidict_module):
+def test_create_multidict_proxy_from_nonmultidict(
+ multidict_module: types.ModuleType,
+) -> None:
with pytest.raises(TypeError):
multidict_module.MultiDictProxy({})
-def test_create_multidict_proxy_from_cimultidict(multidict_module):
+def test_create_multidict_proxy_from_cimultidict(
+ multidict_module: types.ModuleType,
+) -> None:
d = multidict_module.CIMultiDict(key="val")
p = multidict_module.MultiDictProxy(d)
assert p == d
-def test_create_multidict_proxy_from_multidict_proxy_from_mdict(multidict_module):
+def test_create_multidict_proxy_from_multidict_proxy_from_mdict(
+ multidict_module: types.ModuleType,
+) -> None:
d = multidict_module.MultiDict(key="val")
p = multidict_module.MultiDictProxy(d)
assert p == d
@@ -54,7 +59,9 @@ def test_create_multidict_proxy_from_multidict_proxy_from_mdict(multidict_module
assert p2 == p
-def test_create_cimultidict_proxy_from_cimultidict_proxy_from_ci(multidict_module):
+def test_create_cimultidict_proxy_from_cimultidict_proxy_from_ci(
+ multidict_module: types.ModuleType,
+) -> None:
d = multidict_module.CIMultiDict(key="val")
p = multidict_module.CIMultiDictProxy(d)
assert p == d
@@ -62,7 +69,9 @@ def test_create_cimultidict_proxy_from_cimultidict_proxy_from_ci(multidict_modul
assert p2 == p
-def test_create_cimultidict_proxy_from_nonmultidict(multidict_module):
+def test_create_cimultidict_proxy_from_nonmultidict(
+ multidict_module: types.ModuleType,
+) -> None:
with pytest.raises(
TypeError,
match=(
@@ -73,7 +82,9 @@ def test_create_cimultidict_proxy_from_nonmultidict(multidict_module):
multidict_module.CIMultiDictProxy({})
-def test_create_ci_multidict_proxy_from_multidict(multidict_module):
+def test_create_ci_multidict_proxy_from_multidict(
+ multidict_module: types.ModuleType,
+) -> None:
d = multidict_module.MultiDict(key="val")
with pytest.raises(
TypeError,
@@ -85,20 +96,7 @@ def test_create_ci_multidict_proxy_from_multidict(multidict_module):
multidict_module.CIMultiDictProxy(d)
-@pytest.mark.skipif(
- sys.version_info >= (3, 9), reason="Python 3.9 uses GenericAlias which is different"
-)
-def test_generic_exists(multidict_module) -> None:
- assert multidict_module.MultiDict[int] is multidict_module.MultiDict
- assert multidict_module.MultiDictProxy[int] is multidict_module.MultiDictProxy
- assert multidict_module.CIMultiDict[int] is multidict_module.CIMultiDict
- assert multidict_module.CIMultiDictProxy[int] is multidict_module.CIMultiDictProxy
-
-
-@pytest.mark.skipif(
- sys.version_info < (3, 9), reason="Python 3.9 is required for GenericAlias"
-)
-def test_generic_alias(multidict_module) -> None:
+def test_generic_alias(multidict_module: types.ModuleType) -> None:
assert multidict_module.MultiDict[int] == types.GenericAlias(
multidict_module.MultiDict, (int,)
)
diff --git a/contrib/python/multidict/tests/test_update.py b/contrib/python/multidict/tests/test_update.py
index f455327857..46ab30a08b 100644
--- a/contrib/python/multidict/tests/test_update.py
+++ b/contrib/python/multidict/tests/test_update.py
@@ -1,10 +1,12 @@
from collections import deque
-from typing import Type
+from typing import Union
-from multidict import MultiMapping
+from multidict import CIMultiDict, MultiDict
+_MD_Classes = Union[type[MultiDict[int]], type[CIMultiDict[int]]]
-def test_update_replace(any_multidict_class: Type[MultiMapping[str]]) -> None:
+
+def test_update_replace(any_multidict_class: _MD_Classes) -> None:
obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
obj2 = any_multidict_class([("a", 4), ("b", 5), ("a", 6)])
obj1.update(obj2)
@@ -12,7 +14,7 @@ def test_update_replace(any_multidict_class: Type[MultiMapping[str]]) -> None:
assert list(obj1.items()) == expected
-def test_update_append(any_multidict_class: Type[MultiMapping[str]]) -> None:
+def test_update_append(any_multidict_class: _MD_Classes) -> None:
obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
obj2 = any_multidict_class([("a", 4), ("a", 5), ("a", 6)])
obj1.update(obj2)
@@ -20,7 +22,7 @@ def test_update_append(any_multidict_class: Type[MultiMapping[str]]) -> None:
assert list(obj1.items()) == expected
-def test_update_remove(any_multidict_class: Type[MultiMapping[str]]) -> None:
+def test_update_remove(any_multidict_class: _MD_Classes) -> None:
obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
obj2 = any_multidict_class([("a", 4)])
obj1.update(obj2)
@@ -28,7 +30,7 @@ def test_update_remove(any_multidict_class: Type[MultiMapping[str]]) -> None:
assert list(obj1.items()) == expected
-def test_update_replace_seq(any_multidict_class: Type[MultiMapping[str]]) -> None:
+def test_update_replace_seq(any_multidict_class: _MD_Classes) -> None:
obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
obj2 = [("a", 4), ("b", 5), ("a", 6)]
obj1.update(obj2)
@@ -36,14 +38,14 @@ def test_update_replace_seq(any_multidict_class: Type[MultiMapping[str]]) -> Non
assert list(obj1.items()) == expected
-def test_update_replace_seq2(any_multidict_class: Type[MultiMapping[str]]) -> None:
+def test_update_replace_seq2(any_multidict_class: _MD_Classes) -> None:
obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
obj1.update([("a", 4)], b=5, a=6)
expected = [("a", 4), ("b", 5), ("a", 6), ("c", 10)]
assert list(obj1.items()) == expected
-def test_update_append_seq(any_multidict_class: Type[MultiMapping[str]]) -> None:
+def test_update_append_seq(any_multidict_class: _MD_Classes) -> None:
obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
obj2 = [("a", 4), ("a", 5), ("a", 6)]
obj1.update(obj2)
@@ -51,7 +53,7 @@ def test_update_append_seq(any_multidict_class: Type[MultiMapping[str]]) -> None
assert list(obj1.items()) == expected
-def test_update_remove_seq(any_multidict_class: Type[MultiMapping[str]]) -> None:
+def test_update_remove_seq(any_multidict_class: _MD_Classes) -> None:
obj1 = any_multidict_class([("a", 1), ("b", 2), ("a", 3), ("c", 10)])
obj2 = [("a", 4)]
obj1.update(obj2)
@@ -59,9 +61,7 @@ def test_update_remove_seq(any_multidict_class: Type[MultiMapping[str]]) -> None
assert list(obj1.items()) == expected
-def test_update_md(
- case_sensitive_multidict_class: Type[MultiMapping[str]],
-) -> None:
+def test_update_md(case_sensitive_multidict_class: type[CIMultiDict[str]]) -> None:
d = case_sensitive_multidict_class()
d.add("key", "val1")
d.add("key", "val2")
@@ -73,8 +73,8 @@ def test_update_md(
def test_update_istr_ci_md(
- case_insensitive_multidict_class: Type[MultiMapping[str]],
- case_insensitive_str_class: str,
+ case_insensitive_multidict_class: type[CIMultiDict[str]],
+ case_insensitive_str_class: type[str],
) -> None:
d = case_insensitive_multidict_class()
d.add(case_insensitive_str_class("KEY"), "val1")
@@ -86,9 +86,7 @@ def test_update_istr_ci_md(
assert [("key", "val"), ("key2", "val3")] == list(d.items())
-def test_update_ci_md(
- case_insensitive_multidict_class: Type[MultiMapping[str]],
-) -> None:
+def test_update_ci_md(case_insensitive_multidict_class: type[CIMultiDict[str]]) -> None:
d = case_insensitive_multidict_class()
d.add("KEY", "val1")
d.add("key", "val2")
@@ -99,9 +97,7 @@ def test_update_ci_md(
assert [("Key", "val"), ("key2", "val3")] == list(d.items())
-def test_update_list_arg_and_kwds(
- any_multidict_class: Type[MultiMapping[str]],
-) -> None:
+def test_update_list_arg_and_kwds(any_multidict_class: _MD_Classes) -> None:
obj = any_multidict_class()
arg = [("a", 1)]
obj.update(arg, b=2)
@@ -109,9 +105,7 @@ def test_update_list_arg_and_kwds(
assert arg == [("a", 1)]
-def test_update_tuple_arg_and_kwds(
- any_multidict_class: Type[MultiMapping[str]],
-) -> None:
+def test_update_tuple_arg_and_kwds(any_multidict_class: _MD_Classes) -> None:
obj = any_multidict_class()
arg = (("a", 1),)
obj.update(arg, b=2)
@@ -119,9 +113,7 @@ def test_update_tuple_arg_and_kwds(
assert arg == (("a", 1),)
-def test_update_deque_arg_and_kwds(
- any_multidict_class: Type[MultiMapping[str]],
-) -> None:
+def test_update_deque_arg_and_kwds(any_multidict_class: _MD_Classes) -> None:
obj = any_multidict_class()
arg = deque([("a", 1)])
obj.update(arg, b=2)
diff --git a/contrib/python/multidict/tests/test_version.py b/contrib/python/multidict/tests/test_version.py
index e004afa112..4fe209c678 100644
--- a/contrib/python/multidict/tests/test_version.py
+++ b/contrib/python/multidict/tests/test_version.py
@@ -1,18 +1,25 @@
-from typing import Callable, Type
+from collections.abc import Callable
+from typing import TypeVar, Union
import pytest
-from multidict import MultiMapping
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+_T = TypeVar("_T")
+_MD_Types = Union[
+ MultiDict[_T], CIMultiDict[_T], MultiDictProxy[_T], CIMultiDictProxy[_T]
+]
+GetVersion = Callable[[_MD_Types[_T]], int]
-def test_getversion_bad_param(multidict_getversion_callable):
+
+def test_getversion_bad_param(multidict_getversion_callable: GetVersion[str]) -> None:
with pytest.raises(TypeError):
- multidict_getversion_callable(1)
+ multidict_getversion_callable(1) # type: ignore[arg-type]
def test_ctor(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m1 = any_multidict_class()
v1 = multidict_getversion_callable(m1)
@@ -22,8 +29,8 @@ def test_ctor(
def test_add(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
v = multidict_getversion_callable(m)
@@ -32,8 +39,8 @@ def test_add(
def test_delitem(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -43,8 +50,8 @@ def test_delitem(
def test_delitem_not_found(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -55,8 +62,8 @@ def test_delitem_not_found(
def test_setitem(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -66,8 +73,8 @@ def test_setitem(
def test_setitem_not_found(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -77,8 +84,8 @@ def test_setitem_not_found(
def test_clear(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -88,8 +95,8 @@ def test_clear(
def test_setdefault(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -99,8 +106,8 @@ def test_setdefault(
def test_popone(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -110,8 +117,8 @@ def test_popone(
def test_popone_default(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -121,8 +128,8 @@ def test_popone_default(
def test_popone_key_error(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -133,8 +140,8 @@ def test_popone_key_error(
def test_pop(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -144,8 +151,8 @@ def test_pop(
def test_pop_default(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -155,8 +162,8 @@ def test_pop_default(
def test_pop_key_error(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -167,8 +174,8 @@ def test_pop_key_error(
def test_popall(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -178,8 +185,8 @@ def test_popall(
def test_popall_default(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -189,8 +196,8 @@ def test_popall_default(
def test_popall_key_error(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -201,8 +208,8 @@ def test_popall_key_error(
def test_popitem(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
m.add("key", "val")
@@ -212,8 +219,8 @@ def test_popitem(
def test_popitem_key_error(
- any_multidict_class: Type[MultiMapping[str]],
- multidict_getversion_callable: Callable,
+ any_multidict_class: type[MultiDict[str]],
+ multidict_getversion_callable: GetVersion[str],
) -> None:
m = any_multidict_class()
v = multidict_getversion_callable(m)
diff --git a/contrib/python/multidict/ya.make b/contrib/python/multidict/ya.make
index 8a2950eae9..626036249b 100644
--- a/contrib/python/multidict/ya.make
+++ b/contrib/python/multidict/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(6.1.0)
+VERSION(6.2.0)
LICENSE(Apache-2.0)
@@ -25,7 +25,6 @@ PY_REGISTER(
PY_SRCS(
TOP_LEVEL
multidict/__init__.py
- multidict/__init__.pyi
multidict/_abc.py
multidict/_compat.py
multidict/_multidict_base.py
diff --git a/contrib/python/responses/py3/.dist-info/METADATA b/contrib/python/responses/py3/.dist-info/METADATA
index 69687eb48a..f5e0301e46 100644
--- a/contrib/python/responses/py3/.dist-info/METADATA
+++ b/contrib/python/responses/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: responses
-Version: 0.25.6
+Version: 0.25.7
Summary: A utility library for mocking out the `requests` Python library.
Home-page: https://github.com/getsentry/responses
Author: David Cramer
@@ -20,6 +20,7 @@ Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
Classifier: Topic :: Software Development
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
diff --git a/contrib/python/responses/py3/ya.make b/contrib/python/responses/py3/ya.make
index f706b20e79..0b191e6b13 100644
--- a/contrib/python/responses/py3/ya.make
+++ b/contrib/python/responses/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(0.25.6)
+VERSION(0.25.7)
LICENSE(Apache-2.0)
diff --git a/contrib/restricted/fast_float/.yandex_meta/override.nix b/contrib/restricted/fast_float/.yandex_meta/override.nix
index 7f0aaba843..d8bd096dd3 100644
--- a/contrib/restricted/fast_float/.yandex_meta/override.nix
+++ b/contrib/restricted/fast_float/.yandex_meta/override.nix
@@ -1,11 +1,11 @@
self: super: with self; rec {
name = "fast_float";
- version = "8.0.1";
+ version = "8.0.2";
src = fetchFromGitHub {
owner = "fastfloat";
repo = "fast_float";
rev = "v${version}";
- hash = "sha256-Y13JdBk8pZyg748fEOj+O/6gMAaqNXIE2fLY5tsMGB0=";
+ hash = "sha256-lKEzRYKdpjsqixC9WBoILccqB2ZkUtPUzT4Q4+j0oac=";
};
}
diff --git a/contrib/restricted/fast_float/README.md b/contrib/restricted/fast_float/README.md
index 6e5e9d01c6..06c30c3de8 100644
--- a/contrib/restricted/fast_float/README.md
+++ b/contrib/restricted/fast_float/README.md
@@ -455,7 +455,7 @@ sufficiently recent version of CMake (3.11 or better at least):
FetchContent_Declare(
fast_float
GIT_REPOSITORY https://github.com/fastfloat/fast_float.git
- GIT_TAG tags/v8.0.1
+ GIT_TAG tags/v8.0.2
GIT_SHALLOW TRUE)
FetchContent_MakeAvailable(fast_float)
@@ -471,7 +471,7 @@ You may also use [CPM](https://github.com/cpm-cmake/CPM.cmake), like so:
CPMAddPackage(
NAME fast_float
GITHUB_REPOSITORY "fastfloat/fast_float"
- GIT_TAG v8.0.1)
+ GIT_TAG v8.0.2)
```
## Using as single header
@@ -483,7 +483,7 @@ if desired as described in the command line help.
You may directly download automatically generated single-header files:
-<https://github.com/fastfloat/fast_float/releases/download/v8.0.1/fast_float.h>
+<https://github.com/fastfloat/fast_float/releases/download/v8.0.2/fast_float.h>
## Benchmarking
diff --git a/contrib/restricted/fast_float/include/fast_float/float_common.h b/contrib/restricted/fast_float/include/fast_float/float_common.h
index 17ab14818d..5245279164 100644
--- a/contrib/restricted/fast_float/include/fast_float/float_common.h
+++ b/contrib/restricted/fast_float/include/fast_float/float_common.h
@@ -17,7 +17,7 @@
#define FASTFLOAT_VERSION_MAJOR 8
#define FASTFLOAT_VERSION_MINOR 0
-#define FASTFLOAT_VERSION_PATCH 1
+#define FASTFLOAT_VERSION_PATCH 2
#define FASTFLOAT_STRINGIZE_IMPL(x) #x
#define FASTFLOAT_STRINGIZE(x) FASTFLOAT_STRINGIZE_IMPL(x)
diff --git a/contrib/restricted/fast_float/ya.make b/contrib/restricted/fast_float/ya.make
index 4e50435e1a..004c152801 100644
--- a/contrib/restricted/fast_float/ya.make
+++ b/contrib/restricted/fast_float/ya.make
@@ -11,9 +11,9 @@ LICENSE(
LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
-VERSION(8.0.1)
+VERSION(8.0.2)
-ORIGINAL_SOURCE(https://github.com/fastfloat/fast_float/archive/v8.0.1.tar.gz)
+ORIGINAL_SOURCE(https://github.com/fastfloat/fast_float/archive/v8.0.2.tar.gz)
NO_COMPILER_WARNINGS()
diff --git a/contrib/tools/cython/.yandex_meta/__init__.py b/contrib/tools/cython/.yandex_meta/__init__.py
index 3155ec718b..66b5cd174c 100644
--- a/contrib/tools/cython/.yandex_meta/__init__.py
+++ b/contrib/tools/cython/.yandex_meta/__init__.py
@@ -42,6 +42,7 @@ cython = NixProject(
"Cython/ya.make",
"generated_c_headers.h",
"generated_cpp_headers.h",
+ "a.yaml",
],
post_install=post_install,
)
diff --git a/contrib/tools/cython/a.yaml b/contrib/tools/cython/a.yaml
new file mode 100644
index 0000000000..fcafb68eaa
--- /dev/null
+++ b/contrib/tools/cython/a.yaml
@@ -0,0 +1,26 @@
+service: cc
+title: Third party components
+
+ci:
+ secret: sec-01e0d4agf6pfvwdjwxp61n3fvg
+ runtime:
+ sandbox-owner: DEVTOOLS-LARGE
+
+ actions:
+ my-action:
+ flow: add-cython-fakeid-issue
+ triggers:
+ - on: pr
+
+ flows:
+ add-cython-fakeid-issue:
+ jobs:
+ cython-fakeid-issue:
+ title: Cython FakeID issue
+ task: projects/devtools/write_pr_comment/write_pr_comment
+ input:
+ config:
+ data: Любые правки в Cython должны также менять CYTHON_FAKEID для тестирования зависимостей от Cython!
+ pr_id: ${context.launch_pull_request_info.pull_request.id}
+ issue: true
+ once: true