aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python
diff options
context:
space:
mode:
authorAlexander Smirnov <alex@ydb.tech>2024-10-07 15:49:08 +0000
committerAlexander Smirnov <alex@ydb.tech>2024-10-07 15:49:08 +0000
commit6e4a5b7ec90b12f50ed6af6bb3bbd214d4aaaa35 (patch)
tree7bd4c53a7df4f129e96c095353cc73944f6f5971 /contrib/python
parenta91cf35875165a1e7b20cb79925892e304c7b911 (diff)
parent1c145de846055758e1cf1a78a53d9b06ecf4e697 (diff)
downloadydb-6e4a5b7ec90b12f50ed6af6bb3bbd214d4aaaa35.tar.gz
Merge branch 'rightlib' into mergelibs-241007-1548
Diffstat (limited to 'contrib/python')
-rw-r--r--contrib/python/aioresponses/.dist-info/METADATA333
-rw-r--r--contrib/python/aioresponses/.dist-info/top_level.txt1
-rw-r--r--contrib/python/aioresponses/AUTHORS51
-rw-r--r--contrib/python/aioresponses/AUTHORS.rst13
-rw-r--r--contrib/python/aioresponses/LICENSE21
-rw-r--r--contrib/python/aioresponses/README.rst306
-rw-r--r--contrib/python/aioresponses/aioresponses/__init__.py9
-rw-r--r--contrib/python/aioresponses/aioresponses/compat.py68
-rw-r--r--contrib/python/aioresponses/aioresponses/core.py549
-rw-r--r--contrib/python/aioresponses/aioresponses/py.typed0
-rw-r--r--contrib/python/aioresponses/ya.make33
-rw-r--r--contrib/python/anyio/.dist-info/METADATA12
-rw-r--r--contrib/python/anyio/anyio/__init__.py12
-rw-r--r--contrib/python/anyio/anyio/_backends/_asyncio.py452
-rw-r--r--contrib/python/anyio/anyio/_backends/_trio.py196
-rw-r--r--contrib/python/anyio/anyio/_core/_exceptions.py16
-rw-r--r--contrib/python/anyio/anyio/_core/_fileio.py25
-rw-r--r--contrib/python/anyio/anyio/_core/_signals.py6
-rw-r--r--contrib/python/anyio/anyio/_core/_sockets.py31
-rw-r--r--contrib/python/anyio/anyio/_core/_streams.py4
-rw-r--r--contrib/python/anyio/anyio/_core/_subprocesses.py112
-rw-r--r--contrib/python/anyio/anyio/_core/_synchronization.py247
-rw-r--r--contrib/python/anyio/anyio/abc/_eventloop.py74
-rw-r--r--contrib/python/anyio/anyio/abc/_sockets.py8
-rw-r--r--contrib/python/anyio/anyio/from_thread.py85
-rw-r--r--contrib/python/anyio/anyio/pytest_plugin.py19
-rw-r--r--contrib/python/anyio/anyio/streams/memory.py10
-rw-r--r--contrib/python/anyio/anyio/streams/tls.py6
-rw-r--r--contrib/python/anyio/anyio/to_process.py3
-rw-r--r--contrib/python/anyio/ya.make2
-rw-r--r--contrib/python/google-auth/py3/.dist-info/METADATA2
-rw-r--r--contrib/python/google-auth/py3/google/auth/_default.py2
-rw-r--r--contrib/python/google-auth/py3/google/auth/_exponential_backoff.py77
-rw-r--r--contrib/python/google-auth/py3/google/auth/aio/transport/__init__.py144
-rw-r--r--contrib/python/google-auth/py3/google/auth/aio/transport/aiohttp.py184
-rw-r--r--contrib/python/google-auth/py3/google/auth/aio/transport/sessions.py268
-rw-r--r--contrib/python/google-auth/py3/google/auth/compute_engine/credentials.py8
-rw-r--r--contrib/python/google-auth/py3/google/auth/credentials.py11
-rw-r--r--contrib/python/google-auth/py3/google/auth/exceptions.py8
-rw-r--r--contrib/python/google-auth/py3/google/auth/external_account.py43
-rw-r--r--contrib/python/google-auth/py3/google/auth/external_account_authorized_user.py34
-rw-r--r--contrib/python/google-auth/py3/google/auth/impersonated_credentials.py38
-rw-r--r--contrib/python/google-auth/py3/google/auth/version.py2
-rw-r--r--contrib/python/google-auth/py3/google/oauth2/credentials.py96
-rw-r--r--contrib/python/google-auth/py3/google/oauth2/service_account.py14
-rw-r--r--contrib/python/google-auth/py3/tests/compute_engine/test_credentials.py7
-rw-r--r--contrib/python/google-auth/py3/tests/oauth2/test_credentials.py28
-rw-r--r--contrib/python/google-auth/py3/tests/oauth2/test_service_account.py17
-rw-r--r--contrib/python/google-auth/py3/tests/test__default.py32
-rw-r--r--contrib/python/google-auth/py3/tests/test__exponential_backoff.py41
-rw-r--r--contrib/python/google-auth/py3/tests/test_credentials.py5
-rw-r--r--contrib/python/google-auth/py3/tests/test_external_account.py65
-rw-r--r--contrib/python/google-auth/py3/tests/test_external_account_authorized_user.py16
-rw-r--r--contrib/python/google-auth/py3/tests/test_impersonated_credentials.py17
-rw-r--r--contrib/python/google-auth/py3/tests/transport/aio/test_aiohttp.py170
-rw-r--r--contrib/python/google-auth/py3/tests/transport/aio/test_sessions.py311
-rw-r--r--contrib/python/google-auth/py3/tests/ya.make31
-rw-r--r--contrib/python/google-auth/py3/ya.make6
-rw-r--r--contrib/python/idna/py3/.dist-info/METADATA2
-rw-r--r--contrib/python/idna/py3/idna/core.py54
-rw-r--r--contrib/python/idna/py3/idna/idnadata.py78
-rw-r--r--contrib/python/idna/py3/idna/package_data.py2
-rw-r--r--contrib/python/idna/py3/idna/uts46data.py1622
-rw-r--r--contrib/python/idna/py3/ya.make2
-rw-r--r--contrib/python/mypy-protobuf/.dist-info/METADATA6
-rw-r--r--contrib/python/mypy-protobuf/README.md6
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py18
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/main.py42
-rw-r--r--contrib/python/mypy-protobuf/ya.make2
-rw-r--r--contrib/python/pytest-asyncio/.dist-info/METADATA91
-rw-r--r--contrib/python/pytest-asyncio/.dist-info/entry_points.txt2
-rw-r--r--contrib/python/pytest-asyncio/.dist-info/top_level.txt1
-rw-r--r--contrib/python/pytest-asyncio/LICENSE201
-rw-r--r--contrib/python/pytest-asyncio/README.rst52
-rw-r--r--contrib/python/pytest-asyncio/pytest_asyncio/__init__.py5
-rw-r--r--contrib/python/pytest-asyncio/pytest_asyncio/_version.py4
-rw-r--r--contrib/python/pytest-asyncio/pytest_asyncio/plugin.py624
-rw-r--r--contrib/python/pytest-asyncio/pytest_asyncio/py.typed0
-rw-r--r--contrib/python/pytest-asyncio/ya.make30
-rw-r--r--contrib/python/types-protobuf/.dist-info/METADATA10
-rw-r--r--contrib/python/types-protobuf/google-stubs/METADATA.toml6
-rw-r--r--contrib/python/types-protobuf/ya.make2
82 files changed, 5643 insertions, 1600 deletions
diff --git a/contrib/python/aioresponses/.dist-info/METADATA b/contrib/python/aioresponses/.dist-info/METADATA
new file mode 100644
index 0000000000..54b686eb71
--- /dev/null
+++ b/contrib/python/aioresponses/.dist-info/METADATA
@@ -0,0 +1,333 @@
+Metadata-Version: 2.1
+Name: aioresponses
+Version: 0.7.6
+Summary: Mock out requests made by ClientSession from aiohttp package
+Home-page: https://github.com/pnuckowski/aioresponses
+Author: Pawel Nuckowski
+Author-email: p.nuckowski@gmail.com
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Testing :: Mocking
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+License-File: LICENSE
+License-File: AUTHORS
+License-File: AUTHORS.rst
+Requires-Dist: aiohttp (<4.0.0,>=3.3.0)
+
+===============================
+aioresponses
+===============================
+
+.. image:: https://travis-ci.org/pnuckowski/aioresponses.svg?branch=master
+ :target: https://travis-ci.org/pnuckowski/aioresponses
+
+.. image:: https://coveralls.io/repos/github/pnuckowski/aioresponses/badge.svg?branch=master
+ :target: https://coveralls.io/github/pnuckowski/aioresponses?branch=master
+
+.. image:: https://landscape.io/github/pnuckowski/aioresponses/master/landscape.svg?style=flat
+ :target: https://landscape.io/github/pnuckowski/aioresponses/master
+ :alt: Code Health
+
+.. image:: https://pyup.io/repos/github/pnuckowski/aioresponses/shield.svg
+ :target: https://pyup.io/repos/github/pnuckowski/aioresponses/
+ :alt: Updates
+
+.. image:: https://img.shields.io/pypi/v/aioresponses.svg
+ :target: https://pypi.python.org/pypi/aioresponses
+
+.. image:: https://readthedocs.org/projects/aioresponses/badge/?version=latest
+ :target: https://aioresponses.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+
+Aioresponses is a helper to mock/fake web requests in python aiohttp package.
+
+For *requests* module there are a lot of packages that help us with testing (eg. *httpretty*, *responses*, *requests-mock*).
+
+When it comes to testing asynchronous HTTP requests it is a bit harder (at least at the beginning).
+The purpose of this package is to provide an easy way to test asynchronous HTTP requests.
+
+Installing
+----------
+
+.. code:: bash
+
+ $ pip install aioresponses
+
+Supported versions
+------------------
+- Python 3.7+
+- aiohttp>=3.3.0,<4.0.0
+
+Usage
+--------
+
+To mock out HTTP request use *aioresponses* as a method decorator or as a context manager.
+
+Response *status* code, *body*, *payload* (for json response) and *headers* can be mocked.
+
+Supported HTTP methods: **GET**, **POST**, **PUT**, **PATCH**, **DELETE** and **OPTIONS**.
+
+.. code:: python
+
+ import aiohttp
+ import asyncio
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_request(mocked):
+ loop = asyncio.get_event_loop()
+ mocked.get('http://example.com', status=200, body='test')
+ session = aiohttp.ClientSession()
+ resp = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp.status == 200
+ mocked.assert_called_once_with('http://example.com')
+
+
+for convenience use *payload* argument to mock out json response. Example below.
+
+**as a context manager**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ def test_ctx():
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ with aioresponses() as m:
+ m.get('http://test.example.com', payload=dict(foo='bar'))
+
+ resp = loop.run_until_complete(session.get('http://test.example.com'))
+ data = loop.run_until_complete(resp.json())
+
+ assert dict(foo='bar') == data
+ m.assert_called_once_with('http://test.example.com')
+
+**aioresponses allows to mock out any HTTP headers**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_http_headers(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ m.post(
+ 'http://example.com',
+ payload=dict(),
+ headers=dict(connection='keep-alive'),
+ )
+
+ resp = loop.run_until_complete(session.post('http://example.com'))
+
+ # note that we pass 'connection' but get 'Connection' (capitalized)
+ # under the neath `multidict` is used to work with HTTP headers
+ assert resp.headers['Connection'] == 'keep-alive'
+ m.assert_called_once_with('http://example.com', method='POST')
+
+**allows to register different responses for the same url**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_multiple_responses(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ m.get('http://example.com', status=500)
+ m.get('http://example.com', status=200)
+
+ resp1 = loop.run_until_complete(session.get('http://example.com'))
+ resp2 = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp1.status == 500
+ assert resp2.status == 200
+
+
+**Repeat response for the same url**
+
+E.g. for cases you want to test retrying mechanisms
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_multiple_responses(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ m.get('http://example.com', status=500, repeat=True)
+ m.get('http://example.com', status=200) # will not take effect
+
+ resp1 = loop.run_until_complete(session.get('http://example.com'))
+ resp2 = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp1.status == 500
+ assert resp2.status == 500
+
+
+**match URLs with regular expressions**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ import re
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_regexp_example(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ pattern = re.compile(r'^http://example\.com/api\?foo=.*$')
+ m.get(pattern, status=200)
+
+ resp = loop.run_until_complete(session.get('http://example.com/api?foo=bar'))
+
+ assert resp.status == 200
+
+**allows to make redirects responses**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_redirect_example(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+
+ # absolute urls are supported
+ m.get(
+ 'http://example.com/',
+ headers={'Location': 'http://another.com/'},
+ status=307
+ )
+
+ resp = loop.run_until_complete(
+ session.get('http://example.com/', allow_redirects=True)
+ )
+ assert resp.url == 'http://another.com/'
+
+ # and also relative
+ m.get(
+ 'http://example.com/',
+ headers={'Location': '/test'},
+ status=307
+ )
+ resp = loop.run_until_complete(
+ session.get('http://example.com/', allow_redirects=True)
+ )
+ assert resp.url == 'http://example.com/test'
+
+**allows to passthrough to a specified list of servers**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses(passthrough=['http://backend'])
+ def test_passthrough(m, test_client):
+ session = aiohttp.ClientSession()
+ # this will actually perform a request
+ resp = loop.run_until_complete(session.get('http://backend/api'))
+
+
+**aioresponses allows to throw an exception**
+
+.. code:: python
+
+ import asyncio
+ from aiohttp import ClientSession
+ from aiohttp.http_exceptions import HttpProcessingError
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_how_to_throw_an_exception(m, test_client):
+ loop = asyncio.get_event_loop()
+ session = ClientSession()
+ m.get('http://example.com/api', exception=HttpProcessingError('test'))
+
+ # calling
+ # loop.run_until_complete(session.get('http://example.com/api'))
+ # will throw an exception.
+
+
+**aioresponses allows to use callbacks to provide dynamic responses**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import CallbackResult, aioresponses
+
+ def callback(url, **kwargs):
+ return CallbackResult(status=418)
+
+ @aioresponses()
+ def test_callback(m, test_client):
+ loop = asyncio.get_event_loop()
+ session = ClientSession()
+ m.get('http://example.com', callback=callback)
+
+ resp = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp.status == 418
+
+
+**aioresponses can be used in a pytest fixture**
+
+.. code:: python
+
+ import pytest
+ from aioresponses import aioresponses
+
+ @pytest.fixture
+ def mock_aioresponse():
+ with aioresponses() as m:
+ yield m
+
+
+Features
+--------
+* Easy to mock out HTTP requests made by *aiohttp.ClientSession*
+
+
+License
+-------
+* Free software: MIT license
+
+Credits
+-------
+
+This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
+
+.. _Cookiecutter: https://github.com/audreyr/cookiecutter
+.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
+
diff --git a/contrib/python/aioresponses/.dist-info/top_level.txt b/contrib/python/aioresponses/.dist-info/top_level.txt
new file mode 100644
index 0000000000..46cd566df0
--- /dev/null
+++ b/contrib/python/aioresponses/.dist-info/top_level.txt
@@ -0,0 +1 @@
+aioresponses
diff --git a/contrib/python/aioresponses/AUTHORS b/contrib/python/aioresponses/AUTHORS
new file mode 100644
index 0000000000..3854a29412
--- /dev/null
+++ b/contrib/python/aioresponses/AUTHORS
@@ -0,0 +1,51 @@
+Alan Briolat <alan.briolat@gmail.com>
+Aleksei Maslakov <lesha.maslakov@gmail.com>
+Alexey Nikitenko <wblxyxolb.khv@mail.ru>
+Alexey Sveshnikov <a.sveshnikov@rambler-co.ru>
+Alexey Sveshnikov <alexey.sveshnikov@gmail.com>
+Allisson Azevedo <allisson@gmail.com>
+Andrew Grinevich <andrew.grinevich@pandadoc.com>
+Anthony Lukach <anthonylukach@gmail.com>
+Ben Greiner <code@bnavigator.de>
+Brett Wandel <brett.wandel@interferex.com>
+Bryce Drennan <github@accounts.brycedrennan.com>
+Colin-b <Colin-b@users.noreply.github.com>
+Daniel Hahler <git@thequod.de>
+Daniel Tan <danieltanjiawang@gmail.com>
+David Buxton <david@gasmark6.com>
+Fred Thomsen <fred.thomsen@sciencelogic.com>
+Georg Sauthoff <mail@gms.tf>
+Gordon Rogers <gordonrogers@skyscanner.net>
+Hadrien David <hadrien.david@dialogue.co>
+Hadrien David <hadrien@ectobal.com>
+Ibrahim <8592115+iamibi@users.noreply.github.com>
+Ilaï Deutel <ilai-deutel@users.noreply.github.com>
+Jakub Boukal <www.bagr@gmail.com>
+Joongi Kim <me@daybreaker.info>
+Jordi Soucheiron <jordi@soucheiron.cat>
+Jordi Soucheiron <jsoucheiron@users.noreply.github.com>
+Joshua Coats <joshu@fearchar.net>
+Juan Cruz <juancruzmencia@gmail.com>
+Lee Treveil <leetreveil@gmail.com>
+Louis Sautier <sautier.louis@gmail.com>
+Lukasz Jernas <lukasz.jernas@allegrogroup.com>
+Marat Sharafutdinov <decaz89@gmail.com>
+Marcin Sulikowski <marcin.k.sulikowski@gmail.com>
+Marek Kowalski <kowalski0123@gmail.com>
+Pavel Savchenko <asfaltboy@gmail.com>
+Pawel Nuckowski <p.nuckowski@gmail.com>
+Petr Belskiy <petr.belskiy@gmail.com>
+Rémy HUBSCHER <rhubscher@mozilla.com>
+Sam Bull <aa6bs0@sambull.org>
+TyVik <tyvik8@gmail.com>
+Ulrik Johansson <ulrik.johansson@blocket.se>
+Ville Skyttä <ville.skytta@iki.fi>
+d-ryzhikov <d.ryzhykau@gmail.com>
+iamnotaprogrammer <iamnotaprogrammer@yandex.ru>
+iamnotaprogrammer <issmirnov@domclick.ru>
+konstantin <konstantin.klein@hochfrequenz.de>
+oren0e <countx@gmail.com>
+pnuckowski <p.nuckowski@gmail.com>
+pnuckowski <pnuckowski@users.noreply.github.com>
+pyup-bot <github-bot@pyup.io>
+vangheem <vangheem@gmail.com>
diff --git a/contrib/python/aioresponses/AUTHORS.rst b/contrib/python/aioresponses/AUTHORS.rst
new file mode 100644
index 0000000000..3b1fc8e0ec
--- /dev/null
+++ b/contrib/python/aioresponses/AUTHORS.rst
@@ -0,0 +1,13 @@
+=======
+Credits
+=======
+
+Development Lead
+----------------
+
+* Pawel Nuckowski <p.nuckowski@gmail.com>
+
+Contributors
+------------
+
+None yet. Why not be the first?
diff --git a/contrib/python/aioresponses/LICENSE b/contrib/python/aioresponses/LICENSE
new file mode 100644
index 0000000000..fe5490da64
--- /dev/null
+++ b/contrib/python/aioresponses/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 pnuckowski
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/contrib/python/aioresponses/README.rst b/contrib/python/aioresponses/README.rst
new file mode 100644
index 0000000000..ae63650d0a
--- /dev/null
+++ b/contrib/python/aioresponses/README.rst
@@ -0,0 +1,306 @@
+===============================
+aioresponses
+===============================
+
+.. image:: https://travis-ci.org/pnuckowski/aioresponses.svg?branch=master
+ :target: https://travis-ci.org/pnuckowski/aioresponses
+
+.. image:: https://coveralls.io/repos/github/pnuckowski/aioresponses/badge.svg?branch=master
+ :target: https://coveralls.io/github/pnuckowski/aioresponses?branch=master
+
+.. image:: https://landscape.io/github/pnuckowski/aioresponses/master/landscape.svg?style=flat
+ :target: https://landscape.io/github/pnuckowski/aioresponses/master
+ :alt: Code Health
+
+.. image:: https://pyup.io/repos/github/pnuckowski/aioresponses/shield.svg
+ :target: https://pyup.io/repos/github/pnuckowski/aioresponses/
+ :alt: Updates
+
+.. image:: https://img.shields.io/pypi/v/aioresponses.svg
+ :target: https://pypi.python.org/pypi/aioresponses
+
+.. image:: https://readthedocs.org/projects/aioresponses/badge/?version=latest
+ :target: https://aioresponses.readthedocs.io/en/latest/?badge=latest
+ :alt: Documentation Status
+
+
+Aioresponses is a helper to mock/fake web requests in python aiohttp package.
+
+For *requests* module there are a lot of packages that help us with testing (eg. *httpretty*, *responses*, *requests-mock*).
+
+When it comes to testing asynchronous HTTP requests it is a bit harder (at least at the beginning).
+The purpose of this package is to provide an easy way to test asynchronous HTTP requests.
+
+Installing
+----------
+
+.. code:: bash
+
+ $ pip install aioresponses
+
+Supported versions
+------------------
+- Python 3.7+
+- aiohttp>=3.3.0,<4.0.0
+
+Usage
+--------
+
+To mock out HTTP request use *aioresponses* as a method decorator or as a context manager.
+
+Response *status* code, *body*, *payload* (for json response) and *headers* can be mocked.
+
+Supported HTTP methods: **GET**, **POST**, **PUT**, **PATCH**, **DELETE** and **OPTIONS**.
+
+.. code:: python
+
+ import aiohttp
+ import asyncio
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_request(mocked):
+ loop = asyncio.get_event_loop()
+ mocked.get('http://example.com', status=200, body='test')
+ session = aiohttp.ClientSession()
+ resp = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp.status == 200
+ mocked.assert_called_once_with('http://example.com')
+
+
+for convenience use *payload* argument to mock out json response. Example below.
+
+**as a context manager**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ def test_ctx():
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ with aioresponses() as m:
+ m.get('http://test.example.com', payload=dict(foo='bar'))
+
+ resp = loop.run_until_complete(session.get('http://test.example.com'))
+ data = loop.run_until_complete(resp.json())
+
+ assert dict(foo='bar') == data
+ m.assert_called_once_with('http://test.example.com')
+
+**aioresponses allows to mock out any HTTP headers**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_http_headers(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ m.post(
+ 'http://example.com',
+ payload=dict(),
+ headers=dict(connection='keep-alive'),
+ )
+
+ resp = loop.run_until_complete(session.post('http://example.com'))
+
+ # note that we pass 'connection' but get 'Connection' (capitalized)
+ # under the neath `multidict` is used to work with HTTP headers
+ assert resp.headers['Connection'] == 'keep-alive'
+ m.assert_called_once_with('http://example.com', method='POST')
+
+**allows to register different responses for the same url**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_multiple_responses(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ m.get('http://example.com', status=500)
+ m.get('http://example.com', status=200)
+
+ resp1 = loop.run_until_complete(session.get('http://example.com'))
+ resp2 = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp1.status == 500
+ assert resp2.status == 200
+
+
+**Repeat response for the same url**
+
+E.g. for cases you want to test retrying mechanisms
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_multiple_responses(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ m.get('http://example.com', status=500, repeat=True)
+ m.get('http://example.com', status=200) # will not take effect
+
+ resp1 = loop.run_until_complete(session.get('http://example.com'))
+ resp2 = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp1.status == 500
+ assert resp2.status == 500
+
+
+**match URLs with regular expressions**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ import re
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_regexp_example(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+ pattern = re.compile(r'^http://example\.com/api\?foo=.*$')
+ m.get(pattern, status=200)
+
+ resp = loop.run_until_complete(session.get('http://example.com/api?foo=bar'))
+
+ assert resp.status == 200
+
+**allows to make redirects responses**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_redirect_example(m):
+ loop = asyncio.get_event_loop()
+ session = aiohttp.ClientSession()
+
+ # absolute urls are supported
+ m.get(
+ 'http://example.com/',
+ headers={'Location': 'http://another.com/'},
+ status=307
+ )
+
+ resp = loop.run_until_complete(
+ session.get('http://example.com/', allow_redirects=True)
+ )
+ assert resp.url == 'http://another.com/'
+
+ # and also relative
+ m.get(
+ 'http://example.com/',
+ headers={'Location': '/test'},
+ status=307
+ )
+ resp = loop.run_until_complete(
+ session.get('http://example.com/', allow_redirects=True)
+ )
+ assert resp.url == 'http://example.com/test'
+
+**allows to passthrough to a specified list of servers**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import aioresponses
+
+ @aioresponses(passthrough=['http://backend'])
+ def test_passthrough(m, test_client):
+ session = aiohttp.ClientSession()
+ # this will actually perform a request
+ resp = loop.run_until_complete(session.get('http://backend/api'))
+
+
+**aioresponses allows to throw an exception**
+
+.. code:: python
+
+ import asyncio
+ from aiohttp import ClientSession
+ from aiohttp.http_exceptions import HttpProcessingError
+ from aioresponses import aioresponses
+
+ @aioresponses()
+ def test_how_to_throw_an_exception(m, test_client):
+ loop = asyncio.get_event_loop()
+ session = ClientSession()
+ m.get('http://example.com/api', exception=HttpProcessingError('test'))
+
+ # calling
+ # loop.run_until_complete(session.get('http://example.com/api'))
+ # will throw an exception.
+
+
+**aioresponses allows to use callbacks to provide dynamic responses**
+
+.. code:: python
+
+ import asyncio
+ import aiohttp
+ from aioresponses import CallbackResult, aioresponses
+
+ def callback(url, **kwargs):
+ return CallbackResult(status=418)
+
+ @aioresponses()
+ def test_callback(m, test_client):
+ loop = asyncio.get_event_loop()
+ session = ClientSession()
+ m.get('http://example.com', callback=callback)
+
+ resp = loop.run_until_complete(session.get('http://example.com'))
+
+ assert resp.status == 418
+
+
+**aioresponses can be used in a pytest fixture**
+
+.. code:: python
+
+ import pytest
+ from aioresponses import aioresponses
+
+ @pytest.fixture
+ def mock_aioresponse():
+ with aioresponses() as m:
+ yield m
+
+
+Features
+--------
+* Easy to mock out HTTP requests made by *aiohttp.ClientSession*
+
+
+License
+-------
+* Free software: MIT license
+
+Credits
+-------
+
+This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
+
+.. _Cookiecutter: https://github.com/audreyr/cookiecutter
+.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
diff --git a/contrib/python/aioresponses/aioresponses/__init__.py b/contrib/python/aioresponses/aioresponses/__init__.py
new file mode 100644
index 0000000000..c61652c9aa
--- /dev/null
+++ b/contrib/python/aioresponses/aioresponses/__init__.py
@@ -0,0 +1,9 @@
+# -*- coding: utf-8 -*-
+from .core import CallbackResult, aioresponses
+
+__version__ = '0.7.3'
+
+__all__ = [
+ 'CallbackResult',
+ 'aioresponses',
+]
diff --git a/contrib/python/aioresponses/aioresponses/compat.py b/contrib/python/aioresponses/aioresponses/compat.py
new file mode 100644
index 0000000000..aa8771d8d6
--- /dev/null
+++ b/contrib/python/aioresponses/aioresponses/compat.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+import asyncio # noqa: F401
+import sys
+from typing import Dict, Optional, Union # noqa
+from urllib.parse import parse_qsl, urlencode
+
+from aiohttp import __version__ as aiohttp_version, StreamReader
+from aiohttp.client_proto import ResponseHandler
+from multidict import MultiDict
+from packaging.version import Version
+from yarl import URL
+
+if sys.version_info < (3, 7):
+ from re import _pattern_type as Pattern
+else:
+ from re import Pattern
+
+AIOHTTP_VERSION = Version(aiohttp_version)
+
+
+def stream_reader_factory( # noqa
+ loop: 'Optional[asyncio.AbstractEventLoop]' = None
+) -> StreamReader:
+ protocol = ResponseHandler(loop=loop)
+ return StreamReader(protocol, limit=2 ** 16, loop=loop)
+
+
+def merge_params(
+ url: 'Union[URL, str]',
+ params: Optional[Dict] = None
+) -> 'URL':
+ url = URL(url)
+ if params:
+ query_params = MultiDict(url.query)
+ query_params.extend(url.with_query(params).query)
+ return url.with_query(query_params)
+ return url
+
+
+def normalize_url(url: 'Union[URL, str]') -> 'URL':
+ """Normalize url to make comparisons."""
+ url = URL(url)
+ return url.with_query(urlencode(sorted(parse_qsl(url.query_string))))
+
+
+try:
+ from aiohttp import RequestInfo
+except ImportError:
+ class RequestInfo(object):
+ __slots__ = ('url', 'method', 'headers', 'real_url')
+
+ def __init__(
+ self, url: URL, method: str, headers: Dict, real_url: str
+ ):
+ self.url = url
+ self.method = method
+ self.headers = headers
+ self.real_url = real_url
+
+__all__ = [
+ 'URL',
+ 'Pattern',
+ 'RequestInfo',
+ 'AIOHTTP_VERSION',
+ 'merge_params',
+ 'stream_reader_factory',
+ 'normalize_url',
+]
diff --git a/contrib/python/aioresponses/aioresponses/core.py b/contrib/python/aioresponses/aioresponses/core.py
new file mode 100644
index 0000000000..2bb6d57365
--- /dev/null
+++ b/contrib/python/aioresponses/aioresponses/core.py
@@ -0,0 +1,549 @@
+# -*- coding: utf-8 -*-
+import asyncio
+import copy
+import inspect
+import json
+from collections import namedtuple
+from functools import wraps
+from typing import (
+ Any,
+ Callable,
+ cast,
+ Dict,
+ List,
+ Optional,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+from unittest.mock import Mock, patch
+from uuid import uuid4
+
+from aiohttp import (
+ ClientConnectionError,
+ ClientResponse,
+ ClientSession,
+ hdrs,
+ http
+)
+from aiohttp.helpers import TimerNoop
+from multidict import CIMultiDict, CIMultiDictProxy
+
+from .compat import (
+ URL,
+ Pattern,
+ stream_reader_factory,
+ merge_params,
+ normalize_url,
+ RequestInfo,
+)
+
+
+_FuncT = TypeVar("_FuncT", bound=Callable[..., Any])
+
+
+class CallbackResult:
+
+ def __init__(self, method: str = hdrs.METH_GET,
+ status: int = 200,
+ body: Union[str, bytes] = '',
+ content_type: str = 'application/json',
+ payload: Optional[Dict] = None,
+ headers: Optional[Dict] = None,
+ response_class: Optional[Type[ClientResponse]] = None,
+ reason: Optional[str] = None):
+ self.method = method
+ self.status = status
+ self.body = body
+ self.content_type = content_type
+ self.payload = payload
+ self.headers = headers
+ self.response_class = response_class
+ self.reason = reason
+
+
+class RequestMatch(object):
+ url_or_pattern = None # type: Union[URL, Pattern]
+
+ def __init__(self, url: Union[URL, str, Pattern],
+ method: str = hdrs.METH_GET,
+ status: int = 200,
+ body: Union[str, bytes] = '',
+ payload: Optional[Dict] = None,
+ exception: Optional[Exception] = None,
+ headers: Optional[Dict] = None,
+ content_type: str = 'application/json',
+ response_class: Optional[Type[ClientResponse]] = None,
+ timeout: bool = False,
+ repeat: bool = False,
+ reason: Optional[str] = None,
+ callback: Optional[Callable] = None):
+ if isinstance(url, Pattern):
+ self.url_or_pattern = url
+ self.match_func = self.match_regexp
+ else:
+ self.url_or_pattern = normalize_url(url)
+ self.match_func = self.match_str
+ self.method = method.lower()
+ self.status = status
+ self.body = body
+ self.payload = payload
+ self.exception = exception
+ if timeout:
+ self.exception = asyncio.TimeoutError('Connection timeout test')
+ self.headers = headers
+ self.content_type = content_type
+ self.response_class = response_class
+ self.repeat = repeat
+ self.reason = reason
+ if self.reason is None:
+ try:
+ self.reason = http.RESPONSES[self.status][0]
+ except (IndexError, KeyError):
+ self.reason = ''
+ self.callback = callback
+
+ def match_str(self, url: URL) -> bool:
+ return self.url_or_pattern == url
+
+ def match_regexp(self, url: URL) -> bool:
+ # This method is used if and only if self.url_or_pattern is a pattern.
+ return bool(
+ self.url_or_pattern.match(str(url)) # type:ignore[union-attr]
+ )
+
+ def match(self, method: str, url: URL) -> bool:
+ if self.method != method.lower():
+ return False
+ return self.match_func(url)
+
+ def _build_raw_headers(self, headers: Dict) -> Tuple:
+ """
+ Convert a dict of headers to a tuple of tuples
+
+ Mimics the format of ClientResponse.
+ """
+ raw_headers = []
+ for k, v in headers.items():
+ raw_headers.append((k.encode('utf8'), v.encode('utf8')))
+ return tuple(raw_headers)
+
+ def _build_response(self, url: 'Union[URL, str]',
+ method: str = hdrs.METH_GET,
+ request_headers: Optional[Dict] = None,
+ status: int = 200,
+ body: Union[str, bytes] = '',
+ content_type: str = 'application/json',
+ payload: Optional[Dict] = None,
+ headers: Optional[Dict] = None,
+ response_class: Optional[Type[ClientResponse]] = None,
+ reason: Optional[str] = None) -> ClientResponse:
+ if response_class is None:
+ response_class = ClientResponse
+ if payload is not None:
+ body = json.dumps(payload)
+ if not isinstance(body, bytes):
+ body = str.encode(body)
+ if request_headers is None:
+ request_headers = {}
+ loop = Mock()
+ loop.get_debug = Mock()
+ loop.get_debug.return_value = True
+ kwargs = {} # type: Dict[str, Any]
+ kwargs['request_info'] = RequestInfo(
+ url=url,
+ method=method,
+ headers=CIMultiDictProxy(CIMultiDict(**request_headers)),
+ )
+ kwargs['writer'] = None
+ kwargs['continue100'] = None
+ kwargs['timer'] = TimerNoop()
+ kwargs['traces'] = []
+ kwargs['loop'] = loop
+ kwargs['session'] = None
+
+ # We need to initialize headers manually
+ _headers = CIMultiDict({hdrs.CONTENT_TYPE: content_type})
+ if headers:
+ _headers.update(headers)
+ raw_headers = self._build_raw_headers(_headers)
+ resp = response_class(method, url, **kwargs)
+
+ for hdr in _headers.getall(hdrs.SET_COOKIE, ()):
+ resp.cookies.load(hdr)
+
+ # Reified attributes
+ resp._headers = _headers
+ resp._raw_headers = raw_headers
+
+ resp.status = status
+ resp.reason = reason
+ resp.content = stream_reader_factory(loop)
+ resp.content.feed_data(body)
+ resp.content.feed_eof()
+ return resp
+
+ async def build_response(
+ self, url: URL, **kwargs: Any
+ ) -> 'Union[ClientResponse, Exception]':
+ if callable(self.callback):
+ if asyncio.iscoroutinefunction(self.callback):
+ result = await self.callback(url, **kwargs)
+ else:
+ result = self.callback(url, **kwargs)
+ else:
+ result = None
+
+ if self.exception is not None:
+ return self.exception
+
+ result = self if result is None else result
+ resp = self._build_response(
+ url=url,
+ method=result.method,
+ request_headers=kwargs.get("headers"),
+ status=result.status,
+ body=result.body,
+ content_type=result.content_type,
+ payload=result.payload,
+ headers=result.headers,
+ response_class=result.response_class,
+ reason=result.reason)
+ return resp
+
+
+RequestCall = namedtuple('RequestCall', ['args', 'kwargs'])
+
+
+class aioresponses(object):
+ """Mock aiohttp requests made by ClientSession."""
+ _matches = None # type: Dict[str, RequestMatch]
+ _responses = None # type: List[ClientResponse]
+ requests = None # type: Dict
+
+ def __init__(self, **kwargs: Any):
+ self._param = kwargs.pop('param', None)
+ self._passthrough = kwargs.pop('passthrough', [])
+ self.patcher = patch('aiohttp.client.ClientSession._request',
+ side_effect=self._request_mock,
+ autospec=True)
+ self.requests = {}
+
+ def __enter__(self) -> 'aioresponses':
+ self.start()
+ return self
+
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
+ self.stop()
+
+ def __call__(self, f: _FuncT) -> _FuncT:
+ def _pack_arguments(ctx, *args, **kwargs) -> Tuple[Tuple, Dict]:
+ if self._param:
+ kwargs[self._param] = ctx
+ else:
+ args += (ctx,)
+ return args, kwargs
+
+ if asyncio.iscoroutinefunction(f):
+ @wraps(f)
+ async def wrapped(*args, **kwargs):
+ with self as ctx:
+ args, kwargs = _pack_arguments(ctx, *args, **kwargs)
+ return await f(*args, **kwargs)
+ else:
+ @wraps(f)
+ def wrapped(*args, **kwargs):
+ with self as ctx:
+ args, kwargs = _pack_arguments(ctx, *args, **kwargs)
+ return f(*args, **kwargs)
+ return cast(_FuncT, wrapped)
+
+ def clear(self) -> None:
+ self._responses.clear()
+ self._matches.clear()
+
+ def start(self) -> None:
+ self._responses = []
+ self._matches = {}
+ self.patcher.start()
+ self.patcher.return_value = self._request_mock
+
+ def stop(self) -> None:
+ for response in self._responses:
+ response.close()
+ self.patcher.stop()
+ self.clear()
+
+ def head(self, url: 'Union[URL, str, Pattern]', **kwargs: Any) -> None:
+ self.add(url, method=hdrs.METH_HEAD, **kwargs)
+
+ def get(self, url: 'Union[URL, str, Pattern]', **kwargs: Any) -> None:
+ self.add(url, method=hdrs.METH_GET, **kwargs)
+
+ def post(self, url: 'Union[URL, str, Pattern]', **kwargs: Any) -> None:
+ self.add(url, method=hdrs.METH_POST, **kwargs)
+
+ def put(self, url: 'Union[URL, str, Pattern]', **kwargs: Any) -> None:
+ self.add(url, method=hdrs.METH_PUT, **kwargs)
+
+ def patch(self, url: 'Union[URL, str, Pattern]', **kwargs: Any) -> None:
+ self.add(url, method=hdrs.METH_PATCH, **kwargs)
+
+ def delete(self, url: 'Union[URL, str, Pattern]', **kwargs: Any) -> None:
+ self.add(url, method=hdrs.METH_DELETE, **kwargs)
+
+ def options(self, url: 'Union[URL, str, Pattern]', **kwargs: Any) -> None:
+ self.add(url, method=hdrs.METH_OPTIONS, **kwargs)
+
+ def add(self, url: 'Union[URL, str, Pattern]', method: str = hdrs.METH_GET,
+ status: int = 200,
+ body: Union[str, bytes] = '',
+ exception: Optional[Exception] = None,
+ content_type: str = 'application/json',
+ payload: Optional[Dict] = None,
+ headers: Optional[Dict] = None,
+ response_class: Optional[Type[ClientResponse]] = None,
+ repeat: bool = False,
+ timeout: bool = False,
+ reason: Optional[str] = None,
+ callback: Optional[Callable] = None) -> None:
+
+ self._matches[str(uuid4())] = (RequestMatch(
+ url,
+ method=method,
+ status=status,
+ content_type=content_type,
+ body=body,
+ exception=exception,
+ payload=payload,
+ headers=headers,
+ response_class=response_class,
+ repeat=repeat,
+ timeout=timeout,
+ reason=reason,
+ callback=callback,
+ ))
+
+ def _format_call_signature(self, *args, **kwargs) -> str:
+ message = '%s(%%s)' % self.__class__.__name__ or 'mock'
+ formatted_args = ''
+ args_string = ', '.join([repr(arg) for arg in args])
+ kwargs_string = ', '.join([
+ '%s=%r' % (key, value) for key, value in kwargs.items()
+ ])
+ if args_string:
+ formatted_args = args_string
+ if kwargs_string:
+ if formatted_args:
+ formatted_args += ', '
+ formatted_args += kwargs_string
+
+ return message % formatted_args
+
+ def assert_not_called(self):
+ """assert that the mock was never called.
+ """
+ if len(self.requests) != 0:
+ msg = ("Expected '%s' to not have been called. Called %s times."
+ % (self.__class__.__name__,
+ len(self._responses)))
+ raise AssertionError(msg)
+
+ def assert_called(self):
+ """assert that the mock was called at least once.
+ """
+ if len(self.requests) == 0:
+ msg = ("Expected '%s' to have been called."
+ % (self.__class__.__name__,))
+ raise AssertionError(msg)
+
+ def assert_called_once(self):
+ """assert that the mock was called only once.
+ """
+ call_count = len(self.requests)
+ if call_count == 1:
+ call_count = len(list(self.requests.values())[0])
+ if not call_count == 1:
+ msg = ("Expected '%s' to have been called once. Called %s times."
+ % (self.__class__.__name__,
+ call_count))
+
+ raise AssertionError(msg)
+
+ def assert_called_with(self, url: 'Union[URL, str, Pattern]',
+ method: str = hdrs.METH_GET,
+ *args: Any,
+ **kwargs: Any):
+ """assert that the last call was made with the specified arguments.
+
+ Raises an AssertionError if the args and keyword args passed in are
+ different to the last call to the mock."""
+ url = normalize_url(merge_params(url, kwargs.get('params')))
+ method = method.upper()
+ key = (method, url)
+ try:
+ expected = self.requests[key][-1]
+ except KeyError:
+ expected_string = self._format_call_signature(
+ url, method=method, *args, **kwargs
+ )
+ raise AssertionError(
+ '%s call not found' % expected_string
+ )
+ actual = self._build_request_call(method, *args, **kwargs)
+ if not expected == actual:
+ expected_string = self._format_call_signature(
+ expected,
+ )
+ actual_string = self._format_call_signature(
+ actual
+ )
+ raise AssertionError(
+ '%s != %s' % (expected_string, actual_string)
+ )
+
+ def assert_any_call(self, url: 'Union[URL, str, Pattern]',
+ method: str = hdrs.METH_GET,
+ *args: Any,
+ **kwargs: Any):
+ """assert the mock has been called with the specified arguments.
+ The assert passes if the mock has *ever* been called, unlike
+ `assert_called_with` and `assert_called_once_with` that only pass if
+ the call is the most recent one."""
+ url = normalize_url(merge_params(url, kwargs.get('params')))
+ method = method.upper()
+ key = (method, url)
+
+ try:
+ self.requests[key]
+ except KeyError:
+ expected_string = self._format_call_signature(
+ url, method=method, *args, **kwargs
+ )
+ raise AssertionError(
+ '%s call not found' % expected_string
+ )
+
+ def assert_called_once_with(self, *args: Any, **kwargs: Any):
+ """assert that the mock was called once with the specified arguments.
+ Raises an AssertionError if the args and keyword args passed in are
+ different to the only call to the mock."""
+ self.assert_called_once()
+ self.assert_called_with(*args, **kwargs)
+
+ @staticmethod
+ def is_exception(resp_or_exc: Union[ClientResponse, Exception]) -> bool:
+ if inspect.isclass(resp_or_exc):
+ parent_classes = set(inspect.getmro(resp_or_exc))
+ if {Exception, BaseException} & parent_classes:
+ return True
+ else:
+ if isinstance(resp_or_exc, (Exception, BaseException)):
+ return True
+ return False
+
+ async def match(
+ self, method: str,
+ url: URL,
+ allow_redirects: bool = True,
+ **kwargs: Any
+ ) -> Optional['ClientResponse']:
+ history = []
+ while True:
+ for key, matcher in self._matches.items():
+ if matcher.match(method, url):
+ response_or_exc = await matcher.build_response(
+ url, allow_redirects=allow_redirects, **kwargs
+ )
+ break
+ else:
+ return None
+
+ if matcher.repeat is False:
+ del self._matches[key]
+
+ if self.is_exception(response_or_exc):
+ raise response_or_exc
+ # If response_or_exc was an exception, it would have been raised.
+ # At this point we can be sure it's a ClientResponse
+ response: ClientResponse
+ response = response_or_exc # type:ignore[assignment]
+ is_redirect = response.status in (301, 302, 303, 307, 308)
+ if is_redirect and allow_redirects:
+ if hdrs.LOCATION not in response.headers:
+ break
+ history.append(response)
+ redirect_url = URL(response.headers[hdrs.LOCATION])
+ if redirect_url.is_absolute():
+ url = redirect_url
+ else:
+ url = url.join(redirect_url)
+ method = 'get'
+ continue
+ else:
+ break
+
+ response._history = tuple(history)
+ return response
+
+ async def _request_mock(self, orig_self: ClientSession,
+ method: str, url: 'Union[URL, str]',
+ *args: Tuple,
+ **kwargs: Any) -> 'ClientResponse':
+ """Return mocked response object or raise connection error."""
+ if orig_self.closed:
+ raise RuntimeError('Session is closed')
+
+ url_origin = url
+ url = normalize_url(merge_params(url, kwargs.get('params')))
+ url_str = str(url)
+ for prefix in self._passthrough:
+ if url_str.startswith(prefix):
+ return (await self.patcher.temp_original(
+ orig_self, method, url_origin, *args, **kwargs
+ ))
+
+ key = (method, url)
+ self.requests.setdefault(key, [])
+ request_call = self._build_request_call(method, *args, **kwargs)
+ self.requests[key].append(request_call)
+
+ response = await self.match(method, url, **kwargs)
+
+ if response is None:
+ raise ClientConnectionError(
+ 'Connection refused: {} {}'.format(method, url)
+ )
+ self._responses.append(response)
+
+ # Automatically call response.raise_for_status() on a request if the
+ # request was initialized with raise_for_status=True. Also call
+ # response.raise_for_status() if the client session was initialized
+ # with raise_for_status=True, unless the request was called with
+ # raise_for_status=False.
+ raise_for_status = kwargs.get('raise_for_status')
+ if raise_for_status is None:
+ raise_for_status = getattr(
+ orig_self, '_raise_for_status', False
+ )
+ if raise_for_status:
+ response.raise_for_status()
+
+ return response
+
+ def _build_request_call(self, method: str = hdrs.METH_GET,
+ *args: Any,
+ allow_redirects: bool = True,
+ **kwargs: Any):
+ """Return request call."""
+ kwargs.setdefault('allow_redirects', allow_redirects)
+ if method == 'POST':
+ kwargs.setdefault('data', None)
+
+ try:
+ kwargs_copy = copy.deepcopy(kwargs)
+ except (TypeError, ValueError):
+ # Handle the fact that some values cannot be deep copied
+ kwargs_copy = kwargs
+ return RequestCall(args, kwargs_copy)
diff --git a/contrib/python/aioresponses/aioresponses/py.typed b/contrib/python/aioresponses/aioresponses/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/aioresponses/aioresponses/py.typed
diff --git a/contrib/python/aioresponses/ya.make b/contrib/python/aioresponses/ya.make
new file mode 100644
index 0000000000..574b5f85f1
--- /dev/null
+++ b/contrib/python/aioresponses/ya.make
@@ -0,0 +1,33 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(0.7.6)
+
+LICENSE(MIT)
+
+PEERDIR(
+ contrib/python/aiohttp
+)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ aioresponses/__init__.py
+ aioresponses/compat.py
+ aioresponses/core.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/aioresponses/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ aioresponses/py.typed
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/python/anyio/.dist-info/METADATA b/contrib/python/anyio/.dist-info/METADATA
index be13c8aa0f..747e994c6b 100644
--- a/contrib/python/anyio/.dist-info/METADATA
+++ b/contrib/python/anyio/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: anyio
-Version: 4.4.0
+Version: 4.6.0
Summary: High level compatibility layer for multiple asynchronous event loop implementations
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
License: MIT
@@ -15,12 +15,12 @@ Classifier: Framework :: AnyIO
Classifier: Typing :: Typed
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
-Requires-Python: >=3.8
+Classifier: Programming Language :: Python :: 3.13
+Requires-Python: >=3.9
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: idna >=2.8
@@ -29,7 +29,7 @@ Requires-Dist: exceptiongroup >=1.0.2 ; python_version < "3.11"
Requires-Dist: typing-extensions >=4.1 ; python_version < "3.11"
Provides-Extra: doc
Requires-Dist: packaging ; extra == 'doc'
-Requires-Dist: Sphinx >=7 ; extra == 'doc'
+Requires-Dist: Sphinx ~=7.4 ; extra == 'doc'
Requires-Dist: sphinx-rtd-theme ; extra == 'doc'
Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc'
Provides-Extra: test
@@ -41,9 +41,9 @@ Requires-Dist: psutil >=5.9 ; extra == 'test'
Requires-Dist: pytest >=7.0 ; extra == 'test'
Requires-Dist: pytest-mock >=3.6.1 ; extra == 'test'
Requires-Dist: trustme ; extra == 'test'
-Requires-Dist: uvloop >=0.17 ; (platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test'
+Requires-Dist: uvloop >=0.21.0b1 ; (platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test'
Provides-Extra: trio
-Requires-Dist: trio >=0.23 ; extra == 'trio'
+Requires-Dist: trio >=0.26.1 ; extra == 'trio'
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
diff --git a/contrib/python/anyio/anyio/__init__.py b/contrib/python/anyio/anyio/__init__.py
index 7bfe231645..fd9fe06bcf 100644
--- a/contrib/python/anyio/anyio/__init__.py
+++ b/contrib/python/anyio/anyio/__init__.py
@@ -1,7 +1,5 @@
from __future__ import annotations
-from typing import Any
-
from ._core._eventloop import current_time as current_time
from ._core._eventloop import get_all_backends as get_all_backends
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
@@ -69,8 +67,8 @@ from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
from ._core._typedattr import typed_attribute as typed_attribute
# Re-export imports so they look like they live directly in this package
-key: str
-value: Any
-for key, value in list(locals().items()):
- if getattr(value, "__module__", "").startswith("anyio."):
- value.__module__ = __name__
+for __value in list(locals().values()):
+ if getattr(__value, "__module__", "").startswith("anyio."):
+ __value.__module__ = __name__
+
+del __value
diff --git a/contrib/python/anyio/anyio/_backends/_asyncio.py b/contrib/python/anyio/anyio/_backends/_asyncio.py
index 43b7cb0e0c..9342fab818 100644
--- a/contrib/python/anyio/anyio/_backends/_asyncio.py
+++ b/contrib/python/anyio/anyio/_backends/_asyncio.py
@@ -4,6 +4,7 @@ import array
import asyncio
import concurrent.futures
import math
+import os
import socket
import sys
import threading
@@ -19,9 +20,18 @@ from asyncio import (
)
from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined]
from collections import OrderedDict, deque
-from collections.abc import AsyncIterator, Generator, Iterable
+from collections.abc import (
+ AsyncGenerator,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Collection,
+ Coroutine,
+ Iterable,
+ Sequence,
+)
from concurrent.futures import Future
-from contextlib import suppress
+from contextlib import AbstractContextManager, suppress
from contextvars import Context, copy_context
from dataclasses import dataclass
from functools import partial, wraps
@@ -41,16 +51,7 @@ from types import TracebackType
from typing import (
IO,
Any,
- AsyncGenerator,
- Awaitable,
- Callable,
- Collection,
- ContextManager,
- Coroutine,
- Mapping,
Optional,
- Sequence,
- Tuple,
TypeVar,
cast,
)
@@ -58,7 +59,13 @@ from weakref import WeakKeyDictionary
import sniffio
-from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
+from .. import (
+ CapacityLimiterStatistics,
+ EventStatistics,
+ LockStatistics,
+ TaskInfo,
+ abc,
+)
from .._core._eventloop import claim_worker_thread, threadlocals
from .._core._exceptions import (
BrokenResourceError,
@@ -66,12 +73,20 @@ from .._core._exceptions import (
ClosedResourceError,
EndOfStream,
WouldBlock,
+ iterate_exceptions,
)
from .._core._sockets import convert_ipv6_sockaddr
from .._core._streams import create_memory_object_stream
-from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
+from .._core._synchronization import (
+ CapacityLimiter as BaseCapacityLimiter,
+)
from .._core._synchronization import Event as BaseEvent
-from .._core._synchronization import ResourceGuard
+from .._core._synchronization import Lock as BaseLock
+from .._core._synchronization import (
+ ResourceGuard,
+ SemaphoreStatistics,
+)
+from .._core._synchronization import Semaphore as BaseSemaphore
from .._core._tasks import CancelScope as BaseCancelScope
from ..abc import (
AsyncBackend,
@@ -80,6 +95,7 @@ from ..abc import (
UDPPacketType,
UNIXDatagramPacketType,
)
+from ..abc._eventloop import StrOrBytesPath
from ..lowlevel import RunVar
from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
@@ -343,6 +359,14 @@ def _task_started(task: asyncio.Task) -> bool:
#
+def is_anyio_cancellation(exc: CancelledError) -> bool:
+ return (
+ bool(exc.args)
+ and isinstance(exc.args[0], str)
+ and exc.args[0].startswith("Cancelled by cancel scope ")
+ )
+
+
class CancelScope(BaseCancelScope):
def __new__(
cls, *, deadline: float = math.inf, shield: bool = False
@@ -429,35 +453,77 @@ class CancelScope(BaseCancelScope):
host_task_state.cancel_scope = self._parent_scope
- # Restart the cancellation effort in the closest directly cancelled parent
- # scope if this one was shielded
- self._restart_cancellation_in_parent()
+ # Undo all cancellations done by this scope
+ if self._cancelling is not None:
+ while self._cancel_calls:
+ self._cancel_calls -= 1
+ if self._host_task.uncancel() <= self._cancelling:
+ break
- if self._cancel_called and exc_val is not None:
+ # We only swallow the exception iff it was an AnyIO CancelledError, either
+ # directly as exc_val or inside an exception group and there are no cancelled
+ # parent cancel scopes visible to us here
+ not_swallowed_exceptions = 0
+ swallow_exception = False
+ if exc_val is not None:
for exc in iterate_exceptions(exc_val):
- if isinstance(exc, CancelledError):
- self._cancelled_caught = self._uncancel(exc)
- if self._cancelled_caught:
- break
+ if self._cancel_called and isinstance(exc, CancelledError):
+ if not (swallow_exception := self._uncancel(exc)):
+ not_swallowed_exceptions += 1
+ else:
+ not_swallowed_exceptions += 1
- return self._cancelled_caught
+ # Restart the cancellation effort in the closest visible, cancelled parent
+ # scope if necessary
+ self._restart_cancellation_in_parent()
+ return swallow_exception and not not_swallowed_exceptions
- return None
+ @property
+ def _effectively_cancelled(self) -> bool:
+ cancel_scope: CancelScope | None = self
+ while cancel_scope is not None:
+ if cancel_scope._cancel_called:
+ return True
+
+ if cancel_scope.shield:
+ return False
+
+ cancel_scope = cancel_scope._parent_scope
+
+ return False
+
+ @property
+ def _parent_cancellation_is_visible_to_us(self) -> bool:
+ return (
+ self._parent_scope is not None
+ and not self.shield
+ and self._parent_scope._effectively_cancelled
+ )
def _uncancel(self, cancelled_exc: CancelledError) -> bool:
- if sys.version_info < (3, 9) or self._host_task is None:
+ if self._host_task is None:
self._cancel_calls = 0
return True
- # Undo all cancellations done by this scope
- if self._cancelling is not None:
- while self._cancel_calls:
- self._cancel_calls -= 1
- if self._host_task.uncancel() <= self._cancelling:
- return True
+ while True:
+ if is_anyio_cancellation(cancelled_exc):
+ # Only swallow the cancellation exception if it's an AnyIO cancel
+ # exception and there are no other cancel scopes down the line pending
+ # cancellation
+ self._cancelled_caught = (
+ self._effectively_cancelled
+ and not self._parent_cancellation_is_visible_to_us
+ )
+ return self._cancelled_caught
- self._cancel_calls = 0
- return f"Cancelled by cancel scope {id(self):x}" in cancelled_exc.args
+ # Sometimes third party frameworks catch a CancelledError and raise a new
+ # one, so as a workaround we have to look at the previous ones in
+ # __context__ too for a matching cancel message
+ if isinstance(cancelled_exc.__context__, CancelledError):
+ cancelled_exc = cancelled_exc.__context__
+ continue
+
+ return False
def _timeout(self) -> None:
if self._deadline != math.inf:
@@ -481,19 +547,17 @@ class CancelScope(BaseCancelScope):
should_retry = False
current = current_task()
for task in self._tasks:
+ should_retry = True
if task._must_cancel: # type: ignore[attr-defined]
continue
# The task is eligible for cancellation if it has started
- should_retry = True
if task is not current and (task is self._host_task or _task_started(task)):
waiter = task._fut_waiter # type: ignore[attr-defined]
if not isinstance(waiter, asyncio.Future) or not waiter.done():
- origin._cancel_calls += 1
- if sys.version_info >= (3, 9):
- task.cancel(f"Cancelled by cancel scope {id(origin):x}")
- else:
- task.cancel()
+ task.cancel(f"Cancelled by cancel scope {id(origin):x}")
+ if task is origin._host_task:
+ origin._cancel_calls += 1
# Deliver cancellation to child scopes that aren't shielded or running their own
# cancellation callbacks
@@ -531,17 +595,6 @@ class CancelScope(BaseCancelScope):
scope = scope._parent_scope
- def _parent_cancelled(self) -> bool:
- # Check whether any parent has been cancelled
- cancel_scope = self._parent_scope
- while cancel_scope is not None and not cancel_scope._shield:
- if cancel_scope._cancel_called:
- return True
- else:
- cancel_scope = cancel_scope._parent_scope
-
- return False
-
def cancel(self) -> None:
if not self._cancel_called:
if self._timeout_handle:
@@ -630,16 +683,6 @@ class _AsyncioTaskStatus(abc.TaskStatus):
_task_states[task].parent_id = self._parent_id
-def iterate_exceptions(
- exception: BaseException,
-) -> Generator[BaseException, None, None]:
- if isinstance(exception, BaseExceptionGroup):
- for exc in exception.exceptions:
- yield from iterate_exceptions(exc)
- else:
- yield exception
-
-
class TaskGroup(abc.TaskGroup):
def __init__(self) -> None:
self.cancel_scope: CancelScope = CancelScope()
@@ -658,38 +701,50 @@ class TaskGroup(abc.TaskGroup):
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> bool | None:
- ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb)
if exc_val is not None:
self.cancel_scope.cancel()
if not isinstance(exc_val, CancelledError):
self._exceptions.append(exc_val)
- cancelled_exc_while_waiting_tasks: CancelledError | None = None
- while self._tasks:
- try:
- await asyncio.wait(self._tasks)
- except CancelledError as exc:
- # This task was cancelled natively; reraise the CancelledError later
- # unless this task was already interrupted by another exception
- self.cancel_scope.cancel()
- if cancelled_exc_while_waiting_tasks is None:
- cancelled_exc_while_waiting_tasks = exc
+ try:
+ if self._tasks:
+ with CancelScope() as wait_scope:
+ while self._tasks:
+ try:
+ await asyncio.wait(self._tasks)
+ except CancelledError as exc:
+ # Shield the scope against further cancellation attempts,
+ # as they're not productive (#695)
+ wait_scope.shield = True
+ self.cancel_scope.cancel()
+
+ # Set exc_val from the cancellation exception if it was
+ # previously unset. However, we should not replace a native
+ # cancellation exception with one raise by a cancel scope.
+ if exc_val is None or (
+ isinstance(exc_val, CancelledError)
+ and not is_anyio_cancellation(exc)
+ ):
+ exc_val = exc
+ else:
+ # If there are no child tasks to wait on, run at least one checkpoint
+ # anyway
+ await AsyncIOBackend.cancel_shielded_checkpoint()
- self._active = False
- if self._exceptions:
- raise BaseExceptionGroup(
- "unhandled errors in a TaskGroup", self._exceptions
- )
+ self._active = False
+ if self._exceptions:
+ raise BaseExceptionGroup(
+ "unhandled errors in a TaskGroup", self._exceptions
+ )
+ elif exc_val:
+ raise exc_val
+ except BaseException as exc:
+ if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__):
+ return True
- # Raise the CancelledError received while waiting for child tasks to exit,
- # unless the context manager itself was previously exited with another
- # exception, or if any of the child tasks raised an exception other than
- # CancelledError
- if cancelled_exc_while_waiting_tasks:
- if exc_val is None or ignore_exception:
- raise cancelled_exc_while_waiting_tasks
+ raise
- return ignore_exception
+ return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb)
def _spawn(
self,
@@ -725,7 +780,7 @@ class TaskGroup(abc.TaskGroup):
if not isinstance(exc, CancelledError):
self._exceptions.append(exc)
- if not self.cancel_scope._parent_cancelled():
+ if not self.cancel_scope._effectively_cancelled:
self.cancel_scope.cancel()
else:
task_status_future.set_exception(exc)
@@ -801,7 +856,7 @@ class TaskGroup(abc.TaskGroup):
# Threads
#
-_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]]
+_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]]
class WorkerThread(Thread):
@@ -925,7 +980,7 @@ class StreamReaderWrapper(abc.ByteReceiveStream):
raise EndOfStream
async def aclose(self) -> None:
- self._stream.feed_eof()
+ self._stream.set_exception(ClosedResourceError())
await AsyncIOBackend.checkpoint()
@@ -950,7 +1005,7 @@ class Process(abc.Process):
_stderr: StreamReaderWrapper | None
async def aclose(self) -> None:
- with CancelScope(shield=True):
+ with CancelScope(shield=True) as scope:
if self._stdin:
await self._stdin.aclose()
if self._stdout:
@@ -958,14 +1013,14 @@ class Process(abc.Process):
if self._stderr:
await self._stderr.aclose()
- try:
- await self.wait()
- except BaseException:
- self.kill()
- with CancelScope(shield=True):
+ scope.shield = False
+ try:
await self.wait()
-
- raise
+ except BaseException:
+ scope.shield = True
+ self.kill()
+ await self.wait()
+ raise
async def wait(self) -> int:
return await self._process.wait()
@@ -1073,7 +1128,8 @@ class StreamProtocol(asyncio.Protocol):
self.write_event.set()
def data_received(self, data: bytes) -> None:
- self.read_queue.append(data)
+ # ProactorEventloop sometimes sends bytearray instead of bytes
+ self.read_queue.append(bytes(data))
self.read_event.set()
def eof_received(self) -> bool | None:
@@ -1665,6 +1721,154 @@ class Event(BaseEvent):
return EventStatistics(len(self._event._waiters))
+class Lock(BaseLock):
+ def __new__(cls, *, fast_acquire: bool = False) -> Lock:
+ return object.__new__(cls)
+
+ def __init__(self, *, fast_acquire: bool = False) -> None:
+ self._fast_acquire = fast_acquire
+ self._owner_task: asyncio.Task | None = None
+ self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque()
+
+ async def acquire(self) -> None:
+ if self._owner_task is None and not self._waiters:
+ await AsyncIOBackend.checkpoint_if_cancelled()
+ self._owner_task = current_task()
+
+ # Unless on the "fast path", yield control of the event loop so that other
+ # tasks can run too
+ if not self._fast_acquire:
+ try:
+ await AsyncIOBackend.cancel_shielded_checkpoint()
+ except CancelledError:
+ self.release()
+ raise
+
+ return
+
+ task = cast(asyncio.Task, current_task())
+ fut: asyncio.Future[None] = asyncio.Future()
+ item = task, fut
+ self._waiters.append(item)
+ try:
+ await fut
+ except CancelledError:
+ self._waiters.remove(item)
+ if self._owner_task is task:
+ self.release()
+
+ raise
+
+ self._waiters.remove(item)
+
+ def acquire_nowait(self) -> None:
+ if self._owner_task is None and not self._waiters:
+ self._owner_task = current_task()
+ return
+
+ raise WouldBlock
+
+ def locked(self) -> bool:
+ return self._owner_task is not None
+
+ def release(self) -> None:
+ if self._owner_task != current_task():
+ raise RuntimeError("The current task is not holding this lock")
+
+ for task, fut in self._waiters:
+ if not fut.cancelled():
+ self._owner_task = task
+ fut.set_result(None)
+ return
+
+ self._owner_task = None
+
+ def statistics(self) -> LockStatistics:
+ task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None
+ return LockStatistics(self.locked(), task_info, len(self._waiters))
+
+
+class Semaphore(BaseSemaphore):
+ def __new__(
+ cls,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> Semaphore:
+ return object.__new__(cls)
+
+ def __init__(
+ self,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ):
+ super().__init__(initial_value, max_value=max_value)
+ self._value = initial_value
+ self._max_value = max_value
+ self._fast_acquire = fast_acquire
+ self._waiters: deque[asyncio.Future[None]] = deque()
+
+ async def acquire(self) -> None:
+ if self._value > 0 and not self._waiters:
+ await AsyncIOBackend.checkpoint_if_cancelled()
+ self._value -= 1
+
+ # Unless on the "fast path", yield control of the event loop so that other
+ # tasks can run too
+ if not self._fast_acquire:
+ try:
+ await AsyncIOBackend.cancel_shielded_checkpoint()
+ except CancelledError:
+ self.release()
+ raise
+
+ return
+
+ fut: asyncio.Future[None] = asyncio.Future()
+ self._waiters.append(fut)
+ try:
+ await fut
+ except CancelledError:
+ try:
+ self._waiters.remove(fut)
+ except ValueError:
+ self.release()
+
+ raise
+
+ def acquire_nowait(self) -> None:
+ if self._value == 0:
+ raise WouldBlock
+
+ self._value -= 1
+
+ def release(self) -> None:
+ if self._max_value is not None and self._value == self._max_value:
+ raise ValueError("semaphore released too many times")
+
+ for fut in self._waiters:
+ if not fut.cancelled():
+ fut.set_result(None)
+ self._waiters.remove(fut)
+ return
+
+ self._value += 1
+
+ @property
+ def value(self) -> int:
+ return self._value
+
+ @property
+ def max_value(self) -> int | None:
+ return self._max_value
+
+ def statistics(self) -> SemaphoreStatistics:
+ return SemaphoreStatistics(len(self._waiters))
+
+
class CapacityLimiter(BaseCapacityLimiter):
_total_tokens: float = 0
@@ -1861,7 +2065,7 @@ class AsyncIOTaskInfo(TaskInfo):
if task_state := _task_states.get(task):
if cancel_scope := task_state.cancel_scope:
- return cancel_scope.cancel_called or cancel_scope._parent_cancelled()
+ return cancel_scope._effectively_cancelled
return False
@@ -1926,13 +2130,23 @@ class TestRunner(abc.TestRunner):
tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]]
],
) -> None:
+ from _pytest.outcomes import OutcomeException
+
with receive_stream, self._send_stream:
async for coro, future in receive_stream:
try:
retval = await coro
+ except CancelledError as exc:
+ if not future.cancelled():
+ future.cancel(*exc.args)
+
+ raise
except BaseException as exc:
if not future.cancelled():
future.set_exception(exc)
+
+ if not isinstance(exc, (Exception, OutcomeException)):
+ raise
else:
if not future.cancelled():
future.set_result(retval)
@@ -1945,7 +2159,7 @@ class TestRunner(abc.TestRunner):
) -> T_Retval:
if not self._runner_task:
self._send_stream, receive_stream = create_memory_object_stream[
- Tuple[Awaitable[Any], asyncio.Future]
+ tuple[Awaitable[Any], asyncio.Future]
](1)
self._runner_task = self.get_loop().create_task(
self._run_tests_and_fixtures(receive_stream)
@@ -2114,6 +2328,20 @@ class AsyncIOBackend(AsyncBackend):
return Event()
@classmethod
+ def create_lock(cls, *, fast_acquire: bool) -> abc.Lock:
+ return Lock(fast_acquire=fast_acquire)
+
+ @classmethod
+ def create_semaphore(
+ cls,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> abc.Semaphore:
+ return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire)
+
+ @classmethod
def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter:
return CapacityLimiter(total_tokens)
@@ -2245,26 +2473,24 @@ class AsyncIOBackend(AsyncBackend):
@classmethod
async def open_process(
cls,
- command: str | bytes | Sequence[str | bytes],
+ command: StrOrBytesPath | Sequence[StrOrBytesPath],
*,
- shell: bool,
stdin: int | IO[Any] | None,
stdout: int | IO[Any] | None,
stderr: int | IO[Any] | None,
- cwd: str | bytes | PathLike | None = None,
- env: Mapping[str, str] | None = None,
- start_new_session: bool = False,
+ **kwargs: Any,
) -> Process:
await cls.checkpoint()
- if shell:
+ if isinstance(command, PathLike):
+ command = os.fspath(command)
+
+ if isinstance(command, (str, bytes)):
process = await asyncio.create_subprocess_shell(
- cast("str | bytes", command),
+ command,
stdin=stdin,
stdout=stdout,
stderr=stderr,
- cwd=cwd,
- env=env,
- start_new_session=start_new_session,
+ **kwargs,
)
else:
process = await asyncio.create_subprocess_exec(
@@ -2272,9 +2498,7 @@ class AsyncIOBackend(AsyncBackend):
stdin=stdin,
stdout=stdout,
stderr=stderr,
- cwd=cwd,
- env=env,
- start_new_session=start_new_session,
+ **kwargs,
)
stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None
@@ -2289,7 +2513,7 @@ class AsyncIOBackend(AsyncBackend):
name="AnyIO process pool shutdown task",
)
find_root_task().add_done_callback(
- partial(_forcibly_shutdown_process_pool_on_exit, workers)
+ partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type]
)
@classmethod
@@ -2297,7 +2521,7 @@ class AsyncIOBackend(AsyncBackend):
cls, host: str, port: int, local_address: IPSockAddrType | None = None
) -> abc.SocketStream:
transport, protocol = cast(
- Tuple[asyncio.Transport, StreamProtocol],
+ tuple[asyncio.Transport, StreamProtocol],
await get_running_loop().create_connection(
StreamProtocol, host, port, local_addr=local_address
),
@@ -2476,7 +2700,7 @@ class AsyncIOBackend(AsyncBackend):
@classmethod
def open_signal_receiver(
cls, *signals: Signals
- ) -> ContextManager[AsyncIterator[Signals]]:
+ ) -> AbstractContextManager[AsyncIterator[Signals]]:
return _SignalReceiver(signals)
@classmethod
diff --git a/contrib/python/anyio/anyio/_backends/_trio.py b/contrib/python/anyio/anyio/_backends/_trio.py
index cf6f3db789..de2189ce78 100644
--- a/contrib/python/anyio/anyio/_backends/_trio.py
+++ b/contrib/python/anyio/anyio/_backends/_trio.py
@@ -2,12 +2,23 @@ from __future__ import annotations
import array
import math
+import os
import socket
import sys
import types
import weakref
-from collections.abc import AsyncIterator, Iterable
+from collections.abc import (
+ AsyncGenerator,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Collection,
+ Coroutine,
+ Iterable,
+ Sequence,
+)
from concurrent.futures import Future
+from contextlib import AbstractContextManager
from dataclasses import dataclass
from functools import partial
from io import IOBase
@@ -18,16 +29,8 @@ from types import TracebackType
from typing import (
IO,
Any,
- AsyncGenerator,
- Awaitable,
- Callable,
- Collection,
- ContextManager,
- Coroutine,
Generic,
- Mapping,
NoReturn,
- Sequence,
TypeVar,
cast,
overload,
@@ -45,7 +48,14 @@ from trio.lowlevel import (
from trio.socket import SocketType as TrioSocketType
from trio.to_thread import run_sync
-from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
+from .. import (
+ CapacityLimiterStatistics,
+ EventStatistics,
+ LockStatistics,
+ TaskInfo,
+ WouldBlock,
+ abc,
+)
from .._core._eventloop import claim_worker_thread
from .._core._exceptions import (
BrokenResourceError,
@@ -55,12 +65,19 @@ from .._core._exceptions import (
)
from .._core._sockets import convert_ipv6_sockaddr
from .._core._streams import create_memory_object_stream
-from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
+from .._core._synchronization import (
+ CapacityLimiter as BaseCapacityLimiter,
+)
from .._core._synchronization import Event as BaseEvent
-from .._core._synchronization import ResourceGuard
+from .._core._synchronization import Lock as BaseLock
+from .._core._synchronization import (
+ ResourceGuard,
+ SemaphoreStatistics,
+)
+from .._core._synchronization import Semaphore as BaseSemaphore
from .._core._tasks import CancelScope as BaseCancelScope
from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType
-from ..abc._eventloop import AsyncBackend
+from ..abc._eventloop import AsyncBackend, StrOrBytesPath
from ..streams.memory import MemoryObjectSendStream
if sys.version_info >= (3, 10):
@@ -637,6 +654,100 @@ class Event(BaseEvent):
self.__original.set()
+class Lock(BaseLock):
+ def __new__(cls, *, fast_acquire: bool = False) -> Lock:
+ return object.__new__(cls)
+
+ def __init__(self, *, fast_acquire: bool = False) -> None:
+ self._fast_acquire = fast_acquire
+ self.__original = trio.Lock()
+
+ async def acquire(self) -> None:
+ if not self._fast_acquire:
+ await self.__original.acquire()
+ return
+
+ # This is the "fast path" where we don't let other tasks run
+ await trio.lowlevel.checkpoint_if_cancelled()
+ try:
+ self.__original.acquire_nowait()
+ except trio.WouldBlock:
+ await self.__original._lot.park()
+
+ def acquire_nowait(self) -> None:
+ try:
+ self.__original.acquire_nowait()
+ except trio.WouldBlock:
+ raise WouldBlock from None
+
+ def locked(self) -> bool:
+ return self.__original.locked()
+
+ def release(self) -> None:
+ self.__original.release()
+
+ def statistics(self) -> LockStatistics:
+ orig_statistics = self.__original.statistics()
+ owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None
+ return LockStatistics(
+ orig_statistics.locked, owner, orig_statistics.tasks_waiting
+ )
+
+
+class Semaphore(BaseSemaphore):
+ def __new__(
+ cls,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> Semaphore:
+ return object.__new__(cls)
+
+ def __init__(
+ self,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> None:
+ super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire)
+ self.__original = trio.Semaphore(initial_value, max_value=max_value)
+
+ async def acquire(self) -> None:
+ if not self._fast_acquire:
+ await self.__original.acquire()
+ return
+
+ # This is the "fast path" where we don't let other tasks run
+ await trio.lowlevel.checkpoint_if_cancelled()
+ try:
+ self.__original.acquire_nowait()
+ except trio.WouldBlock:
+ await self.__original._lot.park()
+
+ def acquire_nowait(self) -> None:
+ try:
+ self.__original.acquire_nowait()
+ except trio.WouldBlock:
+ raise WouldBlock from None
+
+ @property
+ def max_value(self) -> int | None:
+ return self.__original.max_value
+
+ @property
+ def value(self) -> int:
+ return self.__original.value
+
+ def release(self) -> None:
+ self.__original.release()
+
+ def statistics(self) -> SemaphoreStatistics:
+ orig_statistics = self.__original.statistics()
+ return SemaphoreStatistics(orig_statistics.tasks_waiting)
+
+
class CapacityLimiter(BaseCapacityLimiter):
def __new__(
cls,
@@ -916,6 +1027,20 @@ class TrioBackend(AsyncBackend):
return Event()
@classmethod
+ def create_lock(cls, *, fast_acquire: bool) -> Lock:
+ return Lock(fast_acquire=fast_acquire)
+
+ @classmethod
+ def create_semaphore(
+ cls,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> abc.Semaphore:
+ return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire)
+
+ @classmethod
def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter:
return CapacityLimiter(total_tokens)
@@ -967,26 +1092,39 @@ class TrioBackend(AsyncBackend):
@classmethod
async def open_process(
cls,
- command: str | bytes | Sequence[str | bytes],
+ command: StrOrBytesPath | Sequence[StrOrBytesPath],
*,
- shell: bool,
stdin: int | IO[Any] | None,
stdout: int | IO[Any] | None,
stderr: int | IO[Any] | None,
- cwd: str | bytes | PathLike | None = None,
- env: Mapping[str, str] | None = None,
- start_new_session: bool = False,
+ **kwargs: Any,
) -> Process:
- process = await trio.lowlevel.open_process( # type: ignore[misc]
- command, # type: ignore[arg-type]
- stdin=stdin,
- stdout=stdout,
- stderr=stderr,
- shell=shell,
- cwd=cwd,
- env=env,
- start_new_session=start_new_session,
- )
+ def convert_item(item: StrOrBytesPath) -> str:
+ str_or_bytes = os.fspath(item)
+ if isinstance(str_or_bytes, str):
+ return str_or_bytes
+ else:
+ return os.fsdecode(str_or_bytes)
+
+ if isinstance(command, (str, bytes, PathLike)):
+ process = await trio.lowlevel.open_process(
+ convert_item(command),
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ shell=True,
+ **kwargs,
+ )
+ else:
+ process = await trio.lowlevel.open_process(
+ [convert_item(item) for item in command],
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ shell=False,
+ **kwargs,
+ )
+
stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None
stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None
stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None
@@ -1138,7 +1276,7 @@ class TrioBackend(AsyncBackend):
@classmethod
def open_signal_receiver(
cls, *signals: Signals
- ) -> ContextManager[AsyncIterator[Signals]]:
+ ) -> AbstractContextManager[AsyncIterator[Signals]]:
return _SignalReceiver(signals)
@classmethod
diff --git a/contrib/python/anyio/anyio/_core/_exceptions.py b/contrib/python/anyio/anyio/_core/_exceptions.py
index 571c3b8531..6e3f8ccc67 100644
--- a/contrib/python/anyio/anyio/_core/_exceptions.py
+++ b/contrib/python/anyio/anyio/_core/_exceptions.py
@@ -1,5 +1,11 @@
from __future__ import annotations
+import sys
+from collections.abc import Generator
+
+if sys.version_info < (3, 11):
+ from exceptiongroup import BaseExceptionGroup
+
class BrokenResourceError(Exception):
"""
@@ -71,3 +77,13 @@ class TypedAttributeLookupError(LookupError):
class WouldBlock(Exception):
"""Raised by ``X_nowait`` functions if ``X()`` would block."""
+
+
+def iterate_exceptions(
+ exception: BaseException,
+) -> Generator[BaseException, None, None]:
+ if isinstance(exception, BaseExceptionGroup):
+ for exc in exception.exceptions:
+ yield from iterate_exceptions(exc)
+ else:
+ yield exception
diff --git a/contrib/python/anyio/anyio/_core/_fileio.py b/contrib/python/anyio/anyio/_core/_fileio.py
index df2057fe34..23ccb0d66f 100644
--- a/contrib/python/anyio/anyio/_core/_fileio.py
+++ b/contrib/python/anyio/anyio/_core/_fileio.py
@@ -3,7 +3,7 @@ from __future__ import annotations
import os
import pathlib
import sys
-from collections.abc import Callable, Iterable, Iterator, Sequence
+from collections.abc import AsyncIterator, Callable, Iterable, Iterator, Sequence
from dataclasses import dataclass
from functools import partial
from os import PathLike
@@ -12,7 +12,6 @@ from typing import (
TYPE_CHECKING,
Any,
AnyStr,
- AsyncIterator,
Final,
Generic,
overload,
@@ -358,8 +357,26 @@ class Path:
def as_uri(self) -> str:
return self._path.as_uri()
- def match(self, path_pattern: str) -> bool:
- return self._path.match(path_pattern)
+ if sys.version_info >= (3, 13):
+ parser = pathlib.Path.parser
+
+ @classmethod
+ def from_uri(cls, uri: str) -> Path:
+ return Path(pathlib.Path.from_uri(uri))
+
+ def full_match(
+ self, path_pattern: str, *, case_sensitive: bool | None = None
+ ) -> bool:
+ return self._path.full_match(path_pattern, case_sensitive=case_sensitive)
+
+ def match(
+ self, path_pattern: str, *, case_sensitive: bool | None = None
+ ) -> bool:
+ return self._path.match(path_pattern, case_sensitive=case_sensitive)
+ else:
+
+ def match(self, path_pattern: str) -> bool:
+ return self._path.match(path_pattern)
def is_relative_to(self, other: str | PathLike[str]) -> bool:
try:
diff --git a/contrib/python/anyio/anyio/_core/_signals.py b/contrib/python/anyio/anyio/_core/_signals.py
index 115c749bd9..f3451d302f 100644
--- a/contrib/python/anyio/anyio/_core/_signals.py
+++ b/contrib/python/anyio/anyio/_core/_signals.py
@@ -1,13 +1,15 @@
from __future__ import annotations
from collections.abc import AsyncIterator
+from contextlib import AbstractContextManager
from signal import Signals
-from typing import ContextManager
from ._eventloop import get_async_backend
-def open_signal_receiver(*signals: Signals) -> ContextManager[AsyncIterator[Signals]]:
+def open_signal_receiver(
+ *signals: Signals,
+) -> AbstractContextManager[AsyncIterator[Signals]]:
"""
Start receiving operating system signals.
diff --git a/contrib/python/anyio/anyio/_core/_sockets.py b/contrib/python/anyio/anyio/_core/_sockets.py
index 5e09cdbf0f..6070c647fd 100644
--- a/contrib/python/anyio/anyio/_core/_sockets.py
+++ b/contrib/python/anyio/anyio/_core/_sockets.py
@@ -680,19 +680,26 @@ async def setup_unix_local_socket(
:param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM
"""
- path_str: str | bytes | None
+ path_str: str | None
if path is not None:
- path_str = os.fspath(path)
-
- # Copied from pathlib...
- try:
- stat_result = os.stat(path)
- except OSError as e:
- if e.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EBADF, errno.ELOOP):
- raise
- else:
- if stat.S_ISSOCK(stat_result.st_mode):
- os.unlink(path)
+ path_str = os.fsdecode(path)
+
+ # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call
+ if not path_str.startswith("\0"):
+ # Copied from pathlib...
+ try:
+ stat_result = os.stat(path)
+ except OSError as e:
+ if e.errno not in (
+ errno.ENOENT,
+ errno.ENOTDIR,
+ errno.EBADF,
+ errno.ELOOP,
+ ):
+ raise
+ else:
+ if stat.S_ISSOCK(stat_result.st_mode):
+ os.unlink(path)
else:
path_str = None
diff --git a/contrib/python/anyio/anyio/_core/_streams.py b/contrib/python/anyio/anyio/_core/_streams.py
index aa6b0c222a..6a9814e5a9 100644
--- a/contrib/python/anyio/anyio/_core/_streams.py
+++ b/contrib/python/anyio/anyio/_core/_streams.py
@@ -1,7 +1,7 @@
from __future__ import annotations
import math
-from typing import Tuple, TypeVar
+from typing import TypeVar
from warnings import warn
from ..streams.memory import (
@@ -14,7 +14,7 @@ T_Item = TypeVar("T_Item")
class create_memory_object_stream(
- Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]],
+ tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]],
):
"""
Create a memory object stream.
diff --git a/contrib/python/anyio/anyio/_core/_subprocesses.py b/contrib/python/anyio/anyio/_core/_subprocesses.py
index 5d5d7b768a..7ba41a5b03 100644
--- a/contrib/python/anyio/anyio/_core/_subprocesses.py
+++ b/contrib/python/anyio/anyio/_core/_subprocesses.py
@@ -1,26 +1,41 @@
from __future__ import annotations
-from collections.abc import AsyncIterable, Mapping, Sequence
+import sys
+from collections.abc import AsyncIterable, Iterable, Mapping, Sequence
from io import BytesIO
from os import PathLike
from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess
-from typing import IO, Any, cast
+from typing import IO, Any, Union, cast
from ..abc import Process
from ._eventloop import get_async_backend
from ._tasks import create_task_group
+if sys.version_info >= (3, 10):
+ from typing import TypeAlias
+else:
+ from typing_extensions import TypeAlias
+
+StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"]
+
async def run_process(
- command: str | bytes | Sequence[str | bytes],
+ command: StrOrBytesPath | Sequence[StrOrBytesPath],
*,
input: bytes | None = None,
stdout: int | IO[Any] | None = PIPE,
stderr: int | IO[Any] | None = PIPE,
check: bool = True,
- cwd: str | bytes | PathLike[str] | None = None,
+ cwd: StrOrBytesPath | None = None,
env: Mapping[str, str] | None = None,
+ startupinfo: Any = None,
+ creationflags: int = 0,
start_new_session: bool = False,
+ pass_fds: Sequence[int] = (),
+ user: str | int | None = None,
+ group: str | int | None = None,
+ extra_groups: Iterable[str | int] | None = None,
+ umask: int = -1,
) -> CompletedProcess[bytes]:
"""
Run an external command in a subprocess and wait until it completes.
@@ -40,8 +55,20 @@ async def run_process(
command
:param env: if not ``None``, this mapping replaces the inherited environment
variables from the parent process
+ :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
+ to specify process startup parameters (Windows only)
+ :param creationflags: flags that can be used to control the creation of the
+ subprocess (see :class:`subprocess.Popen` for the specifics)
:param start_new_session: if ``true`` the setsid() system call will be made in the
child process prior to the execution of the subprocess. (POSIX only)
+ :param pass_fds: sequence of file descriptors to keep open between the parent and
+ child processes. (POSIX only)
+ :param user: effective user to run the process as (Python >= 3.9, POSIX only)
+ :param group: effective group to run the process as (Python >= 3.9, POSIX only)
+ :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9,
+ POSIX only)
+ :param umask: if not negative, this umask is applied in the child process before
+ running the given command (Python >= 3.9, POSIX only)
:return: an object representing the completed process
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process
exits with a nonzero return code
@@ -62,7 +89,14 @@ async def run_process(
stderr=stderr,
cwd=cwd,
env=env,
+ startupinfo=startupinfo,
+ creationflags=creationflags,
start_new_session=start_new_session,
+ pass_fds=pass_fds,
+ user=user,
+ group=group,
+ extra_groups=extra_groups,
+ umask=umask,
) as process:
stream_contents: list[bytes | None] = [None, None]
async with create_task_group() as tg:
@@ -86,14 +120,21 @@ async def run_process(
async def open_process(
- command: str | bytes | Sequence[str | bytes],
+ command: StrOrBytesPath | Sequence[StrOrBytesPath],
*,
stdin: int | IO[Any] | None = PIPE,
stdout: int | IO[Any] | None = PIPE,
stderr: int | IO[Any] | None = PIPE,
- cwd: str | bytes | PathLike[str] | None = None,
+ cwd: StrOrBytesPath | None = None,
env: Mapping[str, str] | None = None,
+ startupinfo: Any = None,
+ creationflags: int = 0,
start_new_session: bool = False,
+ pass_fds: Sequence[int] = (),
+ user: str | int | None = None,
+ group: str | int | None = None,
+ extra_groups: Iterable[str | int] | None = None,
+ umask: int = -1,
) -> Process:
"""
Start an external command in a subprocess.
@@ -111,30 +152,45 @@ async def open_process(
:param cwd: If not ``None``, the working directory is changed before executing
:param env: If env is not ``None``, it must be a mapping that defines the
environment variables for the new process
+ :param creationflags: flags that can be used to control the creation of the
+ subprocess (see :class:`subprocess.Popen` for the specifics)
+ :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
+ to specify process startup parameters (Windows only)
:param start_new_session: if ``true`` the setsid() system call will be made in the
child process prior to the execution of the subprocess. (POSIX only)
+ :param pass_fds: sequence of file descriptors to keep open between the parent and
+ child processes. (POSIX only)
+ :param user: effective user to run the process as (POSIX only)
+ :param group: effective group to run the process as (POSIX only)
+ :param extra_groups: supplementary groups to set in the subprocess (POSIX only)
+ :param umask: if not negative, this umask is applied in the child process before
+ running the given command (POSIX only)
:return: an asynchronous process object
"""
- if isinstance(command, (str, bytes)):
- return await get_async_backend().open_process(
- command,
- shell=True,
- stdin=stdin,
- stdout=stdout,
- stderr=stderr,
- cwd=cwd,
- env=env,
- start_new_session=start_new_session,
- )
- else:
- return await get_async_backend().open_process(
- command,
- shell=False,
- stdin=stdin,
- stdout=stdout,
- stderr=stderr,
- cwd=cwd,
- env=env,
- start_new_session=start_new_session,
- )
+ kwargs: dict[str, Any] = {}
+ if user is not None:
+ kwargs["user"] = user
+
+ if group is not None:
+ kwargs["group"] = group
+
+ if extra_groups is not None:
+ kwargs["extra_groups"] = group
+
+ if umask >= 0:
+ kwargs["umask"] = umask
+
+ return await get_async_backend().open_process(
+ command,
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr,
+ cwd=cwd,
+ env=env,
+ startupinfo=startupinfo,
+ creationflags=creationflags,
+ start_new_session=start_new_session,
+ pass_fds=pass_fds,
+ **kwargs,
+ )
diff --git a/contrib/python/anyio/anyio/_core/_synchronization.py b/contrib/python/anyio/anyio/_core/_synchronization.py
index b274a31ea2..023ab73370 100644
--- a/contrib/python/anyio/anyio/_core/_synchronization.py
+++ b/contrib/python/anyio/anyio/_core/_synchronization.py
@@ -7,9 +7,9 @@ from types import TracebackType
from sniffio import AsyncLibraryNotFoundError
-from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled
+from ..lowlevel import checkpoint
from ._eventloop import get_async_backend
-from ._exceptions import BusyResourceError, WouldBlock
+from ._exceptions import BusyResourceError
from ._tasks import CancelScope
from ._testing import TaskInfo, get_current_task
@@ -137,10 +137,11 @@ class EventAdapter(Event):
class Lock:
- _owner_task: TaskInfo | None = None
-
- def __init__(self) -> None:
- self._waiters: deque[tuple[TaskInfo, Event]] = deque()
+ def __new__(cls, *, fast_acquire: bool = False) -> Lock:
+ try:
+ return get_async_backend().create_lock(fast_acquire=fast_acquire)
+ except AsyncLibraryNotFoundError:
+ return LockAdapter(fast_acquire=fast_acquire)
async def __aenter__(self) -> None:
await self.acquire()
@@ -155,31 +156,7 @@ class Lock:
async def acquire(self) -> None:
"""Acquire the lock."""
- await checkpoint_if_cancelled()
- try:
- self.acquire_nowait()
- except WouldBlock:
- task = get_current_task()
- event = Event()
- token = task, event
- self._waiters.append(token)
- try:
- await event.wait()
- except BaseException:
- if not event.is_set():
- self._waiters.remove(token)
- elif self._owner_task == task:
- self.release()
-
- raise
-
- assert self._owner_task == task
- else:
- try:
- await cancel_shielded_checkpoint()
- except BaseException:
- self.release()
- raise
+ raise NotImplementedError
def acquire_nowait(self) -> None:
"""
@@ -188,37 +165,87 @@ class Lock:
:raises ~anyio.WouldBlock: if the operation would block
"""
- task = get_current_task()
- if self._owner_task == task:
- raise RuntimeError("Attempted to acquire an already held Lock")
+ raise NotImplementedError
+
+ def release(self) -> None:
+ """Release the lock."""
+ raise NotImplementedError
+
+ def locked(self) -> bool:
+ """Return True if the lock is currently held."""
+ raise NotImplementedError
+
+ def statistics(self) -> LockStatistics:
+ """
+ Return statistics about the current state of this lock.
+
+ .. versionadded:: 3.0
+ """
+ raise NotImplementedError
+
+
+class LockAdapter(Lock):
+ _internal_lock: Lock | None = None
+
+ def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter:
+ return object.__new__(cls)
+
+ def __init__(self, *, fast_acquire: bool = False):
+ self._fast_acquire = fast_acquire
+
+ @property
+ def _lock(self) -> Lock:
+ if self._internal_lock is None:
+ self._internal_lock = get_async_backend().create_lock(
+ fast_acquire=self._fast_acquire
+ )
+
+ return self._internal_lock
+
+ async def __aenter__(self) -> None:
+ await self._lock.acquire()
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_val: BaseException | None,
+ exc_tb: TracebackType | None,
+ ) -> None:
+ if self._internal_lock is not None:
+ self._internal_lock.release()
+
+ async def acquire(self) -> None:
+ """Acquire the lock."""
+ await self._lock.acquire()
+
+ def acquire_nowait(self) -> None:
+ """
+ Acquire the lock, without blocking.
- if self._owner_task is not None:
- raise WouldBlock
+ :raises ~anyio.WouldBlock: if the operation would block
- self._owner_task = task
+ """
+ self._lock.acquire_nowait()
def release(self) -> None:
"""Release the lock."""
- if self._owner_task != get_current_task():
- raise RuntimeError("The current task is not holding this lock")
-
- if self._waiters:
- self._owner_task, event = self._waiters.popleft()
- event.set()
- else:
- del self._owner_task
+ self._lock.release()
def locked(self) -> bool:
"""Return True if the lock is currently held."""
- return self._owner_task is not None
+ return self._lock.locked()
def statistics(self) -> LockStatistics:
"""
Return statistics about the current state of this lock.
.. versionadded:: 3.0
+
"""
- return LockStatistics(self.locked(), self._owner_task, len(self._waiters))
+ if self._internal_lock is None:
+ return LockStatistics(False, None, 0)
+
+ return self._internal_lock.statistics()
class Condition:
@@ -312,7 +339,27 @@ class Condition:
class Semaphore:
- def __init__(self, initial_value: int, *, max_value: int | None = None):
+ def __new__(
+ cls,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> Semaphore:
+ try:
+ return get_async_backend().create_semaphore(
+ initial_value, max_value=max_value, fast_acquire=fast_acquire
+ )
+ except AsyncLibraryNotFoundError:
+ return SemaphoreAdapter(initial_value, max_value=max_value)
+
+ def __init__(
+ self,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ):
if not isinstance(initial_value, int):
raise TypeError("initial_value must be an integer")
if initial_value < 0:
@@ -325,9 +372,7 @@ class Semaphore:
"max_value must be equal to or higher than initial_value"
)
- self._value = initial_value
- self._max_value = max_value
- self._waiters: deque[Event] = deque()
+ self._fast_acquire = fast_acquire
async def __aenter__(self) -> Semaphore:
await self.acquire()
@@ -343,27 +388,7 @@ class Semaphore:
async def acquire(self) -> None:
"""Decrement the semaphore value, blocking if necessary."""
- await checkpoint_if_cancelled()
- try:
- self.acquire_nowait()
- except WouldBlock:
- event = Event()
- self._waiters.append(event)
- try:
- await event.wait()
- except BaseException:
- if not event.is_set():
- self._waiters.remove(event)
- else:
- self.release()
-
- raise
- else:
- try:
- await cancel_shielded_checkpoint()
- except BaseException:
- self.release()
- raise
+ raise NotImplementedError
def acquire_nowait(self) -> None:
"""
@@ -372,30 +397,21 @@ class Semaphore:
:raises ~anyio.WouldBlock: if the operation would block
"""
- if self._value == 0:
- raise WouldBlock
-
- self._value -= 1
+ raise NotImplementedError
def release(self) -> None:
"""Increment the semaphore value."""
- if self._max_value is not None and self._value == self._max_value:
- raise ValueError("semaphore released too many times")
-
- if self._waiters:
- self._waiters.popleft().set()
- else:
- self._value += 1
+ raise NotImplementedError
@property
def value(self) -> int:
"""The current value of the semaphore."""
- return self._value
+ raise NotImplementedError
@property
def max_value(self) -> int | None:
"""The maximum value of the semaphore."""
- return self._max_value
+ raise NotImplementedError
def statistics(self) -> SemaphoreStatistics:
"""
@@ -403,7 +419,66 @@ class Semaphore:
.. versionadded:: 3.0
"""
- return SemaphoreStatistics(len(self._waiters))
+ raise NotImplementedError
+
+
+class SemaphoreAdapter(Semaphore):
+ _internal_semaphore: Semaphore | None = None
+
+ def __new__(
+ cls,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> SemaphoreAdapter:
+ return object.__new__(cls)
+
+ def __init__(
+ self,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> None:
+ super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire)
+ self._initial_value = initial_value
+ self._max_value = max_value
+
+ @property
+ def _semaphore(self) -> Semaphore:
+ if self._internal_semaphore is None:
+ self._internal_semaphore = get_async_backend().create_semaphore(
+ self._initial_value, max_value=self._max_value
+ )
+
+ return self._internal_semaphore
+
+ async def acquire(self) -> None:
+ await self._semaphore.acquire()
+
+ def acquire_nowait(self) -> None:
+ self._semaphore.acquire_nowait()
+
+ def release(self) -> None:
+ self._semaphore.release()
+
+ @property
+ def value(self) -> int:
+ if self._internal_semaphore is None:
+ return self._initial_value
+
+ return self._semaphore.value
+
+ @property
+ def max_value(self) -> int | None:
+ return self._max_value
+
+ def statistics(self) -> SemaphoreStatistics:
+ if self._internal_semaphore is None:
+ return SemaphoreStatistics(tasks_waiting=0)
+
+ return self._semaphore.statistics()
class CapacityLimiter:
diff --git a/contrib/python/anyio/anyio/abc/_eventloop.py b/contrib/python/anyio/anyio/abc/_eventloop.py
index a50afefaa0..93d0e9d25b 100644
--- a/contrib/python/anyio/anyio/abc/_eventloop.py
+++ b/contrib/python/anyio/anyio/abc/_eventloop.py
@@ -3,7 +3,8 @@ from __future__ import annotations
import math
import sys
from abc import ABCMeta, abstractmethod
-from collections.abc import AsyncIterator, Awaitable, Mapping
+from collections.abc import AsyncIterator, Awaitable, Callable, Sequence
+from contextlib import AbstractContextManager
from os import PathLike
from signal import Signals
from socket import AddressFamily, SocketKind, socket
@@ -11,10 +12,8 @@ from typing import (
IO,
TYPE_CHECKING,
Any,
- Callable,
- ContextManager,
- Sequence,
TypeVar,
+ Union,
overload,
)
@@ -23,10 +22,13 @@ if sys.version_info >= (3, 11):
else:
from typing_extensions import TypeVarTuple, Unpack
-if TYPE_CHECKING:
- from typing import Literal
+if sys.version_info >= (3, 10):
+ from typing import TypeAlias
+else:
+ from typing_extensions import TypeAlias
- from .._core._synchronization import CapacityLimiter, Event
+if TYPE_CHECKING:
+ from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore
from .._core._tasks import CancelScope
from .._core._testing import TaskInfo
from ..from_thread import BlockingPortal
@@ -46,6 +48,7 @@ if TYPE_CHECKING:
T_Retval = TypeVar("T_Retval")
PosArgsT = TypeVarTuple("PosArgsT")
+StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"]
class AsyncBackend(metaclass=ABCMeta):
@@ -169,6 +172,22 @@ class AsyncBackend(metaclass=ABCMeta):
@classmethod
@abstractmethod
+ def create_lock(cls, *, fast_acquire: bool) -> Lock:
+ pass
+
+ @classmethod
+ @abstractmethod
+ def create_semaphore(
+ cls,
+ initial_value: int,
+ *,
+ max_value: int | None = None,
+ fast_acquire: bool = False,
+ ) -> Semaphore:
+ pass
+
+ @classmethod
+ @abstractmethod
def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter:
pass
@@ -214,50 +233,15 @@ class AsyncBackend(metaclass=ABCMeta):
pass
@classmethod
- @overload
- async def open_process(
- cls,
- command: str | bytes,
- *,
- shell: Literal[True],
- stdin: int | IO[Any] | None,
- stdout: int | IO[Any] | None,
- stderr: int | IO[Any] | None,
- cwd: str | bytes | PathLike[str] | None = None,
- env: Mapping[str, str] | None = None,
- start_new_session: bool = False,
- ) -> Process:
- pass
-
- @classmethod
- @overload
- async def open_process(
- cls,
- command: Sequence[str | bytes],
- *,
- shell: Literal[False],
- stdin: int | IO[Any] | None,
- stdout: int | IO[Any] | None,
- stderr: int | IO[Any] | None,
- cwd: str | bytes | PathLike[str] | None = None,
- env: Mapping[str, str] | None = None,
- start_new_session: bool = False,
- ) -> Process:
- pass
-
- @classmethod
@abstractmethod
async def open_process(
cls,
- command: str | bytes | Sequence[str | bytes],
+ command: StrOrBytesPath | Sequence[StrOrBytesPath],
*,
- shell: bool,
stdin: int | IO[Any] | None,
stdout: int | IO[Any] | None,
stderr: int | IO[Any] | None,
- cwd: str | bytes | PathLike[str] | None = None,
- env: Mapping[str, str] | None = None,
- start_new_session: bool = False,
+ **kwargs: Any,
) -> Process:
pass
@@ -366,7 +350,7 @@ class AsyncBackend(metaclass=ABCMeta):
@abstractmethod
def open_signal_receiver(
cls, *signals: Signals
- ) -> ContextManager[AsyncIterator[Signals]]:
+ ) -> AbstractContextManager[AsyncIterator[Signals]]:
pass
@classmethod
diff --git a/contrib/python/anyio/anyio/abc/_sockets.py b/contrib/python/anyio/anyio/abc/_sockets.py
index b321225a7b..1c6a450cdc 100644
--- a/contrib/python/anyio/anyio/abc/_sockets.py
+++ b/contrib/python/anyio/anyio/abc/_sockets.py
@@ -8,7 +8,7 @@ from io import IOBase
from ipaddress import IPv4Address, IPv6Address
from socket import AddressFamily
from types import TracebackType
-from typing import Any, Tuple, TypeVar, Union
+from typing import Any, TypeVar, Union
from .._core._typedattr import (
TypedAttributeProvider,
@@ -19,10 +19,10 @@ from ._streams import ByteStream, Listener, UnreliableObjectStream
from ._tasks import TaskGroup
IPAddressType = Union[str, IPv4Address, IPv6Address]
-IPSockAddrType = Tuple[str, int]
+IPSockAddrType = tuple[str, int]
SockAddrType = Union[IPSockAddrType, str]
-UDPPacketType = Tuple[bytes, IPSockAddrType]
-UNIXDatagramPacketType = Tuple[bytes, str]
+UDPPacketType = tuple[bytes, IPSockAddrType]
+UNIXDatagramPacketType = tuple[bytes, str]
T_Retval = TypeVar("T_Retval")
diff --git a/contrib/python/anyio/anyio/from_thread.py b/contrib/python/anyio/anyio/from_thread.py
index 88a854bb91..93a4cfe8e4 100644
--- a/contrib/python/anyio/anyio/from_thread.py
+++ b/contrib/python/anyio/anyio/from_thread.py
@@ -1,19 +1,20 @@
from __future__ import annotations
import sys
-import threading
from collections.abc import Awaitable, Callable, Generator
-from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait
-from contextlib import AbstractContextManager, contextmanager
+from concurrent.futures import Future
+from contextlib import (
+ AbstractAsyncContextManager,
+ AbstractContextManager,
+ contextmanager,
+)
from dataclasses import dataclass, field
from inspect import isawaitable
+from threading import Lock, Thread, get_ident
from types import TracebackType
from typing import (
Any,
- AsyncContextManager,
- ContextManager,
Generic,
- Iterable,
TypeVar,
cast,
overload,
@@ -88,7 +89,9 @@ class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager):
type[BaseException] | None, BaseException | None, TracebackType | None
] = (None, None, None)
- def __init__(self, async_cm: AsyncContextManager[T_co], portal: BlockingPortal):
+ def __init__(
+ self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal
+ ):
self._async_cm = async_cm
self._portal = portal
@@ -146,7 +149,7 @@ class BlockingPortal:
return get_async_backend().create_blocking_portal()
def __init__(self) -> None:
- self._event_loop_thread_id: int | None = threading.get_ident()
+ self._event_loop_thread_id: int | None = get_ident()
self._stop_event = Event()
self._task_group = create_task_group()
self._cancelled_exc_class = get_cancelled_exc_class()
@@ -167,7 +170,7 @@ class BlockingPortal:
def _check_running(self) -> None:
if self._event_loop_thread_id is None:
raise RuntimeError("This portal is not running")
- if self._event_loop_thread_id == threading.get_ident():
+ if self._event_loop_thread_id == get_ident():
raise RuntimeError(
"This method cannot be called from the event loop thread"
)
@@ -202,7 +205,7 @@ class BlockingPortal:
def callback(f: Future[T_Retval]) -> None:
if f.cancelled() and self._event_loop_thread_id not in (
None,
- threading.get_ident(),
+ get_ident(),
):
self.call(scope.cancel)
@@ -375,8 +378,8 @@ class BlockingPortal:
return f, task_status_future.result()
def wrap_async_context_manager(
- self, cm: AsyncContextManager[T_co]
- ) -> ContextManager[T_co]:
+ self, cm: AbstractAsyncContextManager[T_co]
+ ) -> AbstractContextManager[T_co]:
"""
Wrap an async context manager as a synchronous context manager via this portal.
@@ -411,7 +414,7 @@ class BlockingPortalProvider:
backend: str = "asyncio"
backend_options: dict[str, Any] | None = None
- _lock: threading.Lock = field(init=False, default_factory=threading.Lock)
+ _lock: Lock = field(init=False, default_factory=Lock)
_leases: int = field(init=False, default=0)
_portal: BlockingPortal = field(init=False)
_portal_cm: AbstractContextManager[BlockingPortal] | None = field(
@@ -469,43 +472,37 @@ def start_blocking_portal(
async def run_portal() -> None:
async with BlockingPortal() as portal_:
- if future.set_running_or_notify_cancel():
- future.set_result(portal_)
- await portal_.sleep_until_stopped()
+ future.set_result(portal_)
+ await portal_.sleep_until_stopped()
+
+ def run_blocking_portal() -> None:
+ if future.set_running_or_notify_cancel():
+ try:
+ _eventloop.run(
+ run_portal, backend=backend, backend_options=backend_options
+ )
+ except BaseException as exc:
+ if not future.done():
+ future.set_exception(exc)
future: Future[BlockingPortal] = Future()
- with ThreadPoolExecutor(1) as executor:
- run_future = executor.submit(
- _eventloop.run, # type: ignore[arg-type]
- run_portal,
- backend=backend,
- backend_options=backend_options,
- )
+ thread = Thread(target=run_blocking_portal, daemon=True)
+ thread.start()
+ try:
+ cancel_remaining_tasks = False
+ portal = future.result()
try:
- wait(
- cast(Iterable[Future], [run_future, future]),
- return_when=FIRST_COMPLETED,
- )
+ yield portal
except BaseException:
- future.cancel()
- run_future.cancel()
+ cancel_remaining_tasks = True
raise
-
- if future.done():
- portal = future.result()
- cancel_remaining_tasks = False
+ finally:
try:
- yield portal
- except BaseException:
- cancel_remaining_tasks = True
- raise
- finally:
- try:
- portal.call(portal.stop, cancel_remaining_tasks)
- except RuntimeError:
- pass
-
- run_future.result()
+ portal.call(portal.stop, cancel_remaining_tasks)
+ except RuntimeError:
+ pass
+ finally:
+ thread.join()
def check_cancelled() -> None:
diff --git a/contrib/python/anyio/anyio/pytest_plugin.py b/contrib/python/anyio/anyio/pytest_plugin.py
index a8dd6f3e3f..c9fe1bde92 100644
--- a/contrib/python/anyio/anyio/pytest_plugin.py
+++ b/contrib/python/anyio/anyio/pytest_plugin.py
@@ -1,16 +1,22 @@
from __future__ import annotations
+import sys
from collections.abc import Iterator
from contextlib import ExitStack, contextmanager
from inspect import isasyncgenfunction, iscoroutinefunction
-from typing import Any, Dict, Tuple, cast
+from typing import Any, cast
import pytest
import sniffio
+from _pytest.outcomes import Exit
from ._core._eventloop import get_all_backends, get_async_backend
+from ._core._exceptions import iterate_exceptions
from .abc import TestRunner
+if sys.version_info < (3, 11):
+ from exceptiongroup import ExceptionGroup
+
_current_runner: TestRunner | None = None
_runner_stack: ExitStack | None = None
_runner_leases = 0
@@ -21,7 +27,7 @@ def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]:
return backend, {}
elif isinstance(backend, tuple) and len(backend) == 2:
if isinstance(backend[0], str) and isinstance(backend[1], dict):
- return cast(Tuple[str, Dict[str, Any]], backend)
+ return cast(tuple[str, dict[str, Any]], backend)
raise TypeError("anyio_backend must be either a string or tuple of (string, dict)")
@@ -121,7 +127,14 @@ def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None:
funcargs = pyfuncitem.funcargs
testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
with get_runner(backend_name, backend_options) as runner:
- runner.run_test(pyfuncitem.obj, testargs)
+ try:
+ runner.run_test(pyfuncitem.obj, testargs)
+ except ExceptionGroup as excgrp:
+ for exc in iterate_exceptions(excgrp):
+ if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)):
+ raise exc from excgrp
+
+ raise
return True
diff --git a/contrib/python/anyio/anyio/streams/memory.py b/contrib/python/anyio/anyio/streams/memory.py
index 6840e6242f..b547aa6a48 100644
--- a/contrib/python/anyio/anyio/streams/memory.py
+++ b/contrib/python/anyio/anyio/streams/memory.py
@@ -38,6 +38,12 @@ class MemoryObjectItemReceiver(Generic[T_Item]):
task_info: TaskInfo = field(init=False, default_factory=get_current_task)
item: T_Item = field(init=False)
+ def __repr__(self) -> str:
+ # When item is not defined, we get following error with default __repr__:
+ # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item'
+ item = getattr(self, "item", None)
+ return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})"
+
@dataclass(eq=False)
class MemoryObjectStreamState(Generic[T_Item]):
@@ -175,7 +181,7 @@ class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]):
def __del__(self) -> None:
if not self._closed:
warnings.warn(
- f"Unclosed <{self.__class__.__name__}>",
+ f"Unclosed <{self.__class__.__name__} at {id(self):x}>",
ResourceWarning,
source=self,
)
@@ -305,7 +311,7 @@ class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]):
def __del__(self) -> None:
if not self._closed:
warnings.warn(
- f"Unclosed <{self.__class__.__name__}>",
+ f"Unclosed <{self.__class__.__name__} at {id(self):x}>",
ResourceWarning,
source=self,
)
diff --git a/contrib/python/anyio/anyio/streams/tls.py b/contrib/python/anyio/anyio/streams/tls.py
index e913eedbbf..83240b4d35 100644
--- a/contrib/python/anyio/anyio/streams/tls.py
+++ b/contrib/python/anyio/anyio/streams/tls.py
@@ -7,7 +7,7 @@ import sys
from collections.abc import Callable, Mapping
from dataclasses import dataclass
from functools import wraps
-from typing import Any, Tuple, TypeVar
+from typing import Any, TypeVar
from .. import (
BrokenResourceError,
@@ -25,8 +25,8 @@ else:
T_Retval = TypeVar("T_Retval")
PosArgsT = TypeVarTuple("PosArgsT")
-_PCTRTT = Tuple[Tuple[str, str], ...]
-_PCTRTTT = Tuple[_PCTRTT, ...]
+_PCTRTT = tuple[tuple[str, str], ...]
+_PCTRTTT = tuple[_PCTRTT, ...]
class TLSAttribute(TypedAttributeSet):
diff --git a/contrib/python/anyio/anyio/to_process.py b/contrib/python/anyio/anyio/to_process.py
index 1ff06f0b25..5050dee21e 100644
--- a/contrib/python/anyio/anyio/to_process.py
+++ b/contrib/python/anyio/anyio/to_process.py
@@ -223,7 +223,7 @@ def process_worker() -> None:
main_module_path: str | None
sys.path, main_module_path = args
del sys.modules["__main__"]
- if main_module_path:
+ if main_module_path and os.path.isfile(main_module_path):
# Load the parent's main module but as __mp_main__ instead of
# __main__ (like multiprocessing does) to avoid infinite recursion
try:
@@ -234,7 +234,6 @@ def process_worker() -> None:
sys.modules["__main__"] = main
except BaseException as exc:
exception = exc
-
try:
if exception is not None:
status = b"EXCEPTION"
diff --git a/contrib/python/anyio/ya.make b/contrib/python/anyio/ya.make
index 9062121337..cec445229c 100644
--- a/contrib/python/anyio/ya.make
+++ b/contrib/python/anyio/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(4.4.0)
+VERSION(4.6.0)
LICENSE(MIT)
diff --git a/contrib/python/google-auth/py3/.dist-info/METADATA b/contrib/python/google-auth/py3/.dist-info/METADATA
index 26b8a4974a..261e2a0276 100644
--- a/contrib/python/google-auth/py3/.dist-info/METADATA
+++ b/contrib/python/google-auth/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: google-auth
-Version: 2.34.0
+Version: 2.35.0
Summary: Google Authentication Library
Home-page: https://github.com/googleapis/google-auth-library-python
Author: Google Cloud Platform
diff --git a/contrib/python/google-auth/py3/google/auth/_default.py b/contrib/python/google-auth/py3/google/auth/_default.py
index 63009dfb86..7bbcf85914 100644
--- a/contrib/python/google-auth/py3/google/auth/_default.py
+++ b/contrib/python/google-auth/py3/google/auth/_default.py
@@ -237,6 +237,7 @@ def _get_gcloud_sdk_credentials(quota_project_id=None):
credentials, project_id = load_credentials_from_file(
credentials_filename, quota_project_id=quota_project_id
)
+ credentials._cred_file_path = credentials_filename
if not project_id:
project_id = _cloud_sdk.get_project_id()
@@ -270,6 +271,7 @@ def _get_explicit_environ_credentials(quota_project_id=None):
credentials, project_id = load_credentials_from_file(
os.environ[environment_vars.CREDENTIALS], quota_project_id=quota_project_id
)
+ credentials._cred_file_path = f"{explicit_file} file via the GOOGLE_APPLICATION_CREDENTIALS environment variable"
return credentials, project_id
diff --git a/contrib/python/google-auth/py3/google/auth/_exponential_backoff.py b/contrib/python/google-auth/py3/google/auth/_exponential_backoff.py
index 04f9f97641..89853448f9 100644
--- a/contrib/python/google-auth/py3/google/auth/_exponential_backoff.py
+++ b/contrib/python/google-auth/py3/google/auth/_exponential_backoff.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import asyncio
import random
import time
@@ -38,9 +39,8 @@ an HTTP request.
"""
-class ExponentialBackoff:
- """An exponential backoff iterator. This can be used in a for loop to
- perform requests with exponential backoff.
+class _BaseExponentialBackoff:
+ """An exponential backoff iterator base class.
Args:
total_attempts Optional[int]:
@@ -84,9 +84,40 @@ class ExponentialBackoff:
self._multiplier = multiplier
self._backoff_count = 0
- def __iter__(self):
+ @property
+ def total_attempts(self):
+ """The total amount of backoff attempts that will be made."""
+ return self._total_attempts
+
+ @property
+ def backoff_count(self):
+ """The current amount of backoff attempts that have been made."""
+ return self._backoff_count
+
+ def _reset(self):
self._backoff_count = 0
self._current_wait_in_seconds = self._initial_wait_seconds
+
+ def _calculate_jitter(self):
+ jitter_variance = self._current_wait_in_seconds * self._randomization_factor
+ jitter = random.uniform(
+ self._current_wait_in_seconds - jitter_variance,
+ self._current_wait_in_seconds + jitter_variance,
+ )
+
+ return jitter
+
+
+class ExponentialBackoff(_BaseExponentialBackoff):
+ """An exponential backoff iterator. This can be used in a for loop to
+ perform requests with exponential backoff.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(ExponentialBackoff, self).__init__(*args, **kwargs)
+
+ def __iter__(self):
+ self._reset()
return self
def __next__(self):
@@ -97,23 +128,37 @@ class ExponentialBackoff:
if self._backoff_count <= 1:
return self._backoff_count
- jitter_variance = self._current_wait_in_seconds * self._randomization_factor
- jitter = random.uniform(
- self._current_wait_in_seconds - jitter_variance,
- self._current_wait_in_seconds + jitter_variance,
- )
+ jitter = self._calculate_jitter()
time.sleep(jitter)
self._current_wait_in_seconds *= self._multiplier
return self._backoff_count
- @property
- def total_attempts(self):
- """The total amount of backoff attempts that will be made."""
- return self._total_attempts
- @property
- def backoff_count(self):
- """The current amount of backoff attempts that have been made."""
+class AsyncExponentialBackoff(_BaseExponentialBackoff):
+ """An async exponential backoff iterator. This can be used in a for loop to
+ perform async requests with exponential backoff.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super(AsyncExponentialBackoff, self).__init__(*args, **kwargs)
+
+ def __aiter__(self):
+ self._reset()
+ return self
+
+ async def __anext__(self):
+ if self._backoff_count >= self._total_attempts:
+ raise StopAsyncIteration
+ self._backoff_count += 1
+
+ if self._backoff_count <= 1:
+ return self._backoff_count
+
+ jitter = self._calculate_jitter()
+
+ await asyncio.sleep(jitter)
+
+ self._current_wait_in_seconds *= self._multiplier
return self._backoff_count
diff --git a/contrib/python/google-auth/py3/google/auth/aio/transport/__init__.py b/contrib/python/google-auth/py3/google/auth/aio/transport/__init__.py
new file mode 100644
index 0000000000..166a3be509
--- /dev/null
+++ b/contrib/python/google-auth/py3/google/auth/aio/transport/__init__.py
@@ -0,0 +1,144 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport - Asynchronous HTTP client library support.
+
+:mod:`google.auth.aio` is designed to work with various asynchronous client libraries such
+as aiohttp. In order to work across these libraries with different
+interfaces some abstraction is needed.
+
+This module provides two interfaces that are implemented by transport adapters
+to support HTTP libraries. :class:`Request` defines the interface expected by
+:mod:`google.auth` to make asynchronous requests. :class:`Response` defines the interface
+for the return value of :class:`Request`.
+"""
+
+import abc
+from typing import AsyncGenerator, Mapping, Optional
+
+import google.auth.transport
+
+
+_DEFAULT_TIMEOUT_SECONDS = 180
+
+DEFAULT_RETRYABLE_STATUS_CODES = google.auth.transport.DEFAULT_RETRYABLE_STATUS_CODES
+"""Sequence[int]: HTTP status codes indicating a request can be retried.
+"""
+
+
+DEFAULT_MAX_RETRY_ATTEMPTS = 3
+"""int: How many times to retry a request."""
+
+
+class Response(metaclass=abc.ABCMeta):
+ """Asynchronous HTTP Response Interface."""
+
+ @property
+ @abc.abstractmethod
+ def status_code(self) -> int:
+ """
+ The HTTP response status code.
+
+ Returns:
+ int: The HTTP response status code.
+
+ """
+ raise NotImplementedError("status_code must be implemented.")
+
+ @property
+ @abc.abstractmethod
+ def headers(self) -> Mapping[str, str]:
+ """The HTTP response headers.
+
+ Returns:
+ Mapping[str, str]: The HTTP response headers.
+ """
+ raise NotImplementedError("headers must be implemented.")
+
+ @abc.abstractmethod
+ async def content(self, chunk_size: int) -> AsyncGenerator[bytes, None]:
+ """The raw response content.
+
+ Args:
+ chunk_size (int): The size of each chunk.
+
+ Yields:
+ AsyncGenerator[bytes, None]: An asynchronous generator yielding
+ response chunks as bytes.
+ """
+ raise NotImplementedError("content must be implemented.")
+
+ @abc.abstractmethod
+ async def read(self) -> bytes:
+ """Read the entire response content as bytes.
+
+ Returns:
+ bytes: The entire response content.
+ """
+ raise NotImplementedError("read must be implemented.")
+
+ @abc.abstractmethod
+ async def close(self):
+ """Close the response after it is fully consumed to resource."""
+ raise NotImplementedError("close must be implemented.")
+
+
+class Request(metaclass=abc.ABCMeta):
+ """Interface for a callable that makes HTTP requests.
+
+ Specific transport implementations should provide an implementation of
+ this that adapts their specific request / response API.
+
+ .. automethod:: __call__
+ """
+
+ @abc.abstractmethod
+ async def __call__(
+ self,
+ url: str,
+ method: str,
+ body: Optional[bytes],
+ headers: Optional[Mapping[str, str]],
+ timeout: float,
+ **kwargs
+ ) -> Response:
+ """Make an HTTP request.
+
+ Args:
+ url (str): The URI to be requested.
+ method (str): The HTTP method to use for the request. Defaults
+ to 'GET'.
+ body (Optional[bytes]): The payload / body in HTTP request.
+ headers (Mapping[str, str]): Request headers.
+ timeout (float): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ transport-specific default timeout will be used.
+ kwargs: Additional arguments passed on to the transport's
+ request method.
+
+ Returns:
+ google.auth.aio.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TransportError: If any exception occurred.
+ """
+ # pylint: disable=redundant-returns-doc, missing-raises-doc
+ # (pylint doesn't play well with abstract docstrings.)
+ raise NotImplementedError("__call__ must be implemented.")
+
+ async def close(self) -> None:
+ """
+ Close the underlying session.
+ """
+ raise NotImplementedError("close must be implemented.")
diff --git a/contrib/python/google-auth/py3/google/auth/aio/transport/aiohttp.py b/contrib/python/google-auth/py3/google/auth/aio/transport/aiohttp.py
new file mode 100644
index 0000000000..074d1491c7
--- /dev/null
+++ b/contrib/python/google-auth/py3/google/auth/aio/transport/aiohttp.py
@@ -0,0 +1,184 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Transport adapter for Asynchronous HTTP Requests based on aiohttp.
+"""
+
+import asyncio
+from typing import AsyncGenerator, Mapping, Optional
+
+try:
+ import aiohttp # type: ignore
+except ImportError as caught_exc: # pragma: NO COVER
+ raise ImportError(
+ "The aiohttp library is not installed from please install the aiohttp package to use the aiohttp transport."
+ ) from caught_exc
+
+from google.auth import _helpers
+from google.auth import exceptions
+from google.auth.aio import transport
+
+
+class Response(transport.Response):
+ """
+ Represents an HTTP response and its data. It is returned by ``google.auth.aio.transport.sessions.AsyncAuthorizedSession``.
+
+ Args:
+ response (aiohttp.ClientResponse): An instance of aiohttp.ClientResponse.
+
+ Attributes:
+ status_code (int): The HTTP status code of the response.
+ headers (Mapping[str, str]): The HTTP headers of the response.
+ """
+
+ def __init__(self, response: aiohttp.ClientResponse):
+ self._response = response
+
+ @property
+ @_helpers.copy_docstring(transport.Response)
+ def status_code(self) -> int:
+ return self._response.status
+
+ @property
+ @_helpers.copy_docstring(transport.Response)
+ def headers(self) -> Mapping[str, str]:
+ return {key: value for key, value in self._response.headers.items()}
+
+ @_helpers.copy_docstring(transport.Response)
+ async def content(self, chunk_size: int = 1024) -> AsyncGenerator[bytes, None]:
+ try:
+ async for chunk in self._response.content.iter_chunked(
+ chunk_size
+ ): # pragma: no branch
+ yield chunk
+ except aiohttp.ClientPayloadError as exc:
+ raise exceptions.ResponseError(
+ "Failed to read from the payload stream."
+ ) from exc
+
+ @_helpers.copy_docstring(transport.Response)
+ async def read(self) -> bytes:
+ try:
+ return await self._response.read()
+ except aiohttp.ClientResponseError as exc:
+ raise exceptions.ResponseError("Failed to read the response body.") from exc
+
+ @_helpers.copy_docstring(transport.Response)
+ async def close(self):
+ self._response.close()
+
+
+class Request(transport.Request):
+ """Asynchronous Requests request adapter.
+
+ This class is used internally for making requests using aiohttp
+ in a consistent way. If you use :class:`google.auth.aio.transport.sessions.AsyncAuthorizedSession`
+ you do not need to construct or use this class directly.
+
+ This class can be useful if you want to configure a Request callable
+ with a custom ``aiohttp.ClientSession`` in :class:`AuthorizedSession` or if
+ you want to manually refresh a :class:`~google.auth.aio.credentials.Credentials` instance::
+
+ import aiohttp
+ import google.auth.aio.transport.aiohttp
+
+ # Default example:
+ request = google.auth.aio.transport.aiohttp.Request()
+ await credentials.refresh(request)
+
+ # Custom aiohttp Session Example:
+ session = session=aiohttp.ClientSession(auto_decompress=False)
+ request = google.auth.aio.transport.aiohttp.Request(session=session)
+ auth_sesion = google.auth.aio.transport.sessions.AsyncAuthorizedSession(auth_request=request)
+
+ Args:
+ session (aiohttp.ClientSession): An instance :class:`aiohttp.ClientSession` used
+ to make HTTP requests. If not specified, a session will be created.
+
+ .. automethod:: __call__
+ """
+
+ def __init__(self, session: aiohttp.ClientSession = None):
+ self._session = session
+ self._closed = False
+
+ async def __call__(
+ self,
+ url: str,
+ method: str = "GET",
+ body: Optional[bytes] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ) -> transport.Response:
+ """
+ Make an HTTP request using aiohttp.
+
+ Args:
+ url (str): The URL to be requested.
+ method (Optional[str]):
+ The HTTP method to use for the request. Defaults to 'GET'.
+ body (Optional[bytes]):
+ The payload or body in HTTP request.
+ headers (Optional[Mapping[str, str]]):
+ Request headers.
+ timeout (float): The number of seconds to wait for a
+ response from the server. If not specified or if None, the
+ requests default timeout will be used.
+ kwargs: Additional arguments passed through to the underlying
+ aiohttp :meth:`aiohttp.Session.request` method.
+
+ Returns:
+ google.auth.aio.transport.Response: The HTTP response.
+
+ Raises:
+ - google.auth.exceptions.TransportError: If the request fails or if the session is closed.
+ - google.auth.exceptions.TimeoutError: If the request times out.
+ """
+
+ try:
+ if self._closed:
+ raise exceptions.TransportError("session is closed.")
+
+ if not self._session:
+ self._session = aiohttp.ClientSession()
+
+ client_timeout = aiohttp.ClientTimeout(total=timeout)
+ response = await self._session.request(
+ method,
+ url,
+ data=body,
+ headers=headers,
+ timeout=client_timeout,
+ **kwargs,
+ )
+ return Response(response)
+
+ except aiohttp.ClientError as caught_exc:
+ client_exc = exceptions.TransportError(f"Failed to send request to {url}.")
+ raise client_exc from caught_exc
+
+ except asyncio.TimeoutError as caught_exc:
+ timeout_exc = exceptions.TimeoutError(
+ f"Request timed out after {timeout} seconds."
+ )
+ raise timeout_exc from caught_exc
+
+ async def close(self) -> None:
+ """
+ Close the underlying aiohttp session to release the acquired resources.
+ """
+ if not self._closed and self._session:
+ await self._session.close()
+ self._closed = True
diff --git a/contrib/python/google-auth/py3/google/auth/aio/transport/sessions.py b/contrib/python/google-auth/py3/google/auth/aio/transport/sessions.py
new file mode 100644
index 0000000000..fea7cbbb2c
--- /dev/null
+++ b/contrib/python/google-auth/py3/google/auth/aio/transport/sessions.py
@@ -0,0 +1,268 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+from contextlib import asynccontextmanager
+import functools
+import time
+from typing import Mapping, Optional
+
+from google.auth import _exponential_backoff, exceptions
+from google.auth.aio import transport
+from google.auth.aio.credentials import Credentials
+from google.auth.exceptions import TimeoutError
+
+try:
+ from google.auth.aio.transport.aiohttp import Request as AiohttpRequest
+
+ AIOHTTP_INSTALLED = True
+except ImportError: # pragma: NO COVER
+ AIOHTTP_INSTALLED = False
+
+
+@asynccontextmanager
+async def timeout_guard(timeout):
+ """
+ timeout_guard is an asynchronous context manager to apply a timeout to an asynchronous block of code.
+
+ Args:
+ timeout (float): The time in seconds before the context manager times out.
+
+ Raises:
+ google.auth.exceptions.TimeoutError: If the code within the context exceeds the provided timeout.
+
+ Usage:
+ async with timeout_guard(10) as with_timeout:
+ await with_timeout(async_function())
+ """
+ start = time.monotonic()
+ total_timeout = timeout
+
+ def _remaining_time():
+ elapsed = time.monotonic() - start
+ remaining = total_timeout - elapsed
+ if remaining <= 0:
+ raise TimeoutError(
+ f"Context manager exceeded the configured timeout of {total_timeout}s."
+ )
+ return remaining
+
+ async def with_timeout(coro):
+ try:
+ remaining = _remaining_time()
+ response = await asyncio.wait_for(coro, remaining)
+ return response
+ except (asyncio.TimeoutError, TimeoutError) as e:
+ raise TimeoutError(
+ f"The operation {coro} exceeded the configured timeout of {total_timeout}s."
+ ) from e
+
+ try:
+ yield with_timeout
+
+ finally:
+ _remaining_time()
+
+
+class AsyncAuthorizedSession:
+ """This is an asynchronous implementation of :class:`google.auth.requests.AuthorizedSession` class.
+ We utilize an instance of a class that implements :class:`google.auth.aio.transport.Request` configured
+ by the caller or otherwise default to `google.auth.aio.transport.aiohttp.Request` if the external aiohttp
+ package is installed.
+
+ A Requests Session class with credentials.
+
+ This class is used to perform asynchronous requests to API endpoints that require
+ authorization::
+
+ import aiohttp
+ from google.auth.aio.transport import sessions
+
+ async with sessions.AsyncAuthorizedSession(credentials) as authed_session:
+ response = await authed_session.request(
+ 'GET', 'https://www.googleapis.com/storage/v1/b')
+
+ The underlying :meth:`request` implementation handles adding the
+ credentials' headers to the request and refreshing credentials as needed.
+
+ Args:
+ credentials (google.auth.aio.credentials.Credentials):
+ The credentials to add to the request.
+ auth_request (Optional[google.auth.aio.transport.Request]):
+ An instance of a class that implements
+ :class:`~google.auth.aio.transport.Request` used to make requests
+ and refresh credentials. If not passed,
+ an instance of :class:`~google.auth.aio.transport.aiohttp.Request`
+ is created.
+
+ Raises:
+ - google.auth.exceptions.TransportError: If `auth_request` is `None`
+ and the external package `aiohttp` is not installed.
+ - google.auth.exceptions.InvalidType: If the provided credentials are
+ not of type `google.auth.aio.credentials.Credentials`.
+ """
+
+ def __init__(
+ self, credentials: Credentials, auth_request: Optional[transport.Request] = None
+ ):
+ if not isinstance(credentials, Credentials):
+ raise exceptions.InvalidType(
+ f"The configured credentials of type {type(credentials)} are invalid and must be of type `google.auth.aio.credentials.Credentials`"
+ )
+ self._credentials = credentials
+ _auth_request = auth_request
+ if not _auth_request and AIOHTTP_INSTALLED:
+ _auth_request = AiohttpRequest()
+ if _auth_request is None:
+ raise exceptions.TransportError(
+ "`auth_request` must either be configured or the external package `aiohttp` must be installed to use the default value."
+ )
+ self._auth_request = _auth_request
+
+ async def request(
+ self,
+ method: str,
+ url: str,
+ data: Optional[bytes] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ) -> transport.Response:
+ """
+ Args:
+ method (str): The http method used to make the request.
+ url (str): The URI to be requested.
+ data (Optional[bytes]): The payload or body in HTTP request.
+ headers (Optional[Mapping[str, str]]): Request headers.
+ timeout (float):
+ The amount of time in seconds to wait for the server response
+ with each individual request.
+ max_allowed_time (float):
+ If the method runs longer than this, a ``Timeout`` exception is
+ automatically raised. Unlike the ``timeout`` parameter, this
+ value applies to the total method execution time, even if
+ multiple requests are made under the hood.
+
+ Mind that it is not guaranteed that the timeout error is raised
+ at ``max_allowed_time``. It might take longer, for example, if
+ an underlying request takes a lot of time, but the request
+ itself does not timeout, e.g. if a large file is being
+ transmitted. The timout error will be raised after such
+ request completes.
+
+ Returns:
+ google.auth.aio.transport.Response: The HTTP response.
+
+ Raises:
+ google.auth.exceptions.TimeoutError: If the method does not complete within
+ the configured `max_allowed_time` or the request exceeds the configured
+ `timeout`.
+ """
+
+ retries = _exponential_backoff.AsyncExponentialBackoff(
+ total_attempts=transport.DEFAULT_MAX_RETRY_ATTEMPTS
+ )
+ async with timeout_guard(max_allowed_time) as with_timeout:
+ await with_timeout(
+ # Note: before_request will attempt to refresh credentials if expired.
+ self._credentials.before_request(
+ self._auth_request, method, url, headers
+ )
+ )
+ # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
+ # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
+ async for _ in retries: # pragma: no branch
+ response = await with_timeout(
+ self._auth_request(url, method, data, headers, timeout, **kwargs)
+ )
+ if response.status_code not in transport.DEFAULT_RETRYABLE_STATUS_CODES:
+ break
+ return response
+
+ @functools.wraps(request)
+ async def get(
+ self,
+ url: str,
+ data: Optional[bytes] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ) -> transport.Response:
+ return await self.request(
+ "GET", url, data, headers, max_allowed_time, timeout, **kwargs
+ )
+
+ @functools.wraps(request)
+ async def post(
+ self,
+ url: str,
+ data: Optional[bytes] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ) -> transport.Response:
+ return await self.request(
+ "POST", url, data, headers, max_allowed_time, timeout, **kwargs
+ )
+
+ @functools.wraps(request)
+ async def put(
+ self,
+ url: str,
+ data: Optional[bytes] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ) -> transport.Response:
+ return await self.request(
+ "PUT", url, data, headers, max_allowed_time, timeout, **kwargs
+ )
+
+ @functools.wraps(request)
+ async def patch(
+ self,
+ url: str,
+ data: Optional[bytes] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ) -> transport.Response:
+ return await self.request(
+ "PATCH", url, data, headers, max_allowed_time, timeout, **kwargs
+ )
+
+ @functools.wraps(request)
+ async def delete(
+ self,
+ url: str,
+ data: Optional[bytes] = None,
+ headers: Optional[Mapping[str, str]] = None,
+ max_allowed_time: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ timeout: float = transport._DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ) -> transport.Response:
+ return await self.request(
+ "DELETE", url, data, headers, max_allowed_time, timeout, **kwargs
+ )
+
+ async def close(self) -> None:
+ """
+ Close the underlying auth request session.
+ """
+ await self._auth_request.close()
diff --git a/contrib/python/google-auth/py3/google/auth/compute_engine/credentials.py b/contrib/python/google-auth/py3/google/auth/compute_engine/credentials.py
index 008b991bb9..f0126c0a80 100644
--- a/contrib/python/google-auth/py3/google/auth/compute_engine/credentials.py
+++ b/contrib/python/google-auth/py3/google/auth/compute_engine/credentials.py
@@ -157,6 +157,14 @@ class Credentials(
self._universe_domain_cached = True
return self._universe_domain
+ @_helpers.copy_docstring(credentials.Credentials)
+ def get_cred_info(self):
+ return {
+ "credential_source": "metadata server",
+ "credential_type": "VM credentials",
+ "principal": self.service_account_email,
+ }
+
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
creds = self.__class__(
diff --git a/contrib/python/google-auth/py3/google/auth/credentials.py b/contrib/python/google-auth/py3/google/auth/credentials.py
index e31930311b..2c67e04432 100644
--- a/contrib/python/google-auth/py3/google/auth/credentials.py
+++ b/contrib/python/google-auth/py3/google/auth/credentials.py
@@ -128,6 +128,17 @@ class Credentials(_BaseCredentials):
"""The universe domain value."""
return self._universe_domain
+ def get_cred_info(self):
+ """The credential information JSON.
+
+ The credential information will be added to auth related error messages
+ by client library.
+
+ Returns:
+ Mapping[str, str]: The credential information JSON.
+ """
+ return None
+
@abc.abstractmethod
def refresh(self, request):
"""Refreshes the access token.
diff --git a/contrib/python/google-auth/py3/google/auth/exceptions.py b/contrib/python/google-auth/py3/google/auth/exceptions.py
index fcbe61b746..feb9f7411e 100644
--- a/contrib/python/google-auth/py3/google/auth/exceptions.py
+++ b/contrib/python/google-auth/py3/google/auth/exceptions.py
@@ -98,3 +98,11 @@ class InvalidType(DefaultCredentialsError, TypeError):
class OSError(DefaultCredentialsError, EnvironmentError):
"""Used to wrap EnvironmentError(OSError after python3.3)."""
+
+
+class TimeoutError(GoogleAuthError):
+ """Used to indicate a timeout error occurred during an HTTP request."""
+
+
+class ResponseError(GoogleAuthError):
+ """Used to indicate an error occurred when reading an HTTP response."""
diff --git a/contrib/python/google-auth/py3/google/auth/external_account.py b/contrib/python/google-auth/py3/google/auth/external_account.py
index df0511f255..161e6c50ce 100644
--- a/contrib/python/google-auth/py3/google/auth/external_account.py
+++ b/contrib/python/google-auth/py3/google/auth/external_account.py
@@ -186,6 +186,7 @@ class Credentials(
self._supplier_context = SupplierContext(
self._subject_token_type, self._audience
)
+ self._cred_file_path = None
if not self.is_workforce_pool and self._workforce_pool_user_project:
# Workload identity pools do not support workforce pool user projects.
@@ -321,11 +322,24 @@ class Credentials(
return self._token_info_url
+ @_helpers.copy_docstring(credentials.Credentials)
+ def get_cred_info(self):
+ if self._cred_file_path:
+ cred_info_json = {
+ "credential_source": self._cred_file_path,
+ "credential_type": "external account credentials",
+ }
+ if self.service_account_email:
+ cred_info_json["principal"] = self.service_account_email
+ return cred_info_json
+ return None
+
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
kwargs = self._constructor_args()
kwargs.update(scopes=scopes, default_scopes=default_scopes)
scoped = self.__class__(**kwargs)
+ scoped._cred_file_path = self._cred_file_path
scoped._metrics_options = self._metrics_options
return scoped
@@ -442,30 +456,31 @@ class Credentials(
self.expiry = now + lifetime
- @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
- def with_quota_project(self, quota_project_id):
- # Return copy of instance with the provided quota project ID.
+ def _make_copy(self):
kwargs = self._constructor_args()
- kwargs.update(quota_project_id=quota_project_id)
new_cred = self.__class__(**kwargs)
+ new_cred._cred_file_path = self._cred_file_path
new_cred._metrics_options = self._metrics_options
return new_cred
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ # Return copy of instance with the provided quota project ID.
+ cred = self._make_copy()
+ cred._quota_project_id = quota_project_id
+ return cred
+
@_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
def with_token_uri(self, token_uri):
- kwargs = self._constructor_args()
- kwargs.update(token_url=token_uri)
- new_cred = self.__class__(**kwargs)
- new_cred._metrics_options = self._metrics_options
- return new_cred
+ cred = self._make_copy()
+ cred._token_url = token_uri
+ return cred
@_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
def with_universe_domain(self, universe_domain):
- kwargs = self._constructor_args()
- kwargs.update(universe_domain=universe_domain)
- new_cred = self.__class__(**kwargs)
- new_cred._metrics_options = self._metrics_options
- return new_cred
+ cred = self._make_copy()
+ cred._universe_domain = universe_domain
+ return cred
def _should_initialize_impersonated_credentials(self):
return (
diff --git a/contrib/python/google-auth/py3/google/auth/external_account_authorized_user.py b/contrib/python/google-auth/py3/google/auth/external_account_authorized_user.py
index f73387172c..4d0c3c6806 100644
--- a/contrib/python/google-auth/py3/google/auth/external_account_authorized_user.py
+++ b/contrib/python/google-auth/py3/google/auth/external_account_authorized_user.py
@@ -120,6 +120,7 @@ class Credentials(
self._quota_project_id = quota_project_id
self._scopes = scopes
self._universe_domain = universe_domain or credentials.DEFAULT_UNIVERSE_DOMAIN
+ self._cred_file_path = None
if not self.valid and not self.can_refresh:
raise exceptions.InvalidOperation(
@@ -290,23 +291,38 @@ class Credentials(
def _make_sts_request(self, request):
return self._sts_client.refresh_token(request, self._refresh_token)
+ @_helpers.copy_docstring(credentials.Credentials)
+ def get_cred_info(self):
+ if self._cred_file_path:
+ return {
+ "credential_source": self._cred_file_path,
+ "credential_type": "external account authorized user credentials",
+ }
+ return None
+
+ def _make_copy(self):
+ kwargs = self.constructor_args()
+ cred = self.__class__(**kwargs)
+ cred._cred_file_path = self._cred_file_path
+ return cred
+
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
- kwargs = self.constructor_args()
- kwargs.update(quota_project_id=quota_project_id)
- return self.__class__(**kwargs)
+ cred = self._make_copy()
+ cred._quota_project_id = quota_project_id
+ return cred
@_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
def with_token_uri(self, token_uri):
- kwargs = self.constructor_args()
- kwargs.update(token_url=token_uri)
- return self.__class__(**kwargs)
+ cred = self._make_copy()
+ cred._token_url = token_uri
+ return cred
@_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
def with_universe_domain(self, universe_domain):
- kwargs = self.constructor_args()
- kwargs.update(universe_domain=universe_domain)
- return self.__class__(**kwargs)
+ cred = self._make_copy()
+ cred._universe_domain = universe_domain
+ return cred
@classmethod
def from_info(cls, info, **kwargs):
diff --git a/contrib/python/google-auth/py3/google/auth/impersonated_credentials.py b/contrib/python/google-auth/py3/google/auth/impersonated_credentials.py
index 3c6f8712a9..c42a936433 100644
--- a/contrib/python/google-auth/py3/google/auth/impersonated_credentials.py
+++ b/contrib/python/google-auth/py3/google/auth/impersonated_credentials.py
@@ -226,6 +226,7 @@ class Credentials(
self.expiry = _helpers.utcnow()
self._quota_project_id = quota_project_id
self._iam_endpoint_override = iam_endpoint_override
+ self._cred_file_path = None
def _metric_header_for_usage(self):
return metrics.CRED_TYPE_SA_IMPERSONATE
@@ -316,29 +317,40 @@ class Credentials(
def requires_scopes(self):
return not self._target_scopes
- @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
- def with_quota_project(self, quota_project_id):
- return self.__class__(
+ @_helpers.copy_docstring(credentials.Credentials)
+ def get_cred_info(self):
+ if self._cred_file_path:
+ return {
+ "credential_source": self._cred_file_path,
+ "credential_type": "impersonated credentials",
+ "principal": self._target_principal,
+ }
+ return None
+
+ def _make_copy(self):
+ cred = self.__class__(
self._source_credentials,
target_principal=self._target_principal,
target_scopes=self._target_scopes,
delegates=self._delegates,
lifetime=self._lifetime,
- quota_project_id=quota_project_id,
+ quota_project_id=self._quota_project_id,
iam_endpoint_override=self._iam_endpoint_override,
)
+ cred._cred_file_path = self._cred_file_path
+ return cred
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ cred = self._make_copy()
+ cred._quota_project_id = quota_project_id
+ return cred
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
- return self.__class__(
- self._source_credentials,
- target_principal=self._target_principal,
- target_scopes=scopes or default_scopes,
- delegates=self._delegates,
- lifetime=self._lifetime,
- quota_project_id=self._quota_project_id,
- iam_endpoint_override=self._iam_endpoint_override,
- )
+ cred = self._make_copy()
+ cred._target_scopes = scopes or default_scopes
+ return cred
class IDTokenCredentials(credentials.CredentialsWithQuotaProject):
diff --git a/contrib/python/google-auth/py3/google/auth/version.py b/contrib/python/google-auth/py3/google/auth/version.py
index 297e18a45f..6610120c69 100644
--- a/contrib/python/google-auth/py3/google/auth/version.py
+++ b/contrib/python/google-auth/py3/google/auth/version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.34.0"
+__version__ = "2.35.0"
diff --git a/contrib/python/google-auth/py3/google/oauth2/credentials.py b/contrib/python/google-auth/py3/google/oauth2/credentials.py
index 5ca00d4c5a..6e158089f3 100644
--- a/contrib/python/google-auth/py3/google/oauth2/credentials.py
+++ b/contrib/python/google-auth/py3/google/oauth2/credentials.py
@@ -50,6 +50,9 @@ _LOGGER = logging.getLogger(__name__)
# The Google OAuth 2.0 token endpoint. Used for authorized user credentials.
_GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
+# The Google OAuth 2.0 token info endpoint. Used for getting token info JSON from access tokens.
+_GOOGLE_OAUTH2_TOKEN_INFO_ENDPOINT = "https://oauth2.googleapis.com/tokeninfo"
+
class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaProject):
"""Credentials using OAuth 2.0 access and refresh tokens.
@@ -151,6 +154,7 @@ class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaPr
self._trust_boundary = trust_boundary
self._universe_domain = universe_domain or credentials.DEFAULT_UNIVERSE_DOMAIN
self._account = account or ""
+ self._cred_file_path = None
def __getstate__(self):
"""A __getstate__ method must exist for the __setstate__ to be called
@@ -189,6 +193,7 @@ class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaPr
self._universe_domain = (
d.get("_universe_domain") or credentials.DEFAULT_UNIVERSE_DOMAIN
)
+ self._cred_file_path = d.get("_cred_file_path")
# The refresh_handler setter should be used to repopulate this.
self._refresh_handler = None
self._refresh_worker = None
@@ -278,10 +283,8 @@ class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaPr
"""str: The user account associated with the credential. If the account is unknown an empty string is returned."""
return self._account
- @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
- def with_quota_project(self, quota_project_id):
-
- return self.__class__(
+ def _make_copy(self):
+ cred = self.__class__(
self.token,
refresh_token=self.refresh_token,
id_token=self.id_token,
@@ -291,34 +294,39 @@ class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaPr
scopes=self.scopes,
default_scopes=self.default_scopes,
granted_scopes=self.granted_scopes,
- quota_project_id=quota_project_id,
+ quota_project_id=self.quota_project_id,
rapt_token=self.rapt_token,
enable_reauth_refresh=self._enable_reauth_refresh,
trust_boundary=self._trust_boundary,
universe_domain=self._universe_domain,
account=self._account,
)
+ cred._cred_file_path = self._cred_file_path
+ return cred
+
+ @_helpers.copy_docstring(credentials.Credentials)
+ def get_cred_info(self):
+ if self._cred_file_path:
+ cred_info = {
+ "credential_source": self._cred_file_path,
+ "credential_type": "user credentials",
+ }
+ if self.account:
+ cred_info["principal"] = self.account
+ return cred_info
+ return None
+
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
+ def with_quota_project(self, quota_project_id):
+ cred = self._make_copy()
+ cred._quota_project_id = quota_project_id
+ return cred
@_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
def with_token_uri(self, token_uri):
-
- return self.__class__(
- self.token,
- refresh_token=self.refresh_token,
- id_token=self.id_token,
- token_uri=token_uri,
- client_id=self.client_id,
- client_secret=self.client_secret,
- scopes=self.scopes,
- default_scopes=self.default_scopes,
- granted_scopes=self.granted_scopes,
- quota_project_id=self.quota_project_id,
- rapt_token=self.rapt_token,
- enable_reauth_refresh=self._enable_reauth_refresh,
- trust_boundary=self._trust_boundary,
- universe_domain=self._universe_domain,
- account=self._account,
- )
+ cred = self._make_copy()
+ cred._token_uri = token_uri
+ return cred
def with_account(self, account):
"""Returns a copy of these credentials with a modified account.
@@ -329,45 +337,15 @@ class Credentials(credentials.ReadOnlyScoped, credentials.CredentialsWithQuotaPr
Returns:
google.oauth2.credentials.Credentials: A new credentials instance.
"""
-
- return self.__class__(
- self.token,
- refresh_token=self.refresh_token,
- id_token=self.id_token,
- token_uri=self._token_uri,
- client_id=self.client_id,
- client_secret=self.client_secret,
- scopes=self.scopes,
- default_scopes=self.default_scopes,
- granted_scopes=self.granted_scopes,
- quota_project_id=self.quota_project_id,
- rapt_token=self.rapt_token,
- enable_reauth_refresh=self._enable_reauth_refresh,
- trust_boundary=self._trust_boundary,
- universe_domain=self._universe_domain,
- account=account,
- )
+ cred = self._make_copy()
+ cred._account = account
+ return cred
@_helpers.copy_docstring(credentials.CredentialsWithUniverseDomain)
def with_universe_domain(self, universe_domain):
-
- return self.__class__(
- self.token,
- refresh_token=self.refresh_token,
- id_token=self.id_token,
- token_uri=self._token_uri,
- client_id=self.client_id,
- client_secret=self.client_secret,
- scopes=self.scopes,
- default_scopes=self.default_scopes,
- granted_scopes=self.granted_scopes,
- quota_project_id=self.quota_project_id,
- rapt_token=self.rapt_token,
- enable_reauth_refresh=self._enable_reauth_refresh,
- trust_boundary=self._trust_boundary,
- universe_domain=universe_domain,
- account=self._account,
- )
+ cred = self._make_copy()
+ cred._universe_domain = universe_domain
+ return cred
def _metric_header_for_usage(self):
return metrics.CRED_TYPE_USER
diff --git a/contrib/python/google-auth/py3/google/oauth2/service_account.py b/contrib/python/google-auth/py3/google/oauth2/service_account.py
index 0e12868f14..98dafa3e38 100644
--- a/contrib/python/google-auth/py3/google/oauth2/service_account.py
+++ b/contrib/python/google-auth/py3/google/oauth2/service_account.py
@@ -173,6 +173,7 @@ class Credentials(
"""
super(Credentials, self).__init__()
+ self._cred_file_path = None
self._scopes = scopes
self._default_scopes = default_scopes
self._signer = signer
@@ -220,7 +221,7 @@ class Credentials(
"universe_domain", credentials.DEFAULT_UNIVERSE_DOMAIN
),
trust_boundary=info.get("trust_boundary"),
- **kwargs
+ **kwargs,
)
@classmethod
@@ -294,6 +295,7 @@ class Credentials(
always_use_jwt_access=self._always_use_jwt_access,
universe_domain=self._universe_domain,
)
+ cred._cred_file_path = self._cred_file_path
return cred
@_helpers.copy_docstring(credentials.Scoped)
@@ -503,6 +505,16 @@ class Credentials(
def signer_email(self):
return self._service_account_email
+ @_helpers.copy_docstring(credentials.Credentials)
+ def get_cred_info(self):
+ if self._cred_file_path:
+ return {
+ "credential_source": self._cred_file_path,
+ "credential_type": "service account credentials",
+ "principal": self.service_account_email,
+ }
+ return None
+
class IDTokenCredentials(
credentials.Signing,
diff --git a/contrib/python/google-auth/py3/tests/compute_engine/test_credentials.py b/contrib/python/google-auth/py3/tests/compute_engine/test_credentials.py
index bb29f8c6e2..662210fa41 100644
--- a/contrib/python/google-auth/py3/tests/compute_engine/test_credentials.py
+++ b/contrib/python/google-auth/py3/tests/compute_engine/test_credentials.py
@@ -72,6 +72,13 @@ class TestCredentials(object):
universe_domain=FAKE_UNIVERSE_DOMAIN,
)
+ def test_get_cred_info(self):
+ assert self.credentials.get_cred_info() == {
+ "credential_source": "metadata server",
+ "credential_type": "VM credentials",
+ "principal": "default",
+ }
+
def test_default_state(self):
assert not self.credentials.valid
# Expiration hasn't been set yet
diff --git a/contrib/python/google-auth/py3/tests/oauth2/test_credentials.py b/contrib/python/google-auth/py3/tests/oauth2/test_credentials.py
index 67b6b9c1ad..a4cac7a463 100644
--- a/contrib/python/google-auth/py3/tests/oauth2/test_credentials.py
+++ b/contrib/python/google-auth/py3/tests/oauth2/test_credentials.py
@@ -72,6 +72,34 @@ class TestCredentials(object):
assert credentials.rapt_token == self.RAPT_TOKEN
assert credentials.refresh_handler is None
+ def test_get_cred_info(self):
+ credentials = self.make_credentials()
+ credentials._account = "fake-account"
+ assert not credentials.get_cred_info()
+
+ credentials._cred_file_path = "/path/to/file"
+ assert credentials.get_cred_info() == {
+ "credential_source": "/path/to/file",
+ "credential_type": "user credentials",
+ "principal": "fake-account",
+ }
+
+ def test_get_cred_info_no_account(self):
+ credentials = self.make_credentials()
+ assert not credentials.get_cred_info()
+
+ credentials._cred_file_path = "/path/to/file"
+ assert credentials.get_cred_info() == {
+ "credential_source": "/path/to/file",
+ "credential_type": "user credentials",
+ }
+
+ def test__make_copy_get_cred_info(self):
+ credentials = self.make_credentials()
+ credentials._cred_file_path = "/path/to/file"
+ cred_copy = credentials._make_copy()
+ assert cred_copy._cred_file_path == "/path/to/file"
+
def test_token_usage_metrics(self):
credentials = self.make_credentials()
credentials.token = "token"
diff --git a/contrib/python/google-auth/py3/tests/oauth2/test_service_account.py b/contrib/python/google-auth/py3/tests/oauth2/test_service_account.py
index 0dbe316a0f..fe02e828e7 100644
--- a/contrib/python/google-auth/py3/tests/oauth2/test_service_account.py
+++ b/contrib/python/google-auth/py3/tests/oauth2/test_service_account.py
@@ -69,6 +69,23 @@ class TestCredentials(object):
universe_domain=universe_domain,
)
+ def test_get_cred_info(self):
+ credentials = self.make_credentials()
+ assert not credentials.get_cred_info()
+
+ credentials._cred_file_path = "/path/to/file"
+ assert credentials.get_cred_info() == {
+ "credential_source": "/path/to/file",
+ "credential_type": "service account credentials",
+ "principal": "service-account@example.com",
+ }
+
+ def test__make_copy_get_cred_info(self):
+ credentials = self.make_credentials()
+ credentials._cred_file_path = "/path/to/file"
+ cred_copy = credentials._make_copy()
+ assert cred_copy._cred_file_path == "/path/to/file"
+
def test_constructor_no_universe_domain(self):
credentials = service_account.Credentials(
SIGNER, self.SERVICE_ACCOUNT_EMAIL, self.TOKEN_URI, universe_domain=None
diff --git a/contrib/python/google-auth/py3/tests/test__default.py b/contrib/python/google-auth/py3/tests/test__default.py
index aaf892f6d0..3147d505da 100644
--- a/contrib/python/google-auth/py3/tests/test__default.py
+++ b/contrib/python/google-auth/py3/tests/test__default.py
@@ -884,6 +884,38 @@ def test_default_early_out(unused_get):
@mock.patch(
+ "google.auth._default.load_credentials_from_file",
+ return_value=(MOCK_CREDENTIALS, mock.sentinel.project_id),
+ autospec=True,
+)
+def test_default_cred_file_path_env_var(unused_load_cred, monkeypatch):
+ monkeypatch.setenv(environment_vars.CREDENTIALS, "/path/to/file")
+ cred, _ = _default.default()
+ assert (
+ cred._cred_file_path
+ == "/path/to/file file via the GOOGLE_APPLICATION_CREDENTIALS environment variable"
+ )
+
+
+@mock.patch("os.path.isfile", return_value=True, autospec=True)
+@mock.patch(
+ "google.auth._cloud_sdk.get_application_default_credentials_path",
+ return_value="/path/to/adc/file",
+ autospec=True,
+)
+@mock.patch(
+ "google.auth._default.load_credentials_from_file",
+ return_value=(MOCK_CREDENTIALS, mock.sentinel.project_id),
+ autospec=True,
+)
+def test_default_cred_file_path_gcloud(
+ unused_load_cred, unused_get_adc_file, unused_isfile
+):
+ cred, _ = _default.default()
+ assert cred._cred_file_path == "/path/to/adc/file"
+
+
+@mock.patch(
"google.auth._default._get_explicit_environ_credentials",
return_value=(MOCK_CREDENTIALS, mock.sentinel.project_id),
autospec=True,
diff --git a/contrib/python/google-auth/py3/tests/test__exponential_backoff.py b/contrib/python/google-auth/py3/tests/test__exponential_backoff.py
index 95422502b0..b7b6877b2c 100644
--- a/contrib/python/google-auth/py3/tests/test__exponential_backoff.py
+++ b/contrib/python/google-auth/py3/tests/test__exponential_backoff.py
@@ -54,3 +54,44 @@ def test_minimum_total_attempts():
with pytest.raises(exceptions.InvalidValue):
_exponential_backoff.ExponentialBackoff(total_attempts=-1)
_exponential_backoff.ExponentialBackoff(total_attempts=1)
+
+
+@pytest.mark.asyncio
+@mock.patch("asyncio.sleep", return_value=None)
+async def test_exponential_backoff_async(mock_time_async):
+ eb = _exponential_backoff.AsyncExponentialBackoff()
+ curr_wait = eb._current_wait_in_seconds
+ iteration_count = 0
+
+ # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch`
+ # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372
+ async for attempt in eb: # pragma: no branch
+ if attempt == 1:
+ assert mock_time_async.call_count == 0
+ else:
+ backoff_interval = mock_time_async.call_args[0][0]
+ jitter = curr_wait * eb._randomization_factor
+
+ assert (curr_wait - jitter) <= backoff_interval <= (curr_wait + jitter)
+ assert attempt == iteration_count + 1
+ assert eb.backoff_count == iteration_count + 1
+ assert eb._current_wait_in_seconds == eb._multiplier ** iteration_count
+
+ curr_wait = eb._current_wait_in_seconds
+ iteration_count += 1
+
+ assert eb.total_attempts == _exponential_backoff._DEFAULT_RETRY_TOTAL_ATTEMPTS
+ assert eb.backoff_count == _exponential_backoff._DEFAULT_RETRY_TOTAL_ATTEMPTS
+ assert iteration_count == _exponential_backoff._DEFAULT_RETRY_TOTAL_ATTEMPTS
+ assert (
+ mock_time_async.call_count
+ == _exponential_backoff._DEFAULT_RETRY_TOTAL_ATTEMPTS - 1
+ )
+
+
+def test_minimum_total_attempts_async():
+ with pytest.raises(exceptions.InvalidValue):
+ _exponential_backoff.AsyncExponentialBackoff(total_attempts=0)
+ with pytest.raises(exceptions.InvalidValue):
+ _exponential_backoff.AsyncExponentialBackoff(total_attempts=-1)
+ _exponential_backoff.AsyncExponentialBackoff(total_attempts=1)
diff --git a/contrib/python/google-auth/py3/tests/test_credentials.py b/contrib/python/google-auth/py3/tests/test_credentials.py
index 8e6bbc9633..e11bcb4e55 100644
--- a/contrib/python/google-auth/py3/tests/test_credentials.py
+++ b/contrib/python/google-auth/py3/tests/test_credentials.py
@@ -52,6 +52,11 @@ def test_credentials_constructor():
assert not credentials._use_non_blocking_refresh
+def test_credentials_get_cred_info():
+ credentials = CredentialsImpl()
+ assert not credentials.get_cred_info()
+
+
def test_with_non_blocking_refresh():
c = CredentialsImpl()
c.with_non_blocking_refresh()
diff --git a/contrib/python/google-auth/py3/tests/test_external_account.py b/contrib/python/google-auth/py3/tests/test_external_account.py
index 3c372e6291..bddcb4afa1 100644
--- a/contrib/python/google-auth/py3/tests/test_external_account.py
+++ b/contrib/python/google-auth/py3/tests/test_external_account.py
@@ -275,6 +275,31 @@ class TestCredentials(object):
assert request_kwargs["headers"] == headers
assert "body" not in request_kwargs
+ def test_get_cred_info(self):
+ credentials = self.make_credentials()
+ assert not credentials.get_cred_info()
+
+ credentials._cred_file_path = "/path/to/file"
+ assert credentials.get_cred_info() == {
+ "credential_source": "/path/to/file",
+ "credential_type": "external account credentials",
+ }
+
+ credentials._service_account_impersonation_url = (
+ self.SERVICE_ACCOUNT_IMPERSONATION_URL
+ )
+ assert credentials.get_cred_info() == {
+ "credential_source": "/path/to/file",
+ "credential_type": "external account credentials",
+ "principal": SERVICE_ACCOUNT_EMAIL,
+ }
+
+ def test__make_copy_get_cred_info(self):
+ credentials = self.make_credentials()
+ credentials._cred_file_path = "/path/to/file"
+ cred_copy = credentials._make_copy()
+ assert cred_copy._cred_file_path == "/path/to/file"
+
def test_default_state(self):
credentials = self.make_credentials(
service_account_impersonation_url=self.SERVICE_ACCOUNT_IMPERSONATION_URL
@@ -469,25 +494,29 @@ class TestCredentials(object):
with mock.patch.object(
external_account.Credentials, "__init__", return_value=None
) as mock_init:
- credentials.with_quota_project("project-foo")
+ new_cred = credentials.with_quota_project("project-foo")
- # Confirm with_quota_project initialized the credential with the
- # expected parameters and quota project ID.
- mock_init.assert_called_once_with(
- audience=self.AUDIENCE,
- subject_token_type=self.SUBJECT_TOKEN_TYPE,
- token_url=self.TOKEN_URL,
- token_info_url=self.TOKEN_INFO_URL,
- credential_source=self.CREDENTIAL_SOURCE,
- service_account_impersonation_url=self.SERVICE_ACCOUNT_IMPERSONATION_URL,
- service_account_impersonation_options={"token_lifetime_seconds": 2800},
- client_id=CLIENT_ID,
- client_secret=CLIENT_SECRET,
- quota_project_id="project-foo",
- scopes=self.SCOPES,
- default_scopes=["default1"],
- universe_domain=DEFAULT_UNIVERSE_DOMAIN,
- )
+ # Confirm with_quota_project initialized the credential with the
+ # expected parameters.
+ mock_init.assert_called_once_with(
+ audience=self.AUDIENCE,
+ subject_token_type=self.SUBJECT_TOKEN_TYPE,
+ token_url=self.TOKEN_URL,
+ token_info_url=self.TOKEN_INFO_URL,
+ credential_source=self.CREDENTIAL_SOURCE,
+ service_account_impersonation_url=self.SERVICE_ACCOUNT_IMPERSONATION_URL,
+ service_account_impersonation_options={"token_lifetime_seconds": 2800},
+ client_id=CLIENT_ID,
+ client_secret=CLIENT_SECRET,
+ quota_project_id=self.QUOTA_PROJECT_ID,
+ scopes=self.SCOPES,
+ default_scopes=["default1"],
+ universe_domain=DEFAULT_UNIVERSE_DOMAIN,
+ )
+
+ # Confirm with_quota_project sets the correct quota project after
+ # initialization.
+ assert new_cred.quota_project_id == "project-foo"
def test_info(self):
credentials = self.make_credentials(universe_domain="dummy_universe.com")
diff --git a/contrib/python/google-auth/py3/tests/test_external_account_authorized_user.py b/contrib/python/google-auth/py3/tests/test_external_account_authorized_user.py
index 743ee9c848..93926a1314 100644
--- a/contrib/python/google-auth/py3/tests/test_external_account_authorized_user.py
+++ b/contrib/python/google-auth/py3/tests/test_external_account_authorized_user.py
@@ -83,6 +83,22 @@ class TestCredentials(object):
return request
+ def test_get_cred_info(self):
+ credentials = self.make_credentials()
+ assert not credentials.get_cred_info()
+
+ credentials._cred_file_path = "/path/to/file"
+ assert credentials.get_cred_info() == {
+ "credential_source": "/path/to/file",
+ "credential_type": "external account authorized user credentials",
+ }
+
+ def test__make_copy_get_cred_info(self):
+ credentials = self.make_credentials()
+ credentials._cred_file_path = "/path/to/file"
+ cred_copy = credentials._make_copy()
+ assert cred_copy._cred_file_path == "/path/to/file"
+
def test_default_state(self):
creds = self.make_credentials()
diff --git a/contrib/python/google-auth/py3/tests/test_impersonated_credentials.py b/contrib/python/google-auth/py3/tests/test_impersonated_credentials.py
index 7295bba429..4fb68103a8 100644
--- a/contrib/python/google-auth/py3/tests/test_impersonated_credentials.py
+++ b/contrib/python/google-auth/py3/tests/test_impersonated_credentials.py
@@ -136,6 +136,23 @@ class TestImpersonatedCredentials(object):
iam_endpoint_override=iam_endpoint_override,
)
+ def test_get_cred_info(self):
+ credentials = self.make_credentials()
+ assert not credentials.get_cred_info()
+
+ credentials._cred_file_path = "/path/to/file"
+ assert credentials.get_cred_info() == {
+ "credential_source": "/path/to/file",
+ "credential_type": "impersonated credentials",
+ "principal": "impersonated@project.iam.gserviceaccount.com",
+ }
+
+ def test__make_copy_get_cred_info(self):
+ credentials = self.make_credentials()
+ credentials._cred_file_path = "/path/to/file"
+ cred_copy = credentials._make_copy()
+ assert cred_copy._cred_file_path == "/path/to/file"
+
def test_make_from_user_credentials(self):
credentials = self.make_credentials(
source_credentials=self.USER_SOURCE_CREDENTIALS
diff --git a/contrib/python/google-auth/py3/tests/transport/aio/test_aiohttp.py b/contrib/python/google-auth/py3/tests/transport/aio/test_aiohttp.py
new file mode 100644
index 0000000000..632abff25a
--- /dev/null
+++ b/contrib/python/google-auth/py3/tests/transport/aio/test_aiohttp.py
@@ -0,0 +1,170 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+
+from aioresponses import aioresponses # type: ignore
+from mock import AsyncMock, Mock, patch
+import pytest # type: ignore
+import pytest_asyncio # type: ignore
+
+from google.auth import exceptions
+import google.auth.aio.transport.aiohttp as auth_aiohttp
+
+
+try:
+ import aiohttp # type: ignore
+except ImportError as caught_exc: # pragma: NO COVER
+ raise ImportError(
+ "The aiohttp library is not installed from please install the aiohttp package to use the aiohttp transport."
+ ) from caught_exc
+
+
+@pytest.fixture
+def mock_response():
+ response = Mock()
+ response.status = 200
+ response.headers = {"Content-Type": "application/json", "Content-Length": "100"}
+ mock_iterator = AsyncMock()
+ mock_iterator.__aiter__.return_value = iter(
+ [b"Cavefish ", b"have ", b"no ", b"sight."]
+ )
+ response.content.iter_chunked = lambda chunk_size: mock_iterator
+ response.read = AsyncMock(return_value=b"Cavefish have no sight.")
+ response.close = AsyncMock()
+
+ return auth_aiohttp.Response(response)
+
+
+class TestResponse(object):
+ @pytest.mark.asyncio
+ async def test_response_status_code(self, mock_response):
+ assert mock_response.status_code == 200
+
+ @pytest.mark.asyncio
+ async def test_response_headers(self, mock_response):
+ assert mock_response.headers["Content-Type"] == "application/json"
+ assert mock_response.headers["Content-Length"] == "100"
+
+ @pytest.mark.asyncio
+ async def test_response_content(self, mock_response):
+ content = b"".join([chunk async for chunk in mock_response.content()])
+ assert content == b"Cavefish have no sight."
+
+ @pytest.mark.asyncio
+ async def test_response_content_raises_error(self, mock_response):
+ with patch.object(
+ mock_response._response.content,
+ "iter_chunked",
+ side_effect=aiohttp.ClientPayloadError,
+ ):
+ with pytest.raises(exceptions.ResponseError) as exc:
+ [chunk async for chunk in mock_response.content()]
+ exc.match("Failed to read from the payload stream")
+
+ @pytest.mark.asyncio
+ async def test_response_read(self, mock_response):
+ content = await mock_response.read()
+ assert content == b"Cavefish have no sight."
+
+ @pytest.mark.asyncio
+ async def test_response_read_raises_error(self, mock_response):
+ with patch.object(
+ mock_response._response,
+ "read",
+ side_effect=aiohttp.ClientResponseError(None, None),
+ ):
+ with pytest.raises(exceptions.ResponseError) as exc:
+ await mock_response.read()
+ exc.match("Failed to read the response body.")
+
+ @pytest.mark.asyncio
+ async def test_response_close(self, mock_response):
+ await mock_response.close()
+ mock_response._response.close.assert_called_once()
+
+ @pytest.mark.asyncio
+ async def test_response_content_stream(self, mock_response):
+ itr = mock_response.content().__aiter__()
+ content = []
+ try:
+ while True:
+ chunk = await itr.__anext__()
+ content.append(chunk)
+ except StopAsyncIteration:
+ pass
+ assert b"".join(content) == b"Cavefish have no sight."
+
+
+@pytest.mark.asyncio
+class TestRequest:
+ @pytest_asyncio.fixture
+ async def aiohttp_request(self):
+ request = auth_aiohttp.Request()
+ yield request
+ await request.close()
+
+ async def test_request_call_success(self, aiohttp_request):
+ with aioresponses() as m:
+ mocked_chunks = [b"Cavefish ", b"have ", b"no ", b"sight."]
+ mocked_response = b"".join(mocked_chunks)
+ m.get("http://example.com", status=200, body=mocked_response)
+ response = await aiohttp_request("http://example.com")
+ assert response.status_code == 200
+ assert response.headers == {"Content-Type": "application/json"}
+ content = b"".join([chunk async for chunk in response.content()])
+ assert content == b"Cavefish have no sight."
+
+ async def test_request_call_success_with_provided_session(self):
+ mock_session = aiohttp.ClientSession()
+ request = auth_aiohttp.Request(mock_session)
+ with aioresponses() as m:
+ mocked_chunks = [b"Cavefish ", b"have ", b"no ", b"sight."]
+ mocked_response = b"".join(mocked_chunks)
+ m.get("http://example.com", status=200, body=mocked_response)
+ response = await request("http://example.com")
+ assert response.status_code == 200
+ assert response.headers == {"Content-Type": "application/json"}
+ content = b"".join([chunk async for chunk in response.content()])
+ assert content == b"Cavefish have no sight."
+
+ async def test_request_call_raises_client_error(self, aiohttp_request):
+ with aioresponses() as m:
+ m.get("http://example.com", exception=aiohttp.ClientError)
+
+ with pytest.raises(exceptions.TransportError) as exc:
+ await aiohttp_request("http://example.com/api")
+
+ exc.match("Failed to send request to http://example.com/api.")
+
+ async def test_request_call_raises_timeout_error(self, aiohttp_request):
+ with aioresponses() as m:
+ m.get("http://example.com", exception=asyncio.TimeoutError)
+
+ with pytest.raises(exceptions.TimeoutError) as exc:
+ await aiohttp_request("http://example.com")
+
+ exc.match("Request timed out after 180 seconds.")
+
+ async def test_request_call_raises_transport_error_for_closed_session(
+ self, aiohttp_request
+ ):
+ with aioresponses() as m:
+ m.get("http://example.com", exception=asyncio.TimeoutError)
+ aiohttp_request._closed = True
+ with pytest.raises(exceptions.TransportError) as exc:
+ await aiohttp_request("http://example.com")
+
+ exc.match("session is closed.")
+ aiohttp_request._closed = False
diff --git a/contrib/python/google-auth/py3/tests/transport/aio/test_sessions.py b/contrib/python/google-auth/py3/tests/transport/aio/test_sessions.py
new file mode 100644
index 0000000000..c91a7c40ae
--- /dev/null
+++ b/contrib/python/google-auth/py3/tests/transport/aio/test_sessions.py
@@ -0,0 +1,311 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+from typing import AsyncGenerator
+
+from aioresponses import aioresponses # type: ignore
+from mock import Mock, patch
+import pytest # type: ignore
+
+from google.auth.aio.credentials import AnonymousCredentials
+from google.auth.aio.transport import (
+ _DEFAULT_TIMEOUT_SECONDS,
+ DEFAULT_MAX_RETRY_ATTEMPTS,
+ DEFAULT_RETRYABLE_STATUS_CODES,
+ Request,
+ Response,
+ sessions,
+)
+from google.auth.exceptions import InvalidType, TimeoutError, TransportError
+
+
+@pytest.fixture
+async def simple_async_task():
+ return True
+
+
+class MockRequest(Request):
+ def __init__(self, response=None, side_effect=None):
+ self._closed = False
+ self._response = response
+ self._side_effect = side_effect
+ self.call_count = 0
+
+ async def __call__(
+ self,
+ url,
+ method="GET",
+ body=None,
+ headers=None,
+ timeout=_DEFAULT_TIMEOUT_SECONDS,
+ **kwargs,
+ ):
+ self.call_count += 1
+ if self._side_effect:
+ raise self._side_effect
+ return self._response
+
+ async def close(self):
+ self._closed = True
+ return None
+
+
+class MockResponse(Response):
+ def __init__(self, status_code, headers=None, content=None):
+ self._status_code = status_code
+ self._headers = headers
+ self._content = content
+ self._close = False
+
+ @property
+ def status_code(self):
+ return self._status_code
+
+ @property
+ def headers(self):
+ return self._headers
+
+ async def read(self) -> bytes:
+ content = await self.content(1024)
+ return b"".join([chunk async for chunk in content])
+
+ async def content(self, chunk_size=None) -> AsyncGenerator:
+ return self._content
+
+ async def close(self) -> None:
+ self._close = True
+
+
+class TestTimeoutGuard(object):
+ default_timeout = 1
+
+ def make_timeout_guard(self, timeout):
+ return sessions.timeout_guard(timeout)
+
+ @pytest.mark.asyncio
+ async def test_timeout_with_simple_async_task_within_bounds(
+ self, simple_async_task
+ ):
+ task = False
+ with patch("time.monotonic", side_effect=[0, 0.25, 0.75]):
+ with patch("asyncio.wait_for", lambda coro, _: coro):
+ async with self.make_timeout_guard(
+ timeout=self.default_timeout
+ ) as with_timeout:
+ task = await with_timeout(simple_async_task)
+
+ # Task succeeds.
+ assert task is True
+
+ @pytest.mark.asyncio
+ async def test_timeout_with_simple_async_task_out_of_bounds(
+ self, simple_async_task
+ ):
+ task = False
+ with patch("time.monotonic", side_effect=[0, 1, 1]):
+ with pytest.raises(TimeoutError) as exc:
+ async with self.make_timeout_guard(
+ timeout=self.default_timeout
+ ) as with_timeout:
+ task = await with_timeout(simple_async_task)
+
+ # Task does not succeed and the context manager times out i.e. no remaining time left.
+ assert task is False
+ assert exc.match(
+ f"Context manager exceeded the configured timeout of {self.default_timeout}s."
+ )
+
+ @pytest.mark.asyncio
+ async def test_timeout_with_async_task_timing_out_before_context(
+ self, simple_async_task
+ ):
+ task = False
+ with pytest.raises(TimeoutError) as exc:
+ async with self.make_timeout_guard(
+ timeout=self.default_timeout
+ ) as with_timeout:
+ with patch("asyncio.wait_for", side_effect=asyncio.TimeoutError):
+ task = await with_timeout(simple_async_task)
+
+ # Task does not complete i.e. the operation times out.
+ assert task is False
+ assert exc.match(
+ f"The operation {simple_async_task} exceeded the configured timeout of {self.default_timeout}s."
+ )
+
+
+class TestAsyncAuthorizedSession(object):
+ TEST_URL = "http://example.com/"
+ credentials = AnonymousCredentials()
+
+ @pytest.fixture
+ async def mocked_content(self):
+ content = [b"Cavefish ", b"have ", b"no ", b"sight."]
+ for chunk in content:
+ yield chunk
+
+ @pytest.mark.asyncio
+ async def test_constructor_with_default_auth_request(self):
+ with patch("google.auth.aio.transport.sessions.AIOHTTP_INSTALLED", True):
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials)
+ assert authed_session._credentials == self.credentials
+ await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_constructor_with_provided_auth_request(self):
+ auth_request = MockRequest()
+ authed_session = sessions.AsyncAuthorizedSession(
+ self.credentials, auth_request=auth_request
+ )
+
+ assert authed_session._auth_request is auth_request
+ await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_constructor_raises_no_auth_request_error(self):
+ with patch("google.auth.aio.transport.sessions.AIOHTTP_INSTALLED", False):
+ with pytest.raises(TransportError) as exc:
+ sessions.AsyncAuthorizedSession(self.credentials)
+
+ exc.match(
+ "`auth_request` must either be configured or the external package `aiohttp` must be installed to use the default value."
+ )
+
+ @pytest.mark.asyncio
+ async def test_constructor_raises_incorrect_credentials_error(self):
+ credentials = Mock()
+ with pytest.raises(InvalidType) as exc:
+ sessions.AsyncAuthorizedSession(credentials)
+
+ exc.match(
+ f"The configured credentials of type {type(credentials)} are invalid and must be of type `google.auth.aio.credentials.Credentials`"
+ )
+
+ @pytest.mark.asyncio
+ async def test_request_default_auth_request_success(self):
+ with aioresponses() as m:
+ mocked_chunks = [b"Cavefish ", b"have ", b"no ", b"sight."]
+ mocked_response = b"".join(mocked_chunks)
+ m.get(self.TEST_URL, status=200, body=mocked_response)
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials)
+ response = await authed_session.request("GET", self.TEST_URL)
+ assert response.status_code == 200
+ assert response.headers == {"Content-Type": "application/json"}
+ assert await response.read() == b"Cavefish have no sight."
+ await response.close()
+
+ await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_request_provided_auth_request_success(self, mocked_content):
+ mocked_response = MockResponse(
+ status_code=200,
+ headers={"Content-Type": "application/json"},
+ content=mocked_content,
+ )
+ auth_request = MockRequest(mocked_response)
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials, auth_request)
+ response = await authed_session.request("GET", self.TEST_URL)
+ assert response.status_code == 200
+ assert response.headers == {"Content-Type": "application/json"}
+ assert await response.read() == b"Cavefish have no sight."
+ await response.close()
+ assert response._close
+
+ await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_request_raises_timeout_error(self):
+ auth_request = MockRequest(side_effect=asyncio.TimeoutError)
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials, auth_request)
+ with pytest.raises(TimeoutError):
+ await authed_session.request("GET", self.TEST_URL)
+
+ @pytest.mark.asyncio
+ async def test_request_raises_transport_error(self):
+ auth_request = MockRequest(side_effect=TransportError)
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials, auth_request)
+ with pytest.raises(TransportError):
+ await authed_session.request("GET", self.TEST_URL)
+
+ @pytest.mark.asyncio
+ async def test_request_max_allowed_time_exceeded_error(self):
+ auth_request = MockRequest(side_effect=TransportError)
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials, auth_request)
+ with patch("time.monotonic", side_effect=[0, 1, 1]):
+ with pytest.raises(TimeoutError):
+ await authed_session.request("GET", self.TEST_URL, max_allowed_time=1)
+
+ @pytest.mark.parametrize("retry_status", DEFAULT_RETRYABLE_STATUS_CODES)
+ @pytest.mark.asyncio
+ async def test_request_max_retries(self, retry_status):
+ mocked_response = MockResponse(status_code=retry_status)
+ auth_request = MockRequest(mocked_response)
+ with patch("asyncio.sleep", return_value=None):
+ authed_session = sessions.AsyncAuthorizedSession(
+ self.credentials, auth_request
+ )
+ await authed_session.request("GET", self.TEST_URL)
+ assert auth_request.call_count == DEFAULT_MAX_RETRY_ATTEMPTS
+
+ @pytest.mark.asyncio
+ async def test_http_get_method_success(self):
+ expected_payload = b"content is retrieved."
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials)
+ with aioresponses() as m:
+ m.get(self.TEST_URL, status=200, body=expected_payload)
+ response = await authed_session.get(self.TEST_URL)
+ assert await response.read() == expected_payload
+ response = await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_http_post_method_success(self):
+ expected_payload = b"content is posted."
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials)
+ with aioresponses() as m:
+ m.post(self.TEST_URL, status=200, body=expected_payload)
+ response = await authed_session.post(self.TEST_URL)
+ assert await response.read() == expected_payload
+ response = await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_http_put_method_success(self):
+ expected_payload = b"content is retrieved."
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials)
+ with aioresponses() as m:
+ m.put(self.TEST_URL, status=200, body=expected_payload)
+ response = await authed_session.put(self.TEST_URL)
+ assert await response.read() == expected_payload
+ response = await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_http_patch_method_success(self):
+ expected_payload = b"content is retrieved."
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials)
+ with aioresponses() as m:
+ m.patch(self.TEST_URL, status=200, body=expected_payload)
+ response = await authed_session.patch(self.TEST_URL)
+ assert await response.read() == expected_payload
+ response = await authed_session.close()
+
+ @pytest.mark.asyncio
+ async def test_http_delete_method_success(self):
+ expected_payload = b"content is deleted."
+ authed_session = sessions.AsyncAuthorizedSession(self.credentials)
+ with aioresponses() as m:
+ m.delete(self.TEST_URL, status=200, body=expected_payload)
+ response = await authed_session.delete(self.TEST_URL)
+ assert await response.read() == expected_payload
+ response = await authed_session.close()
diff --git a/contrib/python/google-auth/py3/tests/ya.make b/contrib/python/google-auth/py3/tests/ya.make
index 6c6db898c4..23e821bb9a 100644
--- a/contrib/python/google-auth/py3/tests/ya.make
+++ b/contrib/python/google-auth/py3/tests/ya.make
@@ -9,6 +9,8 @@ PEERDIR(
contrib/python/pytest-localserver
contrib/python/oauth2client
contrib/python/freezegun
+ contrib/python/aioresponses
+ contrib/python/pytest-asyncio
)
DATA(
@@ -22,16 +24,16 @@ PY_SRCS(
)
TEST_SRCS(
- __init__.py
compute_engine/__init__.py
- compute_engine/test__metadata.py
compute_engine/test_credentials.py
+ compute_engine/test__metadata.py
conftest.py
crypt/__init__.py
crypt/test__cryptography_rsa.py
- crypt/test__python_rsa.py
crypt/test_crypt.py
crypt/test_es256.py
+ crypt/test__python_rsa.py
+ __init__.py
oauth2/__init__.py
oauth2/test__client.py
# oauth2/test_challenges.py - need pyu2f
@@ -42,35 +44,38 @@ TEST_SRCS(
oauth2/test_service_account.py
oauth2/test_sts.py
oauth2/test_utils.py
- oauth2/test_webauthn_handler.py
oauth2/test_webauthn_handler_factory.py
+ oauth2/test_webauthn_handler.py
oauth2/test_webauthn_types.py
- test__cloud_sdk.py
- test__default.py
- test__exponential_backoff.py
- test__helpers.py
- test__oauth2client.py
- test__refresh_worker.py
- test__service_account_info.py
test_api_key.py
test_app_engine.py
test_aws.py
+ test__cloud_sdk.py
+ test_credentials_async.py
test_credentials.py
+ test__default.py
test_downscoped.py
test_exceptions.py
- test_external_account.py
+ test__exponential_backoff.py
test_external_account_authorized_user.py
+ test_external_account.py
+ test__helpers.py
test_iam.py
test_identity_pool.py
test_impersonated_credentials.py
test_jwt.py
test_metrics.py
+ test__oauth2client.py
test_packaging.py
test_pluggable.py
+ test__refresh_worker.py
+ test__service_account_info.py
+ transport/aio/test_aiohttp.py
+ # transport/aio/test_sessions.py
# transport/test__custom_tls_signer.py
+ transport/test_grpc.py
transport/test__http_client.py
transport/test__mtls_helper.py
- transport/test_grpc.py
transport/test_mtls.py
# transport/test_requests.py
# transport/test_urllib3.py
diff --git a/contrib/python/google-auth/py3/ya.make b/contrib/python/google-auth/py3/ya.make
index 4f5c4e4ad8..60146f91a4 100644
--- a/contrib/python/google-auth/py3/ya.make
+++ b/contrib/python/google-auth/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(2.34.0)
+VERSION(2.35.0)
LICENSE(Apache-2.0)
@@ -20,6 +20,7 @@ NO_LINT()
NO_CHECK_IMPORTS(
google.auth._oauth2client
+ google.auth.aio.transport.aiohttp
google.auth.transport._aiohttp_requests
)
@@ -39,6 +40,9 @@ PY_SRCS(
google/auth/_service_account_info.py
google/auth/aio/__init__.py
google/auth/aio/credentials.py
+ google/auth/aio/transport/__init__.py
+ google/auth/aio/transport/aiohttp.py
+ google/auth/aio/transport/sessions.py
google/auth/api_key.py
google/auth/app_engine.py
google/auth/aws.py
diff --git a/contrib/python/idna/py3/.dist-info/METADATA b/contrib/python/idna/py3/.dist-info/METADATA
index f7a5e62e40..c42623e942 100644
--- a/contrib/python/idna/py3/.dist-info/METADATA
+++ b/contrib/python/idna/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: idna
-Version: 3.9
+Version: 3.10
Summary: Internationalized Domain Names in Applications (IDNA)
Author-email: Kim Davies <kim+pypi@gumleaf.org>
Requires-Python: >=3.6
diff --git a/contrib/python/idna/py3/idna/core.py b/contrib/python/idna/py3/idna/core.py
index d303b3835b..9115f123f0 100644
--- a/contrib/python/idna/py3/idna/core.py
+++ b/contrib/python/idna/py3/idna/core.py
@@ -9,45 +9,6 @@ from .intranges import intranges_contain
_virama_combining_class = 9
_alabel_prefix = b"xn--"
_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]")
-_ldh = (
- 48,
- 49,
- 50,
- 51,
- 52,
- 53,
- 54,
- 55,
- 56,
- 57,
- 95,
- 97,
- 98,
- 99,
- 100,
- 101,
- 102,
- 103,
- 104,
- 105,
- 106,
- 107,
- 108,
- 109,
- 110,
- 111,
- 112,
- 113,
- 114,
- 115,
- 116,
- 117,
- 118,
- 119,
- 120,
- 121,
- 122,
-)
class IDNAError(UnicodeError):
@@ -380,16 +341,17 @@ def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False
uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
status = uts46row[1]
replacement: Optional[str] = None
- if std3_rules and code_point <= 0x7F:
- if code_point not in _ldh:
- raise InvalidCodepoint(
- "Codepoint {} at position {} does not follow STD3 rules".format(_unot(code_point), pos + 1)
- )
if len(uts46row) == 3:
replacement = uts46row[2]
- if status == "V" or (status == "D" and not transitional):
+ if (
+ status == "V"
+ or (status == "D" and not transitional)
+ or (status == "3" and not std3_rules and replacement is None)
+ ):
output += char
- elif replacement is not None and (status == "M" or (status == "D" and transitional)):
+ elif replacement is not None and (
+ status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional)
+ ):
output += replacement
elif status != "I":
raise IndexError()
diff --git a/contrib/python/idna/py3/idna/idnadata.py b/contrib/python/idna/py3/idna/idnadata.py
index ded47cae0b..4be6004622 100644
--- a/contrib/python/idna/py3/idna/idnadata.py
+++ b/contrib/python/idna/py3/idna/idnadata.py
@@ -1,7 +1,6 @@
# This file is automatically generated by tools/idna-data
-__version__ = "16.0.0"
-
+__version__ = "15.1.0"
scripts = {
"Greek": (
0x37000000374,
@@ -728,7 +727,6 @@ joining_types = {
0x88C: 68,
0x88D: 68,
0x88E: 82,
- 0x897: 84,
0x898: 84,
0x899: 84,
0x89A: 84,
@@ -1875,17 +1873,8 @@ joining_types = {
0x10D25: 84,
0x10D26: 84,
0x10D27: 84,
- 0x10D69: 84,
- 0x10D6A: 84,
- 0x10D6B: 84,
- 0x10D6C: 84,
- 0x10D6D: 84,
0x10EAB: 84,
0x10EAC: 84,
- 0x10EC2: 82,
- 0x10EC3: 68,
- 0x10EC4: 68,
- 0x10EFC: 84,
0x10EFD: 84,
0x10EFE: 84,
0x10EFF: 84,
@@ -2064,17 +2053,6 @@ joining_types = {
0x11372: 84,
0x11373: 84,
0x11374: 84,
- 0x113BB: 84,
- 0x113BC: 84,
- 0x113BD: 84,
- 0x113BE: 84,
- 0x113BF: 84,
- 0x113C0: 84,
- 0x113CE: 84,
- 0x113D0: 84,
- 0x113D2: 84,
- 0x113E1: 84,
- 0x113E2: 84,
0x11438: 84,
0x11439: 84,
0x1143A: 84,
@@ -2130,6 +2108,7 @@ joining_types = {
0x116B5: 84,
0x116B7: 84,
0x1171D: 84,
+ 0x1171E: 84,
0x1171F: 84,
0x11722: 84,
0x11723: 84,
@@ -2286,7 +2265,6 @@ joining_types = {
0x11F3A: 84,
0x11F40: 84,
0x11F42: 84,
- 0x11F5A: 84,
0x13430: 84,
0x13431: 84,
0x13432: 84,
@@ -2319,21 +2297,6 @@ joining_types = {
0x13453: 84,
0x13454: 84,
0x13455: 84,
- 0x1611E: 84,
- 0x1611F: 84,
- 0x16120: 84,
- 0x16121: 84,
- 0x16122: 84,
- 0x16123: 84,
- 0x16124: 84,
- 0x16125: 84,
- 0x16126: 84,
- 0x16127: 84,
- 0x16128: 84,
- 0x16129: 84,
- 0x1612D: 84,
- 0x1612E: 84,
- 0x1612F: 84,
0x16AF0: 84,
0x16AF1: 84,
0x16AF2: 84,
@@ -2642,8 +2605,6 @@ joining_types = {
0x1E4ED: 84,
0x1E4EE: 84,
0x1E4EF: 84,
- 0x1E5EE: 84,
- 0x1E5EF: 84,
0x1E8D0: 84,
0x1E8D1: 84,
0x1E8D2: 84,
@@ -3367,7 +3328,7 @@ codepoint_classes = {
0x8600000086B,
0x87000000888,
0x8890000088F,
- 0x897000008E2,
+ 0x898000008E2,
0x8E300000958,
0x96000000964,
0x96600000970,
@@ -3602,7 +3563,6 @@ codepoint_classes = {
0x1C0000001C38,
0x1C4000001C4A,
0x1C4D00001C7E,
- 0x1C8A00001C8B,
0x1CD000001CD3,
0x1CD400001CFB,
0x1D0000001D2C,
@@ -3966,13 +3926,11 @@ codepoint_classes = {
0xA7C30000A7C4,
0xA7C80000A7C9,
0xA7CA0000A7CB,
- 0xA7CD0000A7CE,
0xA7D10000A7D2,
0xA7D30000A7D4,
0xA7D50000A7D6,
0xA7D70000A7D8,
0xA7D90000A7DA,
- 0xA7DB0000A7DC,
0xA7F60000A7F8,
0xA7FA0000A828,
0xA82C0000A82D,
@@ -4042,7 +4000,6 @@ codepoint_classes = {
0x105A3000105B2,
0x105B3000105BA,
0x105BB000105BD,
- 0x105C0000105F4,
0x1060000010737,
0x1074000010756,
0x1076000010768,
@@ -4080,14 +4037,10 @@ codepoint_classes = {
0x10CC000010CF3,
0x10D0000010D28,
0x10D3000010D3A,
- 0x10D4000010D50,
- 0x10D6900010D6E,
- 0x10D6F00010D86,
0x10E8000010EAA,
0x10EAB00010EAD,
0x10EB000010EB2,
- 0x10EC200010EC5,
- 0x10EFC00010F1D,
+ 0x10EFD00010F1D,
0x10F2700010F28,
0x10F3000010F51,
0x10F7000010F86,
@@ -4133,16 +4086,6 @@ codepoint_classes = {
0x1135D00011364,
0x113660001136D,
0x1137000011375,
- 0x113800001138A,
- 0x1138B0001138C,
- 0x1138E0001138F,
- 0x11390000113B6,
- 0x113B7000113C1,
- 0x113C2000113C3,
- 0x113C5000113C6,
- 0x113C7000113CB,
- 0x113CC000113D4,
- 0x113E1000113E3,
0x114000001144B,
0x114500001145A,
0x1145E00011462,
@@ -4157,7 +4100,6 @@ codepoint_classes = {
0x116500001165A,
0x11680000116B9,
0x116C0000116CA,
- 0x116D0000116E4,
0x117000001171B,
0x1171D0001172C,
0x117300001173A,
@@ -4181,8 +4123,6 @@ codepoint_classes = {
0x11A5000011A9A,
0x11A9D00011A9E,
0x11AB000011AF9,
- 0x11BC000011BE1,
- 0x11BF000011BFA,
0x11C0000011C09,
0x11C0A00011C37,
0x11C3800011C41,
@@ -4207,16 +4147,14 @@ codepoint_classes = {
0x11F0000011F11,
0x11F1200011F3B,
0x11F3E00011F43,
- 0x11F5000011F5B,
+ 0x11F5000011F5A,
0x11FB000011FB1,
0x120000001239A,
0x1248000012544,
0x12F9000012FF1,
0x1300000013430,
0x1344000013456,
- 0x13460000143FB,
0x1440000014647,
- 0x161000001613A,
0x1680000016A39,
0x16A4000016A5F,
0x16A6000016A6A,
@@ -4229,8 +4167,6 @@ codepoint_classes = {
0x16B5000016B5A,
0x16B6300016B78,
0x16B7D00016B90,
- 0x16D4000016D6D,
- 0x16D7000016D7A,
0x16E6000016E80,
0x16F0000016F4B,
0x16F4F00016F88,
@@ -4240,7 +4176,7 @@ codepoint_classes = {
0x16FF000016FF2,
0x17000000187F8,
0x1880000018CD6,
- 0x18CFF00018D09,
+ 0x18D0000018D09,
0x1AFF00001AFF4,
0x1AFF50001AFFC,
0x1AFFD0001AFFF,
@@ -4255,7 +4191,6 @@ codepoint_classes = {
0x1BC800001BC89,
0x1BC900001BC9A,
0x1BC9D0001BC9F,
- 0x1CCF00001CCFA,
0x1CF000001CF2E,
0x1CF300001CF47,
0x1DA000001DA37,
@@ -4279,7 +4214,6 @@ codepoint_classes = {
0x1E2900001E2AF,
0x1E2C00001E2FA,
0x1E4D00001E4FA,
- 0x1E5D00001E5FB,
0x1E7E00001E7E7,
0x1E7E80001E7EC,
0x1E7ED0001E7EF,
diff --git a/contrib/python/idna/py3/idna/package_data.py b/contrib/python/idna/py3/idna/package_data.py
index ddd1e6912e..514ff7e2e6 100644
--- a/contrib/python/idna/py3/idna/package_data.py
+++ b/contrib/python/idna/py3/idna/package_data.py
@@ -1 +1 @@
-__version__ = "3.9"
+__version__ = "3.10"
diff --git a/contrib/python/idna/py3/idna/uts46data.py b/contrib/python/idna/py3/idna/uts46data.py
index 4610b71dad..eb89432741 100644
--- a/contrib/python/idna/py3/idna/uts46data.py
+++ b/contrib/python/idna/py3/idna/uts46data.py
@@ -6,59 +6,59 @@ from typing import List, Tuple, Union
"""IDNA Mapping Table from UTS46."""
-__version__ = "16.0.0"
+__version__ = "15.1.0"
def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
return [
- (0x0, "V"),
- (0x1, "V"),
- (0x2, "V"),
- (0x3, "V"),
- (0x4, "V"),
- (0x5, "V"),
- (0x6, "V"),
- (0x7, "V"),
- (0x8, "V"),
- (0x9, "V"),
- (0xA, "V"),
- (0xB, "V"),
- (0xC, "V"),
- (0xD, "V"),
- (0xE, "V"),
- (0xF, "V"),
- (0x10, "V"),
- (0x11, "V"),
- (0x12, "V"),
- (0x13, "V"),
- (0x14, "V"),
- (0x15, "V"),
- (0x16, "V"),
- (0x17, "V"),
- (0x18, "V"),
- (0x19, "V"),
- (0x1A, "V"),
- (0x1B, "V"),
- (0x1C, "V"),
- (0x1D, "V"),
- (0x1E, "V"),
- (0x1F, "V"),
- (0x20, "V"),
- (0x21, "V"),
- (0x22, "V"),
- (0x23, "V"),
- (0x24, "V"),
- (0x25, "V"),
- (0x26, "V"),
- (0x27, "V"),
- (0x28, "V"),
- (0x29, "V"),
- (0x2A, "V"),
- (0x2B, "V"),
- (0x2C, "V"),
+ (0x0, "3"),
+ (0x1, "3"),
+ (0x2, "3"),
+ (0x3, "3"),
+ (0x4, "3"),
+ (0x5, "3"),
+ (0x6, "3"),
+ (0x7, "3"),
+ (0x8, "3"),
+ (0x9, "3"),
+ (0xA, "3"),
+ (0xB, "3"),
+ (0xC, "3"),
+ (0xD, "3"),
+ (0xE, "3"),
+ (0xF, "3"),
+ (0x10, "3"),
+ (0x11, "3"),
+ (0x12, "3"),
+ (0x13, "3"),
+ (0x14, "3"),
+ (0x15, "3"),
+ (0x16, "3"),
+ (0x17, "3"),
+ (0x18, "3"),
+ (0x19, "3"),
+ (0x1A, "3"),
+ (0x1B, "3"),
+ (0x1C, "3"),
+ (0x1D, "3"),
+ (0x1E, "3"),
+ (0x1F, "3"),
+ (0x20, "3"),
+ (0x21, "3"),
+ (0x22, "3"),
+ (0x23, "3"),
+ (0x24, "3"),
+ (0x25, "3"),
+ (0x26, "3"),
+ (0x27, "3"),
+ (0x28, "3"),
+ (0x29, "3"),
+ (0x2A, "3"),
+ (0x2B, "3"),
+ (0x2C, "3"),
(0x2D, "V"),
(0x2E, "V"),
- (0x2F, "V"),
+ (0x2F, "3"),
(0x30, "V"),
(0x31, "V"),
(0x32, "V"),
@@ -69,13 +69,13 @@ def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x37, "V"),
(0x38, "V"),
(0x39, "V"),
- (0x3A, "V"),
- (0x3B, "V"),
- (0x3C, "V"),
- (0x3D, "V"),
- (0x3E, "V"),
- (0x3F, "V"),
- (0x40, "V"),
+ (0x3A, "3"),
+ (0x3B, "3"),
+ (0x3C, "3"),
+ (0x3D, "3"),
+ (0x3E, "3"),
+ (0x3F, "3"),
+ (0x40, "3"),
(0x41, "M", "a"),
(0x42, "M", "b"),
(0x43, "M", "c"),
@@ -102,12 +102,12 @@ def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x58, "M", "x"),
(0x59, "M", "y"),
(0x5A, "M", "z"),
- (0x5B, "V"),
- (0x5C, "V"),
- (0x5D, "V"),
- (0x5E, "V"),
- (0x5F, "V"),
- (0x60, "V"),
+ (0x5B, "3"),
+ (0x5C, "3"),
+ (0x5D, "3"),
+ (0x5E, "3"),
+ (0x5F, "3"),
+ (0x60, "3"),
(0x61, "V"),
(0x62, "V"),
(0x63, "V"),
@@ -139,11 +139,11 @@ def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x78, "V"),
(0x79, "V"),
(0x7A, "V"),
- (0x7B, "V"),
- (0x7C, "V"),
- (0x7D, "V"),
- (0x7E, "V"),
- (0x7F, "V"),
+ (0x7B, "3"),
+ (0x7C, "3"),
+ (0x7D, "3"),
+ (0x7E, "3"),
+ (0x7F, "3"),
(0x80, "X"),
(0x81, "X"),
(0x82, "X"),
@@ -176,7 +176,7 @@ def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x9D, "X"),
(0x9E, "X"),
(0x9F, "X"),
- (0xA0, "M", " "),
+ (0xA0, "3", " "),
(0xA1, "V"),
(0xA2, "V"),
(0xA3, "V"),
@@ -184,23 +184,23 @@ def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA5, "V"),
(0xA6, "V"),
(0xA7, "V"),
- (0xA8, "M", " ̈"),
+ (0xA8, "3", " ̈"),
(0xA9, "V"),
(0xAA, "M", "a"),
(0xAB, "V"),
(0xAC, "V"),
(0xAD, "I"),
(0xAE, "V"),
- (0xAF, "M", " ̄"),
+ (0xAF, "3", " ̄"),
(0xB0, "V"),
(0xB1, "V"),
(0xB2, "M", "2"),
(0xB3, "M", "3"),
- (0xB4, "M", " ́"),
+ (0xB4, "3", " ́"),
(0xB5, "M", "μ"),
(0xB6, "V"),
(0xB7, "V"),
- (0xB8, "M", " ̧"),
+ (0xB8, "3", " ̧"),
(0xB9, "M", "1"),
(0xBA, "M", "o"),
(0xBB, "V"),
@@ -606,12 +606,12 @@ def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2B7, "M", "w"),
(0x2B8, "M", "y"),
(0x2B9, "V"),
- (0x2D8, "M", " ̆"),
- (0x2D9, "M", " ̇"),
- (0x2DA, "M", " ̊"),
- (0x2DB, "M", " ̨"),
- (0x2DC, "M", " ̃"),
- (0x2DD, "M", " ̋"),
+ (0x2D8, "3", " ̆"),
+ (0x2D9, "3", " ̇"),
+ (0x2DA, "3", " ̊"),
+ (0x2DB, "3", " ̨"),
+ (0x2DC, "3", " ̃"),
+ (0x2DD, "3", " ̋"),
(0x2DE, "V"),
(0x2E0, "M", "ɣ"),
(0x2E1, "M", "l"),
@@ -642,13 +642,13 @@ def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
return [
(0x378, "X"),
- (0x37A, "M", " ι"),
+ (0x37A, "3", " ι"),
(0x37B, "V"),
- (0x37E, "M", ";"),
+ (0x37E, "3", ";"),
(0x37F, "M", "ϳ"),
(0x380, "X"),
- (0x384, "M", " ́"),
- (0x385, "M", " ̈́"),
+ (0x384, "3", " ́"),
+ (0x385, "3", " ̈́"),
(0x386, "M", "ά"),
(0x387, "M", "·"),
(0x388, "M", "έ"),
@@ -885,7 +885,7 @@ def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x4BD, "V"),
(0x4BE, "M", "ҿ"),
(0x4BF, "V"),
- (0x4C0, "M", "ӏ"),
+ (0x4C0, "X"),
(0x4C1, "M", "ӂ"),
(0x4C2, "V"),
(0x4C3, "M", "ӄ"),
@@ -1087,7 +1087,7 @@ def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x86B, "X"),
(0x870, "V"),
(0x88F, "X"),
- (0x897, "V"),
+ (0x898, "V"),
(0x8E2, "X"),
(0x8E3, "V"),
(0x958, "M", "क़"),
@@ -1438,50 +1438,7 @@ def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFCE, "V"),
(0xFDB, "X"),
(0x1000, "V"),
- (0x10A0, "M", "ⴀ"),
- (0x10A1, "M", "ⴁ"),
- (0x10A2, "M", "ⴂ"),
- (0x10A3, "M", "ⴃ"),
- (0x10A4, "M", "ⴄ"),
- (0x10A5, "M", "ⴅ"),
- (0x10A6, "M", "ⴆ"),
- (0x10A7, "M", "ⴇ"),
- (0x10A8, "M", "ⴈ"),
- (0x10A9, "M", "ⴉ"),
- (0x10AA, "M", "ⴊ"),
- (0x10AB, "M", "ⴋ"),
- (0x10AC, "M", "ⴌ"),
- (0x10AD, "M", "ⴍ"),
- (0x10AE, "M", "ⴎ"),
- (0x10AF, "M", "ⴏ"),
- (0x10B0, "M", "ⴐ"),
- (0x10B1, "M", "ⴑ"),
- (0x10B2, "M", "ⴒ"),
- (0x10B3, "M", "ⴓ"),
- (0x10B4, "M", "ⴔ"),
- (0x10B5, "M", "ⴕ"),
- (0x10B6, "M", "ⴖ"),
- (0x10B7, "M", "ⴗ"),
- (0x10B8, "M", "ⴘ"),
- (0x10B9, "M", "ⴙ"),
- (0x10BA, "M", "ⴚ"),
- (0x10BB, "M", "ⴛ"),
- (0x10BC, "M", "ⴜ"),
- (0x10BD, "M", "ⴝ"),
- (0x10BE, "M", "ⴞ"),
- (0x10BF, "M", "ⴟ"),
- (0x10C0, "M", "ⴠ"),
- (0x10C1, "M", "ⴡ"),
- (0x10C2, "M", "ⴢ"),
- (0x10C3, "M", "ⴣ"),
- (0x10C4, "M", "ⴤ"),
- (0x10C5, "M", "ⴥ"),
- ]
-
-
-def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
- (0x10C6, "X"),
+ (0x10A0, "X"),
(0x10C7, "M", "ⴧ"),
(0x10C8, "X"),
(0x10CD, "M", "ⴭ"),
@@ -1489,7 +1446,7 @@ def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x10D0, "V"),
(0x10FC, "M", "ნ"),
(0x10FD, "V"),
- (0x115F, "I"),
+ (0x115F, "X"),
(0x1161, "V"),
(0x1249, "X"),
(0x124A, "V"),
@@ -1519,6 +1476,11 @@ def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x12D8, "V"),
(0x1311, "X"),
(0x1312, "V"),
+ ]
+
+
+def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1316, "X"),
(0x1318, "V"),
(0x135B, "X"),
@@ -1554,7 +1516,7 @@ def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1772, "V"),
(0x1774, "X"),
(0x1780, "V"),
- (0x17B4, "I"),
+ (0x17B4, "X"),
(0x17B6, "V"),
(0x17DE, "X"),
(0x17E0, "V"),
@@ -1562,7 +1524,11 @@ def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x17F0, "V"),
(0x17FA, "X"),
(0x1800, "V"),
+ (0x1806, "X"),
+ (0x1807, "V"),
(0x180B, "I"),
+ (0x180E, "X"),
+ (0x180F, "I"),
(0x1810, "V"),
(0x181A, "X"),
(0x1820, "V"),
@@ -1581,11 +1547,6 @@ def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1941, "X"),
(0x1944, "V"),
(0x196E, "X"),
- ]
-
-
-def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1970, "V"),
(0x1975, "X"),
(0x1980, "V"),
@@ -1610,7 +1571,9 @@ def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1ACF, "X"),
(0x1B00, "V"),
(0x1B4D, "X"),
- (0x1B4E, "V"),
+ (0x1B50, "V"),
+ (0x1B7F, "X"),
+ (0x1B80, "V"),
(0x1BF4, "X"),
(0x1BFC, "V"),
(0x1C38, "X"),
@@ -1618,6 +1581,11 @@ def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1C4A, "X"),
(0x1C4D, "V"),
(0x1C80, "M", "в"),
+ ]
+
+
+def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1C81, "M", "д"),
(0x1C82, "M", "о"),
(0x1C83, "M", "с"),
@@ -1625,9 +1593,7 @@ def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1C86, "M", "ъ"),
(0x1C87, "M", "ѣ"),
(0x1C88, "M", "ꙋ"),
- (0x1C89, "M", "ᲊ"),
- (0x1C8A, "V"),
- (0x1C8B, "X"),
+ (0x1C89, "X"),
(0x1C90, "M", "ა"),
(0x1C91, "M", "ბ"),
(0x1C92, "M", "გ"),
@@ -1686,11 +1652,6 @@ def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D2F, "V"),
(0x1D30, "M", "d"),
(0x1D31, "M", "e"),
- ]
-
-
-def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D32, "M", "ǝ"),
(0x1D33, "M", "g"),
(0x1D34, "M", "h"),
@@ -1725,6 +1686,11 @@ def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D51, "M", "ŋ"),
(0x1D52, "M", "o"),
(0x1D53, "M", "ɔ"),
+ ]
+
+
+def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D54, "M", "ᴖ"),
(0x1D55, "M", "ᴗ"),
(0x1D56, "M", "p"),
@@ -1791,11 +1757,6 @@ def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1DC0, "V"),
(0x1E00, "M", "ḁ"),
(0x1E01, "V"),
- ]
-
-
-def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1E02, "M", "ḃ"),
(0x1E03, "V"),
(0x1E04, "M", "ḅ"),
@@ -1830,6 +1791,11 @@ def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1E21, "V"),
(0x1E22, "M", "ḣ"),
(0x1E23, "V"),
+ ]
+
+
+def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1E24, "M", "ḥ"),
(0x1E25, "V"),
(0x1E26, "M", "ḧ"),
@@ -1896,11 +1862,6 @@ def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1E63, "V"),
(0x1E64, "M", "ṥ"),
(0x1E65, "V"),
- ]
-
-
-def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1E66, "M", "ṧ"),
(0x1E67, "V"),
(0x1E68, "M", "ṩ"),
@@ -1935,6 +1896,11 @@ def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1E85, "V"),
(0x1E86, "M", "ẇ"),
(0x1E87, "V"),
+ ]
+
+
+def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1E88, "M", "ẉ"),
(0x1E89, "V"),
(0x1E8A, "M", "ẋ"),
@@ -2001,11 +1967,6 @@ def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1ECC, "M", "ọ"),
(0x1ECD, "V"),
(0x1ECE, "M", "ỏ"),
- ]
-
-
-def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1ECF, "V"),
(0x1ED0, "M", "ố"),
(0x1ED1, "V"),
@@ -2040,6 +2001,11 @@ def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1EEE, "M", "ữ"),
(0x1EEF, "V"),
(0x1EF0, "M", "ự"),
+ ]
+
+
+def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1EF1, "V"),
(0x1EF2, "M", "ỳ"),
(0x1EF3, "V"),
@@ -2106,11 +2072,6 @@ def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1F5B, "M", "ὓ"),
(0x1F5C, "X"),
(0x1F5D, "M", "ὕ"),
- ]
-
-
-def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1F5E, "X"),
(0x1F5F, "M", "ὗ"),
(0x1F60, "V"),
@@ -2145,6 +2106,11 @@ def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1F85, "M", "ἅι"),
(0x1F86, "M", "ἆι"),
(0x1F87, "M", "ἇι"),
+ ]
+
+
+def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1F88, "M", "ἀι"),
(0x1F89, "M", "ἁι"),
(0x1F8A, "M", "ἂι"),
@@ -2197,11 +2163,11 @@ def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1FBA, "M", "ὰ"),
(0x1FBB, "M", "ά"),
(0x1FBC, "M", "αι"),
- (0x1FBD, "M", " ̓"),
+ (0x1FBD, "3", " ̓"),
(0x1FBE, "M", "ι"),
- (0x1FBF, "M", " ̓"),
- (0x1FC0, "M", " ͂"),
- (0x1FC1, "M", " ̈͂"),
+ (0x1FBF, "3", " ̓"),
+ (0x1FC0, "3", " ͂"),
+ (0x1FC1, "3", " ̈͂"),
(0x1FC2, "M", "ὴι"),
(0x1FC3, "M", "ηι"),
(0x1FC4, "M", "ήι"),
@@ -2211,16 +2177,11 @@ def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1FC8, "M", "ὲ"),
(0x1FC9, "M", "έ"),
(0x1FCA, "M", "ὴ"),
- ]
-
-
-def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1FCB, "M", "ή"),
(0x1FCC, "M", "ηι"),
- (0x1FCD, "M", " ̓̀"),
- (0x1FCE, "M", " ̓́"),
- (0x1FCF, "M", " ̓͂"),
+ (0x1FCD, "3", " ̓̀"),
+ (0x1FCE, "3", " ̓́"),
+ (0x1FCF, "3", " ̓͂"),
(0x1FD0, "V"),
(0x1FD3, "M", "ΐ"),
(0x1FD4, "X"),
@@ -2230,9 +2191,9 @@ def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1FDA, "M", "ὶ"),
(0x1FDB, "M", "ί"),
(0x1FDC, "X"),
- (0x1FDD, "M", " ̔̀"),
- (0x1FDE, "M", " ̔́"),
- (0x1FDF, "M", " ̔͂"),
+ (0x1FDD, "3", " ̔̀"),
+ (0x1FDE, "3", " ̔́"),
+ (0x1FDF, "3", " ̔͂"),
(0x1FE0, "V"),
(0x1FE3, "M", "ΰ"),
(0x1FE4, "V"),
@@ -2241,37 +2202,42 @@ def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1FEA, "M", "ὺ"),
(0x1FEB, "M", "ύ"),
(0x1FEC, "M", "ῥ"),
- (0x1FED, "M", " ̈̀"),
- (0x1FEE, "M", " ̈́"),
- (0x1FEF, "M", "`"),
+ (0x1FED, "3", " ̈̀"),
+ (0x1FEE, "3", " ̈́"),
+ (0x1FEF, "3", "`"),
(0x1FF0, "X"),
(0x1FF2, "M", "ὼι"),
(0x1FF3, "M", "ωι"),
(0x1FF4, "M", "ώι"),
(0x1FF5, "X"),
(0x1FF6, "V"),
+ ]
+
+
+def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1FF7, "M", "ῶι"),
(0x1FF8, "M", "ὸ"),
(0x1FF9, "M", "ό"),
(0x1FFA, "M", "ὼ"),
(0x1FFB, "M", "ώ"),
(0x1FFC, "M", "ωι"),
- (0x1FFD, "M", " ́"),
- (0x1FFE, "M", " ̔"),
+ (0x1FFD, "3", " ́"),
+ (0x1FFE, "3", " ̔"),
(0x1FFF, "X"),
- (0x2000, "M", " "),
+ (0x2000, "3", " "),
(0x200B, "I"),
(0x200C, "D", ""),
(0x200E, "X"),
(0x2010, "V"),
(0x2011, "M", "‐"),
(0x2012, "V"),
- (0x2017, "M", " ̳"),
+ (0x2017, "3", " ̳"),
(0x2018, "V"),
(0x2024, "X"),
(0x2027, "V"),
(0x2028, "X"),
- (0x202F, "M", " "),
+ (0x202F, "3", " "),
(0x2030, "V"),
(0x2033, "M", "′′"),
(0x2034, "M", "′′′"),
@@ -2279,20 +2245,21 @@ def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2036, "M", "‵‵"),
(0x2037, "M", "‵‵‵"),
(0x2038, "V"),
- (0x203C, "M", "!!"),
+ (0x203C, "3", "!!"),
(0x203D, "V"),
- (0x203E, "M", " ̅"),
+ (0x203E, "3", " ̅"),
(0x203F, "V"),
- (0x2047, "M", "??"),
- (0x2048, "M", "?!"),
- (0x2049, "M", "!?"),
+ (0x2047, "3", "??"),
+ (0x2048, "3", "?!"),
+ (0x2049, "3", "!?"),
(0x204A, "V"),
(0x2057, "M", "′′′′"),
(0x2058, "V"),
- (0x205F, "M", " "),
+ (0x205F, "3", " "),
(0x2060, "I"),
+ (0x2061, "X"),
+ (0x2064, "I"),
(0x2065, "X"),
- (0x206A, "I"),
(0x2070, "M", "0"),
(0x2071, "M", "i"),
(0x2072, "X"),
@@ -2302,11 +2269,11 @@ def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2077, "M", "7"),
(0x2078, "M", "8"),
(0x2079, "M", "9"),
- (0x207A, "M", "+"),
+ (0x207A, "3", "+"),
(0x207B, "M", "−"),
- (0x207C, "M", "="),
- (0x207D, "M", "("),
- (0x207E, "M", ")"),
+ (0x207C, "3", "="),
+ (0x207D, "3", "("),
+ (0x207E, "3", ")"),
(0x207F, "M", "n"),
(0x2080, "M", "0"),
(0x2081, "M", "1"),
@@ -2316,18 +2283,13 @@ def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2085, "M", "5"),
(0x2086, "M", "6"),
(0x2087, "M", "7"),
- ]
-
-
-def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2088, "M", "8"),
(0x2089, "M", "9"),
- (0x208A, "M", "+"),
+ (0x208A, "3", "+"),
(0x208B, "M", "−"),
- (0x208C, "M", "="),
- (0x208D, "M", "("),
- (0x208E, "M", ")"),
+ (0x208C, "3", "="),
+ (0x208D, "3", "("),
+ (0x208E, "3", ")"),
(0x208F, "X"),
(0x2090, "M", "a"),
(0x2091, "M", "e"),
@@ -2349,13 +2311,18 @@ def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x20C1, "X"),
(0x20D0, "V"),
(0x20F1, "X"),
- (0x2100, "M", "a/c"),
- (0x2101, "M", "a/s"),
+ (0x2100, "3", "a/c"),
+ (0x2101, "3", "a/s"),
(0x2102, "M", "c"),
(0x2103, "M", "°c"),
(0x2104, "V"),
- (0x2105, "M", "c/o"),
- (0x2106, "M", "c/u"),
+ ]
+
+
+def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x2105, "3", "c/o"),
+ (0x2106, "3", "c/u"),
(0x2107, "M", "ɛ"),
(0x2108, "V"),
(0x2109, "M", "°f"),
@@ -2389,7 +2356,7 @@ def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x212E, "V"),
(0x212F, "M", "e"),
(0x2131, "M", "f"),
- (0x2132, "M", "ⅎ"),
+ (0x2132, "X"),
(0x2133, "M", "m"),
(0x2134, "M", "o"),
(0x2135, "M", "א"),
@@ -2421,11 +2388,6 @@ def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2159, "M", "1⁄6"),
(0x215A, "M", "5⁄6"),
(0x215B, "M", "1⁄8"),
- ]
-
-
-def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x215C, "M", "3⁄8"),
(0x215D, "M", "5⁄8"),
(0x215E, "M", "7⁄8"),
@@ -2459,11 +2421,16 @@ def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x217A, "M", "xi"),
(0x217B, "M", "xii"),
(0x217C, "M", "l"),
+ ]
+
+
+def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x217D, "M", "c"),
(0x217E, "M", "d"),
(0x217F, "M", "m"),
(0x2180, "V"),
- (0x2183, "M", "ↄ"),
+ (0x2183, "X"),
(0x2184, "V"),
(0x2189, "M", "0⁄3"),
(0x218A, "V"),
@@ -2478,7 +2445,7 @@ def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2329, "M", "〈"),
(0x232A, "M", "〉"),
(0x232B, "V"),
- (0x242A, "X"),
+ (0x2427, "X"),
(0x2440, "V"),
(0x244B, "X"),
(0x2460, "M", "1"),
@@ -2501,58 +2468,53 @@ def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2471, "M", "18"),
(0x2472, "M", "19"),
(0x2473, "M", "20"),
- (0x2474, "M", "(1)"),
- (0x2475, "M", "(2)"),
- (0x2476, "M", "(3)"),
- (0x2477, "M", "(4)"),
- (0x2478, "M", "(5)"),
- (0x2479, "M", "(6)"),
- (0x247A, "M", "(7)"),
- (0x247B, "M", "(8)"),
- (0x247C, "M", "(9)"),
- (0x247D, "M", "(10)"),
- (0x247E, "M", "(11)"),
- (0x247F, "M", "(12)"),
- (0x2480, "M", "(13)"),
- (0x2481, "M", "(14)"),
- (0x2482, "M", "(15)"),
- (0x2483, "M", "(16)"),
- (0x2484, "M", "(17)"),
- (0x2485, "M", "(18)"),
- (0x2486, "M", "(19)"),
- (0x2487, "M", "(20)"),
+ (0x2474, "3", "(1)"),
+ (0x2475, "3", "(2)"),
+ (0x2476, "3", "(3)"),
+ (0x2477, "3", "(4)"),
+ (0x2478, "3", "(5)"),
+ (0x2479, "3", "(6)"),
+ (0x247A, "3", "(7)"),
+ (0x247B, "3", "(8)"),
+ (0x247C, "3", "(9)"),
+ (0x247D, "3", "(10)"),
+ (0x247E, "3", "(11)"),
+ (0x247F, "3", "(12)"),
+ (0x2480, "3", "(13)"),
+ (0x2481, "3", "(14)"),
+ (0x2482, "3", "(15)"),
+ (0x2483, "3", "(16)"),
+ (0x2484, "3", "(17)"),
+ (0x2485, "3", "(18)"),
+ (0x2486, "3", "(19)"),
+ (0x2487, "3", "(20)"),
(0x2488, "X"),
- (0x249C, "M", "(a)"),
- (0x249D, "M", "(b)"),
- (0x249E, "M", "(c)"),
- (0x249F, "M", "(d)"),
- ]
-
-
-def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
- (0x24A0, "M", "(e)"),
- (0x24A1, "M", "(f)"),
- (0x24A2, "M", "(g)"),
- (0x24A3, "M", "(h)"),
- (0x24A4, "M", "(i)"),
- (0x24A5, "M", "(j)"),
- (0x24A6, "M", "(k)"),
- (0x24A7, "M", "(l)"),
- (0x24A8, "M", "(m)"),
- (0x24A9, "M", "(n)"),
- (0x24AA, "M", "(o)"),
- (0x24AB, "M", "(p)"),
- (0x24AC, "M", "(q)"),
- (0x24AD, "M", "(r)"),
- (0x24AE, "M", "(s)"),
- (0x24AF, "M", "(t)"),
- (0x24B0, "M", "(u)"),
- (0x24B1, "M", "(v)"),
- (0x24B2, "M", "(w)"),
- (0x24B3, "M", "(x)"),
- (0x24B4, "M", "(y)"),
- (0x24B5, "M", "(z)"),
+ (0x249C, "3", "(a)"),
+ (0x249D, "3", "(b)"),
+ (0x249E, "3", "(c)"),
+ (0x249F, "3", "(d)"),
+ (0x24A0, "3", "(e)"),
+ (0x24A1, "3", "(f)"),
+ (0x24A2, "3", "(g)"),
+ (0x24A3, "3", "(h)"),
+ (0x24A4, "3", "(i)"),
+ (0x24A5, "3", "(j)"),
+ (0x24A6, "3", "(k)"),
+ (0x24A7, "3", "(l)"),
+ (0x24A8, "3", "(m)"),
+ (0x24A9, "3", "(n)"),
+ (0x24AA, "3", "(o)"),
+ (0x24AB, "3", "(p)"),
+ (0x24AC, "3", "(q)"),
+ (0x24AD, "3", "(r)"),
+ (0x24AE, "3", "(s)"),
+ (0x24AF, "3", "(t)"),
+ (0x24B0, "3", "(u)"),
+ (0x24B1, "3", "(v)"),
+ (0x24B2, "3", "(w)"),
+ (0x24B3, "3", "(x)"),
+ (0x24B4, "3", "(y)"),
+ (0x24B5, "3", "(z)"),
(0x24B6, "M", "a"),
(0x24B7, "M", "b"),
(0x24B8, "M", "c"),
@@ -2564,6 +2526,11 @@ def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x24BE, "M", "i"),
(0x24BF, "M", "j"),
(0x24C0, "M", "k"),
+ ]
+
+
+def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x24C1, "M", "l"),
(0x24C2, "M", "m"),
(0x24C3, "M", "n"),
@@ -2609,9 +2576,9 @@ def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x24EB, "V"),
(0x2A0C, "M", "∫∫∫∫"),
(0x2A0D, "V"),
- (0x2A74, "M", "::="),
- (0x2A75, "M", "=="),
- (0x2A76, "M", "==="),
+ (0x2A74, "3", "::="),
+ (0x2A75, "3", "=="),
+ (0x2A76, "3", "==="),
(0x2A77, "V"),
(0x2ADC, "M", "⫝̸"),
(0x2ADD, "V"),
@@ -2631,11 +2598,6 @@ def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2C09, "M", "ⰹ"),
(0x2C0A, "M", "ⰺ"),
(0x2C0B, "M", "ⰻ"),
- ]
-
-
-def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2C0C, "M", "ⰼ"),
(0x2C0D, "M", "ⰽ"),
(0x2C0E, "M", "ⰾ"),
@@ -2669,6 +2631,11 @@ def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2C2A, "M", "ⱚ"),
(0x2C2B, "M", "ⱛ"),
(0x2C2C, "M", "ⱜ"),
+ ]
+
+
+def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2C2D, "M", "ⱝ"),
(0x2C2E, "M", "ⱞ"),
(0x2C2F, "M", "ⱟ"),
@@ -2736,11 +2703,6 @@ def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2CA3, "V"),
(0x2CA4, "M", "ⲥ"),
(0x2CA5, "V"),
- ]
-
-
-def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2CA6, "M", "ⲧ"),
(0x2CA7, "V"),
(0x2CA8, "M", "ⲩ"),
@@ -2774,6 +2736,11 @@ def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2CC4, "M", "ⳅ"),
(0x2CC5, "V"),
(0x2CC6, "M", "ⳇ"),
+ ]
+
+
+def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2CC7, "V"),
(0x2CC8, "M", "ⳉ"),
(0x2CC9, "V"),
@@ -2841,11 +2808,6 @@ def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2DDF, "X"),
(0x2DE0, "V"),
(0x2E5E, "X"),
- ]
-
-
-def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2E80, "V"),
(0x2E9A, "X"),
(0x2E9B, "V"),
@@ -2879,6 +2841,11 @@ def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F17, "M", "十"),
(0x2F18, "M", "卜"),
(0x2F19, "M", "卩"),
+ ]
+
+
+def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2F1A, "M", "厂"),
(0x2F1B, "M", "厶"),
(0x2F1C, "M", "又"),
@@ -2946,11 +2913,6 @@ def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F5A, "M", "片"),
(0x2F5B, "M", "牙"),
(0x2F5C, "M", "牛"),
- ]
-
-
-def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2F5D, "M", "犬"),
(0x2F5E, "M", "玄"),
(0x2F5F, "M", "玉"),
@@ -2984,6 +2946,11 @@ def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F7B, "M", "羽"),
(0x2F7C, "M", "老"),
(0x2F7D, "M", "而"),
+ ]
+
+
+def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2F7E, "M", "耒"),
(0x2F7F, "M", "耳"),
(0x2F80, "M", "聿"),
@@ -3051,11 +3018,6 @@ def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2FBE, "M", "鬥"),
(0x2FBF, "M", "鬯"),
(0x2FC0, "M", "鬲"),
- ]
-
-
-def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2FC1, "M", "鬼"),
(0x2FC2, "M", "魚"),
(0x2FC3, "M", "鳥"),
@@ -3078,7 +3040,7 @@ def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2FD4, "M", "龜"),
(0x2FD5, "M", "龠"),
(0x2FD6, "X"),
- (0x3000, "M", " "),
+ (0x3000, "3", " "),
(0x3001, "V"),
(0x3002, "M", "."),
(0x3003, "V"),
@@ -3089,11 +3051,16 @@ def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x303A, "M", "卅"),
(0x303B, "V"),
(0x3040, "X"),
+ ]
+
+
+def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x3041, "V"),
(0x3097, "X"),
(0x3099, "V"),
- (0x309B, "M", " ゙"),
- (0x309C, "M", " ゚"),
+ (0x309B, "3", " ゙"),
+ (0x309C, "3", " ゚"),
(0x309D, "V"),
(0x309F, "M", "より"),
(0x30A0, "V"),
@@ -3152,15 +3119,10 @@ def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x3161, "M", "ᅳ"),
(0x3162, "M", "ᅴ"),
(0x3163, "M", "ᅵ"),
- (0x3164, "I"),
+ (0x3164, "X"),
(0x3165, "M", "ᄔ"),
(0x3166, "M", "ᄕ"),
(0x3167, "M", "ᇇ"),
- ]
-
-
-def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x3168, "M", "ᇈ"),
(0x3169, "M", "ᇌ"),
(0x316A, "M", "ᇎ"),
@@ -3194,6 +3156,11 @@ def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x3186, "M", "ᅙ"),
(0x3187, "M", "ᆄ"),
(0x3188, "M", "ᆅ"),
+ ]
+
+
+def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x3189, "M", "ᆈ"),
(0x318A, "M", "ᆑ"),
(0x318B, "M", "ᆒ"),
@@ -3217,81 +3184,76 @@ def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x319E, "M", "地"),
(0x319F, "M", "人"),
(0x31A0, "V"),
- (0x31E6, "X"),
+ (0x31E4, "X"),
(0x31F0, "V"),
- (0x3200, "M", "(ᄀ)"),
- (0x3201, "M", "(ᄂ)"),
- (0x3202, "M", "(ᄃ)"),
- (0x3203, "M", "(ᄅ)"),
- (0x3204, "M", "(ᄆ)"),
- (0x3205, "M", "(ᄇ)"),
- (0x3206, "M", "(ᄉ)"),
- (0x3207, "M", "(ᄋ)"),
- (0x3208, "M", "(ᄌ)"),
- (0x3209, "M", "(ᄎ)"),
- (0x320A, "M", "(ᄏ)"),
- (0x320B, "M", "(ᄐ)"),
- (0x320C, "M", "(ᄑ)"),
- (0x320D, "M", "(ᄒ)"),
- (0x320E, "M", "(가)"),
- (0x320F, "M", "(나)"),
- (0x3210, "M", "(다)"),
- (0x3211, "M", "(라)"),
- (0x3212, "M", "(마)"),
- (0x3213, "M", "(바)"),
- (0x3214, "M", "(사)"),
- (0x3215, "M", "(아)"),
- (0x3216, "M", "(자)"),
- (0x3217, "M", "(차)"),
- (0x3218, "M", "(카)"),
- (0x3219, "M", "(타)"),
- (0x321A, "M", "(파)"),
- (0x321B, "M", "(하)"),
- (0x321C, "M", "(주)"),
- (0x321D, "M", "(오전)"),
- (0x321E, "M", "(오후)"),
+ (0x3200, "3", "(ᄀ)"),
+ (0x3201, "3", "(ᄂ)"),
+ (0x3202, "3", "(ᄃ)"),
+ (0x3203, "3", "(ᄅ)"),
+ (0x3204, "3", "(ᄆ)"),
+ (0x3205, "3", "(ᄇ)"),
+ (0x3206, "3", "(ᄉ)"),
+ (0x3207, "3", "(ᄋ)"),
+ (0x3208, "3", "(ᄌ)"),
+ (0x3209, "3", "(ᄎ)"),
+ (0x320A, "3", "(ᄏ)"),
+ (0x320B, "3", "(ᄐ)"),
+ (0x320C, "3", "(ᄑ)"),
+ (0x320D, "3", "(ᄒ)"),
+ (0x320E, "3", "(가)"),
+ (0x320F, "3", "(나)"),
+ (0x3210, "3", "(다)"),
+ (0x3211, "3", "(라)"),
+ (0x3212, "3", "(마)"),
+ (0x3213, "3", "(바)"),
+ (0x3214, "3", "(사)"),
+ (0x3215, "3", "(아)"),
+ (0x3216, "3", "(자)"),
+ (0x3217, "3", "(차)"),
+ (0x3218, "3", "(카)"),
+ (0x3219, "3", "(타)"),
+ (0x321A, "3", "(파)"),
+ (0x321B, "3", "(하)"),
+ (0x321C, "3", "(주)"),
+ (0x321D, "3", "(오전)"),
+ (0x321E, "3", "(오후)"),
(0x321F, "X"),
- (0x3220, "M", "(一)"),
- (0x3221, "M", "(二)"),
- (0x3222, "M", "(三)"),
- (0x3223, "M", "(四)"),
- (0x3224, "M", "(五)"),
- (0x3225, "M", "(六)"),
- (0x3226, "M", "(七)"),
- (0x3227, "M", "(八)"),
- (0x3228, "M", "(九)"),
- (0x3229, "M", "(十)"),
- ]
-
-
-def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
- (0x322A, "M", "(月)"),
- (0x322B, "M", "(火)"),
- (0x322C, "M", "(水)"),
- (0x322D, "M", "(木)"),
- (0x322E, "M", "(金)"),
- (0x322F, "M", "(土)"),
- (0x3230, "M", "(日)"),
- (0x3231, "M", "(株)"),
- (0x3232, "M", "(有)"),
- (0x3233, "M", "(社)"),
- (0x3234, "M", "(名)"),
- (0x3235, "M", "(特)"),
- (0x3236, "M", "(財)"),
- (0x3237, "M", "(祝)"),
- (0x3238, "M", "(労)"),
- (0x3239, "M", "(代)"),
- (0x323A, "M", "(呼)"),
- (0x323B, "M", "(学)"),
- (0x323C, "M", "(監)"),
- (0x323D, "M", "(企)"),
- (0x323E, "M", "(資)"),
- (0x323F, "M", "(協)"),
- (0x3240, "M", "(祭)"),
- (0x3241, "M", "(休)"),
- (0x3242, "M", "(自)"),
- (0x3243, "M", "(至)"),
+ (0x3220, "3", "(一)"),
+ (0x3221, "3", "(二)"),
+ (0x3222, "3", "(三)"),
+ (0x3223, "3", "(四)"),
+ (0x3224, "3", "(五)"),
+ (0x3225, "3", "(六)"),
+ (0x3226, "3", "(七)"),
+ (0x3227, "3", "(八)"),
+ (0x3228, "3", "(九)"),
+ (0x3229, "3", "(十)"),
+ (0x322A, "3", "(月)"),
+ (0x322B, "3", "(火)"),
+ (0x322C, "3", "(水)"),
+ (0x322D, "3", "(木)"),
+ (0x322E, "3", "(金)"),
+ (0x322F, "3", "(土)"),
+ (0x3230, "3", "(日)"),
+ (0x3231, "3", "(株)"),
+ (0x3232, "3", "(有)"),
+ (0x3233, "3", "(社)"),
+ (0x3234, "3", "(名)"),
+ (0x3235, "3", "(特)"),
+ (0x3236, "3", "(財)"),
+ (0x3237, "3", "(祝)"),
+ (0x3238, "3", "(労)"),
+ (0x3239, "3", "(代)"),
+ (0x323A, "3", "(呼)"),
+ (0x323B, "3", "(学)"),
+ (0x323C, "3", "(監)"),
+ (0x323D, "3", "(企)"),
+ (0x323E, "3", "(資)"),
+ (0x323F, "3", "(協)"),
+ (0x3240, "3", "(祭)"),
+ (0x3241, "3", "(休)"),
+ (0x3242, "3", "(自)"),
+ (0x3243, "3", "(至)"),
(0x3244, "M", "問"),
(0x3245, "M", "幼"),
(0x3246, "M", "文"),
@@ -3299,6 +3261,11 @@ def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x3248, "V"),
(0x3250, "M", "pte"),
(0x3251, "M", "21"),
+ ]
+
+
+def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x3252, "M", "22"),
(0x3253, "M", "23"),
(0x3254, "M", "24"),
@@ -3366,11 +3333,6 @@ def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x3292, "M", "有"),
(0x3293, "M", "社"),
(0x3294, "M", "名"),
- ]
-
-
-def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x3295, "M", "特"),
(0x3296, "M", "財"),
(0x3297, "M", "祝"),
@@ -3404,6 +3366,11 @@ def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x32B3, "M", "38"),
(0x32B4, "M", "39"),
(0x32B5, "M", "40"),
+ ]
+
+
+def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x32B6, "M", "41"),
(0x32B7, "M", "42"),
(0x32B8, "M", "43"),
@@ -3471,11 +3438,6 @@ def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x32F6, "M", "ラ"),
(0x32F7, "M", "リ"),
(0x32F8, "M", "ル"),
- ]
-
-
-def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x32F9, "M", "レ"),
(0x32FA, "M", "ロ"),
(0x32FB, "M", "ワ"),
@@ -3509,6 +3471,11 @@ def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x3317, "M", "キロワット"),
(0x3318, "M", "グラム"),
(0x3319, "M", "グラムトン"),
+ ]
+
+
+def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x331A, "M", "クルゼイロ"),
(0x331B, "M", "クローネ"),
(0x331C, "M", "ケース"),
@@ -3576,11 +3543,6 @@ def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x335A, "M", "2点"),
(0x335B, "M", "3点"),
(0x335C, "M", "4点"),
- ]
-
-
-def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x335D, "M", "5点"),
(0x335E, "M", "6点"),
(0x335F, "M", "7点"),
@@ -3614,6 +3576,11 @@ def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x337B, "M", "平成"),
(0x337C, "M", "昭和"),
(0x337D, "M", "大正"),
+ ]
+
+
+def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x337E, "M", "明治"),
(0x337F, "M", "株式会社"),
(0x3380, "M", "pa"),
@@ -3681,11 +3648,6 @@ def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x33BE, "M", "kw"),
(0x33BF, "M", "mw"),
(0x33C0, "M", "kω"),
- ]
-
-
-def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x33C1, "M", "mω"),
(0x33C2, "X"),
(0x33C3, "M", "bq"),
@@ -3719,6 +3681,11 @@ def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x33DF, "M", "a∕m"),
(0x33E0, "M", "1日"),
(0x33E1, "M", "2日"),
+ ]
+
+
+def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x33E2, "M", "3日"),
(0x33E3, "M", "4日"),
(0x33E4, "M", "5日"),
@@ -3786,11 +3753,6 @@ def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA65C, "M", "ꙝ"),
(0xA65D, "V"),
(0xA65E, "M", "ꙟ"),
- ]
-
-
-def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xA65F, "V"),
(0xA660, "M", "ꙡ"),
(0xA661, "V"),
@@ -3824,6 +3786,11 @@ def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA68F, "V"),
(0xA690, "M", "ꚑ"),
(0xA691, "V"),
+ ]
+
+
+def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xA692, "M", "ꚓ"),
(0xA693, "V"),
(0xA694, "M", "ꚕ"),
@@ -3891,11 +3858,6 @@ def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA755, "V"),
(0xA756, "M", "ꝗ"),
(0xA757, "V"),
- ]
-
-
-def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xA758, "M", "ꝙ"),
(0xA759, "V"),
(0xA75A, "M", "ꝛ"),
@@ -3929,6 +3891,11 @@ def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA77D, "M", "ᵹ"),
(0xA77E, "M", "ꝿ"),
(0xA77F, "V"),
+ ]
+
+
+def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xA780, "M", "ꞁ"),
(0xA781, "V"),
(0xA782, "M", "ꞃ"),
@@ -3996,17 +3963,9 @@ def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA7C6, "M", "ᶎ"),
(0xA7C7, "M", "ꟈ"),
(0xA7C8, "V"),
- ]
-
-
-def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xA7C9, "M", "ꟊ"),
(0xA7CA, "V"),
- (0xA7CB, "M", "ɤ"),
- (0xA7CC, "M", "ꟍ"),
- (0xA7CD, "V"),
- (0xA7CE, "X"),
+ (0xA7CB, "X"),
(0xA7D0, "M", "ꟑ"),
(0xA7D1, "V"),
(0xA7D2, "X"),
@@ -4017,10 +3976,7 @@ def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA7D7, "V"),
(0xA7D8, "M", "ꟙ"),
(0xA7D9, "V"),
- (0xA7DA, "M", "ꟛ"),
- (0xA7DB, "V"),
- (0xA7DC, "M", "ƛ"),
- (0xA7DD, "X"),
+ (0xA7DA, "X"),
(0xA7F2, "M", "c"),
(0xA7F3, "M", "f"),
(0xA7F4, "M", "q"),
@@ -4040,6 +3996,11 @@ def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xA8DA, "X"),
(0xA8E0, "V"),
(0xA954, "X"),
+ ]
+
+
+def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xA95F, "V"),
(0xA97D, "X"),
(0xA980, "V"),
@@ -4101,11 +4062,6 @@ def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xAB85, "M", "Ꮅ"),
(0xAB86, "M", "Ꮆ"),
(0xAB87, "M", "Ꮇ"),
- ]
-
-
-def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xAB88, "M", "Ꮈ"),
(0xAB89, "M", "Ꮉ"),
(0xAB8A, "M", "Ꮊ"),
@@ -4145,6 +4101,11 @@ def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xABAC, "M", "Ꮬ"),
(0xABAD, "M", "Ꮭ"),
(0xABAE, "M", "Ꮮ"),
+ ]
+
+
+def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xABAF, "M", "Ꮯ"),
(0xABB0, "M", "Ꮰ"),
(0xABB1, "M", "Ꮱ"),
@@ -4206,11 +4167,6 @@ def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xF920, "M", "鸞"),
(0xF921, "M", "嵐"),
(0xF922, "M", "濫"),
- ]
-
-
-def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xF923, "M", "藍"),
(0xF924, "M", "襤"),
(0xF925, "M", "拉"),
@@ -4250,6 +4206,11 @@ def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xF947, "M", "磊"),
(0xF948, "M", "賂"),
(0xF949, "M", "雷"),
+ ]
+
+
+def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xF94A, "M", "壘"),
(0xF94B, "M", "屢"),
(0xF94C, "M", "樓"),
@@ -4311,11 +4272,6 @@ def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xF984, "M", "濾"),
(0xF985, "M", "礪"),
(0xF986, "M", "閭"),
- ]
-
-
-def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xF987, "M", "驪"),
(0xF988, "M", "麗"),
(0xF989, "M", "黎"),
@@ -4355,6 +4311,11 @@ def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xF9AB, "M", "嶺"),
(0xF9AC, "M", "怜"),
(0xF9AD, "M", "玲"),
+ ]
+
+
+def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xF9AE, "M", "瑩"),
(0xF9AF, "M", "羚"),
(0xF9B0, "M", "聆"),
@@ -4416,11 +4377,6 @@ def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xF9E8, "M", "裡"),
(0xF9E9, "M", "里"),
(0xF9EA, "M", "離"),
- ]
-
-
-def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xF9EB, "M", "匿"),
(0xF9EC, "M", "溺"),
(0xF9ED, "M", "吝"),
@@ -4460,6 +4416,11 @@ def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFA10, "M", "塚"),
(0xFA11, "V"),
(0xFA12, "M", "晴"),
+ ]
+
+
+def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFA13, "V"),
(0xFA15, "M", "凞"),
(0xFA16, "M", "猪"),
@@ -4521,11 +4482,6 @@ def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFA51, "M", "祝"),
(0xFA52, "M", "禍"),
(0xFA53, "M", "禎"),
- ]
-
-
-def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFA54, "M", "穀"),
(0xFA55, "M", "突"),
(0xFA56, "M", "節"),
@@ -4565,6 +4521,11 @@ def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFA7A, "M", "喙"),
(0xFA7B, "M", "嗢"),
(0xFA7C, "M", "塚"),
+ ]
+
+
+def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFA7D, "M", "墳"),
(0xFA7E, "M", "奄"),
(0xFA7F, "M", "奔"),
@@ -4626,11 +4587,6 @@ def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFAB7, "M", "覆"),
(0xFAB8, "M", "視"),
(0xFAB9, "M", "調"),
- ]
-
-
-def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFABA, "M", "諸"),
(0xFABB, "M", "請"),
(0xFABC, "M", "謁"),
@@ -4670,6 +4626,11 @@ def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFB03, "M", "ffi"),
(0xFB04, "M", "ffl"),
(0xFB05, "M", "st"),
+ ]
+
+
+def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFB07, "X"),
(0xFB13, "M", "մն"),
(0xFB14, "M", "մե"),
@@ -4689,7 +4650,7 @@ def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFB26, "M", "ם"),
(0xFB27, "M", "ר"),
(0xFB28, "M", "ת"),
- (0xFB29, "M", "+"),
+ (0xFB29, "3", "+"),
(0xFB2A, "M", "שׁ"),
(0xFB2B, "M", "שׂ"),
(0xFB2C, "M", "שּׁ"),
@@ -4731,11 +4692,6 @@ def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFB50, "M", "ٱ"),
(0xFB52, "M", "ٻ"),
(0xFB56, "M", "پ"),
- ]
-
-
-def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFB5A, "M", "ڀ"),
(0xFB5E, "M", "ٺ"),
(0xFB62, "M", "ٿ"),
@@ -4775,6 +4731,11 @@ def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFBE2, "M", "ۉ"),
(0xFBE4, "M", "ې"),
(0xFBE8, "M", "ى"),
+ ]
+
+
+def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFBEA, "M", "ئا"),
(0xFBEC, "M", "ئە"),
(0xFBEE, "M", "ئو"),
@@ -4836,11 +4797,6 @@ def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFC31, "M", "فى"),
(0xFC32, "M", "في"),
(0xFC33, "M", "قح"),
- ]
-
-
-def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFC34, "M", "قم"),
(0xFC35, "M", "قى"),
(0xFC36, "M", "قي"),
@@ -4880,15 +4836,20 @@ def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFC58, "M", "يم"),
(0xFC59, "M", "يى"),
(0xFC5A, "M", "يي"),
+ ]
+
+
+def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFC5B, "M", "ذٰ"),
(0xFC5C, "M", "رٰ"),
(0xFC5D, "M", "ىٰ"),
- (0xFC5E, "M", " ٌّ"),
- (0xFC5F, "M", " ٍّ"),
- (0xFC60, "M", " َّ"),
- (0xFC61, "M", " ُّ"),
- (0xFC62, "M", " ِّ"),
- (0xFC63, "M", " ّٰ"),
+ (0xFC5E, "3", " ٌّ"),
+ (0xFC5F, "3", " ٍّ"),
+ (0xFC60, "3", " َّ"),
+ (0xFC61, "3", " ُّ"),
+ (0xFC62, "3", " ِّ"),
+ (0xFC63, "3", " ّٰ"),
(0xFC64, "M", "ئر"),
(0xFC65, "M", "ئز"),
(0xFC66, "M", "ئم"),
@@ -4941,11 +4902,6 @@ def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFC95, "M", "يى"),
(0xFC96, "M", "يي"),
(0xFC97, "M", "ئج"),
- ]
-
-
-def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFC98, "M", "ئح"),
(0xFC99, "M", "ئخ"),
(0xFC9A, "M", "ئم"),
@@ -4985,6 +4941,11 @@ def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFCBC, "M", "غج"),
(0xFCBD, "M", "غم"),
(0xFCBE, "M", "فج"),
+ ]
+
+
+def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFCBF, "M", "فح"),
(0xFCC0, "M", "فخ"),
(0xFCC1, "M", "فم"),
@@ -5046,11 +5007,6 @@ def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFCF9, "M", "غى"),
(0xFCFA, "M", "غي"),
(0xFCFB, "M", "سى"),
- ]
-
-
-def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFCFC, "M", "سي"),
(0xFCFD, "M", "شى"),
(0xFCFE, "M", "شي"),
@@ -5090,6 +5046,11 @@ def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFD20, "M", "خي"),
(0xFD21, "M", "صى"),
(0xFD22, "M", "صي"),
+ ]
+
+
+def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFD23, "M", "ضى"),
(0xFD24, "M", "ضي"),
(0xFD25, "M", "شج"),
@@ -5151,11 +5112,6 @@ def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFD7A, "M", "غمي"),
(0xFD7B, "M", "غمى"),
(0xFD7C, "M", "فخم"),
- ]
-
-
-def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFD7E, "M", "قمح"),
(0xFD7F, "M", "قمم"),
(0xFD80, "M", "لحم"),
@@ -5195,6 +5151,11 @@ def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFDA8, "M", "سخى"),
(0xFDA9, "M", "صحي"),
(0xFDAA, "M", "شحي"),
+ ]
+
+
+def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFDAB, "M", "ضحي"),
(0xFDAC, "M", "لجي"),
(0xFDAD, "M", "لمي"),
@@ -5237,18 +5198,18 @@ def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFDF7, "M", "عليه"),
(0xFDF8, "M", "وسلم"),
(0xFDF9, "M", "صلى"),
- (0xFDFA, "M", "صلى الله عليه وسلم"),
- (0xFDFB, "M", "جل جلاله"),
+ (0xFDFA, "3", "صلى الله عليه وسلم"),
+ (0xFDFB, "3", "جل جلاله"),
(0xFDFC, "M", "ریال"),
(0xFDFD, "V"),
(0xFE00, "I"),
- (0xFE10, "M", ","),
+ (0xFE10, "3", ","),
(0xFE11, "M", "、"),
(0xFE12, "X"),
- (0xFE13, "M", ":"),
- (0xFE14, "M", ";"),
- (0xFE15, "M", "!"),
- (0xFE16, "M", "?"),
+ (0xFE13, "3", ":"),
+ (0xFE14, "3", ";"),
+ (0xFE15, "3", "!"),
+ (0xFE16, "3", "?"),
(0xFE17, "M", "〖"),
(0xFE18, "M", "〗"),
(0xFE19, "X"),
@@ -5256,16 +5217,11 @@ def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFE30, "X"),
(0xFE31, "M", "—"),
(0xFE32, "M", "–"),
- ]
-
-
-def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
- (0xFE33, "M", "_"),
- (0xFE35, "M", "("),
- (0xFE36, "M", ")"),
- (0xFE37, "M", "{"),
- (0xFE38, "M", "}"),
+ (0xFE33, "3", "_"),
+ (0xFE35, "3", "("),
+ (0xFE36, "3", ")"),
+ (0xFE37, "3", "{"),
+ (0xFE38, "3", "}"),
(0xFE39, "M", "〔"),
(0xFE3A, "M", "〕"),
(0xFE3B, "M", "【"),
@@ -5279,53 +5235,58 @@ def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFE43, "M", "『"),
(0xFE44, "M", "』"),
(0xFE45, "V"),
- (0xFE47, "M", "["),
- (0xFE48, "M", "]"),
- (0xFE49, "M", " ̅"),
- (0xFE4D, "M", "_"),
- (0xFE50, "M", ","),
+ (0xFE47, "3", "["),
+ (0xFE48, "3", "]"),
+ (0xFE49, "3", " ̅"),
+ (0xFE4D, "3", "_"),
+ (0xFE50, "3", ","),
(0xFE51, "M", "、"),
(0xFE52, "X"),
- (0xFE54, "M", ";"),
- (0xFE55, "M", ":"),
- (0xFE56, "M", "?"),
- (0xFE57, "M", "!"),
+ (0xFE54, "3", ";"),
+ (0xFE55, "3", ":"),
+ (0xFE56, "3", "?"),
+ (0xFE57, "3", "!"),
(0xFE58, "M", "—"),
- (0xFE59, "M", "("),
- (0xFE5A, "M", ")"),
- (0xFE5B, "M", "{"),
- (0xFE5C, "M", "}"),
+ (0xFE59, "3", "("),
+ (0xFE5A, "3", ")"),
+ (0xFE5B, "3", "{"),
+ (0xFE5C, "3", "}"),
(0xFE5D, "M", "〔"),
(0xFE5E, "M", "〕"),
- (0xFE5F, "M", "#"),
- (0xFE60, "M", "&"),
- (0xFE61, "M", "*"),
- (0xFE62, "M", "+"),
+ (0xFE5F, "3", "#"),
+ (0xFE60, "3", "&"),
+ (0xFE61, "3", "*"),
+ ]
+
+
+def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFE62, "3", "+"),
(0xFE63, "M", "-"),
- (0xFE64, "M", "<"),
- (0xFE65, "M", ">"),
- (0xFE66, "M", "="),
+ (0xFE64, "3", "<"),
+ (0xFE65, "3", ">"),
+ (0xFE66, "3", "="),
(0xFE67, "X"),
- (0xFE68, "M", "\\"),
- (0xFE69, "M", "$"),
- (0xFE6A, "M", "%"),
- (0xFE6B, "M", "@"),
+ (0xFE68, "3", "\\"),
+ (0xFE69, "3", "$"),
+ (0xFE6A, "3", "%"),
+ (0xFE6B, "3", "@"),
(0xFE6C, "X"),
- (0xFE70, "M", " ً"),
+ (0xFE70, "3", " ً"),
(0xFE71, "M", "ـً"),
- (0xFE72, "M", " ٌ"),
+ (0xFE72, "3", " ٌ"),
(0xFE73, "V"),
- (0xFE74, "M", " ٍ"),
+ (0xFE74, "3", " ٍ"),
(0xFE75, "X"),
- (0xFE76, "M", " َ"),
+ (0xFE76, "3", " َ"),
(0xFE77, "M", "ـَ"),
- (0xFE78, "M", " ُ"),
+ (0xFE78, "3", " ُ"),
(0xFE79, "M", "ـُ"),
- (0xFE7A, "M", " ِ"),
+ (0xFE7A, "3", " ِ"),
(0xFE7B, "M", "ـِ"),
- (0xFE7C, "M", " ّ"),
+ (0xFE7C, "3", " ّ"),
(0xFE7D, "M", "ـّ"),
- (0xFE7E, "M", " ْ"),
+ (0xFE7E, "3", " ْ"),
(0xFE7F, "M", "ـْ"),
(0xFE80, "M", "ء"),
(0xFE81, "M", "آ"),
@@ -5361,11 +5322,6 @@ def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFEE5, "M", "ن"),
(0xFEE9, "M", "ه"),
(0xFEED, "M", "و"),
- ]
-
-
-def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFEEF, "M", "ى"),
(0xFEF1, "M", "ي"),
(0xFEF5, "M", "لآ"),
@@ -5375,21 +5331,21 @@ def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFEFD, "X"),
(0xFEFF, "I"),
(0xFF00, "X"),
- (0xFF01, "M", "!"),
- (0xFF02, "M", '"'),
- (0xFF03, "M", "#"),
- (0xFF04, "M", "$"),
- (0xFF05, "M", "%"),
- (0xFF06, "M", "&"),
- (0xFF07, "M", "'"),
- (0xFF08, "M", "("),
- (0xFF09, "M", ")"),
- (0xFF0A, "M", "*"),
- (0xFF0B, "M", "+"),
- (0xFF0C, "M", ","),
+ (0xFF01, "3", "!"),
+ (0xFF02, "3", '"'),
+ (0xFF03, "3", "#"),
+ (0xFF04, "3", "$"),
+ (0xFF05, "3", "%"),
+ (0xFF06, "3", "&"),
+ (0xFF07, "3", "'"),
+ (0xFF08, "3", "("),
+ (0xFF09, "3", ")"),
+ (0xFF0A, "3", "*"),
+ (0xFF0B, "3", "+"),
+ (0xFF0C, "3", ","),
(0xFF0D, "M", "-"),
(0xFF0E, "M", "."),
- (0xFF0F, "M", "/"),
+ (0xFF0F, "3", "/"),
(0xFF10, "M", "0"),
(0xFF11, "M", "1"),
(0xFF12, "M", "2"),
@@ -5400,13 +5356,18 @@ def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFF17, "M", "7"),
(0xFF18, "M", "8"),
(0xFF19, "M", "9"),
- (0xFF1A, "M", ":"),
- (0xFF1B, "M", ";"),
- (0xFF1C, "M", "<"),
- (0xFF1D, "M", "="),
- (0xFF1E, "M", ">"),
- (0xFF1F, "M", "?"),
- (0xFF20, "M", "@"),
+ (0xFF1A, "3", ":"),
+ (0xFF1B, "3", ";"),
+ (0xFF1C, "3", "<"),
+ (0xFF1D, "3", "="),
+ (0xFF1E, "3", ">"),
+ ]
+
+
+def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0xFF1F, "3", "?"),
+ (0xFF20, "3", "@"),
(0xFF21, "M", "a"),
(0xFF22, "M", "b"),
(0xFF23, "M", "c"),
@@ -5433,12 +5394,12 @@ def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFF38, "M", "x"),
(0xFF39, "M", "y"),
(0xFF3A, "M", "z"),
- (0xFF3B, "M", "["),
- (0xFF3C, "M", "\\"),
- (0xFF3D, "M", "]"),
- (0xFF3E, "M", "^"),
- (0xFF3F, "M", "_"),
- (0xFF40, "M", "`"),
+ (0xFF3B, "3", "["),
+ (0xFF3C, "3", "\\"),
+ (0xFF3D, "3", "]"),
+ (0xFF3E, "3", "^"),
+ (0xFF3F, "3", "_"),
+ (0xFF40, "3", "`"),
(0xFF41, "M", "a"),
(0xFF42, "M", "b"),
(0xFF43, "M", "c"),
@@ -5465,15 +5426,10 @@ def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFF58, "M", "x"),
(0xFF59, "M", "y"),
(0xFF5A, "M", "z"),
- (0xFF5B, "M", "{"),
- ]
-
-
-def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
- (0xFF5C, "M", "|"),
- (0xFF5D, "M", "}"),
- (0xFF5E, "M", "~"),
+ (0xFF5B, "3", "{"),
+ (0xFF5C, "3", "|"),
+ (0xFF5D, "3", "}"),
+ (0xFF5E, "3", "~"),
(0xFF5F, "M", "⦅"),
(0xFF60, "M", "⦆"),
(0xFF61, "M", "."),
@@ -5510,6 +5466,11 @@ def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFF80, "M", "タ"),
(0xFF81, "M", "チ"),
(0xFF82, "M", "ツ"),
+ ]
+
+
+def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFF83, "M", "テ"),
(0xFF84, "M", "ト"),
(0xFF85, "M", "ナ"),
@@ -5539,7 +5500,7 @@ def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFF9D, "M", "ン"),
(0xFF9E, "M", "゙"),
(0xFF9F, "M", "゚"),
- (0xFFA0, "I"),
+ (0xFFA0, "X"),
(0xFFA1, "M", "ᄀ"),
(0xFFA2, "M", "ᄁ"),
(0xFFA3, "M", "ᆪ"),
@@ -5571,11 +5532,6 @@ def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFFBD, "M", "ᄑ"),
(0xFFBE, "M", "ᄒ"),
(0xFFBF, "X"),
- ]
-
-
-def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0xFFC2, "M", "ᅡ"),
(0xFFC3, "M", "ᅢ"),
(0xFFC4, "M", "ᅣ"),
@@ -5604,7 +5560,7 @@ def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFFE0, "M", "¢"),
(0xFFE1, "M", "£"),
(0xFFE2, "M", "¬"),
- (0xFFE3, "M", " ̄"),
+ (0xFFE3, "3", " ̄"),
(0xFFE4, "M", "¦"),
(0xFFE5, "M", "¥"),
(0xFFE6, "M", "₩"),
@@ -5615,6 +5571,11 @@ def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0xFFEB, "M", "→"),
(0xFFEC, "M", "↓"),
(0xFFED, "M", "■"),
+ ]
+
+
+def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0xFFEE, "M", "○"),
(0xFFEF, "X"),
(0x10000, "V"),
@@ -5676,11 +5637,6 @@ def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1040C, "M", "𐐴"),
(0x1040D, "M", "𐐵"),
(0x1040E, "M", "𐐶"),
- ]
-
-
-def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1040F, "M", "𐐷"),
(0x10410, "M", "𐐸"),
(0x10411, "M", "𐐹"),
@@ -5720,6 +5676,11 @@ def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x104B7, "M", "𐓟"),
(0x104B8, "M", "𐓠"),
(0x104B9, "M", "𐓡"),
+ ]
+
+
+def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x104BA, "M", "𐓢"),
(0x104BB, "M", "𐓣"),
(0x104BC, "M", "𐓤"),
@@ -5781,11 +5742,6 @@ def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x10588, "M", "𐖯"),
(0x10589, "M", "𐖰"),
(0x1058A, "M", "𐖱"),
- ]
-
-
-def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1058B, "X"),
(0x1058C, "M", "𐖳"),
(0x1058D, "M", "𐖴"),
@@ -5806,8 +5762,6 @@ def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x105BA, "X"),
(0x105BB, "V"),
(0x105BD, "X"),
- (0x105C0, "V"),
- (0x105F4, "X"),
(0x10600, "V"),
(0x10737, "X"),
(0x10740, "V"),
@@ -5827,6 +5781,11 @@ def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1078A, "M", "ʤ"),
(0x1078B, "M", "ɖ"),
(0x1078C, "M", "ɗ"),
+ ]
+
+
+def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1078D, "M", "ᶑ"),
(0x1078E, "M", "ɘ"),
(0x1078F, "M", "ɞ"),
@@ -5886,11 +5845,6 @@ def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1083D, "X"),
(0x1083F, "V"),
(0x10856, "X"),
- ]
-
-
-def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x10857, "V"),
(0x1089F, "X"),
(0x108A7, "V"),
@@ -5932,6 +5886,11 @@ def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x10AEB, "V"),
(0x10AF7, "X"),
(0x10B00, "V"),
+ ]
+
+
+def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x10B36, "X"),
(0x10B39, "V"),
(0x10B56, "X"),
@@ -5991,11 +5950,6 @@ def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x10CAB, "M", "𐳫"),
(0x10CAC, "M", "𐳬"),
(0x10CAD, "M", "𐳭"),
- ]
-
-
-def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x10CAE, "M", "𐳮"),
(0x10CAF, "M", "𐳯"),
(0x10CB0, "M", "𐳰"),
@@ -6008,34 +5962,6 @@ def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x10D28, "X"),
(0x10D30, "V"),
(0x10D3A, "X"),
- (0x10D40, "V"),
- (0x10D50, "M", "𐵰"),
- (0x10D51, "M", "𐵱"),
- (0x10D52, "M", "𐵲"),
- (0x10D53, "M", "𐵳"),
- (0x10D54, "M", "𐵴"),
- (0x10D55, "M", "𐵵"),
- (0x10D56, "M", "𐵶"),
- (0x10D57, "M", "𐵷"),
- (0x10D58, "M", "𐵸"),
- (0x10D59, "M", "𐵹"),
- (0x10D5A, "M", "𐵺"),
- (0x10D5B, "M", "𐵻"),
- (0x10D5C, "M", "𐵼"),
- (0x10D5D, "M", "𐵽"),
- (0x10D5E, "M", "𐵾"),
- (0x10D5F, "M", "𐵿"),
- (0x10D60, "M", "𐶀"),
- (0x10D61, "M", "𐶁"),
- (0x10D62, "M", "𐶂"),
- (0x10D63, "M", "𐶃"),
- (0x10D64, "M", "𐶄"),
- (0x10D65, "M", "𐶅"),
- (0x10D66, "X"),
- (0x10D69, "V"),
- (0x10D86, "X"),
- (0x10D8E, "V"),
- (0x10D90, "X"),
(0x10E60, "V"),
(0x10E7F, "X"),
(0x10E80, "V"),
@@ -6044,9 +5970,7 @@ def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x10EAE, "X"),
(0x10EB0, "V"),
(0x10EB2, "X"),
- (0x10EC2, "V"),
- (0x10EC5, "X"),
- (0x10EFC, "V"),
+ (0x10EFD, "V"),
(0x10F28, "X"),
(0x10F30, "V"),
(0x10F5A, "X"),
@@ -6067,6 +5991,11 @@ def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x110D0, "V"),
(0x110E9, "X"),
(0x110F0, "V"),
+ ]
+
+
+def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x110FA, "X"),
(0x11100, "V"),
(0x11135, "X"),
@@ -6096,11 +6025,6 @@ def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x112EB, "X"),
(0x112F0, "V"),
(0x112FA, "X"),
- ]
-
-
-def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x11300, "V"),
(0x11304, "X"),
(0x11305, "V"),
@@ -6131,28 +6055,6 @@ def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1136D, "X"),
(0x11370, "V"),
(0x11375, "X"),
- (0x11380, "V"),
- (0x1138A, "X"),
- (0x1138B, "V"),
- (0x1138C, "X"),
- (0x1138E, "V"),
- (0x1138F, "X"),
- (0x11390, "V"),
- (0x113B6, "X"),
- (0x113B7, "V"),
- (0x113C1, "X"),
- (0x113C2, "V"),
- (0x113C3, "X"),
- (0x113C5, "V"),
- (0x113C6, "X"),
- (0x113C7, "V"),
- (0x113CB, "X"),
- (0x113CC, "V"),
- (0x113D6, "X"),
- (0x113D7, "V"),
- (0x113D9, "X"),
- (0x113E1, "V"),
- (0x113E3, "X"),
(0x11400, "V"),
(0x1145C, "X"),
(0x1145D, "V"),
@@ -6175,8 +6077,6 @@ def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x116BA, "X"),
(0x116C0, "V"),
(0x116CA, "X"),
- (0x116D0, "V"),
- (0x116E4, "X"),
(0x11700, "V"),
(0x1171B, "X"),
(0x1171D, "V"),
@@ -6196,16 +6096,16 @@ def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x118A8, "M", "𑣈"),
(0x118A9, "M", "𑣉"),
(0x118AA, "M", "𑣊"),
+ ]
+
+
+def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x118AB, "M", "𑣋"),
(0x118AC, "M", "𑣌"),
(0x118AD, "M", "𑣍"),
(0x118AE, "M", "𑣎"),
(0x118AF, "M", "𑣏"),
- ]
-
-
-def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x118B0, "M", "𑣐"),
(0x118B1, "M", "𑣑"),
(0x118B2, "M", "𑣒"),
@@ -6254,10 +6154,6 @@ def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x11AF9, "X"),
(0x11B00, "V"),
(0x11B0A, "X"),
- (0x11BC0, "V"),
- (0x11BE2, "X"),
- (0x11BF0, "V"),
- (0x11BFA, "X"),
(0x11C00, "V"),
(0x11C09, "X"),
(0x11C0A, "V"),
@@ -6305,12 +6201,12 @@ def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x11F12, "V"),
(0x11F3B, "X"),
(0x11F3E, "V"),
- (0x11F5B, "X"),
]
-def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
return [
+ (0x11F5A, "X"),
(0x11FB0, "V"),
(0x11FB1, "X"),
(0x11FC0, "V"),
@@ -6329,12 +6225,8 @@ def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x13430, "X"),
(0x13440, "V"),
(0x13456, "X"),
- (0x13460, "V"),
- (0x143FB, "X"),
(0x14400, "V"),
(0x14647, "X"),
- (0x16100, "V"),
- (0x1613A, "X"),
(0x16800, "V"),
(0x16A39, "X"),
(0x16A40, "V"),
@@ -6359,8 +6251,6 @@ def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x16B78, "X"),
(0x16B7D, "V"),
(0x16B90, "X"),
- (0x16D40, "V"),
- (0x16D7A, "X"),
(0x16E40, "M", "𖹠"),
(0x16E41, "M", "𖹡"),
(0x16E42, "M", "𖹢"),
@@ -6409,18 +6299,18 @@ def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x187F8, "X"),
(0x18800, "V"),
(0x18CD6, "X"),
- (0x18CFF, "V"),
+ (0x18D00, "V"),
(0x18D09, "X"),
- ]
-
-
-def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1AFF0, "V"),
(0x1AFF4, "X"),
(0x1AFF5, "V"),
(0x1AFFC, "X"),
(0x1AFFD, "V"),
+ ]
+
+
+def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1AFFF, "X"),
(0x1B000, "V"),
(0x1B123, "X"),
@@ -6445,46 +6335,6 @@ def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1BC9C, "V"),
(0x1BCA0, "I"),
(0x1BCA4, "X"),
- (0x1CC00, "V"),
- (0x1CCD6, "M", "a"),
- (0x1CCD7, "M", "b"),
- (0x1CCD8, "M", "c"),
- (0x1CCD9, "M", "d"),
- (0x1CCDA, "M", "e"),
- (0x1CCDB, "M", "f"),
- (0x1CCDC, "M", "g"),
- (0x1CCDD, "M", "h"),
- (0x1CCDE, "M", "i"),
- (0x1CCDF, "M", "j"),
- (0x1CCE0, "M", "k"),
- (0x1CCE1, "M", "l"),
- (0x1CCE2, "M", "m"),
- (0x1CCE3, "M", "n"),
- (0x1CCE4, "M", "o"),
- (0x1CCE5, "M", "p"),
- (0x1CCE6, "M", "q"),
- (0x1CCE7, "M", "r"),
- (0x1CCE8, "M", "s"),
- (0x1CCE9, "M", "t"),
- (0x1CCEA, "M", "u"),
- (0x1CCEB, "M", "v"),
- (0x1CCEC, "M", "w"),
- (0x1CCED, "M", "x"),
- (0x1CCEE, "M", "y"),
- (0x1CCEF, "M", "z"),
- (0x1CCF0, "M", "0"),
- (0x1CCF1, "M", "1"),
- (0x1CCF2, "M", "2"),
- (0x1CCF3, "M", "3"),
- (0x1CCF4, "M", "4"),
- (0x1CCF5, "M", "5"),
- (0x1CCF6, "M", "6"),
- (0x1CCF7, "M", "7"),
- (0x1CCF8, "M", "8"),
- (0x1CCF9, "M", "9"),
- (0x1CCFA, "X"),
- (0x1CD00, "V"),
- (0x1CEB4, "X"),
(0x1CF00, "V"),
(0x1CF2E, "X"),
(0x1CF30, "V"),
@@ -6504,7 +6354,7 @@ def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D163, "M", "𝅘𝅥𝅱"),
(0x1D164, "M", "𝅘𝅥𝅲"),
(0x1D165, "V"),
- (0x1D173, "I"),
+ (0x1D173, "X"),
(0x1D17B, "V"),
(0x1D1BB, "M", "𝆹𝅥"),
(0x1D1BC, "M", "𝆺𝅥"),
@@ -6516,11 +6366,6 @@ def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D1EB, "X"),
(0x1D200, "V"),
(0x1D246, "X"),
- ]
-
-
-def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D2C0, "V"),
(0x1D2D4, "X"),
(0x1D2E0, "V"),
@@ -6566,6 +6411,11 @@ def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D422, "M", "i"),
(0x1D423, "M", "j"),
(0x1D424, "M", "k"),
+ ]
+
+
+def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D425, "M", "l"),
(0x1D426, "M", "m"),
(0x1D427, "M", "n"),
@@ -6621,11 +6471,6 @@ def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D459, "M", "l"),
(0x1D45A, "M", "m"),
(0x1D45B, "M", "n"),
- ]
-
-
-def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D45C, "M", "o"),
(0x1D45D, "M", "p"),
(0x1D45E, "M", "q"),
@@ -6671,6 +6516,11 @@ def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D486, "M", "e"),
(0x1D487, "M", "f"),
(0x1D488, "M", "g"),
+ ]
+
+
+def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D489, "M", "h"),
(0x1D48A, "M", "i"),
(0x1D48B, "M", "j"),
@@ -6726,11 +6576,6 @@ def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D4C0, "M", "k"),
(0x1D4C1, "M", "l"),
(0x1D4C2, "M", "m"),
- ]
-
-
-def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D4C3, "M", "n"),
(0x1D4C4, "X"),
(0x1D4C5, "M", "p"),
@@ -6776,6 +6621,11 @@ def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D4ED, "M", "d"),
(0x1D4EE, "M", "e"),
(0x1D4EF, "M", "f"),
+ ]
+
+
+def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D4F0, "M", "g"),
(0x1D4F1, "M", "h"),
(0x1D4F2, "M", "i"),
@@ -6831,11 +6681,6 @@ def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D525, "M", "h"),
(0x1D526, "M", "i"),
(0x1D527, "M", "j"),
- ]
-
-
-def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D528, "M", "k"),
(0x1D529, "M", "l"),
(0x1D52A, "M", "m"),
@@ -6881,6 +6726,11 @@ def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D554, "M", "c"),
(0x1D555, "M", "d"),
(0x1D556, "M", "e"),
+ ]
+
+
+def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D557, "M", "f"),
(0x1D558, "M", "g"),
(0x1D559, "M", "h"),
@@ -6936,11 +6786,6 @@ def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D58B, "M", "f"),
(0x1D58C, "M", "g"),
(0x1D58D, "M", "h"),
- ]
-
-
-def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D58E, "M", "i"),
(0x1D58F, "M", "j"),
(0x1D590, "M", "k"),
@@ -6986,6 +6831,11 @@ def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D5B8, "M", "y"),
(0x1D5B9, "M", "z"),
(0x1D5BA, "M", "a"),
+ ]
+
+
+def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D5BB, "M", "b"),
(0x1D5BC, "M", "c"),
(0x1D5BD, "M", "d"),
@@ -7041,11 +6891,6 @@ def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D5EF, "M", "b"),
(0x1D5F0, "M", "c"),
(0x1D5F1, "M", "d"),
- ]
-
-
-def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D5F2, "M", "e"),
(0x1D5F3, "M", "f"),
(0x1D5F4, "M", "g"),
@@ -7091,6 +6936,11 @@ def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D61C, "M", "u"),
(0x1D61D, "M", "v"),
(0x1D61E, "M", "w"),
+ ]
+
+
+def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D61F, "M", "x"),
(0x1D620, "M", "y"),
(0x1D621, "M", "z"),
@@ -7146,11 +6996,6 @@ def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D653, "M", "x"),
(0x1D654, "M", "y"),
(0x1D655, "M", "z"),
- ]
-
-
-def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D656, "M", "a"),
(0x1D657, "M", "b"),
(0x1D658, "M", "c"),
@@ -7196,6 +7041,11 @@ def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D680, "M", "q"),
(0x1D681, "M", "r"),
(0x1D682, "M", "s"),
+ ]
+
+
+def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D683, "M", "t"),
(0x1D684, "M", "u"),
(0x1D685, "M", "v"),
@@ -7251,11 +7101,6 @@ def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D6B8, "M", "ρ"),
(0x1D6B9, "M", "θ"),
(0x1D6BA, "M", "σ"),
- ]
-
-
-def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D6BB, "M", "τ"),
(0x1D6BC, "M", "υ"),
(0x1D6BD, "M", "φ"),
@@ -7301,6 +7146,11 @@ def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D6E6, "M", "ε"),
(0x1D6E7, "M", "ζ"),
(0x1D6E8, "M", "η"),
+ ]
+
+
+def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D6E9, "M", "θ"),
(0x1D6EA, "M", "ι"),
(0x1D6EB, "M", "κ"),
@@ -7356,11 +7206,6 @@ def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D71E, "M", "γ"),
(0x1D71F, "M", "δ"),
(0x1D720, "M", "ε"),
- ]
-
-
-def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D721, "M", "ζ"),
(0x1D722, "M", "η"),
(0x1D723, "M", "θ"),
@@ -7406,6 +7251,11 @@ def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D74C, "M", "χ"),
(0x1D74D, "M", "ψ"),
(0x1D74E, "M", "ω"),
+ ]
+
+
+def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D74F, "M", "∂"),
(0x1D750, "M", "ε"),
(0x1D751, "M", "θ"),
@@ -7461,11 +7311,6 @@ def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D784, "M", "υ"),
(0x1D785, "M", "φ"),
(0x1D786, "M", "χ"),
- ]
-
-
-def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D787, "M", "ψ"),
(0x1D788, "M", "ω"),
(0x1D789, "M", "∂"),
@@ -7511,6 +7356,11 @@ def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D7B1, "M", "θ"),
(0x1D7B2, "M", "ι"),
(0x1D7B3, "M", "κ"),
+ ]
+
+
+def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1D7B4, "M", "λ"),
(0x1D7B5, "M", "μ"),
(0x1D7B6, "M", "ν"),
@@ -7566,11 +7416,6 @@ def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1D7EB, "M", "9"),
(0x1D7EC, "M", "0"),
(0x1D7ED, "M", "1"),
- ]
-
-
-def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1D7EE, "M", "2"),
(0x1D7EF, "M", "3"),
(0x1D7F0, "M", "4"),
@@ -7616,6 +7461,11 @@ def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1E034, "M", "д"),
(0x1E035, "M", "е"),
(0x1E036, "M", "ж"),
+ ]
+
+
+def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1E037, "M", "з"),
(0x1E038, "M", "и"),
(0x1E039, "M", "к"),
@@ -7671,11 +7521,6 @@ def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1E06B, "M", "ҫ"),
(0x1E06C, "M", "ꙑ"),
(0x1E06D, "M", "ұ"),
- ]
-
-
-def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1E06E, "X"),
(0x1E08F, "V"),
(0x1E090, "X"),
@@ -7695,10 +7540,6 @@ def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1E300, "X"),
(0x1E4D0, "V"),
(0x1E4FA, "X"),
- (0x1E5D0, "V"),
- (0x1E5FB, "X"),
- (0x1E5FF, "V"),
- (0x1E600, "X"),
(0x1E7E0, "V"),
(0x1E7E7, "X"),
(0x1E7E8, "V"),
@@ -7725,6 +7566,11 @@ def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1E90B, "M", "𞤭"),
(0x1E90C, "M", "𞤮"),
(0x1E90D, "M", "𞤯"),
+ ]
+
+
+def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1E90E, "M", "𞤰"),
(0x1E90F, "M", "𞤱"),
(0x1E910, "M", "𞤲"),
@@ -7776,11 +7622,6 @@ def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1EE12, "M", "ق"),
(0x1EE13, "M", "ر"),
(0x1EE14, "M", "ش"),
- ]
-
-
-def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1EE15, "M", "ت"),
(0x1EE16, "M", "ث"),
(0x1EE17, "M", "خ"),
@@ -7830,6 +7671,11 @@ def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1EE4C, "X"),
(0x1EE4D, "M", "ن"),
(0x1EE4E, "M", "س"),
+ ]
+
+
+def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1EE4F, "M", "ع"),
(0x1EE50, "X"),
(0x1EE51, "M", "ص"),
@@ -7881,11 +7727,6 @@ def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1EE81, "M", "ب"),
(0x1EE82, "M", "ج"),
(0x1EE83, "M", "د"),
- ]
-
-
-def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1EE84, "M", "ه"),
(0x1EE85, "M", "و"),
(0x1EE86, "M", "ز"),
@@ -7935,6 +7776,11 @@ def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1EEB6, "M", "ث"),
(0x1EEB7, "M", "خ"),
(0x1EEB8, "M", "ذ"),
+ ]
+
+
+def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1EEB9, "M", "ض"),
(0x1EEBA, "M", "ظ"),
(0x1EEBB, "M", "غ"),
@@ -7953,48 +7799,43 @@ def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1F0D0, "X"),
(0x1F0D1, "V"),
(0x1F0F6, "X"),
- (0x1F101, "M", "0,"),
- (0x1F102, "M", "1,"),
- (0x1F103, "M", "2,"),
- (0x1F104, "M", "3,"),
- (0x1F105, "M", "4,"),
- (0x1F106, "M", "5,"),
- (0x1F107, "M", "6,"),
- (0x1F108, "M", "7,"),
- (0x1F109, "M", "8,"),
- (0x1F10A, "M", "9,"),
+ (0x1F101, "3", "0,"),
+ (0x1F102, "3", "1,"),
+ (0x1F103, "3", "2,"),
+ (0x1F104, "3", "3,"),
+ (0x1F105, "3", "4,"),
+ (0x1F106, "3", "5,"),
+ (0x1F107, "3", "6,"),
+ (0x1F108, "3", "7,"),
+ (0x1F109, "3", "8,"),
+ (0x1F10A, "3", "9,"),
(0x1F10B, "V"),
- (0x1F110, "M", "(a)"),
- (0x1F111, "M", "(b)"),
- (0x1F112, "M", "(c)"),
- (0x1F113, "M", "(d)"),
- (0x1F114, "M", "(e)"),
- (0x1F115, "M", "(f)"),
- (0x1F116, "M", "(g)"),
- (0x1F117, "M", "(h)"),
- (0x1F118, "M", "(i)"),
- (0x1F119, "M", "(j)"),
- (0x1F11A, "M", "(k)"),
- (0x1F11B, "M", "(l)"),
- (0x1F11C, "M", "(m)"),
- (0x1F11D, "M", "(n)"),
- (0x1F11E, "M", "(o)"),
- (0x1F11F, "M", "(p)"),
- (0x1F120, "M", "(q)"),
- (0x1F121, "M", "(r)"),
- (0x1F122, "M", "(s)"),
- (0x1F123, "M", "(t)"),
- (0x1F124, "M", "(u)"),
- (0x1F125, "M", "(v)"),
- ]
-
-
-def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
- (0x1F126, "M", "(w)"),
- (0x1F127, "M", "(x)"),
- (0x1F128, "M", "(y)"),
- (0x1F129, "M", "(z)"),
+ (0x1F110, "3", "(a)"),
+ (0x1F111, "3", "(b)"),
+ (0x1F112, "3", "(c)"),
+ (0x1F113, "3", "(d)"),
+ (0x1F114, "3", "(e)"),
+ (0x1F115, "3", "(f)"),
+ (0x1F116, "3", "(g)"),
+ (0x1F117, "3", "(h)"),
+ (0x1F118, "3", "(i)"),
+ (0x1F119, "3", "(j)"),
+ (0x1F11A, "3", "(k)"),
+ (0x1F11B, "3", "(l)"),
+ (0x1F11C, "3", "(m)"),
+ (0x1F11D, "3", "(n)"),
+ (0x1F11E, "3", "(o)"),
+ (0x1F11F, "3", "(p)"),
+ (0x1F120, "3", "(q)"),
+ (0x1F121, "3", "(r)"),
+ (0x1F122, "3", "(s)"),
+ (0x1F123, "3", "(t)"),
+ (0x1F124, "3", "(u)"),
+ (0x1F125, "3", "(v)"),
+ (0x1F126, "3", "(w)"),
+ (0x1F127, "3", "(x)"),
+ (0x1F128, "3", "(y)"),
+ (0x1F129, "3", "(z)"),
(0x1F12A, "M", "〔s〕"),
(0x1F12B, "M", "c"),
(0x1F12C, "M", "r"),
@@ -8040,6 +7881,11 @@ def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1F16D, "V"),
(0x1F190, "M", "dj"),
(0x1F191, "V"),
+ ]
+
+
+def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x1F1AE, "X"),
(0x1F1E6, "V"),
(0x1F200, "M", "ほか"),
@@ -8091,11 +7937,6 @@ def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1F23A, "M", "営"),
(0x1F23B, "M", "配"),
(0x1F23C, "X"),
- ]
-
-
-def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x1F240, "M", "〔本〕"),
(0x1F241, "M", "〔三〕"),
(0x1F242, "M", "〔二〕"),
@@ -8136,9 +7977,7 @@ def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1F890, "V"),
(0x1F8AE, "X"),
(0x1F8B0, "V"),
- (0x1F8BC, "X"),
- (0x1F8C0, "V"),
- (0x1F8C2, "X"),
+ (0x1F8B2, "X"),
(0x1F900, "V"),
(0x1FA54, "X"),
(0x1FA60, "V"),
@@ -8146,18 +7985,26 @@ def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x1FA70, "V"),
(0x1FA7D, "X"),
(0x1FA80, "V"),
- (0x1FA8A, "X"),
- (0x1FA8F, "V"),
- (0x1FAC7, "X"),
+ (0x1FA89, "X"),
+ ]
+
+
+def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
+ (0x1FA90, "V"),
+ (0x1FABE, "X"),
+ (0x1FABF, "V"),
+ (0x1FAC6, "X"),
(0x1FACE, "V"),
- (0x1FADD, "X"),
- (0x1FADF, "V"),
- (0x1FAEA, "X"),
+ (0x1FADC, "X"),
+ (0x1FAE0, "V"),
+ (0x1FAE9, "X"),
(0x1FAF0, "V"),
(0x1FAF9, "X"),
(0x1FB00, "V"),
(0x1FB93, "X"),
(0x1FB94, "V"),
+ (0x1FBCB, "X"),
(0x1FBF0, "M", "0"),
(0x1FBF1, "M", "1"),
(0x1FBF2, "M", "2"),
@@ -8196,11 +8043,6 @@ def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F80C, "M", "㒞"),
(0x2F80D, "M", "𠘺"),
(0x2F80E, "M", "免"),
- ]
-
-
-def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2F80F, "M", "兔"),
(0x2F810, "M", "兤"),
(0x2F811, "M", "具"),
@@ -8249,6 +8091,11 @@ def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F83E, "M", "呈"),
(0x2F83F, "M", "周"),
(0x2F840, "M", "咢"),
+ ]
+
+
+def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2F841, "M", "哶"),
(0x2F842, "M", "唐"),
(0x2F843, "M", "啓"),
@@ -8287,7 +8134,7 @@ def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F865, "M", "姘"),
(0x2F866, "M", "婦"),
(0x2F867, "M", "㛮"),
- (0x2F868, "M", "㛼"),
+ (0x2F868, "X"),
(0x2F869, "M", "嬈"),
(0x2F86A, "M", "嬾"),
(0x2F86C, "M", "𡧈"),
@@ -8298,14 +8145,9 @@ def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F871, "M", "𡬘"),
(0x2F872, "M", "寿"),
(0x2F873, "M", "将"),
- (0x2F874, "M", "当"),
+ (0x2F874, "X"),
(0x2F875, "M", "尢"),
(0x2F876, "M", "㞁"),
- ]
-
-
-def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2F877, "M", "屠"),
(0x2F878, "M", "屮"),
(0x2F879, "M", "峀"),
@@ -8354,6 +8196,11 @@ def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F8A6, "M", "慈"),
(0x2F8A7, "M", "慌"),
(0x2F8A8, "M", "慎"),
+ ]
+
+
+def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2F8A9, "M", "慌"),
(0x2F8AA, "M", "慺"),
(0x2F8AB, "M", "憎"),
@@ -8406,11 +8253,6 @@ def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F8DA, "M", "朡"),
(0x2F8DB, "M", "杞"),
(0x2F8DC, "M", "杓"),
- ]
-
-
-def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2F8DD, "M", "𣏃"),
(0x2F8DE, "M", "㭉"),
(0x2F8DF, "M", "柺"),
@@ -8459,6 +8301,11 @@ def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F90A, "M", "㴳"),
(0x2F90B, "M", "滋"),
(0x2F90C, "M", "滇"),
+ ]
+
+
+def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2F90D, "M", "𣻑"),
(0x2F90E, "M", "淹"),
(0x2F90F, "M", "潮"),
@@ -8477,7 +8324,7 @@ def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F91C, "M", "煅"),
(0x2F91D, "M", "𤉣"),
(0x2F91E, "M", "熜"),
- (0x2F91F, "M", "𤎫"),
+ (0x2F91F, "X"),
(0x2F920, "M", "爨"),
(0x2F921, "M", "爵"),
(0x2F922, "M", "牐"),
@@ -8511,11 +8358,6 @@ def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F93F, "M", "䀈"),
(0x2F940, "M", "直"),
(0x2F941, "M", "𥃳"),
- ]
-
-
-def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2F942, "M", "𥃲"),
(0x2F943, "M", "𥄙"),
(0x2F944, "M", "𥄳"),
@@ -8543,7 +8385,7 @@ def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F95B, "M", "穏"),
(0x2F95C, "M", "𥥼"),
(0x2F95D, "M", "𥪧"),
- (0x2F95F, "M", "竮"),
+ (0x2F95F, "X"),
(0x2F960, "M", "䈂"),
(0x2F961, "M", "𥮫"),
(0x2F962, "M", "篆"),
@@ -8564,6 +8406,11 @@ def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F971, "M", "䌴"),
(0x2F972, "M", "𦈨"),
(0x2F973, "M", "𦉇"),
+ ]
+
+
+def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2F974, "M", "䍙"),
(0x2F975, "M", "𦋙"),
(0x2F976, "M", "罺"),
@@ -8616,11 +8463,6 @@ def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F9A5, "M", "𦵫"),
(0x2F9A6, "M", "𦳕"),
(0x2F9A7, "M", "䔫"),
- ]
-
-
-def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2F9A8, "M", "蓱"),
(0x2F9A9, "M", "蓳"),
(0x2F9AA, "M", "蔖"),
@@ -8644,7 +8486,7 @@ def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F9BC, "M", "蜨"),
(0x2F9BD, "M", "蝫"),
(0x2F9BE, "M", "螆"),
- (0x2F9BF, "M", "䗗"),
+ (0x2F9BF, "X"),
(0x2F9C0, "M", "蟡"),
(0x2F9C1, "M", "蠁"),
(0x2F9C2, "M", "䗹"),
@@ -8669,6 +8511,11 @@ def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2F9D5, "M", "賁"),
(0x2F9D6, "M", "贛"),
(0x2F9D7, "M", "起"),
+ ]
+
+
+def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
+ return [
(0x2F9D8, "M", "𧼯"),
(0x2F9D9, "M", "𠠄"),
(0x2F9DA, "M", "跋"),
@@ -8721,11 +8568,6 @@ def _seg_82() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
(0x2FA0A, "M", "鬒"),
(0x2FA0B, "M", "鱀"),
(0x2FA0C, "M", "鳽"),
- ]
-
-
-def _seg_83() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]:
- return [
(0x2FA0D, "M", "䳎"),
(0x2FA0E, "M", "䳭"),
(0x2FA0F, "M", "鵧"),
@@ -8836,6 +8678,4 @@ uts46data = tuple(
+ _seg_79()
+ _seg_80()
+ _seg_81()
- + _seg_82()
- + _seg_83()
) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...]
diff --git a/contrib/python/idna/py3/ya.make b/contrib/python/idna/py3/ya.make
index 98acf283f7..24aa33c733 100644
--- a/contrib/python/idna/py3/ya.make
+++ b/contrib/python/idna/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(3.9)
+VERSION(3.10)
LICENSE(BSD-3-Clause)
diff --git a/contrib/python/mypy-protobuf/.dist-info/METADATA b/contrib/python/mypy-protobuf/.dist-info/METADATA
index 934bb64a10..eaa67dc8dc 100644
--- a/contrib/python/mypy-protobuf/.dist-info/METADATA
+++ b/contrib/python/mypy-protobuf/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: mypy-protobuf
-Version: 3.5.0
+Version: 3.6.0
Summary: Generate mypy stub files from protobuf specs
Home-page: https://github.com/nipunn1313/mypy-protobuf
Download-URL: https://github.com/nipunn1313/mypy-protobuf/releases
@@ -10,6 +10,6 @@ License: Apache License 2.0
Keywords: mypy proto dropbox
Requires-Python: >=3.8
License-File: LICENSE
-Requires-Dist: protobuf (>=4.23.4)
-Requires-Dist: types-protobuf (>=4.23.0.2)
+Requires-Dist: protobuf >=4.25.3
+Requires-Dist: types-protobuf >=4.24
diff --git a/contrib/python/mypy-protobuf/README.md b/contrib/python/mypy-protobuf/README.md
index a1bfe29279..bccb090c9c 100644
--- a/contrib/python/mypy-protobuf/README.md
+++ b/contrib/python/mypy-protobuf/README.md
@@ -16,7 +16,7 @@ See [Changelog](CHANGELOG.md) for recent changes.
Earlier releases might work, but aren't tested
- [protoc >= 23.4](https://github.com/protocolbuffers/protobuf/releases)
-- [python-protobuf >= 4.23.4](https://pypi.org/project/protobuf/) - matching protoc release
+- [python-protobuf >= 4.25.3](https://pypi.org/project/protobuf/) - matching protoc release
- [python >= 3.8](https://www.python.org/downloads/source/) - for running mypy-protobuf plugin.
## Requirements to run typecheckers on stubs generated by mypy-protobuf
@@ -24,8 +24,8 @@ Earlier releases might work, but aren't tested
Earlier releases might work, but aren't tested
- [mypy >= v1.4.1](https://pypi.org/project/mypy) or [pyright >= 1.1.206](https://github.com/microsoft/pyright)
-- [python-protobuf >= 4.23.4](https://pypi.org/project/protobuf/) - matching protoc release
-- [types-protobuf >= 4.23.0.2](https://pypi.org/project/types-protobuf/) - for stubs from the google.protobuf library
+- [python-protobuf >= 4.25.3](https://pypi.org/project/protobuf/) - matching protoc release
+- [types-protobuf >= 4.24](https://pypi.org/project/types-protobuf/) - for stubs from the google.protobuf library
### To run typecheckers on code generated with grpc plugin - you'll additionally need
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py b/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py
index a638958a1b..5d48735c20 100644
--- a/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py
+++ b/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mypy_protobuf/extensions.proto
+# Protobuf Python Version: 4.25.3
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
@@ -20,18 +21,13 @@ _globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'mypy_protobuf.extensions_pb2', _globals)
if _descriptor._USE_C_DESCRIPTORS == False:
- google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(options)
- google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype)
- google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(keytype)
- google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(valuetype)
-
DESCRIPTOR._options = None
- casttype._options = None
- casttype._serialized_options = b'\030\001'
- keytype._options = None
- keytype._serialized_options = b'\030\001'
- valuetype._options = None
- valuetype._serialized_options = b'\030\001'
+ _globals['casttype']._options = None
+ _globals['casttype']._serialized_options = b'\030\001'
+ _globals['keytype']._options = None
+ _globals['keytype']._serialized_options = b'\030\001'
+ _globals['valuetype']._options = None
+ _globals['valuetype']._serialized_options = b'\030\001'
_globals['_FIELDOPTIONS']._serialized_start=83
_globals['_FIELDOPTIONS']._serialized_end=151
# @@protoc_insertion_point(module_scope)
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/main.py b/contrib/python/mypy-protobuf/mypy_protobuf/main.py
index f0b3dbc7e8..343b80b3af 100644
--- a/contrib/python/mypy-protobuf/mypy_protobuf/main.py
+++ b/contrib/python/mypy-protobuf/mypy_protobuf/main.py
@@ -24,7 +24,7 @@ from google.protobuf.internal.containers import RepeatedCompositeFieldContainer
from google.protobuf.internal.well_known_types import WKTBASES
from . import extensions_pb2
-__version__ = "3.5.0"
+__version__ = "3.6.0"
# SourceCodeLocation is defined by `message Location` here
# https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto
@@ -169,9 +169,7 @@ class PkgWriter(object):
"""
if path == "typing_extensions":
stabilization = {
- "Literal": (3, 8),
"TypeAlias": (3, 10),
- "final": (3, 8),
}
assert name in stabilization
if not self.typing_extensions_min or self.typing_extensions_min < stabilization[name]:
@@ -407,7 +405,7 @@ class PkgWriter(object):
class_name = desc.name if desc.name not in PYTHON_RESERVED else "_r_" + desc.name
message_class = self._import("google.protobuf.message", "Message")
- wl("@{}", self._import("typing_extensions", "final"))
+ wl("@{}", self._import("typing", "final"))
wl(f"class {class_name}({message_class}{addl_base}):")
with self._indent():
scl = scl_prefix + [i]
@@ -438,12 +436,16 @@ class PkgWriter(object):
if field.name in PYTHON_RESERVED:
continue
field_type = self.python_type(field)
-
if is_scalar(field) and field.label != d.FieldDescriptorProto.LABEL_REPEATED:
# Scalar non repeated fields are r/w
wl(f"{field.name}: {field_type}")
self._write_comments(scl + [d.DescriptorProto.FIELD_FIELD_NUMBER, idx])
- else:
+
+ for idx, field in enumerate(desc.field):
+ if field.name in PYTHON_RESERVED:
+ continue
+ field_type = self.python_type(field)
+ if not (is_scalar(field) and field.label != d.FieldDescriptorProto.LABEL_REPEATED):
# r/o Getters for non-scalar fields and scalar-repeated fields
scl_field = scl + [d.DescriptorProto.FIELD_FIELD_NUMBER, idx]
wl("@property")
@@ -452,6 +454,7 @@ class PkgWriter(object):
if self._has_comments(scl_field):
with self._indent():
self._write_comments(scl_field)
+ wl("")
self.write_extensions(desc.extension, scl + [d.DescriptorProto.EXTENSION_FIELD_NUMBER])
@@ -506,14 +509,14 @@ class PkgWriter(object):
if hf_fields:
wl(
"def HasField(self, field_name: {}[{}]) -> {}: ...",
- self._import("typing_extensions", "Literal"),
+ self._import("typing", "Literal"),
hf_fields_text,
self._builtin("bool"),
)
if cf_fields:
wl(
"def ClearField(self, field_name: {}[{}]) -> None: ...",
- self._import("typing_extensions", "Literal"),
+ self._import("typing", "Literal"),
cf_fields_text,
)
@@ -522,10 +525,10 @@ class PkgWriter(object):
wl("@{}", self._import("typing", "overload"))
wl(
"def WhichOneof(self, oneof_group: {}[{}]) -> {}[{}] | None: ...",
- self._import("typing_extensions", "Literal"),
+ self._import("typing", "Literal"),
# Accepts both str and bytes
f'"{wo_field}", b"{wo_field}"',
- self._import("typing_extensions", "Literal"),
+ self._import("typing", "Literal"),
# Returns `str`
", ".join(f'"{m}"' for m in members),
)
@@ -599,6 +602,7 @@ class PkgWriter(object):
with self._indent():
if not self._write_comments(scl_method):
wl("...")
+ wl("")
def write_services(
self,
@@ -620,7 +624,6 @@ class PkgWriter(object):
if self._write_comments(scl):
wl("")
self.write_methods(service, class_name, is_abstract=True, scl_prefix=scl)
- wl("")
# The stub client
stub_class_name = service.name + "_Stub"
@@ -633,7 +636,6 @@ class PkgWriter(object):
self._import("google.protobuf.service", "RpcChannel"),
)
self.write_methods(service, stub_class_name, is_abstract=False, scl_prefix=scl)
- wl("")
def _import_casttype(self, casttype: str) -> str:
split = casttype.split(".")
@@ -709,22 +711,20 @@ class PkgWriter(object):
wl = self._write_line
# _MaybeAsyncIterator[Req] is supertyped by Iterator[Req] and AsyncIterator[Req].
# So both can be used in the contravariant function parameter position.
- wl("_T = {}('_T')", self._import("typing", "TypeVar"))
+ wl('_T = {}("_T")', self._import("typing", "TypeVar"))
wl("")
wl(
- "class _MaybeAsyncIterator({}[_T], {}[_T], metaclass={}):",
+ "class _MaybeAsyncIterator({}[_T], {}[_T], metaclass={}): ...",
self._import("collections.abc", "AsyncIterator"),
self._import("collections.abc", "Iterator"),
self._import("abc", "ABCMeta"),
)
- with self._indent():
- wl("...")
wl("")
# _ServicerContext is supertyped by grpc.ServicerContext and grpc.aio.ServicerContext
# So both can be used in the contravariant function parameter position.
wl(
- "class _ServicerContext({}, {}): # type: ignore",
+ "class _ServicerContext({}, {}): # type: ignore[misc, type-arg]",
self._import("grpc", "ServicerContext"),
self._import("grpc.aio", "ServicerContext"),
)
@@ -758,6 +758,7 @@ class PkgWriter(object):
with self._indent():
if not self._write_comments(scl):
wl("...")
+ wl("")
def write_grpc_stub_methods(self, service: d.ServiceDescriptorProto, scl_prefix: SourceCodeLocation, is_async: bool = False) -> None:
wl = self._write_line
@@ -774,6 +775,7 @@ class PkgWriter(object):
wl("{},", self._output_type(method))
wl("]")
self._write_comments(scl)
+ wl("")
def write_grpc_services(
self,
@@ -799,7 +801,6 @@ class PkgWriter(object):
channel = f"{self._import('typing', 'Union')}[{self._import('grpc', 'Channel')}, {self._import('grpc.aio', 'Channel')}]"
wl("def __init__(self, channel: {}) -> None: ...", channel)
self.write_grpc_stub_methods(service, scl)
- wl("")
# The (fake) async stub client
wl(
@@ -811,7 +812,6 @@ class PkgWriter(object):
wl("")
# No __init__ since this isn't a real class (yet), and requires manual casting to work.
self.write_grpc_stub_methods(service, scl, is_async=True)
- wl("")
# The service definition interface
wl(
@@ -823,7 +823,6 @@ class PkgWriter(object):
if self._write_comments(scl):
wl("")
self.write_grpc_methods(service, scl)
- wl("")
server = self._import("grpc", "Server")
aserver = self._import("grpc.aio", "Server")
wl(
@@ -925,8 +924,9 @@ class PkgWriter(object):
if self.lines:
assert self.lines[0].startswith('"""')
self.lines[0] = f'"""{HEADER}{self.lines[0][3:]}'
+ self._write_line("")
else:
- self._write_line(f'"""{HEADER}"""')
+ self._write_line(f'"""{HEADER}"""\n')
for reexport_idx in self.fd.public_dependency:
reexport_file = self.fd.dependency[reexport_idx]
diff --git a/contrib/python/mypy-protobuf/ya.make b/contrib/python/mypy-protobuf/ya.make
index 6ac24b9c93..efdb299b36 100644
--- a/contrib/python/mypy-protobuf/ya.make
+++ b/contrib/python/mypy-protobuf/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(3.5.0)
+VERSION(3.6.0)
LICENSE(Apache-2.0)
diff --git a/contrib/python/pytest-asyncio/.dist-info/METADATA b/contrib/python/pytest-asyncio/.dist-info/METADATA
new file mode 100644
index 0000000000..c73b027ba5
--- /dev/null
+++ b/contrib/python/pytest-asyncio/.dist-info/METADATA
@@ -0,0 +1,91 @@
+Metadata-Version: 2.1
+Name: pytest-asyncio
+Version: 0.21.1
+Summary: Pytest support for asyncio
+Home-page: https://github.com/pytest-dev/pytest-asyncio
+Author: Tin Tvrtković <tinchester@gmail.com>
+Author-email: tinchester@gmail.com
+License: Apache 2.0
+Project-URL: Documentation, https://pytest-asyncio.readthedocs.io
+Project-URL: Changelog, https://pytest-asyncio.readthedocs.io/en/latest/reference/changelog.html
+Project-URL: Source Code, https://github.com/pytest-dev/pytest-asyncio
+Project-URL: Bug Tracker, https://github.com/pytest-dev/pytest-asyncio/issues
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Topic :: Software Development :: Testing
+Classifier: Framework :: AsyncIO
+Classifier: Framework :: Pytest
+Classifier: Typing :: Typed
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: pytest (>=7.0.0)
+Requires-Dist: typing-extensions (>=3.7.2) ; python_version < "3.8"
+Provides-Extra: docs
+Requires-Dist: sphinx (>=5.3) ; extra == 'docs'
+Requires-Dist: sphinx-rtd-theme (>=1.0) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: coverage (>=6.2) ; extra == 'testing'
+Requires-Dist: hypothesis (>=5.7.1) ; extra == 'testing'
+Requires-Dist: flaky (>=3.5.0) ; extra == 'testing'
+Requires-Dist: mypy (>=0.931) ; extra == 'testing'
+Requires-Dist: pytest-trio (>=0.7.0) ; extra == 'testing'
+
+pytest-asyncio
+==============
+
+.. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+.. image:: https://github.com/pytest-dev/pytest-asyncio/workflows/CI/badge.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio/actions?workflow=CI
+.. image:: https://codecov.io/gh/pytest-dev/pytest-asyncio/branch/main/graph/badge.svg
+ :target: https://codecov.io/gh/pytest-dev/pytest-asyncio
+.. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+`pytest-asyncio <https://pytest-asyncio.readthedocs.io/en/latest/>`_ is a `pytest <https://docs.pytest.org/en/latest/contents.html>`_ plugin. It facilitates testing of code that uses the `asyncio <https://docs.python.org/3/library/asyncio.html>`_ library.
+
+Specifically, pytest-asyncio provides support for coroutines as test functions. This allows users to *await* code inside their tests. For example, the following code is executed as a test item by pytest:
+
+.. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b"expected result" == res
+
+More details can be found in the `documentation <https://pytest-asyncio.readthedocs.io/en/latest/>`_.
+
+Note that test classes subclassing the standard `unittest <https://docs.python.org/3/library/unittest.html>`__ library are not supported. Users
+are advised to use `unittest.IsolatedAsyncioTestCase <https://docs.python.org/3/library/unittest.html#unittest.IsolatedAsyncioTestCase>`__
+or an async framework such as `asynctest <https://asynctest.readthedocs.io/en/latest>`__.
+
+
+pytest-asyncio is available under the `Apache License 2.0 <https://github.com/pytest-dev/pytest-asyncio/blob/main/LICENSE>`_.
+
+
+Installation
+------------
+
+To install pytest-asyncio, simply:
+
+.. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+This is enough for pytest to pick up pytest-asyncio.
+
+
+Contributing
+------------
+Contributions are very welcome. Tests can be run with ``tox``, please ensure
+the coverage at least stays the same before you submit a pull request.
diff --git a/contrib/python/pytest-asyncio/.dist-info/entry_points.txt b/contrib/python/pytest-asyncio/.dist-info/entry_points.txt
new file mode 100644
index 0000000000..88db714dad
--- /dev/null
+++ b/contrib/python/pytest-asyncio/.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[pytest11]
+asyncio = pytest_asyncio.plugin
diff --git a/contrib/python/pytest-asyncio/.dist-info/top_level.txt b/contrib/python/pytest-asyncio/.dist-info/top_level.txt
new file mode 100644
index 0000000000..08d05d1ecf
--- /dev/null
+++ b/contrib/python/pytest-asyncio/.dist-info/top_level.txt
@@ -0,0 +1 @@
+pytest_asyncio
diff --git a/contrib/python/pytest-asyncio/LICENSE b/contrib/python/pytest-asyncio/LICENSE
new file mode 100644
index 0000000000..5c304d1a4a
--- /dev/null
+++ b/contrib/python/pytest-asyncio/LICENSE
@@ -0,0 +1,201 @@
+Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/pytest-asyncio/README.rst b/contrib/python/pytest-asyncio/README.rst
new file mode 100644
index 0000000000..0682b74430
--- /dev/null
+++ b/contrib/python/pytest-asyncio/README.rst
@@ -0,0 +1,52 @@
+pytest-asyncio
+==============
+
+.. image:: https://img.shields.io/pypi/v/pytest-asyncio.svg
+ :target: https://pypi.python.org/pypi/pytest-asyncio
+.. image:: https://github.com/pytest-dev/pytest-asyncio/workflows/CI/badge.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio/actions?workflow=CI
+.. image:: https://codecov.io/gh/pytest-dev/pytest-asyncio/branch/main/graph/badge.svg
+ :target: https://codecov.io/gh/pytest-dev/pytest-asyncio
+.. image:: https://img.shields.io/pypi/pyversions/pytest-asyncio.svg
+ :target: https://github.com/pytest-dev/pytest-asyncio
+ :alt: Supported Python versions
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+
+`pytest-asyncio <https://pytest-asyncio.readthedocs.io/en/latest/>`_ is a `pytest <https://docs.pytest.org/en/latest/contents.html>`_ plugin. It facilitates testing of code that uses the `asyncio <https://docs.python.org/3/library/asyncio.html>`_ library.
+
+Specifically, pytest-asyncio provides support for coroutines as test functions. This allows users to *await* code inside their tests. For example, the following code is executed as a test item by pytest:
+
+.. code-block:: python
+
+ @pytest.mark.asyncio
+ async def test_some_asyncio_code():
+ res = await library.do_something()
+ assert b"expected result" == res
+
+More details can be found in the `documentation <https://pytest-asyncio.readthedocs.io/en/latest/>`_.
+
+Note that test classes subclassing the standard `unittest <https://docs.python.org/3/library/unittest.html>`__ library are not supported. Users
+are advised to use `unittest.IsolatedAsyncioTestCase <https://docs.python.org/3/library/unittest.html#unittest.IsolatedAsyncioTestCase>`__
+or an async framework such as `asynctest <https://asynctest.readthedocs.io/en/latest>`__.
+
+
+pytest-asyncio is available under the `Apache License 2.0 <https://github.com/pytest-dev/pytest-asyncio/blob/main/LICENSE>`_.
+
+
+Installation
+------------
+
+To install pytest-asyncio, simply:
+
+.. code-block:: bash
+
+ $ pip install pytest-asyncio
+
+This is enough for pytest to pick up pytest-asyncio.
+
+
+Contributing
+------------
+Contributions are very welcome. Tests can be run with ``tox``, please ensure
+the coverage at least stays the same before you submit a pull request.
diff --git a/contrib/python/pytest-asyncio/pytest_asyncio/__init__.py b/contrib/python/pytest-asyncio/pytest_asyncio/__init__.py
new file mode 100644
index 0000000000..1bc2811d93
--- /dev/null
+++ b/contrib/python/pytest-asyncio/pytest_asyncio/__init__.py
@@ -0,0 +1,5 @@
+"""The main point for importing pytest-asyncio items."""
+from ._version import version as __version__ # noqa
+from .plugin import fixture
+
+__all__ = ("fixture",)
diff --git a/contrib/python/pytest-asyncio/pytest_asyncio/_version.py b/contrib/python/pytest-asyncio/pytest_asyncio/_version.py
new file mode 100644
index 0000000000..11f23015fb
--- /dev/null
+++ b/contrib/python/pytest-asyncio/pytest_asyncio/_version.py
@@ -0,0 +1,4 @@
+# file generated by setuptools_scm
+# don't change, don't track in version control
+__version__ = version = '0.21.1'
+__version_tuple__ = version_tuple = (0, 21, 1)
diff --git a/contrib/python/pytest-asyncio/pytest_asyncio/plugin.py b/contrib/python/pytest-asyncio/pytest_asyncio/plugin.py
new file mode 100644
index 0000000000..db93b851de
--- /dev/null
+++ b/contrib/python/pytest-asyncio/pytest_asyncio/plugin.py
@@ -0,0 +1,624 @@
+"""pytest-asyncio implementation."""
+import asyncio
+import contextlib
+import enum
+import functools
+import inspect
+import socket
+import sys
+import warnings
+from textwrap import dedent
+from typing import (
+ Any,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+)
+
+import pytest
+from pytest import (
+ Config,
+ FixtureRequest,
+ Function,
+ Item,
+ Parser,
+ PytestPluginManager,
+ Session,
+)
+
+if sys.version_info >= (3, 8):
+ from typing import Literal
+else:
+ from typing_extensions import Literal
+
+_R = TypeVar("_R")
+
+_ScopeName = Literal["session", "package", "module", "class", "function"]
+_T = TypeVar("_T")
+
+SimpleFixtureFunction = TypeVar(
+ "SimpleFixtureFunction", bound=Callable[..., Awaitable[_R]]
+)
+FactoryFixtureFunction = TypeVar(
+ "FactoryFixtureFunction", bound=Callable[..., AsyncIterator[_R]]
+)
+FixtureFunction = Union[SimpleFixtureFunction, FactoryFixtureFunction]
+FixtureFunctionMarker = Callable[[FixtureFunction], FixtureFunction]
+
+# https://github.com/pytest-dev/pytest/pull/9510
+FixtureDef = Any
+SubRequest = Any
+
+
+class Mode(str, enum.Enum):
+ AUTO = "auto"
+ STRICT = "strict"
+
+
+ASYNCIO_MODE_HELP = """\
+'auto' - for automatically handling all async functions by the plugin
+'strict' - for autoprocessing disabling (useful if different async frameworks \
+should be tested together, e.g. \
+both pytest-asyncio and pytest-trio are used in the same project)
+"""
+
+
+def pytest_addoption(parser: Parser, pluginmanager: PytestPluginManager) -> None:
+ group = parser.getgroup("asyncio")
+ group.addoption(
+ "--asyncio-mode",
+ dest="asyncio_mode",
+ default=None,
+ metavar="MODE",
+ help=ASYNCIO_MODE_HELP,
+ )
+ parser.addini(
+ "asyncio_mode",
+ help="default value for --asyncio-mode",
+ default="auto",
+ )
+
+
+@overload
+def fixture(
+ fixture_function: FixtureFunction,
+ *,
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ...,
+ params: Optional[Iterable[object]] = ...,
+ autouse: bool = ...,
+ ids: Union[
+ Iterable[Union[str, float, int, bool, None]],
+ Callable[[Any], Optional[object]],
+ None,
+ ] = ...,
+ name: Optional[str] = ...,
+) -> FixtureFunction:
+ ...
+
+
+@overload
+def fixture(
+ fixture_function: None = ...,
+ *,
+ scope: "Union[_ScopeName, Callable[[str, Config], _ScopeName]]" = ...,
+ params: Optional[Iterable[object]] = ...,
+ autouse: bool = ...,
+ ids: Union[
+ Iterable[Union[str, float, int, bool, None]],
+ Callable[[Any], Optional[object]],
+ None,
+ ] = ...,
+ name: Optional[str] = None,
+) -> FixtureFunctionMarker:
+ ...
+
+
+def fixture(
+ fixture_function: Optional[FixtureFunction] = None, **kwargs: Any
+) -> Union[FixtureFunction, FixtureFunctionMarker]:
+ if fixture_function is not None:
+ _make_asyncio_fixture_function(fixture_function)
+ return pytest.fixture(fixture_function, **kwargs)
+
+ else:
+
+ @functools.wraps(fixture)
+ def inner(fixture_function: FixtureFunction) -> FixtureFunction:
+ return fixture(fixture_function, **kwargs)
+
+ return inner
+
+
+def _is_asyncio_fixture_function(obj: Any) -> bool:
+ obj = getattr(obj, "__func__", obj) # instance method maybe?
+ return getattr(obj, "_force_asyncio_fixture", False)
+
+
+def _make_asyncio_fixture_function(obj: Any) -> None:
+ if hasattr(obj, "__func__"):
+ # instance method, check the function object
+ obj = obj.__func__
+ obj._force_asyncio_fixture = True
+
+
+def _is_coroutine(obj: Any) -> bool:
+ """Check to see if an object is really an asyncio coroutine."""
+ return asyncio.iscoroutinefunction(obj)
+
+
+def _is_coroutine_or_asyncgen(obj: Any) -> bool:
+ return _is_coroutine(obj) or inspect.isasyncgenfunction(obj)
+
+
+def _get_asyncio_mode(config: Config) -> Mode:
+ val = config.getoption("asyncio_mode")
+ if val is None:
+ val = config.getini("asyncio_mode")
+ try:
+ return Mode(val)
+ except ValueError:
+ modes = ", ".join(m.value for m in Mode)
+ raise pytest.UsageError(
+ f"{val!r} is not a valid asyncio_mode. Valid modes: {modes}."
+ )
+
+
+def pytest_configure(config: Config) -> None:
+ """Inject documentation."""
+ config.addinivalue_line(
+ "markers",
+ "asyncio: "
+ "mark the test as a coroutine, it will be "
+ "run using an asyncio event loop",
+ )
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_report_header(config: Config) -> List[str]:
+ """Add asyncio config to pytest header."""
+ mode = _get_asyncio_mode(config)
+ return [f"asyncio: mode={mode}"]
+
+
+def _preprocess_async_fixtures(
+ config: Config,
+ processed_fixturedefs: Set[FixtureDef],
+) -> None:
+ asyncio_mode = _get_asyncio_mode(config)
+ fixturemanager = config.pluginmanager.get_plugin("funcmanage")
+ for fixtures in fixturemanager._arg2fixturedefs.values():
+ for fixturedef in fixtures:
+ func = fixturedef.func
+ if fixturedef in processed_fixturedefs or not _is_coroutine_or_asyncgen(
+ func
+ ):
+ continue
+ if not _is_asyncio_fixture_function(func) and asyncio_mode == Mode.STRICT:
+ # Ignore async fixtures without explicit asyncio mark in strict mode
+ # This applies to pytest_trio fixtures, for example
+ continue
+ _make_asyncio_fixture_function(func)
+ _inject_fixture_argnames(fixturedef)
+ _synchronize_async_fixture(fixturedef)
+ assert _is_asyncio_fixture_function(fixturedef.func)
+ processed_fixturedefs.add(fixturedef)
+
+
+def _inject_fixture_argnames(fixturedef: FixtureDef) -> None:
+ """
+ Ensures that `request` and `event_loop` are arguments of the specified fixture.
+ """
+ to_add = []
+ for name in ("request", "event_loop"):
+ if name not in fixturedef.argnames:
+ to_add.append(name)
+ if to_add:
+ fixturedef.argnames += tuple(to_add)
+
+
+def _synchronize_async_fixture(fixturedef: FixtureDef) -> None:
+ """
+ Wraps the fixture function of an async fixture in a synchronous function.
+ """
+ if inspect.isasyncgenfunction(fixturedef.func):
+ _wrap_asyncgen_fixture(fixturedef)
+ elif inspect.iscoroutinefunction(fixturedef.func):
+ _wrap_async_fixture(fixturedef)
+
+
+def _add_kwargs(
+ func: Callable[..., Any],
+ kwargs: Dict[str, Any],
+ event_loop: asyncio.AbstractEventLoop,
+ request: SubRequest,
+) -> Dict[str, Any]:
+ sig = inspect.signature(func)
+ ret = kwargs.copy()
+ if "request" in sig.parameters:
+ ret["request"] = request
+ if "event_loop" in sig.parameters:
+ ret["event_loop"] = event_loop
+ return ret
+
+
+def _perhaps_rebind_fixture_func(
+ func: _T, instance: Optional[Any], unittest: bool
+) -> _T:
+ if instance is not None:
+ # The fixture needs to be bound to the actual request.instance
+ # so it is bound to the same object as the test method.
+ unbound, cls = func, None
+ try:
+ unbound, cls = func.__func__, type(func.__self__) # type: ignore
+ except AttributeError:
+ pass
+ # If unittest is true, the fixture is bound unconditionally.
+ # otherwise, only if the fixture was bound before to an instance of
+ # the same type.
+ if unittest or (cls is not None and isinstance(instance, cls)):
+ func = unbound.__get__(instance) # type: ignore
+ return func
+
+
+def _wrap_asyncgen_fixture(fixturedef: FixtureDef) -> None:
+ fixture = fixturedef.func
+
+ @functools.wraps(fixture)
+ def _asyncgen_fixture_wrapper(
+ event_loop: asyncio.AbstractEventLoop, request: SubRequest, **kwargs: Any
+ ):
+ func = _perhaps_rebind_fixture_func(
+ fixture, request.instance, fixturedef.unittest
+ )
+ gen_obj = func(**_add_kwargs(func, kwargs, event_loop, request))
+
+ async def setup():
+ res = await gen_obj.__anext__()
+ return res
+
+ def finalizer() -> None:
+ """Yield again, to finalize."""
+
+ async def async_finalizer() -> None:
+ try:
+ await gen_obj.__anext__()
+ except StopAsyncIteration:
+ pass
+ else:
+ msg = "Async generator fixture didn't stop."
+ msg += "Yield only once."
+ raise ValueError(msg)
+
+ event_loop.run_until_complete(async_finalizer())
+
+ result = event_loop.run_until_complete(setup())
+ request.addfinalizer(finalizer)
+ return result
+
+ fixturedef.func = _asyncgen_fixture_wrapper
+
+
+def _wrap_async_fixture(fixturedef: FixtureDef) -> None:
+ fixture = fixturedef.func
+
+ @functools.wraps(fixture)
+ def _async_fixture_wrapper(
+ event_loop: asyncio.AbstractEventLoop, request: SubRequest, **kwargs: Any
+ ):
+ func = _perhaps_rebind_fixture_func(
+ fixture, request.instance, fixturedef.unittest
+ )
+
+ async def setup():
+ res = await func(**_add_kwargs(func, kwargs, event_loop, request))
+ return res
+
+ return event_loop.run_until_complete(setup())
+
+ fixturedef.func = _async_fixture_wrapper
+
+
+_HOLDER: Set[FixtureDef] = set()
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_pycollect_makeitem(
+ collector: Union[pytest.Module, pytest.Class], name: str, obj: object
+) -> Union[
+ pytest.Item, pytest.Collector, List[Union[pytest.Item, pytest.Collector]], None
+]:
+ """A pytest hook to collect asyncio coroutines."""
+ if not collector.funcnamefilter(name):
+ return None
+ _preprocess_async_fixtures(collector.config, _HOLDER)
+ return None
+
+
+def pytest_collection_modifyitems(
+ session: Session, config: Config, items: List[Item]
+) -> None:
+ """
+ Marks collected async test items as `asyncio` tests.
+
+ The mark is only applied in `AUTO` mode. It is applied to:
+
+ - coroutines
+ - staticmethods wrapping coroutines
+ - Hypothesis tests wrapping coroutines
+
+ """
+ if _get_asyncio_mode(config) != Mode.AUTO:
+ return
+ function_items = (item for item in items if isinstance(item, Function))
+ for function_item in function_items:
+ function = function_item.obj
+ if isinstance(function, staticmethod):
+ # staticmethods need to be unwrapped.
+ function = function.__func__
+ if (
+ _is_coroutine(function)
+ or _is_hypothesis_test(function)
+ and _hypothesis_test_wraps_coroutine(function)
+ ):
+ function_item.add_marker("asyncio")
+
+
+def _hypothesis_test_wraps_coroutine(function: Any) -> bool:
+ return _is_coroutine(function.hypothesis.inner_test)
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_fixture_setup(
+ fixturedef: FixtureDef, request: SubRequest
+) -> Optional[object]:
+ """Adjust the event loop policy when an event loop is produced."""
+ if fixturedef.argname == "event_loop":
+ # The use of a fixture finalizer is preferred over the
+ # pytest_fixture_post_finalizer hook. The fixture finalizer is invoked once
+ # for each fixture, whereas the hook may be invoked multiple times for
+ # any specific fixture.
+ # see https://github.com/pytest-dev/pytest/issues/5848
+ _add_finalizers(
+ fixturedef,
+ _close_event_loop,
+ _provide_clean_event_loop,
+ )
+ outcome = yield
+ loop = outcome.get_result()
+ policy = asyncio.get_event_loop_policy()
+ try:
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ old_loop = policy.get_event_loop()
+ if old_loop is not loop:
+ old_loop.close()
+ except RuntimeError:
+ # Either the current event loop has been set to None
+ # or the loop policy doesn't specify to create new loops
+ # or we're not in the main thread
+ pass
+ policy.set_event_loop(loop)
+ return
+
+ yield
+
+
+def _add_finalizers(fixturedef: FixtureDef, *finalizers: Callable[[], object]) -> None:
+ """
+ Regsiters the specified fixture finalizers in the fixture.
+
+ Finalizers need to specified in the exact order in which they should be invoked.
+
+ :param fixturedef: Fixture definition which finalizers should be added to
+ :param finalizers: Finalizers to be added
+ """
+ for finalizer in reversed(finalizers):
+ fixturedef.addfinalizer(finalizer)
+
+
+_UNCLOSED_EVENT_LOOP_WARNING = dedent(
+ """\
+ pytest-asyncio detected an unclosed event loop when tearing down the event_loop
+ fixture: %r
+ pytest-asyncio will close the event loop for you, but future versions of the
+ library will no longer do so. In order to ensure compatibility with future
+ versions, please make sure that:
+ 1. Any custom "event_loop" fixture properly closes the loop after yielding it
+ 2. The scopes of your custom "event_loop" fixtures do not overlap
+ 3. Your code does not modify the event loop in async fixtures or tests
+ """
+)
+
+
+def _close_event_loop() -> None:
+ policy = asyncio.get_event_loop_policy()
+ try:
+ loop = policy.get_event_loop()
+ except RuntimeError:
+ loop = None
+ if loop is not None:
+ if not loop.is_closed():
+ warnings.warn(
+ _UNCLOSED_EVENT_LOOP_WARNING % loop,
+ DeprecationWarning,
+ )
+ loop.close()
+
+
+def _provide_clean_event_loop() -> None:
+ # At this point, the event loop for the current thread is closed.
+ # When a user calls asyncio.get_event_loop(), they will get a closed loop.
+ # In order to avoid this side effect from pytest-asyncio, we need to replace
+ # the current loop with a fresh one.
+ # Note that we cannot set the loop to None, because get_event_loop only creates
+ # a new loop, when set_event_loop has not been called.
+ policy = asyncio.get_event_loop_policy()
+ new_loop = policy.new_event_loop()
+ policy.set_event_loop(new_loop)
+
+
+@pytest.hookimpl(tryfirst=True, hookwrapper=True)
+def pytest_pyfunc_call(pyfuncitem: pytest.Function) -> Optional[object]:
+ """
+ Pytest hook called before a test case is run.
+
+ Wraps marked tests in a synchronous function
+ where the wrapped test coroutine is executed in an event loop.
+ """
+ marker = pyfuncitem.get_closest_marker("asyncio")
+ if marker is not None:
+ funcargs: Dict[str, object] = pyfuncitem.funcargs # type: ignore[name-defined]
+ loop = cast(asyncio.AbstractEventLoop, funcargs["event_loop"])
+ if _is_hypothesis_test(pyfuncitem.obj):
+ pyfuncitem.obj.hypothesis.inner_test = wrap_in_sync(
+ pyfuncitem,
+ pyfuncitem.obj.hypothesis.inner_test,
+ _loop=loop,
+ )
+ else:
+ pyfuncitem.obj = wrap_in_sync(
+ pyfuncitem,
+ pyfuncitem.obj,
+ _loop=loop,
+ )
+ yield
+
+
+def _is_hypothesis_test(function: Any) -> bool:
+ return getattr(function, "is_hypothesis_test", False)
+
+
+def wrap_in_sync(
+ pyfuncitem: pytest.Function,
+ func: Callable[..., Awaitable[Any]],
+ _loop: asyncio.AbstractEventLoop,
+):
+ """Return a sync wrapper around an async function executing it in the
+ current event loop."""
+
+ # if the function is already wrapped, we rewrap using the original one
+ # not using __wrapped__ because the original function may already be
+ # a wrapped one
+ raw_func = getattr(func, "_raw_test_func", None)
+ if raw_func is not None:
+ func = raw_func
+
+ @functools.wraps(func)
+ def inner(*args, **kwargs):
+ coro = func(*args, **kwargs)
+ if not inspect.isawaitable(coro):
+ pyfuncitem.warn(
+ pytest.PytestWarning(
+ f"The test {pyfuncitem} is marked with '@pytest.mark.asyncio' "
+ "but it is not an async function. "
+ "Please remove asyncio marker. "
+ "If the test is not marked explicitly, "
+ "check for global markers applied via 'pytestmark'."
+ )
+ )
+ return
+ task = asyncio.ensure_future(coro, loop=_loop)
+ try:
+ return _loop.run_until_complete(task)
+ except BaseException:
+ # run_until_complete doesn't get the result from exceptions
+ # that are not subclasses of `Exception`. Consume all
+ # exceptions to prevent asyncio's warning from logging.
+ if task.done() and not task.cancelled():
+ task.exception()
+ raise
+
+ inner._raw_test_func = func # type: ignore[attr-defined]
+ return inner
+
+
+def pytest_runtest_setup(item: pytest.Item) -> None:
+ marker = item.get_closest_marker("asyncio")
+ if marker is None:
+ return
+ fixturenames = item.fixturenames # type: ignore[attr-defined]
+ # inject an event loop fixture for all async tests
+ if "event_loop" in fixturenames:
+ fixturenames.remove("event_loop")
+ fixturenames.insert(0, "event_loop")
+ obj = getattr(item, "obj", None)
+ if not getattr(obj, "hypothesis", False) and getattr(
+ obj, "is_hypothesis_test", False
+ ):
+ pytest.fail(
+ "test function `%r` is using Hypothesis, but pytest-asyncio "
+ "only works with Hypothesis 3.64.0 or later." % item
+ )
+
+
+@pytest.fixture
+def event_loop(request: FixtureRequest) -> Iterator[asyncio.AbstractEventLoop]:
+ """Create an instance of the default event loop for each test case."""
+ loop = asyncio.get_event_loop_policy().new_event_loop()
+ yield loop
+ loop.close()
+
+
+def _unused_port(socket_type: int) -> int:
+ """Find an unused localhost port from 1024-65535 and return it."""
+ with contextlib.closing(socket.socket(type=socket_type)) as sock:
+ sock.bind(("127.0.0.1", 0))
+ return sock.getsockname()[1]
+
+
+@pytest.fixture
+def unused_tcp_port() -> int:
+ return _unused_port(socket.SOCK_STREAM)
+
+
+@pytest.fixture
+def unused_udp_port() -> int:
+ return _unused_port(socket.SOCK_DGRAM)
+
+
+@pytest.fixture(scope="session")
+def unused_tcp_port_factory() -> Callable[[], int]:
+ """A factory function, producing different unused TCP ports."""
+ produced = set()
+
+ def factory():
+ """Return an unused port."""
+ port = _unused_port(socket.SOCK_STREAM)
+
+ while port in produced:
+ port = _unused_port(socket.SOCK_STREAM)
+
+ produced.add(port)
+
+ return port
+
+ return factory
+
+
+@pytest.fixture(scope="session")
+def unused_udp_port_factory() -> Callable[[], int]:
+ """A factory function, producing different unused UDP ports."""
+ produced = set()
+
+ def factory():
+ """Return an unused port."""
+ port = _unused_port(socket.SOCK_DGRAM)
+
+ while port in produced:
+ port = _unused_port(socket.SOCK_DGRAM)
+
+ produced.add(port)
+
+ return port
+
+ return factory
diff --git a/contrib/python/pytest-asyncio/pytest_asyncio/py.typed b/contrib/python/pytest-asyncio/pytest_asyncio/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/pytest-asyncio/pytest_asyncio/py.typed
diff --git a/contrib/python/pytest-asyncio/ya.make b/contrib/python/pytest-asyncio/ya.make
new file mode 100644
index 0000000000..e3918ea4b0
--- /dev/null
+++ b/contrib/python/pytest-asyncio/ya.make
@@ -0,0 +1,30 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(0.21.1)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/python/pytest
+)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ pytest_asyncio/__init__.py
+ pytest_asyncio/_version.py
+ pytest_asyncio/plugin.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/pytest-asyncio/
+ .dist-info/METADATA
+ .dist-info/entry_points.txt
+ .dist-info/top_level.txt
+ pytest_asyncio/py.typed
+)
+
+END()
diff --git a/contrib/python/types-protobuf/.dist-info/METADATA b/contrib/python/types-protobuf/.dist-info/METADATA
index ce1d526999..d4c7134a6a 100644
--- a/contrib/python/types-protobuf/.dist-info/METADATA
+++ b/contrib/python/types-protobuf/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: types-protobuf
-Version: 5.27.0.20240907
+Version: 5.27.0.20240920
Summary: Typing stubs for protobuf
Home-page: https://github.com/python/typeshed
License: Apache-2.0
@@ -31,7 +31,7 @@ The source for this package can be found at
https://github.com/python/typeshed/tree/main/stubs/protobuf. All fixes for
types and metadata should be contributed there.
-Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 26.1 on [protobuf v27.1](https://github.com/protocolbuffers/protobuf/releases/tag/v27.1) (python protobuf==5.27.1).
+Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 26.1 on [protobuf v27.1](https://github.com/protocolbuffers/protobuf/releases/tag/v27.1) (python `protobuf==5.27.1`).
This stub package is marked as [partial](https://peps.python.org/pep-0561/#partial-stub-packages).
If you find that annotations are missing, feel free to contribute and help complete them.
@@ -39,6 +39,6 @@ If you find that annotations are missing, feel free to contribute and help compl
See https://github.com/python/typeshed/blob/main/README.md for more details.
This package was generated from typeshed commit
-[`e8e9291c76f50c3bcde79e7bb61060f5c24c054e`](https://github.com/python/typeshed/commit/e8e9291c76f50c3bcde79e7bb61060f5c24c054e) and was tested
-with mypy 1.11.1, pyright 1.1.379, and
-pytype 2024.4.11.
+[`c025e37bbb67f99f9d83c2eecb79e7405f77359b`](https://github.com/python/typeshed/commit/c025e37bbb67f99f9d83c2eecb79e7405f77359b) and was tested
+with mypy 1.11.1, pyright 1.1.381, and
+pytype 2024.9.13.
diff --git a/contrib/python/types-protobuf/google-stubs/METADATA.toml b/contrib/python/types-protobuf/google-stubs/METADATA.toml
index 9e402da864..f08d51e7fc 100644
--- a/contrib/python/types-protobuf/google-stubs/METADATA.toml
+++ b/contrib/python/types-protobuf/google-stubs/METADATA.toml
@@ -1,8 +1,8 @@
-# Whenever you update version here, PROTOBUF_VERSION should be updated
-# in scripts/generate_proto_stubs.sh and vice-versa.
+# Whenever you update version here, PACKAGE_VERSION should be updated
+# in scripts/sync_proto/google_protobuf.py and vice-versa.
version = "5.27.*"
upstream_repository = "https://github.com/protocolbuffers/protobuf"
-extra_description = "Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 26.1 on [protobuf v27.1](https://github.com/protocolbuffers/protobuf/releases/tag/v27.1) (python protobuf==5.27.1)."
+extra_description = "Partially generated using [mypy-protobuf==3.6.0](https://github.com/nipunn1313/mypy-protobuf/tree/v3.6.0) and libprotoc 26.1 on [protobuf v27.1](https://github.com/protocolbuffers/protobuf/releases/tag/v27.1) (python `protobuf==5.27.1`)."
partial_stub = true
[tool.stubtest]
diff --git a/contrib/python/types-protobuf/ya.make b/contrib/python/types-protobuf/ya.make
index f2e94d656a..d817e8af94 100644
--- a/contrib/python/types-protobuf/ya.make
+++ b/contrib/python/types-protobuf/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(5.27.0.20240907)
+VERSION(5.27.0.20240920)
LICENSE(Apache-2.0)