aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-08-14 21:58:27 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-08-14 22:05:52 +0300
commit0634a119529d4799e23b20e067daf2020d8fbc3c (patch)
treebe0c02545920fe983ed9c93b2263b9597f57d615
parent0cde7699c40c90a05b1545e8f58abeb41cc4718e (diff)
downloadydb-0634a119529d4799e23b20e067daf2020d8fbc3c.tar.gz
Intermediate changes
-rw-r--r--contrib/python/Automat/ya.make2
-rw-r--r--contrib/python/Flask-Cors/ya.make2
-rw-r--r--contrib/python/Flask/ya.make2
-rw-r--r--contrib/python/Jinja2/ya.make2
-rw-r--r--contrib/python/MarkupSafe/ya.make2
-rw-r--r--contrib/python/Pillow/ya.make2
-rw-r--r--contrib/python/PyHamcrest/ya.make2
-rw-r--r--contrib/python/PyJWT/ya.make2
-rw-r--r--contrib/python/PySocks/ya.make2
-rw-r--r--contrib/python/Pygments/ya.make2
-rw-r--r--contrib/python/Twisted/ya.make2
-rw-r--r--contrib/python/Werkzeug/ya.make2
-rw-r--r--contrib/python/aiohttp/.dist-info/METADATA64
-rw-r--r--contrib/python/aiohttp/LICENSE.txt2
-rw-r--r--contrib/python/aiohttp/README.rst28
-rw-r--r--contrib/python/aiohttp/aiohttp/__init__.py38
-rw-r--r--contrib/python/aiohttp/aiohttp/_cparser.pxd40
-rw-r--r--contrib/python/aiohttp/aiohttp/_http_parser.pyx128
-rw-r--r--contrib/python/aiohttp/aiohttp/abc.py10
-rw-r--r--contrib/python/aiohttp/aiohttp/base_protocol.py22
-rw-r--r--contrib/python/aiohttp/aiohttp/client.py202
-rw-r--r--contrib/python/aiohttp/aiohttp/client_exceptions.py18
-rw-r--r--contrib/python/aiohttp/aiohttp/client_proto.py89
-rw-r--r--contrib/python/aiohttp/aiohttp/client_reqrep.py312
-rw-r--r--contrib/python/aiohttp/aiohttp/client_ws.py33
-rw-r--r--contrib/python/aiohttp/aiohttp/compression_utils.py157
-rw-r--r--contrib/python/aiohttp/aiohttp/connector.py252
-rw-r--r--contrib/python/aiohttp/aiohttp/cookiejar.py128
-rw-r--r--contrib/python/aiohttp/aiohttp/formdata.py16
-rw-r--r--contrib/python/aiohttp/aiohttp/hdrs.py8
-rw-r--r--contrib/python/aiohttp/aiohttp/helpers.py351
-rw-r--r--contrib/python/aiohttp/aiohttp/http.py12
-rw-r--r--contrib/python/aiohttp/aiohttp/http_exceptions.py17
-rw-r--r--contrib/python/aiohttp/aiohttp/http_parser.py429
-rw-r--r--contrib/python/aiohttp/aiohttp/http_websocket.py111
-rw-r--r--contrib/python/aiohttp/aiohttp/http_writer.py28
-rw-r--r--contrib/python/aiohttp/aiohttp/locks.py4
-rw-r--r--contrib/python/aiohttp/aiohttp/multipart.py208
-rw-r--r--contrib/python/aiohttp/aiohttp/payload.py42
-rw-r--r--contrib/python/aiohttp/aiohttp/payload_streamer.py4
-rw-r--r--contrib/python/aiohttp/aiohttp/pytest_plugin.py40
-rw-r--r--contrib/python/aiohttp/aiohttp/resolver.py2
-rw-r--r--contrib/python/aiohttp/aiohttp/streams.py108
-rw-r--r--contrib/python/aiohttp/aiohttp/tcp_helpers.py1
-rw-r--r--contrib/python/aiohttp/aiohttp/test_utils.py110
-rw-r--r--contrib/python/aiohttp/aiohttp/tracing.py99
-rw-r--r--contrib/python/aiohttp/aiohttp/typedefs.py14
-rw-r--r--contrib/python/aiohttp/aiohttp/web.py84
-rw-r--r--contrib/python/aiohttp/aiohttp/web_app.py107
-rw-r--r--contrib/python/aiohttp/aiohttp/web_exceptions.py35
-rw-r--r--contrib/python/aiohttp/aiohttp/web_fileresponse.py57
-rw-r--r--contrib/python/aiohttp/aiohttp/web_log.py11
-rw-r--r--contrib/python/aiohttp/aiohttp/web_middlewares.py17
-rw-r--r--contrib/python/aiohttp/aiohttp/web_protocol.py51
-rw-r--r--contrib/python/aiohttp/aiohttp/web_request.py105
-rw-r--r--contrib/python/aiohttp/aiohttp/web_response.py168
-rw-r--r--contrib/python/aiohttp/aiohttp/web_routedef.py7
-rw-r--r--contrib/python/aiohttp/aiohttp/web_runner.py78
-rw-r--r--contrib/python/aiohttp/aiohttp/web_server.py21
-rw-r--r--contrib/python/aiohttp/aiohttp/web_urldispatcher.py84
-rw-r--r--contrib/python/aiohttp/aiohttp/web_ws.py178
-rw-r--r--contrib/python/aiohttp/aiohttp/worker.py38
-rw-r--r--contrib/python/aiohttp/ya.make5
-rw-r--r--contrib/python/appnope/ya.make2
-rw-r--r--contrib/python/argcomplete/ya.make2
-rw-r--r--contrib/python/asn1crypto/ya.make2
-rw-r--r--contrib/python/async-timeout/.dist-info/METADATA131
-rw-r--r--contrib/python/async-timeout/.dist-info/top_level.txt1
-rw-r--r--contrib/python/async-timeout/LICENSE13
-rw-r--r--contrib/python/async-timeout/README.rst100
-rw-r--r--contrib/python/async-timeout/async_timeout/__init__.py239
-rw-r--r--contrib/python/async-timeout/async_timeout/py.typed1
-rw-r--r--contrib/python/async-timeout/ya.make23
-rw-r--r--contrib/python/atomicwrites/ya.make2
-rw-r--r--contrib/python/attrs/ya.make2
-rw-r--r--contrib/python/blinker/ya.make2
-rw-r--r--contrib/python/boto/ya.make2
-rw-r--r--contrib/python/boto3/ya.make2
-rw-r--r--contrib/python/botocore/ya.make2
-rw-r--r--contrib/python/cachetools/ya.make2
-rw-r--r--contrib/python/certifi/ya.make2
-rw-r--r--contrib/python/cffi/ya.make2
-rw-r--r--contrib/python/chardet/ya.make2
-rw-r--r--contrib/python/click/ya.make2
-rw-r--r--contrib/python/colorama/ya.make2
-rw-r--r--contrib/python/constantly/ya.make2
-rw-r--r--contrib/python/contextlib2/ya.make2
-rw-r--r--contrib/python/cookies/ya.make2
-rw-r--r--contrib/python/cryptography/ya.make2
-rw-r--r--contrib/python/cycler/ya.make2
-rw-r--r--contrib/python/decorator/ya.make2
-rw-r--r--contrib/python/freezegun/ya.make2
-rw-r--r--contrib/python/funcsigs/ya.make2
-rw-r--r--contrib/python/future/ya.make2
-rw-r--r--contrib/python/google-auth/ya.make2
-rw-r--r--contrib/python/grpcio/ya.make2
-rw-r--r--contrib/python/httplib2/ya.make2
-rw-r--r--contrib/python/hyperlink/ya.make2
-rw-r--r--contrib/python/hypothesis/ya.make2
-rw-r--r--contrib/python/idna/ya.make2
-rw-r--r--contrib/python/importlib-metadata/ya.make2
-rw-r--r--contrib/python/incremental/ya.make2
-rw-r--r--contrib/python/ipdb/ya.make2
-rw-r--r--contrib/python/ipython-genutils/ya.make2
-rw-r--r--contrib/python/ipython/ya.make2
-rw-r--r--contrib/python/itsdangerous/ya.make2
-rw-r--r--contrib/python/jedi/ya.make2
-rw-r--r--contrib/python/jmespath/ya.make2
-rw-r--r--contrib/python/jsonschema/ya.make2
-rw-r--r--contrib/python/kiwisolver/ya.make2
-rw-r--r--contrib/python/lz4/ya.make2
-rw-r--r--contrib/python/matplotlib/ya.make2
-rw-r--r--contrib/python/mock/ya.make2
-rw-r--r--contrib/python/monotonic/ya.make2
-rw-r--r--contrib/python/more-itertools/ya.make2
-rw-r--r--contrib/python/moto/ya.make2
-rw-r--r--contrib/python/numpy/ya.make2
-rw-r--r--contrib/python/oauth2client/ya.make2
-rw-r--r--contrib/python/olefile/ya.make2
-rw-r--r--contrib/python/packaging/ya.make2
-rw-r--r--contrib/python/pandas/ya.make2
-rw-r--r--contrib/python/parameterized/ya.make2
-rw-r--r--contrib/python/parso/ya.make2
-rw-r--r--contrib/python/pathlib2/ya.make2
-rw-r--r--contrib/python/pexpect/ya.make2
-rw-r--r--contrib/python/pickleshare/ya.make2
-rw-r--r--contrib/python/pluggy/ya.make2
-rw-r--r--contrib/python/ply/ya.make2
-rw-r--r--contrib/python/prettytable/ya.make2
-rw-r--r--contrib/python/prompt-toolkit/ya.make2
-rw-r--r--contrib/python/protobuf/ya.make2
-rw-r--r--contrib/python/psutil/ya.make2
-rw-r--r--contrib/python/ptyprocess/ya.make2
-rw-r--r--contrib/python/py/ya.make2
-rw-r--r--contrib/python/pyOpenSSL/ya.make2
-rw-r--r--contrib/python/pyasn1-modules/ya.make2
-rw-r--r--contrib/python/pyasn1/ya.make2
-rw-r--r--contrib/python/pycparser/ya.make2
-rw-r--r--contrib/python/pyparsing/ya.make2
-rw-r--r--contrib/python/pyrsistent/ya.make2
-rw-r--r--contrib/python/pytest-localserver/ya.make2
-rw-r--r--contrib/python/pytest-mock/ya.make2
-rw-r--r--contrib/python/pytest/ya.make2
-rw-r--r--contrib/python/python-dateutil/ya.make2
-rw-r--r--contrib/python/pytz/ya.make2
-rw-r--r--contrib/python/requests-mock/ya.make2
-rw-r--r--contrib/python/requests/ya.make2
-rw-r--r--contrib/python/responses/ya.make2
-rw-r--r--contrib/python/retry/ya.make2
-rw-r--r--contrib/python/rsa/ya.make2
-rw-r--r--contrib/python/ruamel.yaml.clib/ya.make2
-rw-r--r--contrib/python/ruamel.yaml/ya.make2
-rw-r--r--contrib/python/s3transfer/ya.make2
-rw-r--r--contrib/python/scipy/ya.make2
-rw-r--r--contrib/python/setuptools/ya.make2
-rw-r--r--contrib/python/simplegeneric/ya.make2
-rw-r--r--contrib/python/simplejson/ya.make2
-rw-r--r--contrib/python/six/ya.make2
-rw-r--r--contrib/python/sortedcontainers/ya.make2
-rw-r--r--contrib/python/tenacity/ya.make2
-rw-r--r--contrib/python/toml/ya.make2
-rw-r--r--contrib/python/traitlets/ya.make2
-rw-r--r--contrib/python/typing-extensions/ya.make2
-rw-r--r--contrib/python/urllib3/ya.make2
-rw-r--r--contrib/python/wcwidth/ya.make2
-rw-r--r--contrib/python/websocket-client/ya.make2
-rw-r--r--contrib/python/xmltodict/ya.make2
-rw-r--r--contrib/python/zope.interface/ya.make2
-rw-r--r--contrib/python/zstandard/ya.make2
169 files changed, 2794 insertions, 2109 deletions
diff --git a/contrib/python/Automat/ya.make b/contrib/python/Automat/ya.make
index 8df839fcc1..07e6e7bb24 100644
--- a/contrib/python/Automat/ya.make
+++ b/contrib/python/Automat/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Automat/py2)
ELSE()
diff --git a/contrib/python/Flask-Cors/ya.make b/contrib/python/Flask-Cors/ya.make
index df95cd8633..bf5e610906 100644
--- a/contrib/python/Flask-Cors/ya.make
+++ b/contrib/python/Flask-Cors/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Flask-Cors/py2)
ELSE()
diff --git a/contrib/python/Flask/ya.make b/contrib/python/Flask/ya.make
index 648677803e..8a3bbcd8c6 100644
--- a/contrib/python/Flask/ya.make
+++ b/contrib/python/Flask/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Flask/py2)
ELSE()
diff --git a/contrib/python/Jinja2/ya.make b/contrib/python/Jinja2/ya.make
index 4dc402f403..9fe33fdeff 100644
--- a/contrib/python/Jinja2/ya.make
+++ b/contrib/python/Jinja2/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Jinja2/py2)
ELSE()
diff --git a/contrib/python/MarkupSafe/ya.make b/contrib/python/MarkupSafe/ya.make
index 59992866ad..f84021e5d7 100644
--- a/contrib/python/MarkupSafe/ya.make
+++ b/contrib/python/MarkupSafe/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/MarkupSafe/py2)
ELSE()
diff --git a/contrib/python/Pillow/ya.make b/contrib/python/Pillow/ya.make
index f5634defbf..02a54d42d1 100644
--- a/contrib/python/Pillow/ya.make
+++ b/contrib/python/Pillow/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Pillow/py2)
ELSE()
diff --git a/contrib/python/PyHamcrest/ya.make b/contrib/python/PyHamcrest/ya.make
index e5d7651c5e..ec69de4b3f 100644
--- a/contrib/python/PyHamcrest/ya.make
+++ b/contrib/python/PyHamcrest/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/PyHamcrest/py2)
ELSE()
diff --git a/contrib/python/PyJWT/ya.make b/contrib/python/PyJWT/ya.make
index 3ccb0a62dc..28ae45483e 100644
--- a/contrib/python/PyJWT/ya.make
+++ b/contrib/python/PyJWT/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/PyJWT/py2)
ELSE()
diff --git a/contrib/python/PySocks/ya.make b/contrib/python/PySocks/ya.make
index 1d7ecb60a1..b4a166d4b0 100644
--- a/contrib/python/PySocks/ya.make
+++ b/contrib/python/PySocks/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/PySocks/py2)
ELSE()
diff --git a/contrib/python/Pygments/ya.make b/contrib/python/Pygments/ya.make
index e0c5b22b7e..f1bd685dd7 100644
--- a/contrib/python/Pygments/ya.make
+++ b/contrib/python/Pygments/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Pygments/py2)
ELSE()
diff --git a/contrib/python/Twisted/ya.make b/contrib/python/Twisted/ya.make
index a8fec1f62b..89f68f9753 100644
--- a/contrib/python/Twisted/ya.make
+++ b/contrib/python/Twisted/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Twisted/py2)
ELSE()
diff --git a/contrib/python/Werkzeug/ya.make b/contrib/python/Werkzeug/ya.make
index 809655c21c..84b9b93e2b 100644
--- a/contrib/python/Werkzeug/ya.make
+++ b/contrib/python/Werkzeug/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/Werkzeug/py2)
ELSE()
diff --git a/contrib/python/aiohttp/.dist-info/METADATA b/contrib/python/aiohttp/.dist-info/METADATA
index c2f6befe9e..cd31264913 100644
--- a/contrib/python/aiohttp/.dist-info/METADATA
+++ b/contrib/python/aiohttp/.dist-info/METADATA
@@ -1,19 +1,19 @@
Metadata-Version: 2.1
Name: aiohttp
-Version: 3.8.1
+Version: 3.9.5
Summary: Async http client/server framework (asyncio)
Home-page: https://github.com/aio-libs/aiohttp
Maintainer: aiohttp team <team@aiohttp.org>
Maintainer-email: team@aiohttp.org
License: Apache 2
-Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
+Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
+Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
Project-URL: Docs: RTD, https://docs.aiohttp.org
Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
-Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Framework :: AsyncIO
Classifier: Intended Audience :: Developers
@@ -23,29 +23,25 @@ Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: Microsoft :: Windows
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Internet :: WWW/HTTP
-Requires-Python: >=3.6
+Requires-Python: >=3.8
Description-Content-Type: text/x-rst
License-File: LICENSE.txt
-Requires-Dist: attrs (>=17.3.0)
-Requires-Dist: charset-normalizer (<3.0,>=2.0)
-Requires-Dist: multidict (<7.0,>=4.5)
-Requires-Dist: async-timeout (<5.0,>=4.0.0a3)
-Requires-Dist: yarl (<2.0,>=1.0)
-Requires-Dist: frozenlist (>=1.1.1)
-Requires-Dist: aiosignal (>=1.1.2)
-Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
-Requires-Dist: asynctest (==0.13.0) ; python_version < "3.8"
-Requires-Dist: typing-extensions (>=3.7.4) ; python_version < "3.8"
+Requires-Dist: aiosignal >=1.1.2
+Requires-Dist: attrs >=17.3.0
+Requires-Dist: frozenlist >=1.1.1
+Requires-Dist: multidict <7.0,>=4.5
+Requires-Dist: yarl <2.0,>=1.0
+Requires-Dist: async-timeout <5.0,>=4.0 ; python_version < "3.11"
Provides-Extra: speedups
-Requires-Dist: aiodns ; extra == 'speedups'
-Requires-Dist: Brotli ; extra == 'speedups'
-Requires-Dist: cchardet ; extra == 'speedups'
+Requires-Dist: brotlicffi ; (platform_python_implementation != "CPython") and extra == 'speedups'
+Requires-Dist: Brotli ; (platform_python_implementation == "CPython") and extra == 'speedups'
+Requires-Dist: aiodns ; (sys_platform == "linux" or sys_platform == "darwin") and extra == 'speedups'
==================================
Async http client/server framework
@@ -74,13 +70,13 @@ Async http client/server framework
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
-.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
- :target: https://aio-libs.discourse.group
- :alt: Discourse status
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
-.. image:: https://badges.gitter.im/Join%20Chat.svg
- :target: https://gitter.im/aio-libs/Lobby
- :alt: Chat on Gitter
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
Key Features
@@ -89,7 +85,7 @@ Key Features
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
-- Provides Web-server with middlewares and plugable routing.
+- Provides Web-server with middleware and pluggable routing.
Getting started
@@ -116,8 +112,7 @@ To get something from the web:
html = await response.text()
print("Body:", html[:15], "...")
- loop = asyncio.get_event_loop()
- loop.run_until_complete(main())
+ asyncio.run(main())
This prints:
@@ -196,7 +191,7 @@ Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
-*aio-libs discourse group*: https://aio-libs.discourse.group
+*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions
*gitter chat* https://gitter.im/aio-libs/Lobby
@@ -207,23 +202,20 @@ Please add *aiohttp* tag to your question there.
Requirements
============
-- Python >= 3.6
- async-timeout_
- attrs_
-- charset-normalizer_
- multidict_
- yarl_
+- frozenlist_
-Optionally you may install the cChardet_ and aiodns_ libraries (highly
-recommended for sake of speed).
+Optionally you may install the aiodns_ library (highly recommended for sake of speed).
-.. _charset-normalizer: https://pypi.org/project/charset-normalizer
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
+.. _frozenlist: https://pypi.org/project/frozenlist/
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
-.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
@@ -251,5 +243,3 @@ Benchmarks
If you are interested in efficiency, the AsyncIO community maintains a
list of benchmarks on the official wiki:
https://github.com/python/asyncio/wiki/Benchmarks
-
-
diff --git a/contrib/python/aiohttp/LICENSE.txt b/contrib/python/aiohttp/LICENSE.txt
index 054102f2db..e497a322f2 100644
--- a/contrib/python/aiohttp/LICENSE.txt
+++ b/contrib/python/aiohttp/LICENSE.txt
@@ -1,4 +1,4 @@
- Copyright 2013-2020 aio-libs collaboration.
+ Copyright aio-libs contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/contrib/python/aiohttp/README.rst b/contrib/python/aiohttp/README.rst
index d057acbe2f..90b7f71357 100644
--- a/contrib/python/aiohttp/README.rst
+++ b/contrib/python/aiohttp/README.rst
@@ -25,13 +25,13 @@ Async http client/server framework
:target: https://docs.aiohttp.org/
:alt: Latest Read The Docs
-.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
- :target: https://aio-libs.discourse.group
- :alt: Discourse status
+.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs:matrix.org
+ :alt: Matrix Room — #aio-libs:matrix.org
-.. image:: https://badges.gitter.im/Join%20Chat.svg
- :target: https://gitter.im/aio-libs/Lobby
- :alt: Chat on Gitter
+.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
+ :target: https://matrix.to/#/%23aio-libs-space:matrix.org
+ :alt: Matrix Space — #aio-libs-space:matrix.org
Key Features
@@ -40,7 +40,7 @@ Key Features
- Supports both client and server side of HTTP protocol.
- Supports both client and server Web-Sockets out-of-the-box and avoids
Callback Hell.
-- Provides Web-server with middlewares and plugable routing.
+- Provides Web-server with middleware and pluggable routing.
Getting started
@@ -67,8 +67,7 @@ To get something from the web:
html = await response.text()
print("Body:", html[:15], "...")
- loop = asyncio.get_event_loop()
- loop.run_until_complete(main())
+ asyncio.run(main())
This prints:
@@ -147,7 +146,7 @@ Feel free to make a Pull Request for adding your link to these pages!
Communication channels
======================
-*aio-libs discourse group*: https://aio-libs.discourse.group
+*aio-libs Discussions*: https://github.com/aio-libs/aiohttp/discussions
*gitter chat* https://gitter.im/aio-libs/Lobby
@@ -158,23 +157,20 @@ Please add *aiohttp* tag to your question there.
Requirements
============
-- Python >= 3.6
- async-timeout_
- attrs_
-- charset-normalizer_
- multidict_
- yarl_
+- frozenlist_
-Optionally you may install the cChardet_ and aiodns_ libraries (highly
-recommended for sake of speed).
+Optionally you may install the aiodns_ library (highly recommended for sake of speed).
-.. _charset-normalizer: https://pypi.org/project/charset-normalizer
.. _aiodns: https://pypi.python.org/pypi/aiodns
.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
+.. _frozenlist: https://pypi.org/project/frozenlist/
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
-.. _cChardet: https://pypi.python.org/pypi/cchardet
License
=======
diff --git a/contrib/python/aiohttp/aiohttp/__init__.py b/contrib/python/aiohttp/aiohttp/__init__.py
index 4bbcef2935..e82e790b46 100644
--- a/contrib/python/aiohttp/aiohttp/__init__.py
+++ b/contrib/python/aiohttp/aiohttp/__init__.py
@@ -1,6 +1,6 @@
-__version__ = "3.8.1"
+__version__ = "3.9.5"
-from typing import Tuple
+from typing import TYPE_CHECKING, Tuple
from . import hdrs as hdrs
from .client import (
@@ -104,6 +104,13 @@ from .tracing import (
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
)
+if TYPE_CHECKING:
+ # At runtime these are lazy-loaded at the bottom of the file.
+ from .worker import (
+ GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
+ GunicornWebWorker as GunicornWebWorker,
+ )
+
__all__: Tuple[str, ...] = (
"hdrs",
# client
@@ -206,11 +213,28 @@ __all__: Tuple[str, ...] = (
"TraceRequestRedirectParams",
"TraceRequestStartParams",
"TraceResponseChunkReceivedParams",
+ # workers (imported lazily with __getattr__)
+ "GunicornUVLoopWebWorker",
+ "GunicornWebWorker",
)
-try:
- from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
- __all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
-except ImportError: # pragma: no cover
- pass
+def __dir__() -> Tuple[str, ...]:
+ return __all__ + ("__author__", "__doc__")
+
+
+def __getattr__(name: str) -> object:
+ global GunicornUVLoopWebWorker, GunicornWebWorker
+
+ # Importing gunicorn takes a long time (>100ms), so only import if actually needed.
+ if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
+ try:
+ from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
+ except ImportError:
+ return None
+
+ GunicornUVLoopWebWorker = guv # type: ignore[misc]
+ GunicornWebWorker = gw # type: ignore[misc]
+ return guv if name == "GunicornUVLoopWebWorker" else gw
+
+ raise AttributeError(f"module {__name__} has no attribute {name}")
diff --git a/contrib/python/aiohttp/aiohttp/_cparser.pxd b/contrib/python/aiohttp/aiohttp/_cparser.pxd
index 49055d6a56..1b3be6d4ef 100644
--- a/contrib/python/aiohttp/aiohttp/_cparser.pxd
+++ b/contrib/python/aiohttp/aiohttp/_cparser.pxd
@@ -1,13 +1,4 @@
-from libc.stdint cimport (
- int8_t,
- int16_t,
- int32_t,
- int64_t,
- uint8_t,
- uint16_t,
- uint32_t,
- uint64_t,
-)
+from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
cdef extern from "llhttp.h":
@@ -88,30 +79,14 @@ cdef extern from "llhttp.h":
ctypedef llhttp_errno llhttp_errno_t
enum llhttp_flags:
- F_CONNECTION_KEEP_ALIVE,
- F_CONNECTION_CLOSE,
- F_CONNECTION_UPGRADE,
F_CHUNKED,
- F_UPGRADE,
- F_CONTENT_LENGTH,
- F_SKIPBODY,
- F_TRAILING,
- F_TRANSFER_ENCODING
-
- enum llhttp_lenient_flags:
- LENIENT_HEADERS,
- LENIENT_CHUNKED_LENGTH
+ F_CONTENT_LENGTH
enum llhttp_type:
HTTP_REQUEST,
HTTP_RESPONSE,
HTTP_BOTH
- enum llhttp_finish_t:
- HTTP_FINISH_SAFE,
- HTTP_FINISH_SAFE_WITH_CB,
- HTTP_FINISH_UNSAFE
-
enum llhttp_method:
HTTP_DELETE,
HTTP_GET,
@@ -167,24 +142,17 @@ cdef extern from "llhttp.h":
const llhttp_settings_t* settings)
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
- llhttp_errno_t llhttp_finish(llhttp_t* parser)
-
- int llhttp_message_needs_eof(const llhttp_t* parser)
int llhttp_should_keep_alive(const llhttp_t* parser)
- void llhttp_pause(llhttp_t* parser)
- void llhttp_resume(llhttp_t* parser)
-
void llhttp_resume_after_upgrade(llhttp_t* parser)
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
const char* llhttp_get_error_reason(const llhttp_t* parser)
- void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
const char* llhttp_get_error_pos(const llhttp_t* parser)
- const char* llhttp_errno_name(llhttp_errno_t err)
const char* llhttp_method_name(llhttp_method_t method)
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
- void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
diff --git a/contrib/python/aiohttp/aiohttp/_http_parser.pyx b/contrib/python/aiohttp/aiohttp/_http_parser.pyx
index 77bf0aa598..ec6edb2dfe 100644
--- a/contrib/python/aiohttp/aiohttp/_http_parser.pyx
+++ b/contrib/python/aiohttp/aiohttp/_http_parser.pyx
@@ -2,7 +2,6 @@
#
# Based on https://github.com/MagicStack/httptools
#
-from __future__ import absolute_import, print_function
from cpython cimport (
Py_buffer,
@@ -20,6 +19,7 @@ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiD
from yarl import URL as _URL
from aiohttp import hdrs
+from aiohttp.helpers import DEBUG, set_exception
from .http_exceptions import (
BadHttpMessage,
@@ -425,7 +425,7 @@ cdef class HttpParser:
raw_headers = tuple(self._raw_headers)
headers = CIMultiDictProxy(self._headers)
- if upgrade or self._cparser.method == 5: # cparser.CONNECT:
+ if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
self._upgraded = True
# do not support old websocket spec
@@ -453,7 +453,7 @@ cdef class HttpParser:
if (
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
- self._cparser.method == 5 or # CONNECT: 5
+ self._cparser.method == cparser.HTTP_CONNECT or
(self._cparser.status_code >= 199 and
self._cparser.content_length == 0 and
self._read_until_eof)
@@ -546,7 +546,13 @@ cdef class HttpParser:
ex = self._last_error
self._last_error = None
else:
- ex = parser_error_from_errno(self._cparser)
+ after = cparser.llhttp_get_error_pos(self._cparser)
+ before = data[:after - <char*>self.py_buf.buf]
+ after_b = after.split(b"\r\n", 1)[0]
+ before = before.rsplit(b"\r\n", 1)[-1]
+ data = before + after_b
+ pointer = " " * (len(repr(before))-1) + "^"
+ ex = parser_error_from_errno(self._cparser, data, pointer)
self._payload = None
raise ex
@@ -586,34 +592,45 @@ cdef class HttpRequestParser(HttpParser):
self._path = self._buf.decode('utf-8', 'surrogateescape')
try:
idx3 = len(self._path)
- idx1 = self._path.find("?")
- if idx1 == -1:
- query = ""
- idx2 = self._path.find("#")
- if idx2 == -1:
- path = self._path
- fragment = ""
- else:
- path = self._path[0: idx2]
- fragment = self._path[idx2+1:]
+ if self._cparser.method == cparser.HTTP_CONNECT:
+ # authority-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
+ self._url = URL.build(authority=self._path, encoded=True)
+ elif idx3 > 1 and self._path[0] == '/':
+ # origin-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
+ idx1 = self._path.find("?")
+ if idx1 == -1:
+ query = ""
+ idx2 = self._path.find("#")
+ if idx2 == -1:
+ path = self._path
+ fragment = ""
+ else:
+ path = self._path[0: idx2]
+ fragment = self._path[idx2+1:]
- else:
- path = self._path[0:idx1]
- idx1 += 1
- idx2 = self._path.find("#", idx1+1)
- if idx2 == -1:
- query = self._path[idx1:]
- fragment = ""
else:
- query = self._path[idx1: idx2]
- fragment = self._path[idx2+1:]
-
- self._url = URL.build(
- path=path,
- query_string=query,
- fragment=fragment,
- encoded=True,
- )
+ path = self._path[0:idx1]
+ idx1 += 1
+ idx2 = self._path.find("#", idx1+1)
+ if idx2 == -1:
+ query = self._path[idx1:]
+ fragment = ""
+ else:
+ query = self._path[idx1: idx2]
+ fragment = self._path[idx2+1:]
+
+ self._url = URL.build(
+ path=path,
+ query_string=query,
+ fragment=fragment,
+ encoded=True,
+ )
+ else:
+ # absolute-form for proxy maybe,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
+ self._url = URL(self._path, encoded=True)
finally:
PyByteArray_Resize(self._buf, 0)
@@ -631,6 +648,11 @@ cdef class HttpResponseParser(HttpParser):
max_line_size, max_headers, max_field_size,
payload_exception, response_with_body, read_until_eof,
auto_decompress)
+ # Use strict parsing on dev mode, so users are warned about broken servers.
+ if not DEBUG:
+ cparser.llhttp_set_lenient_headers(self._cparser, 1)
+ cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
+ cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
cdef object _on_status_complete(self):
if self._buf:
@@ -726,7 +748,10 @@ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
pyparser._last_error = exc
return -1
else:
- if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
+ if (
+ pyparser._cparser.upgrade or
+ pyparser._cparser.method == cparser.HTTP_CONNECT
+ ):
return 2
else:
return 0
@@ -738,11 +763,13 @@ cdef int cb_on_body(cparser.llhttp_t* parser,
cdef bytes body = at[:length]
try:
pyparser._payload.feed_data(body, length)
- except BaseException as exc:
+ except BaseException as underlying_exc:
+ reraised_exc = underlying_exc
if pyparser._payload_exception is not None:
- pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
- else:
- pyparser._payload.set_exception(exc)
+ reraised_exc = pyparser._payload_exception(str(underlying_exc))
+
+ set_exception(pyparser._payload, reraised_exc, underlying_exc)
+
pyparser._payload_error = 1
return -1
else:
@@ -783,11 +810,13 @@ cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
return 0
-cdef parser_error_from_errno(cparser.llhttp_t* parser):
+cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
- if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
+ err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
+
+ if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
cparser.HPE_CB_HEADERS_COMPLETE,
cparser.HPE_CB_MESSAGE_COMPLETE,
cparser.HPE_CB_CHUNK_HEADER,
@@ -797,22 +826,13 @@ cdef parser_error_from_errno(cparser.llhttp_t* parser):
cparser.HPE_INVALID_CONTENT_LENGTH,
cparser.HPE_INVALID_CHUNK_SIZE,
cparser.HPE_INVALID_EOF_STATE,
- cparser.HPE_INVALID_TRANSFER_ENCODING):
- cls = BadHttpMessage
-
- elif errno == cparser.HPE_INVALID_STATUS:
- cls = BadStatusLine
-
- elif errno == cparser.HPE_INVALID_METHOD:
- cls = BadStatusLine
-
- elif errno == cparser.HPE_INVALID_VERSION:
- cls = BadStatusLine
-
+ cparser.HPE_INVALID_TRANSFER_ENCODING}:
+ return BadHttpMessage(err_msg)
+ elif errno in {cparser.HPE_INVALID_STATUS,
+ cparser.HPE_INVALID_METHOD,
+ cparser.HPE_INVALID_VERSION}:
+ return BadStatusLine(error=err_msg)
elif errno == cparser.HPE_INVALID_URL:
- cls = InvalidURLError
-
- else:
- cls = BadHttpMessage
+ return InvalidURLError(err_msg)
- return cls(desc.decode('latin-1'))
+ return BadHttpMessage(err_msg)
diff --git a/contrib/python/aiohttp/aiohttp/abc.py b/contrib/python/aiohttp/aiohttp/abc.py
index 06fc831638..ee83899899 100644
--- a/contrib/python/aiohttp/aiohttp/abc.py
+++ b/contrib/python/aiohttp/aiohttp/abc.py
@@ -22,7 +22,7 @@ from yarl import URL
from .helpers import get_running_loop
from .typedefs import LooseCookies
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_app import Application
from .web_exceptions import HTTPException
from .web_request import BaseRequest, Request
@@ -65,7 +65,9 @@ class AbstractMatchInfo(ABC):
@property
@abstractmethod
- def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
+ def expect_handler(
+ self,
+ ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
"""Expect handler for 100-continue processing"""
@property # pragma: no branch
@@ -129,7 +131,7 @@ class AbstractResolver(ABC):
"""Release resolver"""
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
IterableBase = Iterable[Morsel[str]]
else:
IterableBase = Iterable
@@ -166,7 +168,7 @@ class AbstractStreamWriter(ABC):
buffer_size = 0
output_size = 0
- length = 0 # type: Optional[int]
+ length: Optional[int] = 0
@abstractmethod
async def write(self, chunk: bytes) -> None:
diff --git a/contrib/python/aiohttp/aiohttp/base_protocol.py b/contrib/python/aiohttp/aiohttp/base_protocol.py
index fff4610a1e..dc1f24f99c 100644
--- a/contrib/python/aiohttp/aiohttp/base_protocol.py
+++ b/contrib/python/aiohttp/aiohttp/base_protocol.py
@@ -1,6 +1,7 @@
import asyncio
from typing import Optional, cast
+from .helpers import set_exception
from .tcp_helpers import tcp_nodelay
@@ -15,13 +16,17 @@ class BaseProtocol(asyncio.Protocol):
)
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
- self._loop = loop # type: asyncio.AbstractEventLoop
+ self._loop: asyncio.AbstractEventLoop = loop
self._paused = False
- self._drain_waiter = None # type: Optional[asyncio.Future[None]]
- self._connection_lost = False
+ self._drain_waiter: Optional[asyncio.Future[None]] = None
self._reading_paused = False
- self.transport = None # type: Optional[asyncio.Transport]
+ self.transport: Optional[asyncio.Transport] = None
+
+ @property
+ def connected(self) -> bool:
+ """Return True if the connection is open."""
+ return self.transport is not None
def pause_writing(self) -> None:
assert not self._paused
@@ -59,7 +64,6 @@ class BaseProtocol(asyncio.Protocol):
self.transport = tr
def connection_lost(self, exc: Optional[BaseException]) -> None:
- self._connection_lost = True
# Wake up the writer if currently paused.
self.transport = None
if not self._paused:
@@ -73,10 +77,14 @@ class BaseProtocol(asyncio.Protocol):
if exc is None:
waiter.set_result(None)
else:
- waiter.set_exception(exc)
+ set_exception(
+ waiter,
+ ConnectionError("Connection lost"),
+ exc,
+ )
async def _drain_helper(self) -> None:
- if self._connection_lost:
+ if not self.connected:
raise ConnectionResetError("Connection lost")
if not self._paused:
return
diff --git a/contrib/python/aiohttp/aiohttp/client.py b/contrib/python/aiohttp/aiohttp/client.py
index 6ae9549db9..32d2c3b711 100644
--- a/contrib/python/aiohttp/aiohttp/client.py
+++ b/contrib/python/aiohttp/aiohttp/client.py
@@ -11,10 +11,12 @@ import warnings
from contextlib import suppress
from types import SimpleNamespace, TracebackType
from typing import (
+ TYPE_CHECKING,
Any,
Awaitable,
Callable,
Coroutine,
+ Final,
FrozenSet,
Generator,
Generic,
@@ -72,13 +74,14 @@ from .connector import (
)
from .cookiejar import CookieJar
from .helpers import (
+ _SENTINEL,
DEBUG,
- PY_36,
BasicAuth,
TimeoutHandle,
ceil_timeout,
get_env_proxy_for_url,
get_running_loop,
+ method_must_be_empty_body,
sentinel,
strip_auth_from_url,
)
@@ -86,7 +89,7 @@ from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
from .streams import FlowControlDataQueue
from .tracing import Trace, TraceConfig
-from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
+from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
__all__ = (
# client_exceptions
@@ -128,10 +131,10 @@ __all__ = (
)
-try:
+if TYPE_CHECKING:
from ssl import SSLContext
-except ImportError: # pragma: no cover
- SSLContext = object # type: ignore[misc,assignment]
+else:
+ SSLContext = None
@attr.s(auto_attribs=True, frozen=True, slots=True)
@@ -140,6 +143,7 @@ class ClientTimeout:
connect: Optional[float] = None
sock_read: Optional[float] = None
sock_connect: Optional[float] = None
+ ceil_threshold: float = 5
# pool_queue_timeout: Optional[float] = None
# dns_resolution_timeout: Optional[float] = None
@@ -159,6 +163,7 @@ class ClientTimeout:
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
_RetType = TypeVar("_RetType")
+_CharsetResolver = Callable[[ClientResponse, bytes], str]
class ClientSession:
@@ -188,10 +193,14 @@ class ClientSession:
"_ws_response_class",
"_trace_configs",
"_read_bufsize",
+ "_max_line_size",
+ "_max_field_size",
+ "_resolve_charset",
]
)
- _source_traceback = None
+ _source_traceback: Optional[traceback.StackSummary] = None
+ _connector: Optional[BaseConnector] = None
def __init__(
self,
@@ -210,16 +219,59 @@ class ClientSession:
version: HttpVersion = http.HttpVersion11,
cookie_jar: Optional[AbstractCookieJar] = None,
connector_owner: bool = True,
- raise_for_status: bool = False,
- read_timeout: Union[float, object] = sentinel,
+ raise_for_status: Union[
+ bool, Callable[[ClientResponse], Awaitable[None]]
+ ] = False,
+ read_timeout: Union[float, _SENTINEL] = sentinel,
conn_timeout: Optional[float] = None,
timeout: Union[object, ClientTimeout] = sentinel,
auto_decompress: bool = True,
trust_env: bool = False,
requote_redirect_url: bool = True,
trace_configs: Optional[List[TraceConfig]] = None,
- read_bufsize: int = 2 ** 16,
+ read_bufsize: int = 2**16,
+ max_line_size: int = 8190,
+ max_field_size: int = 8190,
+ fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
) -> None:
+ # We initialise _connector to None immediately, as it's referenced in __del__()
+ # and could cause issues if an exception occurs during initialisation.
+ self._connector: Optional[BaseConnector] = None
+ if timeout is sentinel or timeout is None:
+ self._timeout = DEFAULT_TIMEOUT
+ if read_timeout is not sentinel:
+ warnings.warn(
+ "read_timeout is deprecated, " "use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
+ if conn_timeout is not None:
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
+ warnings.warn(
+ "conn_timeout is deprecated, " "use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ if not isinstance(timeout, ClientTimeout):
+ raise ValueError(
+ f"timeout parameter cannot be of {type(timeout)} type, "
+ "please use 'timeout=ClientTimeout(...)'",
+ )
+ self._timeout = timeout
+ if read_timeout is not sentinel:
+ raise ValueError(
+ "read_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.read"
+ )
+ if conn_timeout is not None:
+ raise ValueError(
+ "conn_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.connect"
+ )
if loop is None:
if connector is not None:
loop = connector._loop
@@ -252,53 +304,25 @@ class ClientSession:
if cookies is not None:
self._cookie_jar.update_cookies(cookies)
- self._connector = connector # type: Optional[BaseConnector]
+ self._connector = connector
self._connector_owner = connector_owner
self._default_auth = auth
self._version = version
self._json_serialize = json_serialize
- if timeout is sentinel:
- self._timeout = DEFAULT_TIMEOUT
- if read_timeout is not sentinel:
- warnings.warn(
- "read_timeout is deprecated, " "use timeout argument instead",
- DeprecationWarning,
- stacklevel=2,
- )
- self._timeout = attr.evolve(self._timeout, total=read_timeout)
- if conn_timeout is not None:
- self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
- warnings.warn(
- "conn_timeout is deprecated, " "use timeout argument instead",
- DeprecationWarning,
- stacklevel=2,
- )
- else:
- self._timeout = timeout # type: ignore[assignment]
- if read_timeout is not sentinel:
- raise ValueError(
- "read_timeout and timeout parameters "
- "conflict, please setup "
- "timeout.read"
- )
- if conn_timeout is not None:
- raise ValueError(
- "conn_timeout and timeout parameters "
- "conflict, please setup "
- "timeout.connect"
- )
self._raise_for_status = raise_for_status
self._auto_decompress = auto_decompress
self._trust_env = trust_env
self._requote_redirect_url = requote_redirect_url
self._read_bufsize = read_bufsize
+ self._max_line_size = max_line_size
+ self._max_field_size = max_field_size
# Convert to list of tuples
if headers:
- real_headers = CIMultiDict(headers) # type: CIMultiDict[str]
+ real_headers: CIMultiDict[str] = CIMultiDict(headers)
else:
real_headers = CIMultiDict()
- self._default_headers = real_headers # type: CIMultiDict[str]
+ self._default_headers: CIMultiDict[str] = real_headers
if skip_auto_headers is not None:
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
else:
@@ -312,6 +336,8 @@ class ClientSession:
for trace_config in self._trace_configs:
trace_config.freeze()
+ self._resolve_charset = fallback_charset_resolver
+
def __init_subclass__(cls: Type["ClientSession"]) -> None:
warnings.warn(
"Inheritance class {} from ClientSession "
@@ -334,10 +360,7 @@ class ClientSession:
def __del__(self, _warnings: Any = warnings) -> None:
if not self.closed:
- if PY_36:
- kwargs = {"source": self}
- else:
- kwargs = {}
+ kwargs = {"source": self}
_warnings.warn(
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
)
@@ -377,18 +400,24 @@ class ClientSession:
compress: Optional[str] = None,
chunked: Optional[bool] = None,
expect100: bool = False,
- raise_for_status: Optional[bool] = None,
+ raise_for_status: Union[
+ None, bool, Callable[[ClientResponse], Awaitable[None]]
+ ] = None,
read_until_eof: bool = True,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
- timeout: Union[ClientTimeout, object] = sentinel,
+ timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
- ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
+ server_hostname: Optional[str] = None,
proxy_headers: Optional[LooseHeaders] = None,
trace_request_ctx: Optional[SimpleNamespace] = None,
read_bufsize: Optional[int] = None,
+ auto_decompress: Optional[bool] = None,
+ max_line_size: Optional[int] = None,
+ max_field_size: Optional[int] = None,
) -> ClientResponse:
# NOTE: timeout clamps existing connect and read timeouts. We cannot
@@ -413,6 +442,7 @@ class ClientSession:
redirects = 0
history = []
version = self._version
+ params = params or {}
# Merge with default headers and transform to CIMultiDict
headers = self._prepare_headers(headers)
@@ -435,20 +465,31 @@ class ClientSession:
raise InvalidURL(proxy) from e
if timeout is sentinel:
- real_timeout = self._timeout # type: ClientTimeout
+ real_timeout: ClientTimeout = self._timeout
else:
if not isinstance(timeout, ClientTimeout):
- real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
+ real_timeout = ClientTimeout(total=timeout)
else:
real_timeout = timeout
# timeout is cumulative for all request operations
# (request, redirects, responses, data consuming)
- tm = TimeoutHandle(self._loop, real_timeout.total)
+ tm = TimeoutHandle(
+ self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
+ )
handle = tm.start()
if read_bufsize is None:
read_bufsize = self._read_bufsize
+ if auto_decompress is None:
+ auto_decompress = self._auto_decompress
+
+ if max_line_size is None:
+ max_line_size = self._max_line_size
+
+ if max_field_size is None:
+ max_field_size = self._max_field_size
+
traces = [
Trace(
self,
@@ -523,14 +564,19 @@ class ClientSession:
proxy_auth=proxy_auth,
timer=timer,
session=self,
- ssl=ssl,
+ ssl=ssl if ssl is not None else True,
+ server_hostname=server_hostname,
proxy_headers=proxy_headers,
traces=traces,
+ trust_env=self.trust_env,
)
# connection timeout
try:
- async with ceil_timeout(real_timeout.connect):
+ async with ceil_timeout(
+ real_timeout.connect,
+ ceil_threshold=real_timeout.ceil_threshold,
+ ):
assert self._connector is not None
conn = await self._connector.connect(
req, traces=traces, timeout=real_timeout
@@ -545,11 +591,14 @@ class ClientSession:
assert conn.protocol is not None
conn.protocol.set_response_params(
timer=timer,
- skip_payload=method.upper() == "HEAD",
+ skip_payload=method_must_be_empty_body(method),
read_until_eof=read_until_eof,
- auto_decompress=self._auto_decompress,
+ auto_decompress=auto_decompress,
read_timeout=real_timeout.sock_read,
read_bufsize=read_bufsize,
+ timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
)
try:
@@ -629,7 +678,7 @@ class ClientSession:
headers.pop(hdrs.AUTHORIZATION, None)
url = parsed_url
- params = None
+ params = {}
resp.release()
continue
@@ -638,7 +687,12 @@ class ClientSession:
# check response status
if raise_for_status is None:
raise_for_status = self._raise_for_status
- if raise_for_status:
+
+ if raise_for_status is None:
+ pass
+ elif callable(raise_for_status):
+ await raise_for_status(resp)
+ elif raise_for_status:
resp.raise_for_status()
# register connection
@@ -686,7 +740,7 @@ class ClientSession:
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
- ssl: Union[SSLContext, bool, None, Fingerprint] = None,
+ ssl: Union[SSLContext, bool, None, Fingerprint] = True,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
@@ -738,7 +792,7 @@ class ClientSession:
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
- ssl: Union[SSLContext, bool, None, Fingerprint] = None,
+ ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
@@ -748,13 +802,13 @@ class ClientSession:
) -> ClientWebSocketResponse:
if headers is None:
- real_headers = CIMultiDict() # type: CIMultiDict[str]
+ real_headers: CIMultiDict[str] = CIMultiDict()
else:
real_headers = CIMultiDict(headers)
default_headers = {
hdrs.UPGRADE: "websocket",
- hdrs.CONNECTION: "upgrade",
+ hdrs.CONNECTION: "Upgrade",
hdrs.SEC_WEBSOCKET_VERSION: "13",
}
@@ -772,6 +826,9 @@ class ClientSession:
extstr = ws_ext_gen(compress=compress)
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
+ # For the sake of backward compatibility, if user passes in None, convert it to True
+ if ssl is None:
+ ssl = True
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
# send request
@@ -867,9 +924,9 @@ class ClientSession:
assert conn_proto is not None
transport = conn.transport
assert transport is not None
- reader = FlowControlDataQueue(
- conn_proto, 2 ** 16, loop=self._loop
- ) # type: FlowControlDataQueue[WSMessage]
+ reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
+ conn_proto, 2**16, loop=self._loop
+ )
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
writer = WebSocketWriter(
conn_proto,
@@ -904,7 +961,7 @@ class ClientSession:
if headers:
if not isinstance(headers, (MultiDictProxy, MultiDict)):
headers = CIMultiDict(headers)
- added_names = set() # type: Set[str]
+ added_names: Set[str] = set()
for key, value in headers.items():
if key in added_names:
result.add(key, value)
@@ -1026,7 +1083,7 @@ class ClientSession:
return self._loop
@property
- def timeout(self) -> Union[object, ClientTimeout]:
+ def timeout(self) -> ClientTimeout:
"""Timeout for the session."""
return self._timeout
@@ -1123,8 +1180,8 @@ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType
def send(self, arg: None) -> "asyncio.Future[Any]":
return self._coro.send(arg)
- def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override]
- self._coro.throw(arg)
+ def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
+ return self._coro.throw(*args, **kwargs)
def close(self) -> None:
return self._coro.close()
@@ -1156,6 +1213,7 @@ class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
# explicitly. Otherwise connection error handling should kick in
# and close/recycle the connection as required.
self._resp.release()
+ await self._resp.wait_for_close()
class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
@@ -1180,7 +1238,7 @@ class _SessionRequestContextManager:
session: ClientSession,
) -> None:
self._coro = coro
- self._resp = None # type: Optional[ClientResponse]
+ self._resp: Optional[ClientResponse] = None
self._session = session
async def __aenter__(self) -> ClientResponse:
@@ -1228,6 +1286,8 @@ def request(
connector: Optional[BaseConnector] = None,
read_bufsize: Optional[int] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
+ max_line_size: int = 8190,
+ max_field_size: int = 8190,
) -> _SessionRequestContextManager:
"""Constructs and sends a request.
@@ -1299,6 +1359,8 @@ def request(
proxy=proxy,
proxy_auth=proxy_auth,
read_bufsize=read_bufsize,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
),
session,
)
diff --git a/contrib/python/aiohttp/aiohttp/client_exceptions.py b/contrib/python/aiohttp/aiohttp/client_exceptions.py
index dd55321054..9b6e44203c 100644
--- a/contrib/python/aiohttp/aiohttp/client_exceptions.py
+++ b/contrib/python/aiohttp/aiohttp/client_exceptions.py
@@ -15,7 +15,7 @@ except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore[assignment]
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
else:
RequestInfo = ClientResponse = ConnectionKey = None
@@ -47,9 +47,13 @@ class ClientError(Exception):
class ClientResponseError(ClientError):
- """Connection error during reading response.
+ """Base class for exceptions that occur after getting a response.
- request_info: instance of RequestInfo
+ request_info: An instance of RequestInfo.
+ history: A sequence of responses, if redirects occurred.
+ status: HTTP status code.
+ message: Error message.
+ headers: Response headers.
"""
def __init__(
@@ -154,7 +158,7 @@ class ClientConnectorError(ClientOSError):
"""Client connector error.
Raised in :class:`aiohttp.connector.TCPConnector` if
- connection to proxy can not be established.
+ a connection can not be established.
"""
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
@@ -176,12 +180,12 @@ class ClientConnectorError(ClientOSError):
return self._conn_key.port
@property
- def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
+ def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
return self._conn_key.ssl
def __str__(self) -> str:
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
- self, self.ssl if self.ssl is not None else "default", self.strerror
+ self, "default" if self.ssl is True else self.ssl, self.strerror
)
# OSError.__reduce__ does too much black magick
@@ -215,7 +219,7 @@ class UnixClientConnectorError(ClientConnectorError):
def __str__(self) -> str:
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
- self, self.ssl if self.ssl is not None else "default", self.strerror
+ self, "default" if self.ssl is True else self.ssl, self.strerror
)
diff --git a/contrib/python/aiohttp/aiohttp/client_proto.py b/contrib/python/aiohttp/aiohttp/client_proto.py
index f36863b836..723f5aae5f 100644
--- a/contrib/python/aiohttp/aiohttp/client_proto.py
+++ b/contrib/python/aiohttp/aiohttp/client_proto.py
@@ -9,8 +9,14 @@ from .client_exceptions import (
ServerDisconnectedError,
ServerTimeoutError,
)
-from .helpers import BaseTimerContext
+from .helpers import (
+ _EXC_SENTINEL,
+ BaseTimerContext,
+ set_exception,
+ status_code_must_be_empty_body,
+)
from .http import HttpResponseParser, RawResponseMessage
+from .http_exceptions import HttpProcessingError
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
@@ -31,10 +37,12 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
self._tail = b""
self._upgraded = False
- self._parser = None # type: Optional[HttpResponseParser]
+ self._parser: Optional[HttpResponseParser] = None
+
+ self._read_timeout: Optional[float] = None
+ self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
- self._read_timeout = None # type: Optional[float]
- self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
+ self._timeout_ceil_threshold: Optional[float] = 5
@property
def upgraded(self) -> bool:
@@ -71,28 +79,50 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._drop_timeout()
+ original_connection_error = exc
+ reraised_exc = original_connection_error
+
+ connection_closed_cleanly = original_connection_error is None
+
if self._payload_parser is not None:
- with suppress(Exception):
+ with suppress(Exception): # FIXME: log this somehow?
self._payload_parser.feed_eof()
uncompleted = None
if self._parser is not None:
try:
uncompleted = self._parser.feed_eof()
- except Exception:
+ except Exception as underlying_exc:
if self._payload is not None:
- self._payload.set_exception(
- ClientPayloadError("Response payload is not completed")
+ client_payload_exc_msg = (
+ f"Response payload is not completed: {underlying_exc !r}"
+ )
+ if not connection_closed_cleanly:
+ client_payload_exc_msg = (
+ f"{client_payload_exc_msg !s}. "
+ f"{original_connection_error !r}"
+ )
+ set_exception(
+ self._payload,
+ ClientPayloadError(client_payload_exc_msg),
+ underlying_exc,
)
if not self.is_eof():
- if isinstance(exc, OSError):
- exc = ClientOSError(*exc.args)
- if exc is None:
- exc = ServerDisconnectedError(uncompleted)
+ if isinstance(original_connection_error, OSError):
+ reraised_exc = ClientOSError(*original_connection_error.args)
+ if connection_closed_cleanly:
+ reraised_exc = ServerDisconnectedError(uncompleted)
# assigns self._should_close to True as side effect,
# we do it anyway below
- self.set_exception(exc)
+ underlying_non_eof_exc = (
+ _EXC_SENTINEL
+ if connection_closed_cleanly
+ else original_connection_error
+ )
+ assert underlying_non_eof_exc is not None
+ assert reraised_exc is not None
+ self.set_exception(reraised_exc, underlying_non_eof_exc)
self._should_close = True
self._parser = None
@@ -100,7 +130,7 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
self._payload_parser = None
self._reading_paused = False
- super().connection_lost(exc)
+ super().connection_lost(reraised_exc)
def eof_received(self) -> None:
# should call parser.feed_eof() most likely
@@ -114,10 +144,14 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
super().resume_reading()
self._reschedule_timeout()
- def set_exception(self, exc: BaseException) -> None:
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
self._should_close = True
self._drop_timeout()
- super().set_exception(exc)
+ super().set_exception(exc, exc_cause)
def set_parser(self, parser: Any, payload: Any) -> None:
# TODO: actual types are:
@@ -142,12 +176,16 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
read_until_eof: bool = False,
auto_decompress: bool = True,
read_timeout: Optional[float] = None,
- read_bufsize: int = 2 ** 16,
+ read_bufsize: int = 2**16,
+ timeout_ceil_threshold: float = 5,
+ max_line_size: int = 8190,
+ max_field_size: int = 8190,
) -> None:
self._skip_payload = skip_payload
self._read_timeout = read_timeout
- self._reschedule_timeout()
+
+ self._timeout_ceil_threshold = timeout_ceil_threshold
self._parser = HttpResponseParser(
self,
@@ -158,6 +196,8 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
response_with_body=not skip_payload,
read_until_eof=read_until_eof,
auto_decompress=auto_decompress,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
)
if self._tail:
@@ -181,11 +221,14 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
else:
self._read_timeout_handle = None
+ def start_timeout(self) -> None:
+ self._reschedule_timeout()
+
def _on_read_timeout(self) -> None:
exc = ServerTimeoutError("Timeout on reading data from socket")
self.set_exception(exc)
if self._payload is not None:
- self._payload.set_exception(exc)
+ set_exception(self._payload, exc)
def data_received(self, data: bytes) -> None:
self._reschedule_timeout()
@@ -211,14 +254,14 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
- except BaseException as exc:
+ except BaseException as underlying_exc:
if self.transport is not None:
# connection.release() could be called BEFORE
# data_received(), the transport is already
# closed in this case
self.transport.close()
# should_close is True after the call
- self.set_exception(exc)
+ self.set_exception(HttpProcessingError(), underlying_exc)
return
self._upgraded = upgraded
@@ -230,7 +273,9 @@ class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamRe
self._payload = payload
- if self._skip_payload or message.code in (204, 304):
+ if self._skip_payload or status_code_must_be_empty_body(
+ message.code
+ ):
self.feed_data((message, EMPTY_PAYLOAD), 0)
else:
self.feed_data((message, payload), 0)
diff --git a/contrib/python/aiohttp/aiohttp/client_reqrep.py b/contrib/python/aiohttp/aiohttp/client_reqrep.py
index 343002517b..afe719da16 100644
--- a/contrib/python/aiohttp/aiohttp/client_reqrep.py
+++ b/contrib/python/aiohttp/aiohttp/client_reqrep.py
@@ -1,5 +1,6 @@
import asyncio
import codecs
+import contextlib
import functools
import io
import re
@@ -12,6 +13,7 @@ from types import MappingProxyType, TracebackType
from typing import (
TYPE_CHECKING,
Any,
+ Callable,
Dict,
Iterable,
List,
@@ -37,18 +39,27 @@ from .client_exceptions import (
InvalidURL,
ServerFingerprintMismatch,
)
+from .compression_utils import HAS_BROTLI
from .formdata import FormData
from .helpers import (
- PY_36,
BaseTimerContext,
BasicAuth,
HeadersMixin,
TimerNoop,
+ basicauth_from_netrc,
+ netrc_from_env,
noop,
reify,
+ set_exception,
set_result,
)
-from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter
+from .http import (
+ SERVER_SOFTWARE,
+ HttpVersion,
+ HttpVersion10,
+ HttpVersion11,
+ StreamWriter,
+)
from .log import client_logger
from .streams import StreamReader
from .typedefs import (
@@ -66,24 +77,24 @@ except ImportError: # pragma: no cover
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
-try:
- import cchardet as chardet
-except ImportError: # pragma: no cover
- import charset_normalizer as chardet # type: ignore[no-redef]
-
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .client import ClientSession
from .connector import Connection
from .tracing import Trace
+_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
+def _gen_default_accept_encoding() -> str:
+ return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
+
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ContentDisposition:
type: Optional[str]
@@ -140,22 +151,24 @@ class Fingerprint:
if ssl is not None:
SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
else: # pragma: no cover
- SSL_ALLOWED_TYPES = type(None)
+ SSL_ALLOWED_TYPES = (bool, type(None))
def _merge_ssl_params(
- ssl: Union["SSLContext", bool, Fingerprint, None],
+ ssl: Union["SSLContext", bool, Fingerprint],
verify_ssl: Optional[bool],
ssl_context: Optional["SSLContext"],
fingerprint: Optional[bytes],
-) -> Union["SSLContext", bool, Fingerprint, None]:
+) -> Union["SSLContext", bool, Fingerprint]:
+ if ssl is None:
+ ssl = True # Double check for backwards compatibility
if verify_ssl is not None and not verify_ssl:
warnings.warn(
"verify_ssl is deprecated, use ssl=False instead",
DeprecationWarning,
stacklevel=3,
)
- if ssl is not None:
+ if ssl is not True:
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
@@ -168,7 +181,7 @@ def _merge_ssl_params(
DeprecationWarning,
stacklevel=3,
)
- if ssl is not None:
+ if ssl is not True:
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
@@ -181,7 +194,7 @@ def _merge_ssl_params(
DeprecationWarning,
stacklevel=3,
)
- if ssl is not None:
+ if ssl is not True:
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
@@ -203,7 +216,7 @@ class ConnectionKey:
host: str
port: Optional[int]
is_ssl: bool
- ssl: Union[SSLContext, None, bool, Fingerprint]
+ ssl: Union[SSLContext, bool, Fingerprint]
proxy: Optional[URL]
proxy_auth: Optional[BasicAuth]
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
@@ -229,14 +242,14 @@ class ClientRequest:
DEFAULT_HEADERS = {
hdrs.ACCEPT: "*/*",
- hdrs.ACCEPT_ENCODING: "gzip, deflate",
+ hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
}
body = b""
auth = None
response = None
- _writer = None # async task for streaming data
+ __writer = None # async task for streaming data
_continue = None # waiter future for '100 Continue' response
# N.B.
@@ -265,14 +278,22 @@ class ClientRequest:
proxy_auth: Optional[BasicAuth] = None,
timer: Optional[BaseTimerContext] = None,
session: Optional["ClientSession"] = None,
- ssl: Union[SSLContext, bool, Fingerprint, None] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
proxy_headers: Optional[LooseHeaders] = None,
traces: Optional[List["Trace"]] = None,
+ trust_env: bool = False,
+ server_hostname: Optional[str] = None,
):
-
if loop is None:
loop = asyncio.get_event_loop()
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+ if match:
+ raise ValueError(
+ f"Method cannot contain non-token characters {method!r} "
+ "(found at least {match.group()!r})"
+ )
+
assert isinstance(url, URL), url
assert isinstance(proxy, (URL, type(None))), proxy
# FIXME: session is None in tests only, need to fix tests
@@ -294,9 +315,10 @@ class ClientRequest:
real_response_class = ClientResponse
else:
real_response_class = response_class
- self.response_class = real_response_class # type: Type[ClientResponse]
+ self.response_class: Type[ClientResponse] = real_response_class
self._timer = timer if timer is not None else TimerNoop()
- self._ssl = ssl
+ self._ssl = ssl if ssl is not None else True
+ self.server_hostname = server_hostname
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
@@ -307,7 +329,7 @@ class ClientRequest:
self.update_auto_headers(skip_auto_headers)
self.update_cookies(cookies)
self.update_content_encoding(data)
- self.update_auth(auth)
+ self.update_auth(auth, trust_env)
self.update_proxy(proxy, proxy_auth, proxy_headers)
self.update_body_from_data(data)
@@ -318,20 +340,33 @@ class ClientRequest:
traces = []
self._traces = traces
+ def __reset_writer(self, _: object = None) -> None:
+ self.__writer = None
+
+ @property
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
+ return self.__writer
+
+ @_writer.setter
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
+ if self.__writer is not None:
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = writer
+ if writer is not None:
+ writer.add_done_callback(self.__reset_writer)
+
def is_ssl(self) -> bool:
return self.url.scheme in ("https", "wss")
@property
- def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]:
+ def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
return self._ssl
@property
def connection_key(self) -> ConnectionKey:
proxy_headers = self.proxy_headers
if proxy_headers:
- h = hash(
- tuple((k, v) for k, v in proxy_headers.items())
- ) # type: Optional[int]
+ h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items()))
else:
h = None
return ConnectionKey(
@@ -356,7 +391,7 @@ class ClientRequest:
@property
def request_info(self) -> RequestInfo:
- headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str]
+ headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
return RequestInfo(self.url, self.method, headers, self.original_url)
def update_host(self, url: URL) -> None:
@@ -387,12 +422,14 @@ class ClientRequest:
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
"""Update request headers."""
- self.headers = CIMultiDict() # type: CIMultiDict[str]
+ self.headers: CIMultiDict[str] = CIMultiDict()
# add host
netloc = cast(str, self.url.raw_host)
if helpers.is_ipv6_address(netloc):
netloc = f"[{netloc}]"
+ # See https://github.com/aio-libs/aiohttp/issues/3636.
+ netloc = netloc.rstrip(".")
if self.url.port is not None and not self.url.is_default_port():
netloc += ":" + str(self.url.port)
self.headers[hdrs.HOST] = netloc
@@ -427,7 +464,7 @@ class ClientRequest:
if not cookies:
return
- c = SimpleCookie() # type: SimpleCookie[str]
+ c = SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ""))
del self.headers[hdrs.COOKIE]
@@ -486,10 +523,14 @@ class ClientRequest:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
- def update_auth(self, auth: Optional[BasicAuth]) -> None:
+ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
"""Set basic auth."""
if auth is None:
auth = self.auth
+ if auth is None and trust_env and self.url.host is not None:
+ netrc_obj = netrc_from_env()
+ with contextlib.suppress(LookupError):
+ auth = basicauth_from_netrc(netrc_obj, self.url.host)
if auth is None:
return
@@ -573,8 +614,11 @@ class ClientRequest:
"""Support coroutines that yields bytes objects."""
# 100 response
if self._continue is not None:
- await writer.drain()
- await self._continue
+ try:
+ await writer.drain()
+ await self._continue
+ except asyncio.CancelledError:
+ return
protocol = conn.protocol
assert protocol is not None
@@ -587,22 +631,32 @@ class ClientRequest:
for chunk in self.body:
await writer.write(chunk) # type: ignore[arg-type]
+ except OSError as underlying_exc:
+ reraised_exc = underlying_exc
+
+ exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
+ underlying_exc, asyncio.TimeoutError
+ )
+ if exc_is_not_timeout:
+ reraised_exc = ClientOSError(
+ underlying_exc.errno,
+ f"Can not write request body for {self.url !s}",
+ )
+ set_exception(protocol, reraised_exc, underlying_exc)
+ except asyncio.CancelledError:
await writer.write_eof()
- except OSError as exc:
- new_exc = ClientOSError(
- exc.errno, "Can not write request body for %s" % self.url
+ except Exception as underlying_exc:
+ set_exception(
+ protocol,
+ ClientConnectionError(
+ f"Failed to send bytes into the underlying connection {conn !s}",
+ ),
+ underlying_exc,
)
- new_exc.__context__ = exc
- new_exc.__cause__ = exc
- protocol.set_exception(new_exc)
- except asyncio.CancelledError as exc:
- if not conn.closed:
- protocol.set_exception(exc)
- except Exception as exc:
- protocol.set_exception(exc)
- finally:
- self._writer = None
+ else:
+ await writer.write_eof()
+ protocol.start_timeout()
async def send(self, conn: "Connection") -> "ClientResponse":
# Specify request target:
@@ -663,8 +717,8 @@ class ClientRequest:
self.headers[hdrs.CONNECTION] = connection
# status + headers
- status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format(
- self.method, path, self.version
+ status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format(
+ self.method, path, v=self.version
)
await writer.write_headers(status_line, self.headers)
@@ -687,15 +741,14 @@ class ClientRequest:
async def close(self) -> None:
if self._writer is not None:
- try:
+ with contextlib.suppress(asyncio.CancelledError):
await self._writer
- finally:
- self._writer = None
def terminate(self) -> None:
if self._writer is not None:
if not self.loop.is_closed():
self._writer.cancel()
+ self._writer.remove_done_callback(self.__reset_writer)
self._writer = None
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
@@ -711,21 +764,25 @@ class ClientRequest:
class ClientResponse(HeadersMixin):
+ # Some of these attributes are None when created,
+ # but will be set by the start() method.
+ # As the end user will likely never see the None values, we cheat the types below.
# from the Status-Line of the response
- version = None # HTTP-Version
- status = None # type: int # Status-Code
- reason = None # Reason-Phrase
+ version: Optional[HttpVersion] = None # HTTP-Version
+ status: int = None # type: ignore[assignment] # Status-Code
+ reason: Optional[str] = None # Reason-Phrase
- content = None # type: StreamReader # Payload stream
- _headers = None # type: CIMultiDictProxy[str] # Response headers
- _raw_headers = None # type: RawHeaders # Response raw headers
+ content: StreamReader = None # type: ignore[assignment] # Payload stream
+ _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
+ _raw_headers: RawHeaders = None # type: ignore[assignment]
_connection = None # current connection
- _source_traceback = None
- # setted up by ClientRequest after ClientResponse object creation
+ _source_traceback: Optional[traceback.StackSummary] = None
+ # set up by ClientRequest after ClientResponse object creation
# post-init stage allows to not change ctor signature
_closed = True # to allow __del__ for non-initialized properly response
_released = False
+ __writer = None
def __init__(
self,
@@ -743,25 +800,49 @@ class ClientResponse(HeadersMixin):
assert isinstance(url, URL)
self.method = method
- self.cookies = SimpleCookie() # type: SimpleCookie[str]
+ self.cookies = SimpleCookie()
self._real_url = url
self._url = url.with_fragment(None)
- self._body = None # type: Any
- self._writer = writer # type: Optional[asyncio.Task[None]]
+ self._body: Any = None
+ self._writer: Optional[asyncio.Task[None]] = writer
self._continue = continue100 # None by default
self._closed = True
- self._history = () # type: Tuple[ClientResponse, ...]
+ self._history: Tuple[ClientResponse, ...] = ()
self._request_info = request_info
self._timer = timer if timer is not None else TimerNoop()
- self._cache = {} # type: Dict[str, Any]
+ self._cache: Dict[str, Any] = {}
self._traces = traces
self._loop = loop
# store a reference to session #1985
- self._session = session # type: Optional[ClientSession]
+ self._session: Optional[ClientSession] = session
+ # Save reference to _resolve_charset, so that get_encoding() will still
+ # work after the response has finished reading the body.
+ if session is None:
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
+ self._resolve_charset: Callable[
+ ["ClientResponse", bytes], str
+ ] = lambda *_: "utf-8"
+ else:
+ self._resolve_charset = session._resolve_charset
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
+ def __reset_writer(self, _: object = None) -> None:
+ self.__writer = None
+
+ @property
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
+ return self.__writer
+
+ @_writer.setter
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
+ if self.__writer is not None:
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = writer
+ if writer is not None:
+ writer.add_done_callback(self.__reset_writer)
+
@reify
def url(self) -> URL:
return self._url
@@ -811,10 +892,7 @@ class ClientResponse(HeadersMixin):
self._cleanup_writer()
if self._loop.get_debug():
- if PY_36:
- kwargs = {"source": self}
- else:
- kwargs = {}
+ kwargs = {"source": self}
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
context = {"client_response": self, "message": "Unclosed response"}
if self._source_traceback:
@@ -829,7 +907,7 @@ class ClientResponse(HeadersMixin):
"ascii", "backslashreplace"
).decode("ascii")
else:
- ascii_encodable_reason = self.reason
+ ascii_encodable_reason = "None"
print(
"<ClientResponse({}) [{} {}]>".format(
ascii_encodable_url, self.status, ascii_encodable_reason
@@ -855,7 +933,7 @@ class ClientResponse(HeadersMixin):
if not links_str:
return MultiDictProxy(MultiDict())
- links = MultiDict() # type: MultiDict[MultiDictProxy[Union[str, URL]]]
+ links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
for val in re.split(r",(?=\s*<)", links_str):
match = re.match(r"\s*<(.*)>(.*)", val)
@@ -865,7 +943,7 @@ class ClientResponse(HeadersMixin):
url, params_str = match.groups()
params = params_str.split(";")[1:]
- link = MultiDict() # type: MultiDict[Union[str, URL]]
+ link: MultiDict[Union[str, URL]] = MultiDict()
for param in params:
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
@@ -876,11 +954,11 @@ class ClientResponse(HeadersMixin):
link.add(key, value)
- key = link.get("rel", url) # type: ignore[assignment]
+ key = link.get("rel", url)
link.add("url", self.url.join(URL(url)))
- links.add(key, MultiDictProxy(link))
+ links.add(str(key), MultiDictProxy(link))
return MultiDictProxy(links)
@@ -939,20 +1017,14 @@ class ClientResponse(HeadersMixin):
if self._closed:
return
- if self._connection is not None:
- # websocket, protocol could be None because
- # connection could be detached
- if (
- self._connection.protocol is not None
- and self._connection.protocol.upgraded
- ):
- return
-
- self._connection.release()
- self._connection = None
+ # protocol could be None because connection could be detached
+ protocol = self._connection and self._connection.protocol
+ if protocol is not None and protocol.upgraded:
+ return
self._closed = True
self._cleanup_writer()
+ self._release_connection()
@property
def closed(self) -> bool:
@@ -961,30 +1033,24 @@ class ClientResponse(HeadersMixin):
def close(self) -> None:
if not self._released:
self._notify_content()
- if self._closed:
- return
self._closed = True
if self._loop is None or self._loop.is_closed():
return
+ self._cleanup_writer()
if self._connection is not None:
self._connection.close()
self._connection = None
- self._cleanup_writer()
def release(self) -> Any:
if not self._released:
self._notify_content()
- if self._closed:
- return noop()
self._closed = True
- if self._connection is not None:
- self._connection.release()
- self._connection = None
self._cleanup_writer()
+ self._release_connection()
return noop()
@property
@@ -1009,24 +1075,33 @@ class ClientResponse(HeadersMixin):
headers=self.headers,
)
+ def _release_connection(self) -> None:
+ if self._connection is not None:
+ if self._writer is None:
+ self._connection.release()
+ self._connection = None
+ else:
+ self._writer.add_done_callback(lambda f: self._release_connection())
+
+ async def _wait_released(self) -> None:
+ if self._writer is not None:
+ await self._writer
+ self._release_connection()
+
def _cleanup_writer(self) -> None:
if self._writer is not None:
self._writer.cancel()
- self._writer = None
self._session = None
def _notify_content(self) -> None:
content = self.content
if content and content.exception() is None:
- content.set_exception(ClientConnectionError("Connection closed"))
+ set_exception(content, ClientConnectionError("Connection closed"))
self._released = True
async def wait_for_close(self) -> None:
if self._writer is not None:
- try:
- await self._writer
- finally:
- self._writer = None
+ await self._writer
self.release()
async def read(self) -> bytes:
@@ -1041,9 +1116,12 @@ class ClientResponse(HeadersMixin):
except BaseException:
self.close()
raise
- elif self._released:
+ elif self._released: # Response explicitly released
raise ClientConnectionError("Connection closed")
+ protocol = self._connection and self._connection.protocol
+ if protocol is None or not protocol.upgraded:
+ await self._wait_released() # Underlying connection released
return self._body # type: ignore[no-any-return]
def get_encoding(self) -> str:
@@ -1052,27 +1130,22 @@ class ClientResponse(HeadersMixin):
encoding = mimetype.parameters.get("charset")
if encoding:
- try:
- codecs.lookup(encoding)
- except LookupError:
- encoding = None
- if not encoding:
- if mimetype.type == "application" and (
- mimetype.subtype == "json" or mimetype.subtype == "rdap"
- ):
- # RFC 7159 states that the default encoding is UTF-8.
- # RFC 7483 defines application/rdap+json
- encoding = "utf-8"
- elif self._body is None:
- raise RuntimeError(
- "Cannot guess the encoding of " "a not yet read body"
- )
- else:
- encoding = chardet.detect(self._body)["encoding"]
- if not encoding:
- encoding = "utf-8"
+ with contextlib.suppress(LookupError):
+ return codecs.lookup(encoding).name
+
+ if mimetype.type == "application" and (
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
+ ):
+ # RFC 7159 states that the default encoding is UTF-8.
+ # RFC 7483 defines application/rdap+json
+ return "utf-8"
+
+ if self._body is None:
+ raise RuntimeError(
+ "Cannot compute fallback encoding of a not yet read body"
+ )
- return encoding
+ return self._resolve_charset(self, self._body)
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
"""Read response payload and decode."""
@@ -1131,3 +1204,4 @@ class ClientResponse(HeadersMixin):
# for exceptions, response object can close connection
# if state is broken
self.release()
+ await self.wait_for_close()
diff --git a/contrib/python/aiohttp/aiohttp/client_ws.py b/contrib/python/aiohttp/aiohttp/client_ws.py
index 7c8121f659..d9c74a30f5 100644
--- a/contrib/python/aiohttp/aiohttp/client_ws.py
+++ b/contrib/python/aiohttp/aiohttp/client_ws.py
@@ -1,10 +1,9 @@
"""WebSocket client for asyncio."""
import asyncio
+import sys
from typing import Any, Optional, cast
-import async_timeout
-
from .client_exceptions import ClientError
from .client_reqrep import ClientResponse
from .helpers import call_later, set_result
@@ -25,6 +24,11 @@ from .typedefs import (
JSONEncoder,
)
+if sys.version_info >= (3, 11):
+ import asyncio as async_timeout
+else:
+ import async_timeout
+
class ClientWebSocketResponse:
def __init__(
@@ -51,7 +55,7 @@ class ClientWebSocketResponse:
self._protocol = protocol
self._closed = False
self._closing = False
- self._close_code = None # type: Optional[int]
+ self._close_code: Optional[int] = None
self._timeout = timeout
self._receive_timeout = receive_timeout
self._autoclose = autoclose
@@ -62,8 +66,8 @@ class ClientWebSocketResponse:
self._pong_heartbeat = heartbeat / 2.0
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
self._loop = loop
- self._waiting = None # type: Optional[asyncio.Future[bool]]
- self._exception = None # type: Optional[BaseException]
+ self._waiting: Optional[asyncio.Future[bool]] = None
+ self._exception: Optional[BaseException] = None
self._compress = compress
self._client_notakeover = client_notakeover
@@ -83,7 +87,12 @@ class ClientWebSocketResponse:
if self._heartbeat is not None:
self._heartbeat_cb = call_later(
- self._send_heartbeat, self._heartbeat, self._loop
+ self._send_heartbeat,
+ self._heartbeat,
+ self._loop,
+ timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
+ if self._conn is not None
+ else 5,
)
def _send_heartbeat(self) -> None:
@@ -96,7 +105,12 @@ class ClientWebSocketResponse:
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
- self._pong_not_received, self._pong_heartbeat, self._loop
+ self._pong_not_received,
+ self._pong_heartbeat,
+ self._loop,
+ timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
+ if self._conn is not None
+ else 5,
)
def _pong_not_received(self) -> None:
@@ -167,7 +181,8 @@ class ClientWebSocketResponse:
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
# we need to break `receive()` cycle first,
# `close()` may be called from different task
- if self._waiting is not None and not self._closed:
+ if self._waiting is not None and not self._closing:
+ self._closing = True
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
await self._waiting
@@ -186,7 +201,7 @@ class ClientWebSocketResponse:
self._response.close()
return True
- if self._closing:
+ if self._close_code:
self._response.close()
return True
diff --git a/contrib/python/aiohttp/aiohttp/compression_utils.py b/contrib/python/aiohttp/aiohttp/compression_utils.py
new file mode 100644
index 0000000000..9631d377e9
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/compression_utils.py
@@ -0,0 +1,157 @@
+import asyncio
+import zlib
+from concurrent.futures import Executor
+from typing import Optional, cast
+
+try:
+ try:
+ import brotlicffi as brotli
+ except ImportError:
+ import brotli
+
+ HAS_BROTLI = True
+except ImportError: # pragma: no cover
+ HAS_BROTLI = False
+
+MAX_SYNC_CHUNK_SIZE = 1024
+
+
+def encoding_to_mode(
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+) -> int:
+ if encoding == "gzip":
+ return 16 + zlib.MAX_WBITS
+
+ return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
+
+
+class ZlibBaseHandler:
+ def __init__(
+ self,
+ mode: int,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ self._mode = mode
+ self._executor = executor
+ self._max_sync_chunk_size = max_sync_chunk_size
+
+
+class ZLibCompressor(ZlibBaseHandler):
+ def __init__(
+ self,
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+ level: Optional[int] = None,
+ wbits: Optional[int] = None,
+ strategy: int = zlib.Z_DEFAULT_STRATEGY,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ super().__init__(
+ mode=encoding_to_mode(encoding, suppress_deflate_header)
+ if wbits is None
+ else wbits,
+ executor=executor,
+ max_sync_chunk_size=max_sync_chunk_size,
+ )
+ if level is None:
+ self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
+ else:
+ self._compressor = zlib.compressobj(
+ wbits=self._mode, strategy=strategy, level=level
+ )
+ self._compress_lock = asyncio.Lock()
+
+ def compress_sync(self, data: bytes) -> bytes:
+ return self._compressor.compress(data)
+
+ async def compress(self, data: bytes) -> bytes:
+ async with self._compress_lock:
+ # To ensure the stream is consistent in the event
+ # there are multiple writers, we need to lock
+ # the compressor so that only one writer can
+ # compress at a time.
+ if (
+ self._max_sync_chunk_size is not None
+ and len(data) > self._max_sync_chunk_size
+ ):
+ return await asyncio.get_event_loop().run_in_executor(
+ self._executor, self.compress_sync, data
+ )
+ return self.compress_sync(data)
+
+ def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
+ return self._compressor.flush(mode)
+
+
+class ZLibDecompressor(ZlibBaseHandler):
+ def __init__(
+ self,
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ super().__init__(
+ mode=encoding_to_mode(encoding, suppress_deflate_header),
+ executor=executor,
+ max_sync_chunk_size=max_sync_chunk_size,
+ )
+ self._decompressor = zlib.decompressobj(wbits=self._mode)
+
+ def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
+ return self._decompressor.decompress(data, max_length)
+
+ async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
+ if (
+ self._max_sync_chunk_size is not None
+ and len(data) > self._max_sync_chunk_size
+ ):
+ return await asyncio.get_event_loop().run_in_executor(
+ self._executor, self.decompress_sync, data, max_length
+ )
+ return self.decompress_sync(data, max_length)
+
+ def flush(self, length: int = 0) -> bytes:
+ return (
+ self._decompressor.flush(length)
+ if length > 0
+ else self._decompressor.flush()
+ )
+
+ @property
+ def eof(self) -> bool:
+ return self._decompressor.eof
+
+ @property
+ def unconsumed_tail(self) -> bytes:
+ return self._decompressor.unconsumed_tail
+
+ @property
+ def unused_data(self) -> bytes:
+ return self._decompressor.unused_data
+
+
+class BrotliDecompressor:
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self) -> None:
+ if not HAS_BROTLI:
+ raise RuntimeError(
+ "The brotli decompression is not available. "
+ "Please install `Brotli` module"
+ )
+ self._obj = brotli.Decompressor()
+
+ def decompress_sync(self, data: bytes) -> bytes:
+ if hasattr(self._obj, "decompress"):
+ return cast(bytes, self._obj.decompress(data))
+ return cast(bytes, self._obj.process(data))
+
+ def flush(self) -> bytes:
+ if hasattr(self._obj, "flush"):
+ return cast(bytes, self._obj.flush())
+ return b""
diff --git a/contrib/python/aiohttp/aiohttp/connector.py b/contrib/python/aiohttp/aiohttp/connector.py
index 4c9a951d6e..f95ebe84c6 100644
--- a/contrib/python/aiohttp/aiohttp/connector.py
+++ b/contrib/python/aiohttp/aiohttp/connector.py
@@ -6,6 +6,7 @@ import traceback
import warnings
from collections import defaultdict, deque
from contextlib import suppress
+from http import HTTPStatus
from http.cookies import SimpleCookie
from itertools import cycle, islice
from time import monotonic
@@ -19,6 +20,7 @@ from typing import (
Dict,
Iterator,
List,
+ Literal,
Optional,
Set,
Tuple,
@@ -45,15 +47,7 @@ from .client_exceptions import (
)
from .client_proto import ResponseHandler
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
-from .helpers import (
- PY_36,
- ceil_timeout,
- get_running_loop,
- is_ip_address,
- noop,
- sentinel,
-)
-from .http import RESPONSES
+from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel
from .locks import EventResultOrError
from .resolver import DefaultResolver
@@ -69,7 +63,7 @@ except ImportError: # pragma: no cover
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .client import ClientTimeout
from .client_reqrep import ConnectionKey
from .tracing import Trace
@@ -110,8 +104,8 @@ class Connection:
self._key = key
self._connector = connector
self._loop = loop
- self._protocol = protocol # type: Optional[ResponseHandler]
- self._callbacks = [] # type: List[Callable[[], None]]
+ self._protocol: Optional[ResponseHandler] = protocol
+ self._callbacks: List[Callable[[], None]] = []
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
@@ -121,10 +115,7 @@ class Connection:
def __del__(self, _warnings: Any = warnings) -> None:
if self._protocol is not None:
- if PY_36:
- kwargs = {"source": self}
- else:
- kwargs = {}
+ kwargs = {"source": self}
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
if self._loop.is_closed():
return
@@ -136,6 +127,10 @@ class Connection:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
+ def __bool__(self) -> Literal[True]:
+ """Force subclasses to not be falsy, to make checks simpler."""
+ return True
+
@property
def loop(self) -> asyncio.AbstractEventLoop:
warnings.warn(
@@ -202,6 +197,8 @@ class BaseConnector:
limit_per_host - Number of simultaneous connections to one host.
enable_cleanup_closed - Enables clean-up closed ssl transports.
Disabled by default.
+ timeout_ceil_threshold - Trigger ceiling of timeout values when
+ it's above timeout_ceil_threshold.
loop - Optional event loop.
"""
@@ -220,6 +217,7 @@ class BaseConnector:
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
+ timeout_ceil_threshold: float = 5,
) -> None:
if force_close:
@@ -232,20 +230,19 @@ class BaseConnector:
keepalive_timeout = 15.0
loop = get_running_loop(loop)
+ self._timeout_ceil_threshold = timeout_ceil_threshold
self._closed = False
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
- self._conns = (
- {}
- ) # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]]
+ self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {}
self._limit = limit
self._limit_per_host = limit_per_host
- self._acquired = set() # type: Set[ResponseHandler]
- self._acquired_per_host = defaultdict(
- set
- ) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]]
+ self._acquired: Set[ResponseHandler] = set()
+ self._acquired_per_host: DefaultDict[
+ ConnectionKey, Set[ResponseHandler]
+ ] = defaultdict(set)
self._keepalive_timeout = cast(float, keepalive_timeout)
self._force_close = force_close
@@ -255,7 +252,7 @@ class BaseConnector:
self._loop = loop
self._factory = functools.partial(ResponseHandler, loop=loop)
- self.cookies = SimpleCookie() # type: SimpleCookie[str]
+ self.cookies = SimpleCookie()
# start keep-alive connection cleanup task
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
@@ -263,7 +260,7 @@ class BaseConnector:
# start cleanup closed transports task
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
self._cleanup_closed_disabled = not enable_cleanup_closed
- self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]]
+ self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
self._cleanup_closed()
def __del__(self, _warnings: Any = warnings) -> None:
@@ -276,10 +273,7 @@ class BaseConnector:
self._close()
- if PY_36:
- kwargs = {"source": self}
- else:
- kwargs = {}
+ kwargs = {"source": self}
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
context = {
"connector": self,
@@ -292,14 +286,14 @@ class BaseConnector:
def __enter__(self) -> "BaseConnector":
warnings.warn(
- '"witn Connector():" is deprecated, '
+ '"with Connector():" is deprecated, '
'use "async with Connector():" instead',
DeprecationWarning,
)
return self
def __exit__(self, *exc: Any) -> None:
- self.close()
+ self._close()
async def __aenter__(self) -> "BaseConnector":
return self
@@ -373,7 +367,11 @@ class BaseConnector:
if self._conns:
self._cleanup_handle = helpers.weakref_handle(
- self, "_cleanup", timeout, self._loop
+ self,
+ "_cleanup",
+ timeout,
+ self._loop,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
)
def _drop_acquired_per_host(
@@ -403,7 +401,11 @@ class BaseConnector:
if not self._cleanup_closed_disabled:
self._cleanup_closed_handle = helpers.weakref_handle(
- self, "_cleanup_closed", self._cleanup_closed_period, self._loop
+ self,
+ "_cleanup_closed",
+ self._cleanup_closed_period,
+ self._loop,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
)
def close(self) -> Awaitable[None]:
@@ -490,7 +492,7 @@ class BaseConnector:
return available
async def connect(
- self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
) -> Connection:
"""Get from pool or create new connection."""
key = req.connection_key
@@ -607,7 +609,7 @@ class BaseConnector:
"""
Iterates over all waiters until one to be released is found.
- The one to be released is not finsihed and
+ The one to be released is not finished and
belongs to a host that has available connections.
"""
if not self._waiters:
@@ -674,21 +676,23 @@ class BaseConnector:
if self._cleanup_handle is None:
self._cleanup_handle = helpers.weakref_handle(
- self, "_cleanup", self._keepalive_timeout, self._loop
+ self,
+ "_cleanup",
+ self._keepalive_timeout,
+ self._loop,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
)
async def _create_connection(
- self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
raise NotImplementedError()
class _DNSCacheTable:
def __init__(self, ttl: Optional[float] = None) -> None:
- self._addrs_rr = (
- {}
- ) # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]]
- self._timestamps = {} # type: Dict[Tuple[str, int], float]
+ self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {}
+ self._timestamps: Dict[Tuple[str, int], float] = {}
self._ttl = ttl
def __contains__(self, host: object) -> bool:
@@ -697,13 +701,13 @@ class _DNSCacheTable:
def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
self._addrs_rr[key] = (cycle(addrs), len(addrs))
- if self._ttl:
+ if self._ttl is not None:
self._timestamps[key] = monotonic()
def remove(self, key: Tuple[str, int]) -> None:
self._addrs_rr.pop(key, None)
- if self._ttl:
+ if self._ttl is not None:
self._timestamps.pop(key, None)
def clear(self) -> None:
@@ -758,7 +762,7 @@ class TCPConnector(BaseConnector):
ttl_dns_cache: Optional[int] = 10,
family: int = 0,
ssl_context: Optional[SSLContext] = None,
- ssl: Union[None, bool, Fingerprint, SSLContext] = None,
+ ssl: Union[bool, Fingerprint, SSLContext] = True,
local_addr: Optional[Tuple[str, int]] = None,
resolver: Optional[AbstractResolver] = None,
keepalive_timeout: Union[None, float, object] = sentinel,
@@ -767,6 +771,7 @@ class TCPConnector(BaseConnector):
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
+ timeout_ceil_threshold: float = 5,
):
super().__init__(
keepalive_timeout=keepalive_timeout,
@@ -775,6 +780,7 @@ class TCPConnector(BaseConnector):
limit_per_host=limit_per_host,
enable_cleanup_closed=enable_cleanup_closed,
loop=loop,
+ timeout_ceil_threshold=timeout_ceil_threshold,
)
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
@@ -784,9 +790,7 @@ class TCPConnector(BaseConnector):
self._use_dns_cache = use_dns_cache
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
- self._throttle_dns_events = (
- {}
- ) # type: Dict[Tuple[str, int], EventResultOrError]
+ self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {}
self._family = family
self._local_addr = local_addr
@@ -821,6 +825,7 @@ class TCPConnector(BaseConnector):
async def _resolve_host(
self, host: str, port: int, traces: Optional[List["Trace"]] = None
) -> List[Dict[str, Any]]:
+ """Resolve host and return list of addresses."""
if is_ip_address(host):
return [
{
@@ -848,8 +853,7 @@ class TCPConnector(BaseConnector):
return res
key = (host, port)
-
- if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)):
+ if key in self._cached_hosts and not self._cached_hosts.expired(key):
# get result early, before any await (#4014)
result = self._cached_hosts.next_addrs(key)
@@ -858,6 +862,39 @@ class TCPConnector(BaseConnector):
await trace.send_dns_cache_hit(host)
return result
+ #
+ # If multiple connectors are resolving the same host, we wait
+ # for the first one to resolve and then use the result for all of them.
+ # We use a throttle event to ensure that we only resolve the host once
+ # and then use the result for all the waiters.
+ #
+ # In this case we need to create a task to ensure that we can shield
+ # the task from cancellation as cancelling this lookup should not cancel
+ # the underlying lookup or else the cancel event will get broadcast to
+ # all the waiters across all connections.
+ #
+ resolved_host_task = asyncio.create_task(
+ self._resolve_host_with_throttle(key, host, port, traces)
+ )
+ try:
+ return await asyncio.shield(resolved_host_task)
+ except asyncio.CancelledError:
+
+ def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
+ with suppress(Exception, asyncio.CancelledError):
+ fut.result()
+
+ resolved_host_task.add_done_callback(drop_exception)
+ raise
+
+ async def _resolve_host_with_throttle(
+ self,
+ key: Tuple[str, int],
+ host: str,
+ port: int,
+ traces: Optional[List["Trace"]],
+ ) -> List[Dict[str, Any]]:
+ """Resolve host with a dns events throttle."""
if key in self._throttle_dns_events:
# get event early, before any await (#4014)
event = self._throttle_dns_events[key]
@@ -895,7 +932,7 @@ class TCPConnector(BaseConnector):
return self._cached_hosts.next_addrs(key)
async def _create_connection(
- self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
"""Create connection.
@@ -931,7 +968,7 @@ class TCPConnector(BaseConnector):
sslcontext.set_default_verify_paths()
return sslcontext
- def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]:
+ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
"""Logic to get the correct SSL context
0. if req.ssl is false, return None
@@ -951,20 +988,20 @@ class TCPConnector(BaseConnector):
sslcontext = req.ssl
if isinstance(sslcontext, ssl.SSLContext):
return sslcontext
- if sslcontext is not None:
+ if sslcontext is not True:
# not verified or fingerprinted
return self._make_ssl_context(False)
sslcontext = self._ssl
if isinstance(sslcontext, ssl.SSLContext):
return sslcontext
- if sslcontext is not None:
+ if sslcontext is not True:
# not verified or fingerprinted
return self._make_ssl_context(False)
return self._make_ssl_context(True)
else:
return None
- def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]:
+ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
ret = req.ssl
if isinstance(ret, Fingerprint):
return ret
@@ -976,27 +1013,30 @@ class TCPConnector(BaseConnector):
async def _wrap_create_connection(
self,
*args: Any,
- req: "ClientRequest",
+ req: ClientRequest,
timeout: "ClientTimeout",
client_error: Type[Exception] = ClientConnectorError,
**kwargs: Any,
) -> Tuple[asyncio.Transport, ResponseHandler]:
try:
- async with ceil_timeout(timeout.sock_connect):
- return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
+ return await self._loop.create_connection(*args, **kwargs)
except cert_errors as exc:
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
raise ClientConnectorSSLError(req.connection_key, exc) from exc
except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
raise client_error(req.connection_key, exc) from exc
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
- One case is that :py:meth:`asyncio.loop.start_tls` is not yet
- implemented under Python 3.6. It is necessary for TLS-in-TLS so
- that it is possible to send HTTPS queries through HTTPS proxies.
+ It is necessary for TLS-in-TLS so that it is possible to
+ send HTTPS queries through HTTPS proxies.
This doesn't affect regular HTTP requests, though.
"""
@@ -1018,7 +1058,7 @@ class TCPConnector(BaseConnector):
"An HTTPS request is being sent through an HTTPS proxy. "
"This needs support for TLS in TLS but it is not implemented "
"in your runtime for the stdlib asyncio.\n\n"
- "Please upgrade to Python 3.7 or higher. For more details, "
+ "Please upgrade to Python 3.11 or higher. For more details, "
"please see:\n"
"* https://bugs.python.org/issue37179\n"
"* https://github.com/python/cpython/pull/28073\n"
@@ -1038,7 +1078,7 @@ class TCPConnector(BaseConnector):
def _warn_about_tls_in_tls(
self,
underlying_transport: asyncio.Transport,
- req: "ClientRequest",
+ req: ClientRequest,
) -> None:
"""Issue a warning if the requested URL has HTTPS scheme."""
if req.request_info.url.scheme != "https":
@@ -1056,10 +1096,10 @@ class TCPConnector(BaseConnector):
warnings.warn(
"An HTTPS request is being sent through an HTTPS proxy. "
"This support for TLS in TLS is known to be disabled "
- "in the stdlib asyncio. This is why you'll probably see "
+ "in the stdlib asyncio (Python <3.11). This is why you'll probably see "
"an error in the log below.\n\n"
- "It is possible to enable it via monkeypatching under "
- "Python 3.7 or higher. For more details, see:\n"
+ "It is possible to enable it via monkeypatching. "
+ "For more details, see:\n"
"* https://bugs.python.org/issue37179\n"
"* https://github.com/python/cpython/pull/28073\n\n"
"You can temporarily patch this as follows:\n"
@@ -1075,7 +1115,7 @@ class TCPConnector(BaseConnector):
async def _start_tls_connection(
self,
underlying_transport: asyncio.Transport,
- req: "ClientRequest",
+ req: ClientRequest,
timeout: "ClientTimeout",
client_error: Type[Exception] = ClientConnectorError,
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
@@ -1090,13 +1130,15 @@ class TCPConnector(BaseConnector):
sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
try:
- async with ceil_timeout(timeout.sock_connect):
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
try:
tls_transport = await self._loop.start_tls(
underlying_transport,
tls_proto,
sslcontext,
- server_hostname=req.host,
+ server_hostname=req.server_hostname or req.host,
ssl_handshake_timeout=timeout.total,
)
except BaseException:
@@ -1110,6 +1152,8 @@ class TCPConnector(BaseConnector):
except ssl_errors as exc:
raise ClientConnectorSSLError(req.connection_key, exc) from exc
except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
raise client_error(req.connection_key, exc) from exc
except TypeError as type_err:
# Example cause looks like this:
@@ -1123,6 +1167,9 @@ class TCPConnector(BaseConnector):
f"[{type_err!s}]"
) from type_err
else:
+ if tls_transport is None:
+ msg = "Failed to start TLS (possibly caused by closing transport)"
+ raise client_error(req.connection_key, OSError(msg))
tls_proto.connection_made(
tls_transport
) # Kick the state machine of the new TLS protocol
@@ -1131,7 +1178,7 @@ class TCPConnector(BaseConnector):
async def _create_direct_connection(
self,
- req: "ClientRequest",
+ req: ClientRequest,
traces: List["Trace"],
timeout: "ClientTimeout",
*,
@@ -1142,35 +1189,39 @@ class TCPConnector(BaseConnector):
host = req.url.raw_host
assert host is not None
+ # Replace multiple trailing dots with a single one.
+ # A trailing dot is only present for fully-qualified domain names.
+ # See https://github.com/aio-libs/aiohttp/pull/7364.
+ if host.endswith(".."):
+ host = host.rstrip(".") + "."
port = req.port
assert port is not None
- host_resolved = asyncio.ensure_future(
- self._resolve_host(host, port, traces=traces), loop=self._loop
- )
try:
# Cancelling this lookup should not cancel the underlying lookup
# or else the cancel event will get broadcast to all the waiters
# across all connections.
- hosts = await asyncio.shield(host_resolved)
- except asyncio.CancelledError:
-
- def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
- with suppress(Exception, asyncio.CancelledError):
- fut.result()
-
- host_resolved.add_done_callback(drop_exception)
- raise
+ hosts = await self._resolve_host(host, port, traces=traces)
except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
# in case of proxy it is not ClientProxyConnectionError
# it is problem of resolving proxy ip itself
raise ClientConnectorError(req.connection_key, exc) from exc
- last_exc = None # type: Optional[Exception]
+ last_exc: Optional[Exception] = None
for hinfo in hosts:
host = hinfo["host"]
port = hinfo["port"]
+ # Strip trailing dots, certificates contain FQDN without dots.
+ # See https://github.com/aio-libs/aiohttp/issues/3636
+ server_hostname = (
+ (req.server_hostname or hinfo["hostname"]).rstrip(".")
+ if sslcontext
+ else None
+ )
+
try:
transp, proto = await self._wrap_create_connection(
self._factory,
@@ -1181,7 +1232,7 @@ class TCPConnector(BaseConnector):
family=hinfo["family"],
proto=hinfo["proto"],
flags=hinfo["flags"],
- server_hostname=hinfo["hostname"] if sslcontext else None,
+ server_hostname=server_hostname,
local_addr=self._local_addr,
req=req,
client_error=client_error,
@@ -1206,14 +1257,12 @@ class TCPConnector(BaseConnector):
raise last_exc
async def _create_proxy_connection(
- self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
self._fail_on_no_start_tls(req)
runtime_has_start_tls = self._loop_supports_start_tls()
- if req.proxy.scheme != "https":
- runtime_has_start_tls = False
- headers = {} # type: Dict[str, str]
+ headers: Dict[str, str] = {}
if req.proxy_headers is not None:
headers = req.proxy_headers # type: ignore[assignment]
headers[hdrs.HOST] = req.headers[hdrs.HOST]
@@ -1273,7 +1322,10 @@ class TCPConnector(BaseConnector):
# read_until_eof=True will ensure the connection isn't closed
# once the response is received and processed allowing
# START_TLS to work on the connection below.
- protocol.set_response_params(read_until_eof=runtime_has_start_tls)
+ protocol.set_response_params(
+ read_until_eof=runtime_has_start_tls,
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
+ )
resp = await proxy_resp.start(conn)
except BaseException:
proxy_resp.close()
@@ -1286,7 +1338,7 @@ class TCPConnector(BaseConnector):
if resp.status != 200:
message = resp.reason
if message is None:
- message = RESPONSES[resp.status][0]
+ message = HTTPStatus(resp.status).phrase
raise ClientHttpProxyError(
proxy_resp.request_info,
resp.history,
@@ -1373,24 +1425,28 @@ class UnixConnector(BaseConnector):
return self._path
async def _create_connection(
- self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
- async with ceil_timeout(timeout.sock_connect):
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
_, proto = await self._loop.create_unix_connection(
self._factory, self._path
)
except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
- return cast(ResponseHandler, proto)
+ return proto
class NamedPipeConnector(BaseConnector):
"""Named pipe connector.
Only supported by the proactor event loop.
- See also: https://docs.python.org/3.7/library/asyncio-eventloop.html
+ See also: https://docs.python.org/3/library/asyncio-eventloop.html
path - Windows named pipe path.
keepalive_timeout - (optional) Keep-alive timeout.
@@ -1431,11 +1487,13 @@ class NamedPipeConnector(BaseConnector):
return self._path
async def _create_connection(
- self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
- async with ceil_timeout(timeout.sock_connect):
- _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
+ async with ceil_timeout(
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
+ ):
+ _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
self._factory, self._path
)
# the drain is required so that the connection_made is called
@@ -1446,6 +1504,8 @@ class NamedPipeConnector(BaseConnector):
# other option is to manually set transport like
# `proto.transport = trans`
except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
raise ClientConnectorError(req.connection_key, exc) from exc
return cast(ResponseHandler, proto)
diff --git a/contrib/python/aiohttp/aiohttp/cookiejar.py b/contrib/python/aiohttp/aiohttp/cookiejar.py
index fe0ef586a5..a348f112cb 100644
--- a/contrib/python/aiohttp/aiohttp/cookiejar.py
+++ b/contrib/python/aiohttp/aiohttp/cookiejar.py
@@ -1,12 +1,15 @@
import asyncio
+import calendar
import contextlib
import datetime
import os # noqa
import pathlib
import pickle
import re
+import time
from collections import defaultdict
from http.cookies import BaseCookie, Morsel, SimpleCookie
+from math import ceil
from typing import ( # noqa
DefaultDict,
Dict,
@@ -24,7 +27,7 @@ from typing import ( # noqa
from yarl import URL
from .abc import AbstractCookieJar, ClearCookiePredicate
-from .helpers import is_ip_address, next_whole_second
+from .helpers import is_ip_address
from .typedefs import LooseCookies, PathLike, StrOrURL
__all__ = ("CookieJar", "DummyCookieJar")
@@ -52,9 +55,23 @@ class CookieJar(AbstractCookieJar):
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
- MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
-
- MAX_32BIT_TIME = datetime.datetime(2038, 1, 19, 3, 14, 7)
+ # calendar.timegm() fails for timestamps after datetime.datetime.max
+ # Minus one as a loss of precision occurs when timestamp() is called.
+ MAX_TIME = (
+ int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
+ )
+ try:
+ calendar.timegm(time.gmtime(MAX_TIME))
+ except (OSError, ValueError):
+ # Hit the maximum representable time on Windows
+ # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
+ # Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere
+ MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
+ except OverflowError:
+ # #4515: datetime.max may not be representable on 32-bit platforms
+ MAX_TIME = 2**31 - 1
+ # Avoid minuses in the future, 3x faster
+ SUB_MAX_TIME = MAX_TIME - 1
def __init__(
self,
@@ -65,10 +82,10 @@ class CookieJar(AbstractCookieJar):
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(loop=loop)
- self._cookies = defaultdict(
+ self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
SimpleCookie
- ) # type: DefaultDict[str, SimpleCookie[str]]
- self._host_only_cookies = set() # type: Set[Tuple[str, str]]
+ )
+ self._host_only_cookies: Set[Tuple[str, str]] = set()
self._unsafe = unsafe
self._quote_cookie = quote_cookie
if treat_as_secure_origin is None:
@@ -83,14 +100,8 @@ class CookieJar(AbstractCookieJar):
for url in treat_as_secure_origin
]
self._treat_as_secure_origin = treat_as_secure_origin
- self._next_expiration = next_whole_second()
- self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
- # #4515: datetime.max may not be representable on 32-bit platforms
- self._max_time = self.MAX_TIME
- try:
- self._max_time.timestamp()
- except OverflowError:
- self._max_time = self.MAX_32BIT_TIME
+ self._next_expiration: float = ceil(time.time())
+ self._expirations: Dict[Tuple[str, str, str], float] = {}
def save(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
@@ -104,36 +115,34 @@ class CookieJar(AbstractCookieJar):
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
if predicate is None:
- self._next_expiration = next_whole_second()
+ self._next_expiration = ceil(time.time())
self._cookies.clear()
self._host_only_cookies.clear()
self._expirations.clear()
return
to_del = []
- now = datetime.datetime.now(datetime.timezone.utc)
- for domain, cookie in self._cookies.items():
+ now = time.time()
+ for (domain, path), cookie in self._cookies.items():
for name, morsel in cookie.items():
- key = (domain, name)
+ key = (domain, path, name)
if (
key in self._expirations and self._expirations[key] <= now
) or predicate(morsel):
to_del.append(key)
- for domain, name in to_del:
- key = (domain, name)
- self._host_only_cookies.discard(key)
+ for domain, path, name in to_del:
+ self._host_only_cookies.discard((domain, name))
+ key = (domain, path, name)
if key in self._expirations:
- del self._expirations[(domain, name)]
- self._cookies[domain].pop(name, None)
+ del self._expirations[(domain, path, name)]
+ self._cookies[(domain, path)].pop(name, None)
- next_expiration = min(self._expirations.values(), default=self._max_time)
- try:
- self._next_expiration = next_expiration.replace(
- microsecond=0
- ) + datetime.timedelta(seconds=1)
- except OverflowError:
- self._next_expiration = self._max_time
+ self._next_expiration = (
+ min(*self._expirations.values(), self.SUB_MAX_TIME) + 1
+ if self._expirations
+ else self.MAX_TIME
+ )
def clear_domain(self, domain: str) -> None:
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
@@ -149,9 +158,9 @@ class CookieJar(AbstractCookieJar):
def _do_expiration(self) -> None:
self.clear(lambda x: False)
- def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
+ def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
self._next_expiration = min(self._next_expiration, when)
- self._expirations[(domain, name)] = when
+ self._expirations[(domain, path, name)] = when
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
@@ -166,7 +175,7 @@ class CookieJar(AbstractCookieJar):
for name, cookie in cookies:
if not isinstance(cookie, Morsel):
- tmp = SimpleCookie() # type: SimpleCookie[str]
+ tmp = SimpleCookie()
tmp[name] = cookie # type: ignore[assignment]
cookie = tmp[name]
@@ -207,13 +216,8 @@ class CookieJar(AbstractCookieJar):
if max_age:
try:
delta_seconds = int(max_age)
- try:
- max_age_expiration = datetime.datetime.now(
- datetime.timezone.utc
- ) + datetime.timedelta(seconds=delta_seconds)
- except OverflowError:
- max_age_expiration = self._max_time
- self._expire_cookie(max_age_expiration, domain, name)
+ max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
+ self._expire_cookie(max_age_expiration, domain, path, name)
except ValueError:
cookie["max-age"] = ""
@@ -222,34 +226,38 @@ class CookieJar(AbstractCookieJar):
if expires:
expire_time = self._parse_date(expires)
if expire_time:
- self._expire_cookie(expire_time, domain, name)
+ self._expire_cookie(expire_time, domain, path, name)
else:
cookie["expires"] = ""
- self._cookies[domain][name] = cookie
+ self._cookies[(domain, path)][name] = cookie
self._do_expiration()
- def filter_cookies(
- self, request_url: URL = URL()
- ) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
+ def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
"""Returns this jar's cookies filtered by their attributes."""
- self._do_expiration()
- request_url = URL(request_url)
- filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
+ filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
SimpleCookie() if self._quote_cookie else BaseCookie()
)
+ if not self._cookies:
+ # Skip do_expiration() if there are no cookies.
+ return filtered
+ self._do_expiration()
+ if not self._cookies:
+ # Skip rest of function if no non-expired cookies.
+ return filtered
+ request_url = URL(request_url)
hostname = request_url.raw_host or ""
- request_origin = URL()
- with contextlib.suppress(ValueError):
- request_origin = request_url.origin()
- is_not_secure = (
- request_url.scheme not in ("https", "wss")
- and request_origin not in self._treat_as_secure_origin
- )
+ is_not_secure = request_url.scheme not in ("https", "wss")
+ if is_not_secure and self._treat_as_secure_origin:
+ request_origin = URL()
+ with contextlib.suppress(ValueError):
+ request_origin = request_url.origin()
+ is_not_secure = request_origin not in self._treat_as_secure_origin
- for cookie in self:
+ # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
+ for cookie in sorted(self, key=lambda c: len(c["path"])):
name = cookie.key
domain = cookie["domain"]
@@ -317,7 +325,7 @@ class CookieJar(AbstractCookieJar):
return non_matching.startswith("/")
@classmethod
- def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
+ def _parse_date(cls, date_str: str) -> Optional[int]:
"""Implements date string parsing adhering to RFC 6265."""
if not date_str:
return None
@@ -378,9 +386,7 @@ class CookieJar(AbstractCookieJar):
if year < 1601 or hour > 23 or minute > 59 or second > 59:
return None
- return datetime.datetime(
- year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
- )
+ return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
class DummyCookieJar(AbstractCookieJar):
diff --git a/contrib/python/aiohttp/aiohttp/formdata.py b/contrib/python/aiohttp/aiohttp/formdata.py
index 4857c89856..2b75b3de72 100644
--- a/contrib/python/aiohttp/aiohttp/formdata.py
+++ b/contrib/python/aiohttp/aiohttp/formdata.py
@@ -1,4 +1,5 @@
import io
+import warnings
from typing import Any, Iterable, List, Optional
from urllib.parse import urlencode
@@ -24,7 +25,7 @@ class FormData:
charset: Optional[str] = None,
) -> None:
self._writer = multipart.MultipartWriter("form-data")
- self._fields = [] # type: List[Any]
+ self._fields: List[Any] = []
self._is_multipart = False
self._is_processed = False
self._quote_fields = quote_fields
@@ -53,10 +54,15 @@ class FormData:
if isinstance(value, io.IOBase):
self._is_multipart = True
elif isinstance(value, (bytes, bytearray, memoryview)):
+ msg = (
+ "In v4, passing bytes will no longer create a file field. "
+ "Please explicitly use the filename parameter or pass a BytesIO object."
+ )
if filename is None and content_transfer_encoding is None:
+ warnings.warn(msg, DeprecationWarning)
filename = name
- type_options = MultiDict({"name": name}) # type: MultiDict[str]
+ type_options: MultiDict[str] = MultiDict({"name": name})
if filename is not None and not isinstance(filename, str):
raise TypeError(
"filename must be an instance of str. " "Got: %s" % filename
@@ -81,7 +87,11 @@ class FormData:
"content_transfer_encoding must be an instance"
" of str. Got: %s" % content_transfer_encoding
)
- headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
+ msg = (
+ "content_transfer_encoding is deprecated. "
+ "To maintain compatibility with v4 please pass a BytesPayload."
+ )
+ warnings.warn(msg, DeprecationWarning)
self._is_multipart = True
self._fields.append((type_options, headers, value))
diff --git a/contrib/python/aiohttp/aiohttp/hdrs.py b/contrib/python/aiohttp/aiohttp/hdrs.py
index a619f2543e..2f1f5e02b7 100644
--- a/contrib/python/aiohttp/aiohttp/hdrs.py
+++ b/contrib/python/aiohttp/aiohttp/hdrs.py
@@ -2,16 +2,10 @@
# After changing the file content call ./tools/gen.py
# to regenerate the headers parser
-import sys
-from typing import Set
+from typing import Final, Set
from multidict import istr
-if sys.version_info >= (3, 8):
- from typing import Final
-else:
- from typing_extensions import Final
-
METH_ANY: Final[str] = "*"
METH_CONNECT: Final[str] = "CONNECT"
METH_HEAD: Final[str] = "HEAD"
diff --git a/contrib/python/aiohttp/aiohttp/helpers.py b/contrib/python/aiohttp/aiohttp/helpers.py
index f30f76ba41..284033b7a0 100644
--- a/contrib/python/aiohttp/aiohttp/helpers.py
+++ b/contrib/python/aiohttp/aiohttp/helpers.py
@@ -3,7 +3,9 @@
import asyncio
import base64
import binascii
+import contextlib
import datetime
+import enum
import functools
import inspect
import netrc
@@ -34,63 +36,49 @@ from typing import (
Mapping,
Optional,
Pattern,
- Set,
+ Protocol,
Tuple,
Type,
TypeVar,
Union,
- cast,
+ get_args,
+ overload,
)
from urllib.parse import quote
from urllib.request import getproxies, proxy_bypass
-import async_timeout
import attr
-from multidict import MultiDict, MultiDictProxy
+from multidict import MultiDict, MultiDictProxy, MultiMapping
from yarl import URL
from . import hdrs
from .log import client_logger, internal_logger
-from .typedefs import PathLike, Protocol # noqa
+
+if sys.version_info >= (3, 11):
+ import asyncio as async_timeout
+else:
+ import async_timeout
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
IS_MACOS = platform.system() == "Darwin"
IS_WINDOWS = platform.system() == "Windows"
-PY_36 = sys.version_info >= (3, 6)
-PY_37 = sys.version_info >= (3, 7)
-PY_38 = sys.version_info >= (3, 8)
PY_310 = sys.version_info >= (3, 10)
-
-if sys.version_info < (3, 7):
- import idna_ssl
-
- idna_ssl.patch_match_hostname()
-
- def all_tasks(
- loop: Optional[asyncio.AbstractEventLoop] = None,
- ) -> Set["asyncio.Task[Any]"]:
- tasks = list(asyncio.Task.all_tasks(loop))
- return {t for t in tasks if not t.done()}
-
-
-else:
- all_tasks = asyncio.all_tasks
+PY_311 = sys.version_info >= (3, 11)
_T = TypeVar("_T")
_S = TypeVar("_S")
+_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
+sentinel = _SENTINEL.sentinel
-sentinel = object() # type: Any
-NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
+NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
-# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
-# for compatibility with older versions
-DEBUG = getattr(sys.flags, "dev_mode", False) or (
+DEBUG = sys.flags.dev_mode or (
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
-) # type: bool
+)
CHAR = {chr(i) for i in range(0, 128)}
@@ -226,8 +214,11 @@ def netrc_from_env() -> Optional[netrc.netrc]:
except netrc.NetrcParseError as e:
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
+ netrc_exists = False
+ with contextlib.suppress(OSError):
+ netrc_exists = netrc_path.is_file()
# we couldn't read the file (doesn't exist, permissions, etc.)
- if netrc_env or netrc_path.is_file():
+ if netrc_env or netrc_exists:
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning("Could not read .netrc file: %s", e)
@@ -241,6 +232,35 @@ class ProxyInfo:
proxy_auth: Optional[BasicAuth]
+def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
+ """
+ Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
+
+ :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
+ entry is found for the ``host``.
+ """
+ if netrc_obj is None:
+ raise LookupError("No .netrc file found")
+ auth_from_netrc = netrc_obj.authenticators(host)
+
+ if auth_from_netrc is None:
+ raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
+ login, account, password = auth_from_netrc
+
+ # TODO(PY311): username = login or account
+ # Up to python 3.10, account could be None if not specified,
+ # and login will be empty string if not specified. From 3.11,
+ # login and account will be empty string if not specified.
+ username = login if (login or account is None) else account
+
+ # TODO(PY311): Remove this, as password will be empty string
+ # if not specified
+ if password is None:
+ password = ""
+
+ return BasicAuth(username, password)
+
+
def proxies_from_env() -> Dict[str, ProxyInfo]:
proxy_urls = {
k: URL(v)
@@ -258,16 +278,11 @@ def proxies_from_env() -> Dict[str, ProxyInfo]:
)
continue
if netrc_obj and auth is None:
- auth_from_netrc = None
if proxy.host is not None:
- auth_from_netrc = netrc_obj.authenticators(proxy.host)
- if auth_from_netrc is not None:
- # auth_from_netrc is a (`user`, `account`, `password`) tuple,
- # `user` and `account` both can be username,
- # if `user` is None, use `account`
- *logins, password = auth_from_netrc
- login = logins[0] if logins[0] else logins[-1]
- auth = BasicAuth(cast(str, login), cast(str, password))
+ try:
+ auth = basicauth_from_netrc(netrc_obj, proxy.host)
+ except LookupError:
+ auth = None
ret[proto] = ProxyInfo(proxy, auth)
return ret
@@ -275,10 +290,7 @@ def proxies_from_env() -> Dict[str, ProxyInfo]:
def current_task(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> "Optional[asyncio.Task[Any]]":
- if sys.version_info >= (3, 7):
- return asyncio.current_task(loop=loop)
- else:
- return asyncio.Task.current_task(loop=loop)
+ return asyncio.current_task(loop=loop)
def get_running_loop(
@@ -350,27 +362,19 @@ def parse_mimetype(mimetype: str) -> MimeType:
)
parts = mimetype.split(";")
- params = MultiDict() # type: MultiDict[str]
+ params: MultiDict[str] = MultiDict()
for item in parts[1:]:
if not item:
continue
- key, value = cast(
- Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
- )
+ key, _, value = item.partition("=")
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == "*":
fulltype = "*/*"
- mtype, stype = (
- cast(Tuple[str, str], fulltype.split("/", 1))
- if "/" in fulltype
- else (fulltype, "")
- )
- stype, suffix = (
- cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
- )
+ mtype, _, stype = fulltype.partition("/")
+ stype, _, suffix = stype.partition("+")
return MimeType(
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
@@ -541,14 +545,7 @@ def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> b
return is_ipv4_address(host) or is_ipv6_address(host)
-def next_whole_second() -> datetime.datetime:
- """Return current time rounded up to the next whole second."""
- return datetime.datetime.now(datetime.timezone.utc).replace(
- microsecond=0
- ) + datetime.timedelta(seconds=0)
-
-
-_cached_current_datetime = None # type: Optional[int]
+_cached_current_datetime: Optional[int] = None
_cached_formatted_datetime = ""
@@ -601,11 +598,15 @@ def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
def weakref_handle(
- ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
+ ob: object,
+ name: str,
+ timeout: float,
+ loop: asyncio.AbstractEventLoop,
+ timeout_ceil_threshold: float = 5,
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
- if timeout >= 5:
+ if timeout >= timeout_ceil_threshold:
when = ceil(when)
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
@@ -613,11 +614,14 @@ def weakref_handle(
def call_later(
- cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
+ cb: Callable[[], Any],
+ timeout: float,
+ loop: asyncio.AbstractEventLoop,
+ timeout_ceil_threshold: float = 5,
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
- if timeout > 5:
+ if timeout > timeout_ceil_threshold:
when = ceil(when)
return loop.call_at(when, cb)
return None
@@ -627,13 +631,17 @@ class TimeoutHandle:
"""Timeout handle"""
def __init__(
- self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
+ self,
+ loop: asyncio.AbstractEventLoop,
+ timeout: Optional[float],
+ ceil_threshold: float = 5,
) -> None:
self._timeout = timeout
self._loop = loop
- self._callbacks = (
- []
- ) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
+ self._ceil_threshold = ceil_threshold
+ self._callbacks: List[
+ Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
+ ] = []
def register(
self, callback: Callable[..., None], *args: Any, **kwargs: Any
@@ -647,7 +655,7 @@ class TimeoutHandle:
timeout = self._timeout
if timeout is not None and timeout > 0:
when = self._loop.time() + timeout
- if timeout >= 5:
+ if timeout >= self._ceil_threshold:
when = ceil(when)
return self._loop.call_at(when, self.__call__)
else:
@@ -670,7 +678,8 @@ class TimeoutHandle:
class BaseTimerContext(ContextManager["BaseTimerContext"]):
- pass
+ def assert_timeout(self) -> None:
+ """Raise TimeoutError if timeout has been exceeded."""
class TimerNoop(BaseTimerContext):
@@ -691,9 +700,14 @@ class TimerContext(BaseTimerContext):
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
- self._tasks = [] # type: List[asyncio.Task[Any]]
+ self._tasks: List[asyncio.Task[Any]] = []
self._cancelled = False
+ def assert_timeout(self) -> None:
+ """Raise TimeoutError if timer has already been cancelled."""
+ if self._cancelled:
+ raise asyncio.TimeoutError from None
+
def __enter__(self) -> BaseTimerContext:
task = current_task(loop=self._loop)
@@ -729,27 +743,30 @@ class TimerContext(BaseTimerContext):
self._cancelled = True
-def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
+def ceil_timeout(
+ delay: Optional[float], ceil_threshold: float = 5
+) -> async_timeout.Timeout:
if delay is None or delay <= 0:
return async_timeout.timeout(None)
loop = get_running_loop()
now = loop.time()
when = now + delay
- if delay > 5:
+ if delay > ceil_threshold:
when = ceil(when)
return async_timeout.timeout_at(when)
class HeadersMixin:
-
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
- _content_type = None # type: Optional[str]
- _content_dict = None # type: Optional[Dict[str, str]]
- _stored_content_type = sentinel
+ _headers: MultiMapping[str]
+
+ _content_type: Optional[str] = None
+ _content_dict: Optional[Dict[str, str]] = None
+ _stored_content_type: Union[str, None, _SENTINEL] = sentinel
- def _parse_content_type(self, raw: str) -> None:
+ def _parse_content_type(self, raw: Optional[str]) -> None:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
@@ -758,13 +775,13 @@ class HeadersMixin:
else:
msg = HeaderParser().parsestr("Content-Type: " + raw)
self._content_type = msg.get_content_type()
- params = msg.get_params()
+ params = msg.get_params(())
self._content_dict = dict(params[1:]) # First element is content type again
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
- raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type # type: ignore[return-value]
@@ -772,7 +789,7 @@ class HeadersMixin:
@property
def charset(self) -> Optional[str]:
"""The value of charset part for Content-Type HTTP header."""
- raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
+ raw = self._headers.get(hdrs.CONTENT_TYPE)
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get("charset") # type: ignore[union-attr]
@@ -780,9 +797,7 @@ class HeadersMixin:
@property
def content_length(self) -> Optional[int]:
"""The value of Content-Length HTTP header."""
- content_length = self._headers.get( # type: ignore[attr-defined]
- hdrs.CONTENT_LENGTH
- )
+ content_length = self._headers.get(hdrs.CONTENT_LENGTH)
if content_length is not None:
return int(content_length)
@@ -795,15 +810,92 @@ def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
fut.set_result(result)
-def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
- if not fut.done():
- fut.set_exception(exc)
+_EXC_SENTINEL = BaseException()
+
+
+class ErrorableProtocol(Protocol):
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = ...,
+ ) -> None:
+ ... # pragma: no cover
-class ChainMapProxy(Mapping[str, Any]):
+def set_exception(
+ fut: "asyncio.Future[_T] | ErrorableProtocol",
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+) -> None:
+ """Set future exception.
+
+ If the future is marked as complete, this function is a no-op.
+
+ :param exc_cause: An exception that is a direct cause of ``exc``.
+ Only set if provided.
+ """
+ if asyncio.isfuture(fut) and fut.done():
+ return
+
+ exc_is_sentinel = exc_cause is _EXC_SENTINEL
+ exc_causes_itself = exc is exc_cause
+ if not exc_is_sentinel and not exc_causes_itself:
+ exc.__cause__ = exc_cause
+
+ fut.set_exception(exc)
+
+
+@functools.total_ordering
+class AppKey(Generic[_T]):
+ """Keys for static typing support in Application."""
+
+ __slots__ = ("_name", "_t", "__orig_class__")
+
+ # This may be set by Python when instantiating with a generic type. We need to
+ # support this, in order to support types that are not concrete classes,
+ # like Iterable, which can't be passed as the second parameter to __init__.
+ __orig_class__: Type[object]
+
+ def __init__(self, name: str, t: Optional[Type[_T]] = None):
+ # Prefix with module name to help deduplicate key names.
+ frame = inspect.currentframe()
+ while frame:
+ if frame.f_code.co_name == "<module>":
+ module: str = frame.f_globals["__name__"]
+ break
+ frame = frame.f_back
+
+ self._name = module + "." + name
+ self._t = t
+
+ def __lt__(self, other: object) -> bool:
+ if isinstance(other, AppKey):
+ return self._name < other._name
+ return True # Order AppKey above other types.
+
+ def __repr__(self) -> str:
+ t = self._t
+ if t is None:
+ with suppress(AttributeError):
+ # Set to type arg.
+ t = get_args(self.__orig_class__)[0]
+
+ if t is None:
+ t_repr = "<<Unknown>>"
+ elif isinstance(t, type):
+ if t.__module__ == "builtins":
+ t_repr = t.__qualname__
+ else:
+ t_repr = f"{t.__module__}.{t.__qualname__}"
+ else:
+ t_repr = repr(t)
+ return f"<AppKey({self._name}, type={t_repr})>"
+
+
+class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
__slots__ = ("_maps",)
- def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
+ def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
self._maps = tuple(maps)
def __init_subclass__(cls) -> None:
@@ -812,7 +904,15 @@ class ChainMapProxy(Mapping[str, Any]):
"is forbidden".format(cls.__name__)
)
+ @overload # type: ignore[override]
+ def __getitem__(self, key: AppKey[_T]) -> _T:
+ ...
+
+ @overload
def __getitem__(self, key: str) -> Any:
+ ...
+
+ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
for mapping in self._maps:
try:
return mapping[key]
@@ -820,15 +920,30 @@ class ChainMapProxy(Mapping[str, Any]):
pass
raise KeyError(key)
- def get(self, key: str, default: Any = None) -> Any:
- return self[key] if key in self else default
+ @overload # type: ignore[override]
+ def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]:
+ ...
+
+ @overload
+ def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]:
+ ...
+
+ @overload
+ def get(self, key: str, default: Any = ...) -> Any:
+ ...
+
+ def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
+ try:
+ return self[key]
+ except KeyError:
+ return default
def __len__(self) -> int:
# reuses stored hash values if possible
- return len(set().union(*self._maps)) # type: ignore[arg-type]
+ return len(set().union(*self._maps))
- def __iter__(self) -> Iterator[str]:
- d = {} # type: Dict[str, Any]
+ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
+ d: Dict[Union[str, AppKey[Any]], Any] = {}
for mapping in reversed(self._maps):
# reuses stored hash values if possible
d.update(mapping)
@@ -846,11 +961,11 @@ class ChainMapProxy(Mapping[str, Any]):
# https://tools.ietf.org/html/rfc7232#section-2.3
-_ETAGC = r"[!#-}\x80-\xff]+"
+_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
_ETAGC_RE = re.compile(_ETAGC)
-_QUOTED_ETAG = fr'(W/)?"({_ETAGC})"'
+_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
-LIST_QUOTED_ETAG_RE = re.compile(fr"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
+LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
ETAG_ANY = "*"
@@ -876,3 +991,39 @@ def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
with suppress(ValueError):
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None
+
+
+def must_be_empty_body(method: str, code: int) -> bool:
+ """Check if a request must return an empty body."""
+ return (
+ status_code_must_be_empty_body(code)
+ or method_must_be_empty_body(method)
+ or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
+ )
+
+
+def method_must_be_empty_body(method: str) -> bool:
+ """Check if a method must return an empty body."""
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
+ return method.upper() == hdrs.METH_HEAD
+
+
+def status_code_must_be_empty_body(code: int) -> bool:
+ """Check if a status code must return an empty body."""
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
+ return code in {204, 304} or 100 <= code < 200
+
+
+def should_remove_content_length(method: str, code: int) -> bool:
+ """Check if a Content-Length header should be removed.
+
+ This should always be a subset of must_be_empty_body
+ """
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
+ return (
+ code in {204, 304}
+ or 100 <= code < 200
+ or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
+ )
diff --git a/contrib/python/aiohttp/aiohttp/http.py b/contrib/python/aiohttp/aiohttp/http.py
index 415ffbf563..a1feae2d9b 100644
--- a/contrib/python/aiohttp/aiohttp/http.py
+++ b/contrib/python/aiohttp/aiohttp/http.py
@@ -1,5 +1,5 @@
-import http.server
import sys
+from http import HTTPStatus
from typing import Mapping, Tuple
from . import __version__
@@ -63,10 +63,10 @@ __all__ = (
)
-SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
+SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
sys.version_info, __version__
-) # type: str
+)
-RESPONSES = (
- http.server.BaseHTTPRequestHandler.responses
-) # type: Mapping[int, Tuple[str, str]]
+RESPONSES: Mapping[int, Tuple[str, str]] = {
+ v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
+}
diff --git a/contrib/python/aiohttp/aiohttp/http_exceptions.py b/contrib/python/aiohttp/aiohttp/http_exceptions.py
index c885f80f32..72eac3a3ca 100644
--- a/contrib/python/aiohttp/aiohttp/http_exceptions.py
+++ b/contrib/python/aiohttp/aiohttp/http_exceptions.py
@@ -1,6 +1,7 @@
"""Low-level http related exceptions."""
+from textwrap import indent
from typing import Optional, Union
from .typedefs import _CIMultiDict
@@ -35,10 +36,11 @@ class HttpProcessingError(Exception):
self.message = message
def __str__(self) -> str:
- return f"{self.code}, message={self.message!r}"
+ msg = indent(self.message, " ")
+ return f"{self.code}, message:\n{msg}"
def __repr__(self) -> str:
- return f"<{self.__class__.__name__}: {self}>"
+ return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
class BadHttpMessage(HttpProcessingError):
@@ -85,18 +87,17 @@ class LineTooLong(BadHttpMessage):
class InvalidHeader(BadHttpMessage):
def __init__(self, hdr: Union[bytes, str]) -> None:
- if isinstance(hdr, bytes):
- hdr = hdr.decode("utf-8", "surrogateescape")
- super().__init__(f"Invalid HTTP Header: {hdr}")
- self.hdr = hdr
+ hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
+ super().__init__(f"Invalid HTTP header: {hdr!r}")
+ self.hdr = hdr_s
self.args = (hdr,)
class BadStatusLine(BadHttpMessage):
- def __init__(self, line: str = "") -> None:
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
if not isinstance(line, str):
line = repr(line)
- super().__init__(f"Bad status line {line!r}")
+ super().__init__(error or f"Bad status line {line!r}")
self.args = (line,)
self.line = line
diff --git a/contrib/python/aiohttp/aiohttp/http_parser.py b/contrib/python/aiohttp/aiohttp/http_parser.py
index 2dc9482f4f..013511917e 100644
--- a/contrib/python/aiohttp/aiohttp/http_parser.py
+++ b/contrib/python/aiohttp/aiohttp/http_parser.py
@@ -1,15 +1,16 @@
import abc
import asyncio
-import collections
import re
import string
-import zlib
from contextlib import suppress
from enum import IntEnum
from typing import (
Any,
+ ClassVar,
+ Final,
Generic,
List,
+ Literal,
NamedTuple,
Optional,
Pattern,
@@ -18,7 +19,6 @@ from typing import (
Type,
TypeVar,
Union,
- cast,
)
from multidict import CIMultiDict, CIMultiDictProxy, istr
@@ -26,28 +26,30 @@ from yarl import URL
from . import hdrs
from .base_protocol import BaseProtocol
-from .helpers import NO_EXTENSIONS, BaseTimerContext
+from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
+from .helpers import (
+ _EXC_SENTINEL,
+ DEBUG,
+ NO_EXTENSIONS,
+ BaseTimerContext,
+ method_must_be_empty_body,
+ set_exception,
+ status_code_must_be_empty_body,
+)
from .http_exceptions import (
BadHttpMessage,
BadStatusLine,
ContentEncodingError,
ContentLengthError,
InvalidHeader,
+ InvalidURLError,
LineTooLong,
TransferEncodingError,
)
from .http_writer import HttpVersion, HttpVersion10
from .log import internal_logger
from .streams import EMPTY_PAYLOAD, StreamReader
-from .typedefs import Final, RawHeaders
-
-try:
- import brotli
-
- HAS_BROTLI = True
-except ImportError: # pragma: no cover
- HAS_BROTLI = False
-
+from .typedefs import RawHeaders
__all__ = (
"HeadersParser",
@@ -58,18 +60,22 @@ __all__ = (
"RawResponseMessage",
)
+_SEP = Literal[b"\r\n", b"\n"]
+
ASCIISET: Final[Set[str]] = set(string.printable)
-# See https://tools.ietf.org/html/rfc7230#section-3.1.1
-# and https://tools.ietf.org/html/rfc7230#appendix-B
+# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
+# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
#
# method = token
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
# token = 1*tchar
-METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
-VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
-HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
+_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
+TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
+VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
+DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
+HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
class RawRequestMessage(NamedTuple):
@@ -85,20 +91,16 @@ class RawRequestMessage(NamedTuple):
url: URL
-RawResponseMessage = collections.namedtuple(
- "RawResponseMessage",
- [
- "version",
- "code",
- "reason",
- "headers",
- "raw_headers",
- "should_close",
- "compression",
- "upgrade",
- "chunked",
- ],
-)
+class RawResponseMessage(NamedTuple):
+ version: HttpVersion
+ code: int
+ reason: str
+ headers: CIMultiDictProxy[str]
+ raw_headers: RawHeaders
+ should_close: bool
+ compression: Optional[str]
+ upgrade: bool
+ chunked: bool
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
@@ -126,15 +128,18 @@ class HeadersParser:
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
+ lax: bool = False,
) -> None:
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
+ self._lax = lax
def parse_headers(
self, lines: List[bytes]
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
- headers = CIMultiDict() # type: CIMultiDict[str]
+ headers: CIMultiDict[str] = CIMultiDict()
+ # note: "raw" does not mean inclusion of OWS before/after the field value
raw_headers = []
lines_idx = 1
@@ -148,18 +153,25 @@ class HeadersParser:
except ValueError:
raise InvalidHeader(line) from None
- bname = bname.strip(b" \t")
- bvalue = bvalue.lstrip()
- if HDRRE.search(bname):
+ if len(bname) == 0:
raise InvalidHeader(bname)
+
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
+ if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
+ raise InvalidHeader(line)
+
+ bvalue = bvalue.lstrip(b" \t")
if len(bname) > self.max_field_size:
raise LineTooLong(
"request header name {}".format(
- bname.decode("utf8", "xmlcharrefreplace")
+ bname.decode("utf8", "backslashreplace")
),
str(self.max_field_size),
str(len(bname)),
)
+ name = bname.decode("utf-8", "surrogateescape")
+ if not TOKENRE.fullmatch(name):
+ raise InvalidHeader(bname)
header_length = len(bvalue)
@@ -168,8 +180,9 @@ class HeadersParser:
line = lines[lines_idx]
# consume continuation lines
- continuation = line and line[0] in (32, 9) # (' ', '\t')
+ continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
+ # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
if continuation:
bvalue_lst = [bvalue]
while continuation:
@@ -177,7 +190,7 @@ class HeadersParser:
if header_length > self.max_field_size:
raise LineTooLong(
"request header field {}".format(
- bname.decode("utf8", "xmlcharrefreplace")
+ bname.decode("utf8", "backslashreplace")
),
str(self.max_field_size),
str(header_length),
@@ -198,28 +211,38 @@ class HeadersParser:
if header_length > self.max_field_size:
raise LineTooLong(
"request header field {}".format(
- bname.decode("utf8", "xmlcharrefreplace")
+ bname.decode("utf8", "backslashreplace")
),
str(self.max_field_size),
str(header_length),
)
- bvalue = bvalue.strip()
- name = bname.decode("utf-8", "surrogateescape")
+ bvalue = bvalue.strip(b" \t")
value = bvalue.decode("utf-8", "surrogateescape")
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
+ if "\n" in value or "\r" in value or "\x00" in value:
+ raise InvalidHeader(bvalue)
+
headers.add(name, value)
raw_headers.append((bname, bvalue))
return (CIMultiDictProxy(headers), tuple(raw_headers))
+def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
+ """Check if the upgrade header is supported."""
+ return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
+
+
class HttpParser(abc.ABC, Generic[_MsgT]):
+ lax: ClassVar[bool] = False
+
def __init__(
self,
protocol: Optional[BaseProtocol] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
- limit: int = 2 ** 16,
+ limit: int = 2**16,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
@@ -245,14 +268,16 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
self.response_with_body = response_with_body
self.read_until_eof = read_until_eof
- self._lines = [] # type: List[bytes]
+ self._lines: List[bytes] = []
self._tail = b""
self._upgraded = False
self._payload = None
- self._payload_parser = None # type: Optional[HttpPayloadParser]
+ self._payload_parser: Optional[HttpPayloadParser] = None
self._auto_decompress = auto_decompress
self._limit = limit
- self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
+ self._headers_parser = HeadersParser(
+ max_line_size, max_headers, max_field_size, self.lax
+ )
@abc.abstractmethod
def parse_message(self, lines: List[bytes]) -> _MsgT:
@@ -277,7 +302,7 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
def feed_data(
self,
data: bytes,
- SEP: bytes = b"\r\n",
+ SEP: _SEP = b"\r\n",
EMPTY: bytes = b"",
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
METH_CONNECT: str = hdrs.METH_CONNECT,
@@ -301,13 +326,16 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
pos = data.find(SEP, start_pos)
# consume \r\n
if pos == start_pos and not self._lines:
- start_pos = pos + 2
+ start_pos = pos + len(SEP)
continue
if pos >= start_pos:
# line found
- self._lines.append(data[start_pos:pos])
- start_pos = pos + 2
+ line = data[start_pos:pos]
+ if SEP == b"\n": # For lax response parsing
+ line = line.rstrip(b"\r")
+ self._lines.append(line)
+ start_pos = pos + len(SEP)
# \r\n\r\n found
if self._lines[-1] == EMPTY:
@@ -322,31 +350,35 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
if length_hdr is None:
return None
- try:
- length = int(length_hdr)
- except ValueError:
- raise InvalidHeader(CONTENT_LENGTH)
-
- if length < 0:
+ # Shouldn't allow +/- or other number formats.
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
+ # msg.headers is already stripped of leading/trailing wsp
+ if not DIGITS.fullmatch(length_hdr):
raise InvalidHeader(CONTENT_LENGTH)
- return length
+ return int(length_hdr)
length = get_content_length()
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in msg.headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
- self._upgraded = msg.upgrade
+ self._upgraded = msg.upgrade and _is_supported_upgrade(
+ msg.headers
+ )
method = getattr(msg, "method", self.method)
+ # code is only present on responses
+ code = getattr(msg, "code", 0)
assert self.protocol is not None
# calculate payload
- if (
- (length is not None and length > 0)
- or msg.chunked
- and not msg.upgrade
+ empty_body = status_code_must_be_empty_body(code) or bool(
+ method and method_must_be_empty_body(method)
+ )
+ if not empty_body and (
+ ((length is not None and length > 0) or msg.chunked)
+ and not self._upgraded
):
payload = StreamReader(
self.protocol,
@@ -364,6 +396,7 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
readall=self.readall,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
+ lax=self.lax,
)
if not payload_parser.done:
self._payload_parser = payload_parser
@@ -382,34 +415,31 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
compression=msg.compression,
readall=True,
auto_decompress=self._auto_decompress,
+ lax=self.lax,
+ )
+ elif not empty_body and length is None and self.read_until_eof:
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ readall=True,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ lax=self.lax,
)
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
else:
- if (
- getattr(msg, "code", 100) >= 199
- and length is None
- and self.read_until_eof
- ):
- payload = StreamReader(
- self.protocol,
- timer=self.timer,
- loop=loop,
- limit=self._limit,
- )
- payload_parser = HttpPayloadParser(
- payload,
- length=length,
- chunked=msg.chunked,
- method=method,
- compression=msg.compression,
- code=self.code,
- readall=True,
- response_with_body=self.response_with_body,
- auto_decompress=self._auto_decompress,
- )
- if not payload_parser.done:
- self._payload_parser = payload_parser
- else:
- payload = EMPTY_PAYLOAD
+ payload = EMPTY_PAYLOAD
messages.append((msg, payload))
else:
@@ -427,14 +457,17 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
assert not self._lines
assert self._payload_parser is not None
try:
- eof, data = self._payload_parser.feed_data(data[start_pos:])
- except BaseException as exc:
+ eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
+ except BaseException as underlying_exc:
+ reraised_exc = underlying_exc
if self.payload_exception is not None:
- self._payload_parser.payload.set_exception(
- self.payload_exception(str(exc))
- )
- else:
- self._payload_parser.payload.set_exception(exc)
+ reraised_exc = self.payload_exception(str(underlying_exc))
+
+ set_exception(
+ self._payload_parser.payload,
+ reraised_exc,
+ underlying_exc,
+ )
eof = True
data = b""
@@ -470,6 +503,24 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
upgrade = False
chunked = False
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
+ singletons = (
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TYPE,
+ hdrs.ETAG,
+ hdrs.HOST,
+ hdrs.MAX_FORWARDS,
+ hdrs.SERVER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.USER_AGENT,
+ )
+ bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
+ if bad_hdr is not None:
+ raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
+
# keep-alive
conn = headers.get(hdrs.CONNECTION)
if conn:
@@ -478,7 +529,8 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
close_conn = True
elif v == "keep-alive":
close_conn = False
- elif v == "upgrade":
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
+ elif v == "upgrade" and headers.get(hdrs.UPGRADE):
upgrade = True
# encoding
@@ -498,7 +550,7 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
if hdrs.CONTENT_LENGTH in headers:
raise BadHttpMessage(
- "Content-Length can't be present with Transfer-Encoding",
+ "Transfer-Encoding can't be present with Content-Length",
)
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
@@ -523,7 +575,7 @@ class HttpRequestParser(HttpParser[RawRequestMessage]):
# request line
line = lines[0].decode("utf-8", "surrogateescape")
try:
- method, path, version = line.split(None, 2)
+ method, path, version = line.split(" ", maxsplit=2)
except ValueError:
raise BadStatusLine(line) from None
@@ -532,22 +584,48 @@ class HttpRequestParser(HttpParser[RawRequestMessage]):
"Status line is too long", str(self.max_line_size), str(len(path))
)
- path_part, _hash_separator, url_fragment = path.partition("#")
- path_part, _question_mark_separator, qs_part = path_part.partition("?")
-
# method
- if not METHRE.match(method):
+ if not TOKENRE.fullmatch(method):
raise BadStatusLine(method)
# version
- try:
- if version.startswith("HTTP/"):
- n1, n2 = version[5:].split(".", 1)
- version_o = HttpVersion(int(n1), int(n2))
- else:
- raise BadStatusLine(version)
- except Exception:
- raise BadStatusLine(version)
+ match = VERSRE.fullmatch(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
+
+ if method == "CONNECT":
+ # authority-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
+ url = URL.build(authority=path, encoded=True)
+ elif path.startswith("/"):
+ # origin-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
+ path_part, _hash_separator, url_fragment = path.partition("#")
+ path_part, _question_mark_separator, qs_part = path_part.partition("?")
+
+ # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
+ # NOTE: parser does, otherwise it results into the same
+ # NOTE: HTTP Request-Line input producing different
+ # NOTE: `yarl.URL()` objects
+ url = URL.build(
+ path=path_part,
+ query_string=qs_part,
+ fragment=url_fragment,
+ encoded=True,
+ )
+ elif path == "*" and method == "OPTIONS":
+ # asterisk-form,
+ url = URL(path, encoded=True)
+ else:
+ # absolute-form for proxy maybe,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
+ url = URL(path, encoded=True)
+ if url.scheme == "":
+ # not absolute-form
+ raise InvalidURLError(
+ path.encode(errors="surrogateescape").decode("latin1")
+ )
# read headers
(
@@ -575,16 +653,7 @@ class HttpRequestParser(HttpParser[RawRequestMessage]):
compression,
upgrade,
chunked,
- # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
- # NOTE: parser does, otherwise it results into the same
- # NOTE: HTTP Request-Line input producing different
- # NOTE: `yarl.URL()` objects
- URL.build(
- path=path_part,
- query_string=qs_part,
- fragment=url_fragment,
- encoded=True,
- ),
+ url,
)
@@ -595,16 +664,31 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
Returns RawResponseMessage.
"""
+ # Lax mode should only be enabled on response parser.
+ lax = not DEBUG
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: Optional[_SEP] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
+ if SEP is None:
+ SEP = b"\r\n" if DEBUG else b"\n"
+ return super().feed_data(data, SEP, *args, **kwargs)
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
try:
- version, status = line.split(None, 1)
+ version, status = line.split(maxsplit=1)
except ValueError:
raise BadStatusLine(line) from None
try:
- status, reason = status.split(None, 1)
+ status, reason = status.split(maxsplit=1)
except ValueError:
+ status = status.strip()
reason = ""
if len(reason) > self.max_line_size:
@@ -613,19 +697,15 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
)
# version
- match = VERSRE.match(version)
+ match = VERSRE.fullmatch(version)
if match is None:
raise BadStatusLine(line)
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
- # The status code is a three-digit number
- try:
- status_i = int(status)
- except ValueError:
- raise BadStatusLine(line) from None
-
- if status_i > 999:
+ # The status code is a three-digit ASCII number, no padding
+ if len(status) != 3 or not DIGITS.fullmatch(status):
raise BadStatusLine(line)
+ status_i = int(status)
# read headers
(
@@ -638,7 +718,16 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
) = self.parse_headers(lines)
if close is None:
- close = version_o <= HttpVersion10
+ if version_o <= HttpVersion10:
+ close = True
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
+ elif 100 <= status_i < 200 or status_i in {204, 304}:
+ close = False
+ elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
+ close = False
+ else:
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
+ close = True
return RawResponseMessage(
version_o,
@@ -665,6 +754,7 @@ class HttpPayloadParser:
readall: bool = False,
response_with_body: bool = True,
auto_decompress: bool = True,
+ lax: bool = False,
) -> None:
self._length = 0
self._type = ParseState.PARSE_NONE
@@ -672,13 +762,14 @@ class HttpPayloadParser:
self._chunk_size = 0
self._chunk_tail = b""
self._auto_decompress = auto_decompress
+ self._lax = lax
self.done = False
# payload decompression wrapper
if response_with_body and compression and self._auto_decompress:
- real_payload = DeflateBuffer(
+ real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
payload, compression
- ) # type: Union[StreamReader, DeflateBuffer]
+ )
else:
real_payload = payload
@@ -723,7 +814,7 @@ class HttpPayloadParser:
)
def feed_data(
- self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
+ self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
@@ -760,18 +851,22 @@ class HttpPayloadParser:
else:
size_b = chunk[:pos]
- try:
- size = int(bytes(size_b), 16)
- except ValueError:
+ if self._lax: # Allow whitespace in lax mode.
+ size_b = size_b.strip()
+
+ if not re.fullmatch(HEXDIGITS, size_b):
exc = TransferEncodingError(
chunk[:pos].decode("ascii", "surrogateescape")
)
- self.payload.set_exception(exc)
- raise exc from None
+ set_exception(self.payload, exc)
+ raise exc
+ size = int(bytes(size_b), 16)
- chunk = chunk[pos + 2 :]
+ chunk = chunk[pos + len(SEP) :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
@@ -793,13 +888,15 @@ class HttpPayloadParser:
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
chunk = chunk[required:]
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
self.payload.end_http_chunk_receiving()
# toss the CRLF at the end of the chunk
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
- if chunk[:2] == SEP:
- chunk = chunk[2:]
+ if chunk[: len(SEP)] == SEP:
+ chunk = chunk[len(SEP) :]
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
@@ -807,13 +904,13 @@ class HttpPayloadParser:
# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
- # trailers needs to be skiped until \r\n\r\n
+ # trailers needs to be skipped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
- head = chunk[:2]
+ head = chunk[: len(SEP)]
if head == SEP:
# end of stream
self.payload.feed_eof()
- return True, chunk[2:]
+ return True, chunk[len(SEP) :]
# Both CR and LF, or only LF may not be received yet. It is
# expected that CRLF or LF will be shown at the very first
# byte next time, otherwise trailers should come. The last
@@ -831,7 +928,7 @@ class HttpPayloadParser:
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
- chunk = chunk[pos + 2 :]
+ chunk = chunk[pos + len(SEP) :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
@@ -855,37 +952,23 @@ class DeflateBuffer:
self.encoding = encoding
self._started_decoding = False
+ self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
if encoding == "br":
if not HAS_BROTLI: # pragma: no cover
raise ContentEncodingError(
"Can not decode content-encoding: brotli (br). "
"Please install `Brotli`"
)
-
- class BrotliDecoder:
- # Supports both 'brotlipy' and 'Brotli' packages
- # since they share an import name. The top branches
- # are for 'brotlipy' and bottom branches for 'Brotli'
- def __init__(self) -> None:
- self._obj = brotli.Decompressor()
-
- def decompress(self, data: bytes) -> bytes:
- if hasattr(self._obj, "decompress"):
- return cast(bytes, self._obj.decompress(data))
- return cast(bytes, self._obj.process(data))
-
- def flush(self) -> bytes:
- if hasattr(self._obj, "flush"):
- return cast(bytes, self._obj.flush())
- return b""
-
- self.decompressor = BrotliDecoder()
+ self.decompressor = BrotliDecompressor()
else:
- zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
- self.decompressor = zlib.decompressobj(wbits=zlib_mode)
+ self.decompressor = ZLibDecompressor(encoding=encoding)
- def set_exception(self, exc: BaseException) -> None:
- self.out.set_exception(exc)
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
+ set_exception(self.out, exc, exc_cause)
def feed_data(self, chunk: bytes, size: int) -> None:
if not size:
@@ -903,10 +986,12 @@ class DeflateBuffer:
):
# Change the decoder to decompress incorrectly compressed data
# Actually we should issue a warning about non-RFC-compliant data.
- self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
+ self.decompressor = ZLibDecompressor(
+ encoding=self.encoding, suppress_deflate_header=True
+ )
try:
- chunk = self.decompressor.decompress(chunk)
+ chunk = self.decompressor.decompress_sync(chunk)
except Exception:
raise ContentEncodingError(
"Can not decode content-encoding: %s" % self.encoding
@@ -941,7 +1026,7 @@ RawResponseMessagePy = RawResponseMessage
try:
if not NO_EXTENSIONS:
- from ._http_parser import ( # type: ignore[import,no-redef]
+ from ._http_parser import ( # type: ignore[import-not-found,no-redef]
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
diff --git a/contrib/python/aiohttp/aiohttp/http_websocket.py b/contrib/python/aiohttp/aiohttp/http_websocket.py
index 991a149d09..39f2e4a5c1 100644
--- a/contrib/python/aiohttp/aiohttp/http_websocket.py
+++ b/contrib/python/aiohttp/aiohttp/http_websocket.py
@@ -1,7 +1,7 @@
"""WebSocket protocol versions 13 and 8."""
import asyncio
-import collections
+import functools
import json
import random
import re
@@ -9,12 +9,24 @@ import sys
import zlib
from enum import IntEnum
from struct import Struct
-from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
+from typing import (
+ Any,
+ Callable,
+ Final,
+ List,
+ NamedTuple,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Union,
+ cast,
+)
from .base_protocol import BaseProtocol
-from .helpers import NO_EXTENSIONS
+from .compression_utils import ZLibCompressor, ZLibDecompressor
+from .helpers import NO_EXTENSIONS, set_exception
from .streams import DataQueue
-from .typedefs import Final
__all__ = (
"WS_CLOSED_MESSAGE",
@@ -47,6 +59,15 @@ class WSCloseCode(IntEnum):
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
+# For websockets, keeping latency low is extremely important as implementations
+# generally expect to be able to send and receive messages quickly. We use a
+# larger chunk size than the default to reduce the number of executor calls
+# since the executor is a significant source of latency and overhead when
+# the chunks are small. A size of 5KiB was chosen because it is also the
+# same value python-zlib-ng choose to use as the threshold to release the GIL.
+
+WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
+
class WSMsgType(IntEnum):
# websocket spec types
@@ -82,14 +103,16 @@ PACK_LEN1 = Struct("!BB").pack
PACK_LEN2 = Struct("!BBH").pack
PACK_LEN3 = Struct("!BBQ").pack
PACK_CLOSE_CODE = Struct("!H").pack
-MSG_SIZE: Final[int] = 2 ** 14
-DEFAULT_LIMIT: Final[int] = 2 ** 16
-
+MSG_SIZE: Final[int] = 2**14
+DEFAULT_LIMIT: Final[int] = 2**16
-_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
+class WSMessage(NamedTuple):
+ type: WSMsgType
+ # To type correctly, this would need some kind of tagged union for each type.
+ data: Any
+ extra: Optional[str]
-class WSMessage(_WSMessageBase):
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
"""Return parsed JSON data.
@@ -121,7 +144,9 @@ native_byteorder: Final[str] = sys.byteorder
# Used by _websocket_mask_python
-_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
+@functools.lru_cache
+def _xor_table() -> List[bytes]:
+ return [bytes(a ^ b for a in range(256)) for b in range(256)]
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
@@ -141,6 +166,7 @@ def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
assert len(mask) == 4, mask
if data:
+ _XOR_TABLE = _xor_table()
a, b, c, d = (_XOR_TABLE[n] for n in mask)
data[::4] = data[::4].translate(a)
data[1::4] = data[1::4].translate(b)
@@ -152,7 +178,7 @@ if NO_EXTENSIONS: # pragma: no cover
_websocket_mask = _websocket_mask_python
else:
try:
- from ._websocket import _websocket_mask_cython # type: ignore[import]
+ from ._websocket import _websocket_mask_cython # type: ignore[import-not-found]
_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
@@ -259,22 +285,22 @@ class WebSocketReader:
self.queue = queue
self._max_msg_size = max_msg_size
- self._exc = None # type: Optional[BaseException]
+ self._exc: Optional[BaseException] = None
self._partial = bytearray()
self._state = WSParserState.READ_HEADER
- self._opcode = None # type: Optional[int]
+ self._opcode: Optional[int] = None
self._frame_fin = False
- self._frame_opcode = None # type: Optional[int]
+ self._frame_opcode: Optional[int] = None
self._frame_payload = bytearray()
self._tail = b""
self._has_mask = False
- self._frame_mask = None # type: Optional[bytes]
+ self._frame_mask: Optional[bytes] = None
self._payload_length = 0
self._payload_length_flag = 0
- self._compressed = None # type: Optional[bool]
- self._decompressobj = None # type: Any # zlib.decompressobj actually
+ self._compressed: Optional[bool] = None
+ self._decompressobj: Optional[ZLibDecompressor] = None
self._compress = compress
def feed_eof(self) -> None:
@@ -288,13 +314,13 @@ class WebSocketReader:
return self._feed_data(data)
except Exception as exc:
self._exc = exc
- self.queue.set_exception(exc)
+ set_exception(self.queue, exc)
return True, b""
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
for fin, opcode, payload, compressed in self.parse_frame(data):
if compressed and not self._decompressobj:
- self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
+ self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)
if opcode == WSMsgType.CLOSE:
if len(payload) >= 2:
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
@@ -379,8 +405,9 @@ class WebSocketReader:
# Decompress process must to be done after all packets
# received.
if compressed:
+ assert self._decompressobj is not None
self._partial.extend(_WS_DEFLATE_TRAILING)
- payload_merged = self._decompressobj.decompress(
+ payload_merged = self._decompressobj.decompress_sync(
self._partial, self._max_msg_size
)
if self._decompressobj.unconsumed_tail:
@@ -578,7 +605,7 @@ class WebSocketWriter:
*,
use_mask: bool = False,
limit: int = DEFAULT_LIMIT,
- random: Any = random.Random(),
+ random: random.Random = random.Random(),
compress: int = 0,
notakeover: bool = False,
) -> None:
@@ -591,7 +618,7 @@ class WebSocketWriter:
self._closing = False
self._limit = limit
self._output_size = 0
- self._compressobj = None # type: Any # actually compressobj
+ self._compressobj: Any = None # actually compressobj
async def _send_frame(
self, message: bytes, opcode: int, compress: Optional[int] = None
@@ -608,16 +635,18 @@ class WebSocketWriter:
if (compress or self.compress) and opcode < 8:
if compress:
# Do not set self._compress if compressing is for this frame
- compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
+ compressobj = self._make_compress_obj(compress)
else: # self.compress
if not self._compressobj:
- self._compressobj = zlib.compressobj(
- level=zlib.Z_BEST_SPEED, wbits=-self.compress
- )
+ self._compressobj = self._make_compress_obj(self.compress)
compressobj = self._compressobj
- message = compressobj.compress(message)
- message = message + compressobj.flush(
+ message = await compressobj.compress(message)
+ # Its critical that we do not return control to the event
+ # loop until we have finished sending all the compressed
+ # data. Otherwise we could end up mixing compressed frames
+ # if there are multiple coroutines compressing data.
+ message += compressobj.flush(
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
)
if message.endswith(_WS_DEFLATE_TRAILING):
@@ -639,37 +668,47 @@ class WebSocketWriter:
else:
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
if use_mask:
- mask = self.randrange(0, 0xFFFFFFFF)
- mask = mask.to_bytes(4, "big")
+ mask_int = self.randrange(0, 0xFFFFFFFF)
+ mask = mask_int.to_bytes(4, "big")
message = bytearray(message)
_websocket_mask(mask, message)
self._write(header + mask + message)
- self._output_size += len(header) + len(mask) + len(message)
+ self._output_size += len(header) + len(mask) + msg_length
else:
- if len(message) > MSG_SIZE:
+ if msg_length > MSG_SIZE:
self._write(header)
self._write(message)
else:
self._write(header + message)
- self._output_size += len(header) + len(message)
+ self._output_size += len(header) + msg_length
+
+ # It is safe to return control to the event loop when using compression
+ # after this point as we have already sent or buffered all the data.
if self._output_size > self._limit:
self._output_size = 0
await self.protocol._drain_helper()
+ def _make_compress_obj(self, compress: int) -> ZLibCompressor:
+ return ZLibCompressor(
+ level=zlib.Z_BEST_SPEED,
+ wbits=-compress,
+ max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
+ )
+
def _write(self, data: bytes) -> None:
if self.transport is None or self.transport.is_closing():
raise ConnectionResetError("Cannot write to closing transport")
self.transport.write(data)
- async def pong(self, message: bytes = b"") -> None:
+ async def pong(self, message: Union[bytes, str] = b"") -> None:
"""Send pong message."""
if isinstance(message, str):
message = message.encode("utf-8")
await self._send_frame(message, WSMsgType.PONG)
- async def ping(self, message: bytes = b"") -> None:
+ async def ping(self, message: Union[bytes, str] = b"") -> None:
"""Send ping message."""
if isinstance(message, str):
message = message.encode("utf-8")
@@ -689,7 +728,7 @@ class WebSocketWriter:
else:
await self._send_frame(message, WSMsgType.TEXT, compress)
- async def close(self, code: int = 1000, message: bytes = b"") -> None:
+ async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
"""Close the websocket, sending the specified code and message."""
if isinstance(message, str):
message = message.encode("utf-8")
diff --git a/contrib/python/aiohttp/aiohttp/http_writer.py b/contrib/python/aiohttp/aiohttp/http_writer.py
index e09144736c..d6b02e6f56 100644
--- a/contrib/python/aiohttp/aiohttp/http_writer.py
+++ b/contrib/python/aiohttp/aiohttp/http_writer.py
@@ -8,6 +8,7 @@ from multidict import CIMultiDict
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
+from .compression_utils import ZLibCompressor
from .helpers import NO_EXTENSIONS
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
@@ -35,7 +36,6 @@ class StreamWriter(AbstractStreamWriter):
on_headers_sent: _T_OnHeadersSent = None,
) -> None:
self._protocol = protocol
- self._transport = protocol.transport
self.loop = loop
self.length = None
@@ -44,15 +44,15 @@ class StreamWriter(AbstractStreamWriter):
self.output_size = 0
self._eof = False
- self._compress = None # type: Any
+ self._compress: Optional[ZLibCompressor] = None
self._drain_waiter = None
- self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
- self._on_headers_sent = on_headers_sent # type: _T_OnHeadersSent
+ self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
+ self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
@property
def transport(self) -> Optional[asyncio.Transport]:
- return self._transport
+ return self._protocol.transport
@property
def protocol(self) -> BaseProtocol:
@@ -64,17 +64,16 @@ class StreamWriter(AbstractStreamWriter):
def enable_compression(
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
) -> None:
- zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
- self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
+ self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
def _write(self, chunk: bytes) -> None:
size = len(chunk)
self.buffer_size += size
self.output_size += size
-
- if self._transport is None or self._transport.is_closing():
+ transport = self.transport
+ if not self._protocol.connected or transport is None or transport.is_closing():
raise ConnectionResetError("Cannot write to closing transport")
- self._transport.write(chunk)
+ transport.write(chunk)
async def write(
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
@@ -94,7 +93,7 @@ class StreamWriter(AbstractStreamWriter):
chunk = chunk.cast("c")
if self._compress is not None:
- chunk = self._compress.compress(chunk)
+ chunk = await self._compress.compress(chunk)
if not chunk:
return
@@ -139,9 +138,9 @@ class StreamWriter(AbstractStreamWriter):
if self._compress:
if chunk:
- chunk = self._compress.compress(chunk)
+ chunk = await self._compress.compress(chunk)
- chunk = chunk + self._compress.flush()
+ chunk += self._compress.flush()
if chunk and self.chunked:
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
@@ -159,7 +158,6 @@ class StreamWriter(AbstractStreamWriter):
await self.drain()
self._eof = True
- self._transport = None
async def drain(self) -> None:
"""Flush the write buffer.
@@ -191,7 +189,7 @@ def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> byte
_serialize_headers = _py_serialize_headers
try:
- import aiohttp._http_writer as _http_writer # type: ignore[import]
+ import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
_c_serialize_headers = _http_writer._serialize_headers
if not NO_EXTENSIONS:
diff --git a/contrib/python/aiohttp/aiohttp/locks.py b/contrib/python/aiohttp/aiohttp/locks.py
index df65e3e47d..de2dc83d09 100644
--- a/contrib/python/aiohttp/aiohttp/locks.py
+++ b/contrib/python/aiohttp/aiohttp/locks.py
@@ -14,9 +14,9 @@ class EventResultOrError:
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
- self._exc = None # type: Optional[BaseException]
+ self._exc: Optional[BaseException] = None
self._event = asyncio.Event()
- self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
+ self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
def set(self, exc: Optional[BaseException] = None) -> None:
self._exc = exc
diff --git a/contrib/python/aiohttp/aiohttp/multipart.py b/contrib/python/aiohttp/aiohttp/multipart.py
index c84e20044f..71fc2654a1 100644
--- a/contrib/python/aiohttp/aiohttp/multipart.py
+++ b/contrib/python/aiohttp/aiohttp/multipart.py
@@ -25,8 +25,9 @@ from typing import (
)
from urllib.parse import parse_qsl, unquote, urlencode
-from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
+from multidict import CIMultiDict, CIMultiDictProxy
+from .compression_utils import ZLibCompressor, ZLibDecompressor
from .hdrs import (
CONTENT_DISPOSITION,
CONTENT_ENCODING,
@@ -58,7 +59,7 @@ __all__ = (
)
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .client_reqrep import ClientResponse
@@ -103,7 +104,7 @@ def parse_content_disposition(
warnings.warn(BadContentDispositionHeader(header))
return None, {}
- params = {} # type: Dict[str, str]
+ params: Dict[str, str] = {}
while parts:
item = parts.pop(0)
@@ -255,20 +256,28 @@ class BodyPartReader:
chunk_size = 8192
def __init__(
- self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
+ self,
+ boundary: bytes,
+ headers: "CIMultiDictProxy[str]",
+ content: StreamReader,
+ *,
+ subtype: str = "mixed",
+ default_charset: Optional[str] = None,
) -> None:
self.headers = headers
self._boundary = boundary
self._content = content
+ self._default_charset = default_charset
self._at_eof = False
- length = self.headers.get(CONTENT_LENGTH, None)
+ self._is_form_data = subtype == "form-data"
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
+ length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None)
self._length = int(length) if length is not None else None
self._read_bytes = 0
- # TODO: typeing.Deque is not supported by Python 3.5
self._unread: Deque[bytes] = deque()
- self._prev_chunk = None # type: Optional[bytes]
+ self._prev_chunk: Optional[bytes] = None
self._content_eof = 0
- self._cache = {} # type: Dict[str, Any]
+ self._cache: Dict[str, Any] = {}
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore[return-value]
@@ -329,6 +338,8 @@ class BodyPartReader:
assert self._length is not None, "Content-Length required for chunked read"
chunk_size = min(size, self._length - self._read_bytes)
chunk = await self._content.read(chunk_size)
+ if self._content.at_eof():
+ self._at_eof = True
return chunk
async def _read_chunk_from_stream(self, size: int) -> bytes:
@@ -404,8 +415,8 @@ class BodyPartReader:
async def text(self, *, encoding: Optional[str] = None) -> str:
"""Like read(), but assumes that body part contains text data."""
data = await self.read(decode=True)
- # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
- # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
+ # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm
+ # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send
encoding = encoding or self.get_charset(default="utf-8")
return data.decode(encoding)
@@ -426,8 +437,13 @@ class BodyPartReader:
real_encoding = encoding
else:
real_encoding = self.get_charset(default="utf-8")
+ try:
+ decoded_data = data.rstrip().decode(real_encoding)
+ except UnicodeDecodeError:
+ raise ValueError("data cannot be decoded with %s encoding" % real_encoding)
+
return parse_qsl(
- data.rstrip().decode(real_encoding),
+ decoded_data,
keep_blank_values=True,
encoding=real_encoding,
)
@@ -444,21 +460,22 @@ class BodyPartReader:
"""
if CONTENT_TRANSFER_ENCODING in self.headers:
data = self._decode_content_transfer(data)
- if CONTENT_ENCODING in self.headers:
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
+ if not self._is_form_data and CONTENT_ENCODING in self.headers:
return self._decode_content(data)
return data
def _decode_content(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
-
- if encoding == "deflate":
- return zlib.decompress(data, -zlib.MAX_WBITS)
- elif encoding == "gzip":
- return zlib.decompress(data, 16 + zlib.MAX_WBITS)
- elif encoding == "identity":
+ if encoding == "identity":
return data
- else:
- raise RuntimeError(f"unknown content encoding: {encoding}")
+ if encoding in {"deflate", "gzip"}:
+ return ZLibDecompressor(
+ encoding=encoding,
+ suppress_deflate_header=True,
+ ).decompress_sync(data)
+
+ raise RuntimeError(f"unknown content encoding: {encoding}")
def _decode_content_transfer(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
@@ -478,7 +495,7 @@ class BodyPartReader:
"""Returns charset parameter from Content-Type header or default."""
ctype = self.headers.get(CONTENT_TYPE, "")
mimetype = parse_mimetype(ctype)
- return mimetype.parameters.get("charset", default)
+ return mimetype.parameters.get("charset", self._default_charset or default)
@reify
def name(self) -> Optional[str]:
@@ -504,7 +521,7 @@ class BodyPartReaderPayload(Payload):
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value, *args, **kwargs)
- params = {} # type: Dict[str, str]
+ params: Dict[str, str] = {}
if value.name is not None:
params["name"] = value.name
if value.filename is not None:
@@ -515,10 +532,10 @@ class BodyPartReaderPayload(Payload):
async def write(self, writer: Any) -> None:
field = self._value
- chunk = await field.read_chunk(size=2 ** 16)
+ chunk = await field.read_chunk(size=2**16)
while chunk:
await writer.write(field.decode(chunk))
- chunk = await field.read_chunk(size=2 ** 16)
+ chunk = await field.read_chunk(size=2**16)
class MultipartReader:
@@ -533,15 +550,21 @@ class MultipartReader:
part_reader_cls = BodyPartReader
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
+ self._mimetype = parse_mimetype(headers[CONTENT_TYPE])
+ assert self._mimetype.type == "multipart", "multipart/* content type expected"
+ if "boundary" not in self._mimetype.parameters:
+ raise ValueError(
+ "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE]
+ )
+
self.headers = headers
self._boundary = ("--" + self._get_boundary()).encode()
self._content = content
- self._last_part = (
- None
- ) # type: Optional[Union['MultipartReader', BodyPartReader]]
+ self._default_charset: Optional[str] = None
+ self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
self._at_eof = False
self._at_bof = True
- self._unread = [] # type: List[bytes]
+ self._unread: List[bytes] = []
def __aiter__(
self,
@@ -589,7 +612,24 @@ class MultipartReader:
await self._read_boundary()
if self._at_eof: # we just read the last boundary, nothing to do there
return None
- self._last_part = await self.fetch_next_part()
+
+ part = await self.fetch_next_part()
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6
+ if (
+ self._last_part is None
+ and self._mimetype.subtype == "form-data"
+ and isinstance(part, BodyPartReader)
+ ):
+ _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION))
+ if params.get("name") == "_charset_":
+ # Longest encoding in https://encoding.spec.whatwg.org/encodings.json
+ # is 19 characters, so 32 should be more than enough for any valid encoding.
+ charset = await part.read_chunk(32)
+ if len(charset) > 31:
+ raise RuntimeError("Invalid default charset")
+ self._default_charset = charset.strip().decode()
+ part = await self.fetch_next_part()
+ self._last_part = part
return self._last_part
async def release(self) -> None:
@@ -625,19 +665,16 @@ class MultipartReader:
return type(self)(headers, self._content)
return self.multipart_reader_cls(headers, self._content)
else:
- return self.part_reader_cls(self._boundary, headers, self._content)
-
- def _get_boundary(self) -> str:
- mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
-
- assert mimetype.type == "multipart", "multipart/* content type expected"
-
- if "boundary" not in mimetype.parameters:
- raise ValueError(
- "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
+ return self.part_reader_cls(
+ self._boundary,
+ headers,
+ self._content,
+ subtype=self._mimetype.subtype,
+ default_charset=self._default_charset,
)
- boundary = mimetype.parameters["boundary"]
+ def _get_boundary(self) -> str:
+ boundary = self._mimetype.parameters["boundary"]
if len(boundary) > 70:
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
@@ -727,7 +764,8 @@ class MultipartWriter(Payload):
super().__init__(None, content_type=ctype)
- self._parts = [] # type: List[_Part]
+ self._parts: List[_Part] = []
+ self._is_form_data = subtype == "form-data"
def __enter__(self) -> "MultipartWriter":
return self
@@ -749,14 +787,14 @@ class MultipartWriter(Payload):
def __bool__(self) -> bool:
return True
- _valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
- _invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
+ _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
+ _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
@property
def _boundary_value(self) -> str:
"""Wrap boundary parameter value in quotes, if necessary.
- Reads self.boundary and returns a unicode sting.
+ Reads self.boundary and returns a unicode string.
"""
# Refer to RFCs 7231, 7230, 5234.
#
@@ -788,7 +826,7 @@ class MultipartWriter(Payload):
def boundary(self) -> str:
return self._boundary.decode("ascii")
- def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
+ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload:
if headers is None:
headers = CIMultiDict()
@@ -805,38 +843,44 @@ class MultipartWriter(Payload):
def append_payload(self, payload: Payload) -> Payload:
"""Adds a new body part to multipart writer."""
- # compression
- encoding = payload.headers.get(
- CONTENT_ENCODING,
- "",
- ).lower() # type: Optional[str]
- if encoding and encoding not in ("deflate", "gzip", "identity"):
- raise RuntimeError(f"unknown content encoding: {encoding}")
- if encoding == "identity":
- encoding = None
-
- # te encoding
- te_encoding = payload.headers.get(
- CONTENT_TRANSFER_ENCODING,
- "",
- ).lower() # type: Optional[str]
- if te_encoding not in ("", "base64", "quoted-printable", "binary"):
- raise RuntimeError(
- "unknown content transfer encoding: {}" "".format(te_encoding)
+ encoding: Optional[str] = None
+ te_encoding: Optional[str] = None
+ if self._is_form_data:
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
+ assert (
+ not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING}
+ & payload.headers.keys()
)
- if te_encoding == "binary":
- te_encoding = None
-
- # size
- size = payload.size
- if size is not None and not (encoding or te_encoding):
- payload.headers[CONTENT_LENGTH] = str(size)
+ # Set default Content-Disposition in case user doesn't create one
+ if CONTENT_DISPOSITION not in payload.headers:
+ name = f"section-{len(self._parts)}"
+ payload.set_content_disposition("form-data", name=name)
+ else:
+ # compression
+ encoding = payload.headers.get(CONTENT_ENCODING, "").lower()
+ if encoding and encoding not in ("deflate", "gzip", "identity"):
+ raise RuntimeError(f"unknown content encoding: {encoding}")
+ if encoding == "identity":
+ encoding = None
+
+ # te encoding
+ te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
+ if te_encoding not in ("", "base64", "quoted-printable", "binary"):
+ raise RuntimeError(f"unknown content transfer encoding: {te_encoding}")
+ if te_encoding == "binary":
+ te_encoding = None
+
+ # size
+ size = payload.size
+ if size is not None and not (encoding or te_encoding):
+ payload.headers[CONTENT_LENGTH] = str(size)
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
return payload
def append_json(
- self, obj: Any, headers: Optional[MultiMapping[str]] = None
+ self, obj: Any, headers: Optional[Mapping[str, str]] = None
) -> Payload:
"""Helper to append JSON part."""
if headers is None:
@@ -847,7 +891,7 @@ class MultipartWriter(Payload):
def append_form(
self,
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
- headers: Optional[MultiMapping[str]] = None,
+ headers: Optional[Mapping[str, str]] = None,
) -> Payload:
"""Helper to append form urlencoded part."""
assert isinstance(obj, (Sequence, Mapping))
@@ -888,6 +932,11 @@ class MultipartWriter(Payload):
async def write(self, writer: Any, close_boundary: bool = True) -> None:
"""Write body."""
for part, encoding, te_encoding in self._parts:
+ if self._is_form_data:
+ # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
+ assert CONTENT_DISPOSITION in part.headers
+ assert "name=" in part.headers[CONTENT_DISPOSITION]
+
await writer.write(b"--" + self._boundary + b"\r\n")
await writer.write(part._binary_headers)
@@ -911,9 +960,9 @@ class MultipartWriter(Payload):
class MultipartPayloadWriter:
def __init__(self, writer: Any) -> None:
self._writer = writer
- self._encoding = None # type: Optional[str]
- self._compress = None # type: Any
- self._encoding_buffer = None # type: Optional[bytearray]
+ self._encoding: Optional[str] = None
+ self._compress: Optional[ZLibCompressor] = None
+ self._encoding_buffer: Optional[bytearray] = None
def enable_encoding(self, encoding: str) -> None:
if encoding == "base64":
@@ -925,8 +974,11 @@ class MultipartPayloadWriter:
def enable_compression(
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
) -> None:
- zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
- self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
+ self._compress = ZLibCompressor(
+ encoding=encoding,
+ suppress_deflate_header=True,
+ strategy=strategy,
+ )
async def write_eof(self) -> None:
if self._compress is not None:
@@ -942,7 +994,7 @@ class MultipartPayloadWriter:
async def write(self, chunk: bytes) -> None:
if self._compress is not None:
if chunk:
- chunk = self._compress.compress(chunk)
+ chunk = await self._compress.compress(chunk)
if not chunk:
return
diff --git a/contrib/python/aiohttp/aiohttp/payload.py b/contrib/python/aiohttp/aiohttp/payload.py
index 2ee90beea8..6593b05c6f 100644
--- a/contrib/python/aiohttp/aiohttp/payload.py
+++ b/contrib/python/aiohttp/aiohttp/payload.py
@@ -13,6 +13,7 @@ from typing import (
Any,
ByteString,
Dict,
+ Final,
Iterable,
Optional,
TextIO,
@@ -26,14 +27,14 @@ from multidict import CIMultiDict
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import (
- PY_36,
+ _SENTINEL,
content_disposition_header,
guess_filename,
parse_mimetype,
sentinel,
)
from .streams import StreamReader
-from .typedefs import Final, JSONEncoder, _CIMultiDict
+from .typedefs import JSONEncoder, _CIMultiDict
__all__ = (
"PAYLOAD_REGISTRY",
@@ -51,9 +52,9 @@ __all__ = (
"AsyncIterablePayload",
)
-TOO_LARGE_BYTES_BODY: Final[int] = 2 ** 20 # 1 MB
+TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from typing import List
@@ -98,9 +99,9 @@ class PayloadRegistry:
"""
def __init__(self) -> None:
- self._first = [] # type: List[_PayloadRegistryItem]
- self._normal = [] # type: List[_PayloadRegistryItem]
- self._last = [] # type: List[_PayloadRegistryItem]
+ self._first: List[_PayloadRegistryItem] = []
+ self._normal: List[_PayloadRegistryItem] = []
+ self._last: List[_PayloadRegistryItem] = []
def get(
self,
@@ -132,8 +133,8 @@ class PayloadRegistry:
class Payload(ABC):
- _default_content_type = "application/octet-stream" # type: str
- _size = None # type: Optional[int]
+ _default_content_type: str = "application/octet-stream"
+ _size: Optional[int] = None
def __init__(
self,
@@ -141,14 +142,14 @@ class Payload(ABC):
headers: Optional[
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
] = None,
- content_type: Optional[str] = sentinel,
+ content_type: Union[str, None, _SENTINEL] = sentinel,
filename: Optional[str] = None,
encoding: Optional[str] = None,
**kwargs: Any,
) -> None:
self._encoding = encoding
self._filename = filename
- self._headers = CIMultiDict() # type: _CIMultiDict
+ self._headers: _CIMultiDict = CIMultiDict()
self._value = value
if content_type is not sentinel and content_type is not None:
self._headers[hdrs.CONTENT_TYPE] = content_type
@@ -231,10 +232,7 @@ class BytesPayload(Payload):
self._size = len(value)
if self._size > TOO_LARGE_BYTES_BODY:
- if PY_36:
- kwargs = {"source": self}
- else:
- kwargs = {}
+ kwargs = {"source": self}
warnings.warn(
"Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
@@ -301,10 +299,10 @@ class IOBasePayload(Payload):
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
- chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
while chunk:
await writer.write(chunk)
- chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
finally:
await loop.run_in_executor(None, self._value.close)
@@ -350,7 +348,7 @@ class TextIOPayload(IOBasePayload):
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
- chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
while chunk:
data = (
chunk.encode(encoding=self._encoding)
@@ -358,7 +356,7 @@ class TextIOPayload(IOBasePayload):
else chunk.encode()
)
await writer.write(data)
- chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ chunk = await loop.run_in_executor(None, self._value.read, 2**16)
finally:
await loop.run_in_executor(None, self._value.close)
@@ -403,7 +401,7 @@ class JsonPayload(BytesPayload):
)
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from typing import AsyncIterable, AsyncIterator
_AsyncIterator = AsyncIterator[bytes]
@@ -417,13 +415,13 @@ else:
class AsyncIterablePayload(Payload):
- _iter = None # type: Optional[_AsyncIterator]
+ _iter: Optional[_AsyncIterator] = None
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
raise TypeError(
"value argument must support "
- "collections.abc.AsyncIterablebe interface, "
+ "collections.abc.AsyncIterable interface, "
"got {!r}".format(type(value))
)
diff --git a/contrib/python/aiohttp/aiohttp/payload_streamer.py b/contrib/python/aiohttp/aiohttp/payload_streamer.py
index 9f8b8bc57c..364f763ae7 100644
--- a/contrib/python/aiohttp/aiohttp/payload_streamer.py
+++ b/contrib/python/aiohttp/aiohttp/payload_streamer.py
@@ -1,5 +1,5 @@
"""
-Payload implemenation for coroutines as data provider.
+Payload implementation for coroutines as data provider.
As a simple case, you can upload data from file::
@@ -44,7 +44,7 @@ class _stream_wrapper:
self.kwargs = kwargs
async def __call__(self, writer: AbstractStreamWriter) -> None:
- await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
+ await self.coro(writer, *self.args, **self.kwargs)
class streamer:
diff --git a/contrib/python/aiohttp/aiohttp/pytest_plugin.py b/contrib/python/aiohttp/aiohttp/pytest_plugin.py
index dd9a9f6179..5754747bf4 100644
--- a/contrib/python/aiohttp/aiohttp/pytest_plugin.py
+++ b/contrib/python/aiohttp/aiohttp/pytest_plugin.py
@@ -1,12 +1,11 @@
import asyncio
import contextlib
import warnings
-from collections.abc import Callable
-from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
+from typing import Any, Awaitable, Callable, Dict, Iterator, Optional, Type, Union
import pytest
-from aiohttp.helpers import PY_37, isasyncgenfunction
+from aiohttp.helpers import isasyncgenfunction
from aiohttp.web import Application
from .test_utils import (
@@ -23,14 +22,11 @@ from .test_utils import (
try:
import uvloop
except ImportError: # pragma: no cover
- uvloop = None
-
-try:
- import tokio
-except ImportError: # pragma: no cover
- tokio = None
+ uvloop = None # type: ignore[assignment]
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
+AiohttpRawServer = Callable[[Application], Awaitable[RawTestServer]]
+AiohttpServer = Callable[[Application], Awaitable[TestServer]]
def pytest_addoption(parser): # type: ignore[no-untyped-def]
@@ -44,7 +40,7 @@ def pytest_addoption(parser): # type: ignore[no-untyped-def]
"--aiohttp-loop",
action="store",
default="pyloop",
- help="run tests with specific loop: pyloop, uvloop, tokio or all",
+ help="run tests with specific loop: pyloop, uvloop or all",
)
parser.addoption(
"--aiohttp-enable-loop-debug",
@@ -193,16 +189,14 @@ def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
return
loops = metafunc.config.option.aiohttp_loop
+ avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
if uvloop is not None: # pragma: no cover
avail_factories["uvloop"] = uvloop.EventLoopPolicy
- if tokio is not None: # pragma: no cover
- avail_factories["tokio"] = tokio.EventLoopPolicy
-
if loops == "all":
- loops = "pyloop,uvloop?,tokio?"
+ loops = "pyloop,uvloop?"
factories = {} # type: ignore[var-annotated]
for name in loops.split(","):
@@ -236,12 +230,8 @@ def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
@pytest.fixture
def proactor_loop(): # type: ignore[no-untyped-def]
- if not PY_37:
- policy = asyncio.get_event_loop_policy()
- policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
- else:
- policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
- asyncio.set_event_loop_policy(policy)
+ policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
+ asyncio.set_event_loop_policy(policy)
with loop_context(policy.new_event_loop) as _loop:
asyncio.set_event_loop(_loop)
@@ -249,7 +239,7 @@ def proactor_loop(): # type: ignore[no-untyped-def]
@pytest.fixture
-def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
+def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
warnings.warn(
"Deprecated, use aiohttp_unused_port fixture instead",
DeprecationWarning,
@@ -259,13 +249,13 @@ def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma:
@pytest.fixture
-def aiohttp_unused_port(): # type: ignore[no-untyped-def]
+def aiohttp_unused_port() -> Callable[[], int]:
"""Return a port that is unused on the current host."""
return _unused_port
@pytest.fixture
-def aiohttp_server(loop): # type: ignore[no-untyped-def]
+def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
"""Factory to create a TestServer instance, given an app.
aiohttp_server(app, **kwargs)
@@ -298,7 +288,7 @@ def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no c
@pytest.fixture
-def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
+def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
"""Factory to create a RawTestServer instance, given a web handler.
aiohttp_raw_server(handler, **kwargs)
@@ -335,7 +325,7 @@ def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
@pytest.fixture
def aiohttp_client(
loop: asyncio.AbstractEventLoop,
-) -> Generator[AiohttpClient, None, None]:
+) -> Iterator[AiohttpClient]:
"""Factory to create a TestClient instance.
aiohttp_client(app, **kwargs)
diff --git a/contrib/python/aiohttp/aiohttp/resolver.py b/contrib/python/aiohttp/aiohttp/resolver.py
index 6668fa80ec..c03230c744 100644
--- a/contrib/python/aiohttp/aiohttp/resolver.py
+++ b/contrib/python/aiohttp/aiohttp/resolver.py
@@ -45,7 +45,7 @@ class ThreadedResolver(AbstractResolver):
# IPv6 is not supported by Python build,
# or IPv6 is not enabled in the host
continue
- if address[3]: # type: ignore[misc]
+ if address[3]:
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
diff --git a/contrib/python/aiohttp/aiohttp/streams.py b/contrib/python/aiohttp/aiohttp/streams.py
index 055848877e..b9b9c3fd96 100644
--- a/contrib/python/aiohttp/aiohttp/streams.py
+++ b/contrib/python/aiohttp/aiohttp/streams.py
@@ -1,12 +1,27 @@
import asyncio
import collections
import warnings
-from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
+from typing import (
+ Awaitable,
+ Callable,
+ Deque,
+ Final,
+ Generic,
+ List,
+ Optional,
+ Tuple,
+ TypeVar,
+)
from .base_protocol import BaseProtocol
-from .helpers import BaseTimerContext, set_exception, set_result
+from .helpers import (
+ _EXC_SENTINEL,
+ BaseTimerContext,
+ TimerNoop,
+ set_exception,
+ set_result,
+)
from .log import internal_logger
-from .typedefs import Final
__all__ = (
"EMPTY_PAYLOAD",
@@ -59,19 +74,11 @@ class AsyncStreamReaderMixin:
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
- """Returns an asynchronous iterator that yields chunks of size n.
-
- Python-3.5 available for Python 3.5+ only
- """
- return AsyncStreamIterator(
- lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
- )
+ """Returns an asynchronous iterator that yields chunks of size n."""
+ return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
def iter_any(self) -> AsyncStreamIterator[bytes]:
- """Yield all available data as soon as it is received.
-
- Python-3.5 available for Python 3.5+ only
- """
+ """Yield all available data as soon as it is received."""
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
@@ -79,8 +86,6 @@ class AsyncStreamReaderMixin:
The yielded objects are tuples
of (bytes, bool) as returned by the StreamReader.readchunk method.
-
- Python-3.5 available for Python 3.5+ only
"""
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
@@ -117,15 +122,15 @@ class StreamReader(AsyncStreamReaderMixin):
self._loop = loop
self._size = 0
self._cursor = 0
- self._http_chunk_splits = None # type: Optional[List[int]]
- self._buffer = collections.deque() # type: Deque[bytes]
+ self._http_chunk_splits: Optional[List[int]] = None
+ self._buffer: Deque[bytes] = collections.deque()
self._buffer_offset = 0
self._eof = False
- self._waiter = None # type: Optional[asyncio.Future[None]]
- self._eof_waiter = None # type: Optional[asyncio.Future[None]]
- self._exception = None # type: Optional[BaseException]
- self._timer = timer
- self._eof_callbacks = [] # type: List[Callable[[], None]]
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._eof_waiter: Optional[asyncio.Future[None]] = None
+ self._exception: Optional[BaseException] = None
+ self._timer = TimerNoop() if timer is None else timer
+ self._eof_callbacks: List[Callable[[], None]] = []
def __repr__(self) -> str:
info = [self.__class__.__name__]
@@ -133,7 +138,7 @@ class StreamReader(AsyncStreamReaderMixin):
info.append("%d bytes" % self._size)
if self._eof:
info.append("eof")
- if self._low_water != 2 ** 16: # default limit
+ if self._low_water != 2**16: # default limit
info.append("low=%d high=%d" % (self._low_water, self._high_water))
if self._waiter:
info.append("w=%r" % self._waiter)
@@ -147,19 +152,23 @@ class StreamReader(AsyncStreamReaderMixin):
def exception(self) -> Optional[BaseException]:
return self._exception
- def set_exception(self, exc: BaseException) -> None:
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
self._exception = exc
self._eof_callbacks.clear()
waiter = self._waiter
if waiter is not None:
self._waiter = None
- set_exception(waiter, exc)
+ set_exception(waiter, exc, exc_cause)
waiter = self._eof_waiter
if waiter is not None:
self._eof_waiter = None
- set_exception(waiter, exc)
+ set_exception(waiter, exc, exc_cause)
def on_eof(self, callback: Callable[[], None]) -> None:
if self._eof:
@@ -266,7 +275,7 @@ class StreamReader(AsyncStreamReaderMixin):
# self._http_chunk_splits contains logical byte offsets from start of
# the body transfer. Each offset is the offset of the end of a chunk.
# "Logical" means bytes, accessible for a user.
- # If no chunks containig logical data were received, current position
+ # If no chunks containing logical data were received, current position
# is difinitely zero.
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
@@ -299,10 +308,7 @@ class StreamReader(AsyncStreamReaderMixin):
waiter = self._waiter = self._loop.create_future()
try:
- if self._timer:
- with self._timer:
- await waiter
- else:
+ with self._timer:
await waiter
finally:
self._waiter = None
@@ -327,7 +333,9 @@ class StreamReader(AsyncStreamReaderMixin):
offset = self._buffer_offset
ichar = self._buffer[0].find(separator, offset) + 1
# Read from current offset to found separator or to the end.
- data = self._read_nowait_chunk(ichar - offset if ichar else -1)
+ data = self._read_nowait_chunk(
+ ichar - offset + seplen - 1 if ichar else -1
+ )
chunk += data
chunk_size += len(data)
if ichar:
@@ -436,7 +444,7 @@ class StreamReader(AsyncStreamReaderMixin):
if self._exception is not None:
raise self._exception
- blocks = [] # type: List[bytes]
+ blocks: List[bytes] = []
while n > 0:
block = await self.read(n)
if not block:
@@ -491,8 +499,9 @@ class StreamReader(AsyncStreamReaderMixin):
def _read_nowait(self, n: int) -> bytes:
"""Read not more than n bytes, or whole buffer if n == -1"""
- chunks = []
+ self._timer.assert_timeout()
+ chunks = []
while self._buffer:
chunk = self._read_nowait_chunk(n)
chunks.append(chunk)
@@ -506,12 +515,19 @@ class StreamReader(AsyncStreamReaderMixin):
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
def __init__(self) -> None:
- pass
+ self._read_eof_chunk = False
+
+ def __repr__(self) -> str:
+ return "<%s>" % self.__class__.__name__
def exception(self) -> Optional[BaseException]:
return None
- def set_exception(self, exc: BaseException) -> None:
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
pass
def on_eof(self, callback: Callable[[], None]) -> None:
@@ -547,6 +563,10 @@ class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
return b""
async def readchunk(self) -> Tuple[bytes, bool]:
+ if not self._read_eof_chunk:
+ self._read_eof_chunk = True
+ return (b"", False)
+
return (b"", True)
async def readexactly(self, n: int) -> bytes:
@@ -565,10 +585,10 @@ class DataQueue(Generic[_T]):
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._eof = False
- self._waiter = None # type: Optional[asyncio.Future[None]]
- self._exception = None # type: Optional[BaseException]
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._exception: Optional[BaseException] = None
self._size = 0
- self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
+ self._buffer: Deque[Tuple[_T, int]] = collections.deque()
def __len__(self) -> int:
return len(self._buffer)
@@ -582,14 +602,18 @@ class DataQueue(Generic[_T]):
def exception(self) -> Optional[BaseException]:
return self._exception
- def set_exception(self, exc: BaseException) -> None:
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
self._eof = True
self._exception = exc
waiter = self._waiter
if waiter is not None:
self._waiter = None
- set_exception(waiter, exc)
+ set_exception(waiter, exc, exc_cause)
def feed_data(self, data: _T, size: int = 0) -> None:
self._size += size
diff --git a/contrib/python/aiohttp/aiohttp/tcp_helpers.py b/contrib/python/aiohttp/aiohttp/tcp_helpers.py
index 0e1dbf1655..88b2442237 100644
--- a/contrib/python/aiohttp/aiohttp/tcp_helpers.py
+++ b/contrib/python/aiohttp/aiohttp/tcp_helpers.py
@@ -15,7 +15,6 @@ if hasattr(socket, "SO_KEEPALIVE"):
if sock is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
-
else:
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
diff --git a/contrib/python/aiohttp/aiohttp/test_utils.py b/contrib/python/aiohttp/aiohttp/test_utils.py
index 361dae486c..a36e859968 100644
--- a/contrib/python/aiohttp/aiohttp/test_utils.py
+++ b/contrib/python/aiohttp/aiohttp/test_utils.py
@@ -22,7 +22,7 @@ from typing import (
Union,
cast,
)
-from unittest import mock
+from unittest import IsolatedAsyncioTestCase, mock
from aiosignal import Signal
from multidict import CIMultiDict, CIMultiDictProxy
@@ -35,8 +35,9 @@ from . import ClientSession, hdrs
from .abc import AbstractCookieJar
from .client_reqrep import ClientResponse
from .client_ws import ClientWebSocketResponse
-from .helpers import PY_38, sentinel
+from .helpers import sentinel
from .http import HttpVersion, RawRequestMessage
+from .typedefs import StrOrURL
from .web import (
Application,
AppRunner,
@@ -49,16 +50,11 @@ from .web import (
)
from .web_protocol import _RequestHandler
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from ssl import SSLContext
else:
SSLContext = None
-if PY_38:
- from unittest import IsolatedAsyncioTestCase as TestCase
-else:
- from asynctest import TestCase # type: ignore[no-redef]
-
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
@@ -105,8 +101,8 @@ class BaseTestServer(ABC):
**kwargs: Any,
) -> None:
self._loop = loop
- self.runner = None # type: Optional[BaseRunner]
- self._root = None # type: Optional[URL]
+ self.runner: Optional[BaseRunner] = None
+ self._root: Optional[URL] = None
self.host = host
self.port = port
self._closed = False
@@ -121,7 +117,7 @@ class BaseTestServer(ABC):
return
self._loop = loop
self._ssl = kwargs.pop("ssl", None)
- self.runner = await self._make_runner(**kwargs)
+ self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
await self.runner.setup()
if not self.port:
self.port = 0
@@ -136,7 +132,7 @@ class BaseTestServer(ABC):
await site.start()
server = site._server
assert server is not None
- sockets = server.sockets
+ sockets = server.sockets # type: ignore[attr-defined]
assert sockets is not None
self.port = sockets[0].getsockname()[1]
if self.scheme is sentinel:
@@ -151,14 +147,14 @@ class BaseTestServer(ABC):
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
pass
- def make_url(self, path: str) -> URL:
+ def make_url(self, path: StrOrURL) -> URL:
assert self._root is not None
url = URL(path)
if not self.skip_url_asserts:
assert not url.is_absolute()
return self._root.join(url)
else:
- return URL(str(self._root) + path)
+ return URL(str(self._root) + str(path))
@property
def started(self) -> bool:
@@ -284,8 +280,8 @@ class TestClient:
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
self._closed = False
- self._responses = [] # type: List[ClientResponse]
- self._websockets = [] # type: List[ClientWebSocketResponse]
+ self._responses: List[ClientResponse] = []
+ self._websockets: List[ClientWebSocketResponse] = []
async def start_server(self) -> None:
await self._server.start_server(loop=self._loop)
@@ -317,16 +313,20 @@ class TestClient:
"""
return self._session
- def make_url(self, path: str) -> URL:
+ def make_url(self, path: StrOrURL) -> URL:
return self._server.make_url(path)
- async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
+ async def _request(
+ self, method: str, path: StrOrURL, **kwargs: Any
+ ) -> ClientResponse:
resp = await self._session.request(method, self.make_url(path), **kwargs)
# save it to close later
self._responses.append(resp)
return resp
- def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
+ def request(
+ self, method: str, path: StrOrURL, **kwargs: Any
+ ) -> _RequestContextManager:
"""Routes a request to tested http server.
The interface is identical to aiohttp.ClientSession.request,
@@ -336,35 +336,35 @@ class TestClient:
"""
return _RequestContextManager(self._request(method, path, **kwargs))
- def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP GET request."""
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
- def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP POST request."""
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
- def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP OPTIONS request."""
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
- def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP HEAD request."""
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
- def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PUT request."""
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
- def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
- def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
- def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
+ def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
"""Initiate websocket connection.
The api corresponds to aiohttp.ClientSession.ws_connect.
@@ -372,7 +372,9 @@ class TestClient:
"""
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
- async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
+ async def _ws_connect(
+ self, path: StrOrURL, **kwargs: Any
+ ) -> ClientWebSocketResponse:
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
self._websockets.append(ws)
return ws
@@ -423,7 +425,7 @@ class TestClient:
await self.close()
-class AioHTTPTestCase(TestCase):
+class AioHTTPTestCase(IsolatedAsyncioTestCase):
"""A base class to allow for unittest web applications using aiohttp.
Provides the following:
@@ -454,13 +456,9 @@ class AioHTTPTestCase(TestCase):
"""
raise RuntimeError("Did you forget to define get_application()?")
- def setUp(self) -> None:
- try:
- self.loop = asyncio.get_running_loop()
- except (AttributeError, RuntimeError): # AttributeError->py36
- self.loop = asyncio.get_event_loop_policy().get_event_loop()
-
- self.loop.run_until_complete(self.setUpAsync())
+ async def asyncSetUp(self) -> None:
+ self.loop = asyncio.get_running_loop()
+ return await self.setUpAsync()
async def setUpAsync(self) -> None:
self.app = await self.get_application()
@@ -469,8 +467,8 @@ class AioHTTPTestCase(TestCase):
await self.client.start_server()
- def tearDown(self) -> None:
- self.loop.run_until_complete(self.tearDownAsync())
+ async def asyncTearDown(self) -> None:
+ return await self.tearDownAsync()
async def tearDownAsync(self) -> None:
await self.client.close()
@@ -523,28 +521,7 @@ def setup_test_loop(
once they are done with the loop.
"""
loop = loop_factory()
- try:
- module = loop.__class__.__module__
- skip_watcher = "uvloop" in module
- except AttributeError: # pragma: no cover
- # Just in case
- skip_watcher = True
asyncio.set_event_loop(loop)
- if sys.platform != "win32" and not skip_watcher:
- policy = asyncio.get_event_loop_policy()
- watcher: asyncio.AbstractChildWatcher
- try: # Python >= 3.8
- # Refs:
- # * https://github.com/pytest-dev/pytest-xdist/issues/620
- # * https://stackoverflow.com/a/58614689/595220
- # * https://bugs.python.org/issue35621
- # * https://github.com/python/cpython/pull/14344
- watcher = asyncio.ThreadedChildWatcher()
- except AttributeError: # Python < 3.8
- watcher = asyncio.SafeChildWatcher()
- watcher.attach_loop(loop)
- with contextlib.suppress(NotImplementedError):
- policy.set_child_watcher(watcher)
return loop
@@ -569,7 +546,7 @@ def _create_app_mock() -> mock.MagicMock:
def set_dict(app: Any, key: str, value: Any) -> None:
app.__app_dict[key] = value
- app = mock.MagicMock()
+ app = mock.MagicMock(spec=Application)
app.__app_dict = {}
app.__getitem__ = get_dict
app.__setitem__ = set_dict
@@ -607,7 +584,7 @@ def make_mocked_request(
transport: Any = sentinel,
payload: Any = sentinel,
sslcontext: Optional[SSLContext] = None,
- client_max_size: int = 1024 ** 2,
+ client_max_size: int = 1024**2,
loop: Any = ...,
) -> Request:
"""Creates mocked web.Request testing purposes.
@@ -617,8 +594,15 @@ def make_mocked_request(
"""
task = mock.Mock()
if loop is ...:
- loop = mock.Mock()
- loop.create_future.return_value = ()
+ # no loop passed, try to get the current one if
+ # its is running as we need a real loop to create
+ # executor jobs to be able to do testing
+ # with a real executor
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = mock.Mock()
+ loop.create_future.return_value = ()
if version < HttpVersion(1, 1):
closing = True
diff --git a/contrib/python/aiohttp/aiohttp/tracing.py b/contrib/python/aiohttp/aiohttp/tracing.py
index 0e118a3997..62847a0bf7 100644
--- a/contrib/python/aiohttp/aiohttp/tracing.py
+++ b/contrib/python/aiohttp/aiohttp/tracing.py
@@ -1,5 +1,5 @@
from types import SimpleNamespace
-from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
+from typing import TYPE_CHECKING, Awaitable, Optional, Protocol, Type, TypeVar
import attr
from aiosignal import Signal
@@ -8,9 +8,8 @@ from yarl import URL
from .client_reqrep import ClientResponse
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .client import ClientSession
- from .typedefs import Protocol
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
@@ -51,54 +50,54 @@ class TraceConfig:
def __init__(
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
) -> None:
- self._on_request_start = Signal(
+ self._on_request_start: Signal[
+ _SignalCallback[TraceRequestStartParams]
+ ] = Signal(self)
+ self._on_request_chunk_sent: Signal[
+ _SignalCallback[TraceRequestChunkSentParams]
+ ] = Signal(self)
+ self._on_response_chunk_received: Signal[
+ _SignalCallback[TraceResponseChunkReceivedParams]
+ ] = Signal(self)
+ self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
self
- ) # type: Signal[_SignalCallback[TraceRequestStartParams]]
- self._on_request_chunk_sent = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]]
- self._on_response_chunk_received = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]]
- self._on_request_end = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceRequestEndParams]]
- self._on_request_exception = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceRequestExceptionParams]]
- self._on_request_redirect = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceRequestRedirectParams]]
- self._on_connection_queued_start = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]]
- self._on_connection_queued_end = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]]
- self._on_connection_create_start = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]]
- self._on_connection_create_end = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]]
- self._on_connection_reuseconn = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]]
- self._on_dns_resolvehost_start = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]]
- self._on_dns_resolvehost_end = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]]
- self._on_dns_cache_hit = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]]
- self._on_dns_cache_miss = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]]
- self._on_request_headers_sent = Signal(
- self
- ) # type: Signal[_SignalCallback[TraceRequestHeadersSentParams]]
+ )
+ self._on_request_exception: Signal[
+ _SignalCallback[TraceRequestExceptionParams]
+ ] = Signal(self)
+ self._on_request_redirect: Signal[
+ _SignalCallback[TraceRequestRedirectParams]
+ ] = Signal(self)
+ self._on_connection_queued_start: Signal[
+ _SignalCallback[TraceConnectionQueuedStartParams]
+ ] = Signal(self)
+ self._on_connection_queued_end: Signal[
+ _SignalCallback[TraceConnectionQueuedEndParams]
+ ] = Signal(self)
+ self._on_connection_create_start: Signal[
+ _SignalCallback[TraceConnectionCreateStartParams]
+ ] = Signal(self)
+ self._on_connection_create_end: Signal[
+ _SignalCallback[TraceConnectionCreateEndParams]
+ ] = Signal(self)
+ self._on_connection_reuseconn: Signal[
+ _SignalCallback[TraceConnectionReuseconnParams]
+ ] = Signal(self)
+ self._on_dns_resolvehost_start: Signal[
+ _SignalCallback[TraceDnsResolveHostStartParams]
+ ] = Signal(self)
+ self._on_dns_resolvehost_end: Signal[
+ _SignalCallback[TraceDnsResolveHostEndParams]
+ ] = Signal(self)
+ self._on_dns_cache_hit: Signal[
+ _SignalCallback[TraceDnsCacheHitParams]
+ ] = Signal(self)
+ self._on_dns_cache_miss: Signal[
+ _SignalCallback[TraceDnsCacheMissParams]
+ ] = Signal(self)
+ self._on_request_headers_sent: Signal[
+ _SignalCallback[TraceRequestHeadersSentParams]
+ ] = Signal(self)
self._trace_config_ctx_factory = trace_config_ctx_factory
diff --git a/contrib/python/aiohttp/aiohttp/typedefs.py b/contrib/python/aiohttp/aiohttp/typedefs.py
index 84283d9a46..5e963e1a10 100644
--- a/contrib/python/aiohttp/aiohttp/typedefs.py
+++ b/contrib/python/aiohttp/aiohttp/typedefs.py
@@ -1,6 +1,5 @@
import json
import os
-import sys
from typing import (
TYPE_CHECKING,
Any,
@@ -15,20 +14,10 @@ from typing import (
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
from yarl import URL
-# These are for other modules to use (to avoid repeating the conditional import).
-if sys.version_info >= (3, 8):
- from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
-else:
- from typing_extensions import ( # noqa: F401
- Final,
- Protocol as Protocol,
- TypedDict as TypedDict,
- )
-
DEFAULT_JSON_ENCODER = json.dumps
DEFAULT_JSON_DECODER = json.loads
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
_CIMultiDict = CIMultiDict[str]
_CIMultiDictProxy = CIMultiDictProxy[str]
_MultiDict = MultiDict[str]
@@ -60,5 +49,6 @@ LooseCookies = Union[
]
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
+Middleware = Callable[["Request", Handler], Awaitable["StreamResponse"]]
PathLike = Union[str, "os.PathLike[str]"]
diff --git a/contrib/python/aiohttp/aiohttp/web.py b/contrib/python/aiohttp/aiohttp/web.py
index 864428b49b..e9116507f4 100644
--- a/contrib/python/aiohttp/aiohttp/web.py
+++ b/contrib/python/aiohttp/aiohttp/web.py
@@ -1,9 +1,13 @@
import asyncio
import logging
+import os
import socket
import sys
+import warnings
from argparse import ArgumentParser
from collections.abc import Iterable
+from contextlib import suppress
+from functools import partial
from importlib import import_module
from typing import (
Any,
@@ -17,10 +21,12 @@ from typing import (
Union,
cast,
)
+from weakref import WeakSet
from .abc import AbstractAccessLogger
-from .helpers import all_tasks
+from .helpers import AppKey as AppKey
from .log import access_logger
+from .typedefs import PathLike
from .web_app import Application as Application, CleanupError as CleanupError
from .web_exceptions import (
HTTPAccepted as HTTPAccepted,
@@ -42,6 +48,7 @@ from .web_exceptions import (
HTTPLengthRequired as HTTPLengthRequired,
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
+ HTTPMove as HTTPMove,
HTTPMovedPermanently as HTTPMovedPermanently,
HTTPMultipleChoices as HTTPMultipleChoices,
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
@@ -80,6 +87,7 @@ from .web_exceptions import (
HTTPUseProxy as HTTPUseProxy,
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
HTTPVersionNotSupported as HTTPVersionNotSupported,
+ NotAppKeyWarning as NotAppKeyWarning,
)
from .web_fileresponse import FileResponse as FileResponse
from .web_log import AccessLogger
@@ -136,6 +144,7 @@ from .web_urldispatcher import (
AbstractRoute as AbstractRoute,
DynamicResource as DynamicResource,
PlainResource as PlainResource,
+ PrefixedSubAppResource as PrefixedSubAppResource,
Resource as Resource,
ResourceRoute as ResourceRoute,
StaticResource as StaticResource,
@@ -151,9 +160,11 @@ from .web_ws import (
__all__ = (
# web_app
+ "AppKey",
"Application",
"CleanupError",
# web_exceptions
+ "NotAppKeyWarning",
"HTTPAccepted",
"HTTPBadGateway",
"HTTPBadRequest",
@@ -173,6 +184,7 @@ __all__ = (
"HTTPLengthRequired",
"HTTPMethodNotAllowed",
"HTTPMisdirectedRequest",
+ "HTTPMove",
"HTTPMovedPermanently",
"HTTPMultipleChoices",
"HTTPNetworkAuthenticationRequired",
@@ -261,6 +273,7 @@ __all__ = (
"AbstractRoute",
"DynamicResource",
"PlainResource",
+ "PrefixedSubAppResource",
"Resource",
"ResourceRoute",
"StaticResource",
@@ -281,6 +294,9 @@ try:
except ImportError: # pragma: no cover
SSLContext = Any # type: ignore[misc,assignment]
+# Only display warning when using -Wdefault, -We, -X dev or similar.
+warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
+
HostSequence = TypingIterable[str]
@@ -289,12 +305,12 @@ async def _run_app(
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
- path: Optional[str] = None,
- sock: Optional[socket.socket] = None,
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
shutdown_timeout: float = 60.0,
keepalive_timeout: float = 75.0,
ssl_context: Optional[SSLContext] = None,
- print: Callable[..., None] = print,
+ print: Optional[Callable[..., None]] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
@@ -302,10 +318,28 @@ async def _run_app(
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
+ handler_cancellation: bool = False,
) -> None:
- # A internal functio to actually do all dirty job for application running
+ async def wait(
+ starting_tasks: "WeakSet[asyncio.Task[object]]", shutdown_timeout: float
+ ) -> None:
+ # Wait for pending tasks for a given time limit.
+ t = asyncio.current_task()
+ assert t is not None
+ starting_tasks.add(t)
+ with suppress(asyncio.TimeoutError):
+ await asyncio.wait_for(_wait(starting_tasks), timeout=shutdown_timeout)
+
+ async def _wait(exclude: "WeakSet[asyncio.Task[object]]") -> None:
+ t = asyncio.current_task()
+ assert t is not None
+ exclude.add(t)
+ while tasks := asyncio.all_tasks().difference(exclude):
+ await asyncio.wait(tasks)
+
+ # An internal function to actually do all dirty job for application running
if asyncio.iscoroutine(app):
- app = await app # type: ignore[misc]
+ app = await app
app = cast(Application, app)
@@ -316,11 +350,19 @@ async def _run_app(
access_log_format=access_log_format,
access_log=access_log,
keepalive_timeout=keepalive_timeout,
+ shutdown_timeout=shutdown_timeout,
+ handler_cancellation=handler_cancellation,
)
await runner.setup()
+ # On shutdown we want to avoid waiting on tasks which run forever.
+ # It's very likely that all tasks which run forever will have been created by
+ # the time we have completed the application startup (in runner.setup()),
+ # so we just record all running tasks here and exclude them later.
+ starting_tasks: "WeakSet[asyncio.Task[object]]" = WeakSet(asyncio.all_tasks())
+ runner.shutdown_callback = partial(wait, starting_tasks, shutdown_timeout)
- sites = [] # type: List[BaseSite]
+ sites: List[BaseSite] = []
try:
if host is not None:
@@ -330,7 +372,6 @@ async def _run_app(
runner,
host,
port,
- shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
@@ -344,7 +385,6 @@ async def _run_app(
runner,
h,
port,
- shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
@@ -356,7 +396,6 @@ async def _run_app(
TCPSite(
runner,
port=port,
- shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
@@ -365,12 +404,11 @@ async def _run_app(
)
if path is not None:
- if isinstance(path, (str, bytes, bytearray, memoryview)):
+ if isinstance(path, (str, os.PathLike)):
sites.append(
UnixSite(
runner,
path,
- shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
@@ -381,7 +419,6 @@ async def _run_app(
UnixSite(
runner,
p,
- shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
@@ -393,7 +430,6 @@ async def _run_app(
SockSite(
runner,
sock,
- shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
@@ -404,7 +440,6 @@ async def _run_app(
SockSite(
runner,
s,
- shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
@@ -420,15 +455,8 @@ async def _run_app(
)
# sleep forever by 1 hour intervals,
- # on Windows before Python 3.8 wake up every 1 second to handle
- # Ctrl+C smoothly
- if sys.platform == "win32" and sys.version_info < (3, 8):
- delay = 1
- else:
- delay = 3600
-
while True:
- await asyncio.sleep(delay)
+ await asyncio.sleep(3600)
finally:
await runner.cleanup()
@@ -462,12 +490,12 @@ def run_app(
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
- path: Optional[str] = None,
- sock: Optional[socket.socket] = None,
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
shutdown_timeout: float = 60.0,
keepalive_timeout: float = 75.0,
ssl_context: Optional[SSLContext] = None,
- print: Callable[..., None] = print,
+ print: Optional[Callable[..., None]] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
@@ -475,6 +503,7 @@ def run_app(
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
+ handler_cancellation: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
"""Run an app locally"""
@@ -506,6 +535,7 @@ def run_app(
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port,
+ handler_cancellation=handler_cancellation,
)
)
@@ -516,7 +546,7 @@ def run_app(
pass
finally:
_cancel_tasks({main_task}, loop)
- _cancel_tasks(all_tasks(loop), loop)
+ _cancel_tasks(asyncio.all_tasks(loop), loop)
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
diff --git a/contrib/python/aiohttp/aiohttp/web_app.py b/contrib/python/aiohttp/aiohttp/web_app.py
index d5dc90ed42..91bf5fdac6 100644
--- a/contrib/python/aiohttp/aiohttp/web_app.py
+++ b/contrib/python/aiohttp/aiohttp/web_app.py
@@ -18,8 +18,10 @@ from typing import (
Sequence,
Tuple,
Type,
+ TypeVar,
Union,
cast,
+ overload,
)
from aiosignal import Signal
@@ -32,10 +34,12 @@ from .abc import (
AbstractRouter,
AbstractStreamWriter,
)
-from .helpers import DEBUG
+from .helpers import DEBUG, AppKey
from .http_parser import RawRequestMessage
from .log import web_logger
from .streams import StreamReader
+from .typedefs import Middleware
+from .web_exceptions import NotAppKeyWarning
from .web_log import AccessLogger
from .web_middlewares import _fix_request_current_app
from .web_protocol import RequestHandler
@@ -56,29 +60,25 @@ from .web_urldispatcher import (
__all__ = ("Application", "CleanupError")
-if TYPE_CHECKING: # pragma: no cover
- from .typedefs import Handler
-
+if TYPE_CHECKING:
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
- _Middleware = Union[
- Callable[[Request, Handler], Awaitable[StreamResponse]],
- Callable[["Application", Handler], Awaitable[Handler]], # old-style
- ]
- _Middlewares = FrozenList[_Middleware]
- _MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
+ _Middlewares = FrozenList[Middleware]
+ _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]]
_Subapps = List["Application"]
else:
# No type checker mode, skip types
_AppSignal = Signal
_RespPrepareSignal = Signal
- _Middleware = Callable
_Middlewares = FrozenList
_MiddlewaresHandlers = Optional[Sequence]
_Subapps = List
+_T = TypeVar("_T")
+_U = TypeVar("_U")
+
-class Application(MutableMapping[str, Any]):
+class Application(MutableMapping[Union[str, AppKey[Any]], Any]):
ATTRS = frozenset(
[
"logger",
@@ -107,9 +107,9 @@ class Application(MutableMapping[str, Any]):
*,
logger: logging.Logger = web_logger,
router: Optional[UrlDispatcher] = None,
- middlewares: Iterable[_Middleware] = (),
+ middlewares: Iterable[Middleware] = (),
handler_args: Optional[Mapping[str, Any]] = None,
- client_max_size: int = 1024 ** 2,
+ client_max_size: int = 1024**2,
loop: Optional[asyncio.AbstractEventLoop] = None,
debug: Any = ..., # mypy doesn't support ellipsis
) -> None:
@@ -131,27 +131,27 @@ class Application(MutableMapping[str, Any]):
"debug argument is deprecated", DeprecationWarning, stacklevel=2
)
self._debug = debug
- self._router = router # type: UrlDispatcher
+ self._router: UrlDispatcher = router
self._loop = loop
self._handler_args = handler_args
self.logger = logger
- self._middlewares = FrozenList(middlewares) # type: _Middlewares
+ self._middlewares: _Middlewares = FrozenList(middlewares)
# initialized on freezing
- self._middlewares_handlers = None # type: _MiddlewaresHandlers
+ self._middlewares_handlers: _MiddlewaresHandlers = None
# initialized on freezing
- self._run_middlewares = None # type: Optional[bool]
+ self._run_middlewares: Optional[bool] = None
- self._state = {} # type: Dict[str, Any]
+ self._state: Dict[Union[AppKey[Any], str], object] = {}
self._frozen = False
self._pre_frozen = False
- self._subapps = [] # type: _Subapps
+ self._subapps: _Subapps = []
- self._on_response_prepare = Signal(self) # type: _RespPrepareSignal
- self._on_startup = Signal(self) # type: _AppSignal
- self._on_shutdown = Signal(self) # type: _AppSignal
- self._on_cleanup = Signal(self) # type: _AppSignal
+ self._on_response_prepare: _RespPrepareSignal = Signal(self)
+ self._on_startup: _AppSignal = Signal(self)
+ self._on_shutdown: _AppSignal = Signal(self)
+ self._on_cleanup: _AppSignal = Signal(self)
self._cleanup_ctx = CleanupContext()
self._on_startup.append(self._cleanup_ctx._on_startup)
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
@@ -162,7 +162,7 @@ class Application(MutableMapping[str, Any]):
"Inheritance class {} from web.Application "
"is discouraged".format(cls.__name__),
DeprecationWarning,
- stacklevel=2,
+ stacklevel=3,
)
if DEBUG: # pragma: no cover
@@ -182,7 +182,15 @@ class Application(MutableMapping[str, Any]):
def __eq__(self, other: object) -> bool:
return self is other
+ @overload # type: ignore[override]
+ def __getitem__(self, key: AppKey[_T]) -> _T:
+ ...
+
+ @overload
def __getitem__(self, key: str) -> Any:
+ ...
+
+ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
return self._state[key]
def _check_frozen(self) -> None:
@@ -193,26 +201,57 @@ class Application(MutableMapping[str, Any]):
stacklevel=3,
)
+ @overload # type: ignore[override]
+ def __setitem__(self, key: AppKey[_T], value: _T) -> None:
+ ...
+
+ @overload
def __setitem__(self, key: str, value: Any) -> None:
+ ...
+
+ def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None:
self._check_frozen()
+ if not isinstance(key, AppKey):
+ warnings.warn(
+ "It is recommended to use web.AppKey instances for keys.\n"
+ + "https://docs.aiohttp.org/en/stable/web_advanced.html"
+ + "#application-s-config",
+ category=NotAppKeyWarning,
+ stacklevel=2,
+ )
self._state[key] = value
- def __delitem__(self, key: str) -> None:
+ def __delitem__(self, key: Union[str, AppKey[_T]]) -> None:
self._check_frozen()
del self._state[key]
def __len__(self) -> int:
return len(self._state)
- def __iter__(self) -> Iterator[str]:
+ def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
return iter(self._state)
+ @overload # type: ignore[override]
+ def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]:
+ ...
+
+ @overload
+ def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]:
+ ...
+
+ @overload
+ def get(self, key: str, default: Any = ...) -> Any:
+ ...
+
+ def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
+ return self._state.get(key, default)
+
########
@property
def loop(self) -> asyncio.AbstractEventLoop:
# Technically the loop can be None
# but we mask it by explicit type cast
- # to provide more convinient type annotation
+ # to provide more convenient type annotation
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
return cast(asyncio.AbstractEventLoop, self._loop)
@@ -324,7 +363,7 @@ class Application(MutableMapping[str, Any]):
if not isinstance(domain, str):
raise TypeError("Domain must be str")
elif "*" in domain:
- rule = MaskDomain(domain) # type: Domain
+ rule: Domain = MaskDomain(domain)
else:
rule = Domain(domain)
factory = partial(MatchedSubAppResource, rule, subapp)
@@ -453,7 +492,7 @@ class Application(MutableMapping[str, Any]):
client_max_size=self._client_max_size,
)
- def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
+ def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]:
for m in reversed(self._middlewares):
if getattr(m, "__middleware_version__", None) == 1:
yield m, True
@@ -493,13 +532,13 @@ class Application(MutableMapping[str, Any]):
if self._run_middlewares:
for app in match_info.apps[::-1]:
- for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
+ for m, new_style in app._middlewares_handlers: # type: ignore[union-attr]
if new_style:
handler = update_wrapper(
partial(m, handler=handler), handler
)
else:
- handler = await m(app, handler) # type: ignore[arg-type]
+ handler = await m(app, handler) # type: ignore[arg-type,assignment]
resp = await handler(request)
@@ -522,7 +561,7 @@ class CleanupError(RuntimeError):
return cast(List[BaseException], self.args[1])
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
else:
_CleanupContextBase = FrozenList
@@ -531,7 +570,7 @@ else:
class CleanupContext(_CleanupContextBase):
def __init__(self) -> None:
super().__init__()
- self._exits = [] # type: List[AsyncIterator[None]]
+ self._exits: List[AsyncIterator[None]] = []
async def _on_startup(self, app: Application) -> None:
for cb in self:
diff --git a/contrib/python/aiohttp/aiohttp/web_exceptions.py b/contrib/python/aiohttp/aiohttp/web_exceptions.py
index 2eadca0386..ee2c1e72d4 100644
--- a/contrib/python/aiohttp/aiohttp/web_exceptions.py
+++ b/contrib/python/aiohttp/aiohttp/web_exceptions.py
@@ -18,6 +18,7 @@ __all__ = (
"HTTPNoContent",
"HTTPResetContent",
"HTTPPartialContent",
+ "HTTPMove",
"HTTPMultipleChoices",
"HTTPMovedPermanently",
"HTTPFound",
@@ -67,6 +68,10 @@ __all__ = (
)
+class NotAppKeyWarning(UserWarning):
+ """Warning when not using AppKey in Application."""
+
+
############################################################
# HTTP Exceptions
############################################################
@@ -160,7 +165,7 @@ class HTTPPartialContent(HTTPSuccessful):
############################################################
-class _HTTPMove(HTTPRedirection):
+class HTTPMove(HTTPRedirection):
def __init__(
self,
location: StrOrURL,
@@ -184,21 +189,21 @@ class _HTTPMove(HTTPRedirection):
self.location = location
-class HTTPMultipleChoices(_HTTPMove):
+class HTTPMultipleChoices(HTTPMove):
status_code = 300
-class HTTPMovedPermanently(_HTTPMove):
+class HTTPMovedPermanently(HTTPMove):
status_code = 301
-class HTTPFound(_HTTPMove):
+class HTTPFound(HTTPMove):
status_code = 302
# This one is safe after a POST (the redirected location will be
# retrieved with GET):
-class HTTPSeeOther(_HTTPMove):
+class HTTPSeeOther(HTTPMove):
status_code = 303
@@ -208,16 +213,16 @@ class HTTPNotModified(HTTPRedirection):
empty_body = True
-class HTTPUseProxy(_HTTPMove):
+class HTTPUseProxy(HTTPMove):
# Not a move, but looks a little like one
status_code = 305
-class HTTPTemporaryRedirect(_HTTPMove):
+class HTTPTemporaryRedirect(HTTPMove):
status_code = 307
-class HTTPPermanentRedirect(_HTTPMove):
+class HTTPPermanentRedirect(HTTPMove):
status_code = 308
@@ -273,7 +278,7 @@ class HTTPMethodNotAllowed(HTTPClientError):
content_type=content_type,
)
self.headers["Allow"] = allow
- self.allowed_methods = set(allowed_methods) # type: Set[str]
+ self.allowed_methods: Set[str] = set(allowed_methods)
self.method = method.upper()
@@ -366,7 +371,7 @@ class HTTPUnavailableForLegalReasons(HTTPClientError):
def __init__(
self,
- link: str,
+ link: Optional[StrOrURL],
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
@@ -381,8 +386,14 @@ class HTTPUnavailableForLegalReasons(HTTPClientError):
text=text,
content_type=content_type,
)
- self.headers["Link"] = '<%s>; rel="blocked-by"' % link
- self.link = link
+ self._link = None
+ if link:
+ self._link = URL(link)
+ self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"'
+
+ @property
+ def link(self) -> Optional[URL]:
+ return self._link
############################################################
diff --git a/contrib/python/aiohttp/aiohttp/web_fileresponse.py b/contrib/python/aiohttp/aiohttp/web_fileresponse.py
index f41ed3fd0a..7dbe50f0a5 100644
--- a/contrib/python/aiohttp/aiohttp/web_fileresponse.py
+++ b/contrib/python/aiohttp/aiohttp/web_fileresponse.py
@@ -2,13 +2,13 @@ import asyncio
import mimetypes
import os
import pathlib
-import sys
from typing import ( # noqa
IO,
TYPE_CHECKING,
Any,
Awaitable,
Callable,
+ Final,
Iterator,
List,
Optional,
@@ -19,8 +19,8 @@ from typing import ( # noqa
from . import hdrs
from .abc import AbstractStreamWriter
-from .helpers import ETAG_ANY, ETag
-from .typedefs import Final, LooseHeaders
+from .helpers import ETAG_ANY, ETag, must_be_empty_body
+from .typedefs import LooseHeaders, PathLike
from .web_exceptions import (
HTTPNotModified,
HTTPPartialContent,
@@ -31,7 +31,7 @@ from .web_response import StreamResponse
__all__ = ("FileResponse",)
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_request import BaseRequest
@@ -46,7 +46,7 @@ class FileResponse(StreamResponse):
def __init__(
self,
- path: Union[str, pathlib.Path],
+ path: PathLike,
chunk_size: int = 256 * 1024,
status: int = 200,
reason: Optional[str] = None,
@@ -54,10 +54,7 @@ class FileResponse(StreamResponse):
) -> None:
super().__init__(status=status, reason=reason, headers=headers)
- if isinstance(path, str):
- path = pathlib.Path(path)
-
- self._path = path
+ self._path = pathlib.Path(path)
self._chunk_size = chunk_size
async def _sendfile_fallback(
@@ -88,7 +85,7 @@ class FileResponse(StreamResponse):
writer = await super().prepare(request)
assert writer is not None
- if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
+ if NOSENDFILE or self.compression:
return await self._sendfile_fallback(writer, fobj, offset, count)
loop = request._loop
@@ -127,19 +124,35 @@ class FileResponse(StreamResponse):
self.content_length = 0
return await super().prepare(request)
- async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
- filepath = self._path
+ def _get_file_path_stat_and_gzip(
+ self, check_for_gzipped_file: bool
+ ) -> Tuple[pathlib.Path, os.stat_result, bool]:
+ """Return the file path, stat result, and gzip status.
- gzip = False
- if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
+ This method should be called from a thread executor
+ since it calls os.stat which may block.
+ """
+ filepath = self._path
+ if check_for_gzipped_file:
gzip_path = filepath.with_name(filepath.name + ".gz")
+ try:
+ return gzip_path, gzip_path.stat(), True
+ except OSError:
+ # Fall through and try the non-gzipped file
+ pass
- if gzip_path.is_file():
- filepath = gzip_path
- gzip = True
+ return filepath, filepath.stat(), False
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
loop = asyncio.get_event_loop()
- st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
+ # Encoding comparisons should be case-insensitive
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
+ check_for_gzipped_file = (
+ "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
+ )
+ filepath, st, gzip = await loop.run_in_executor(
+ None, self._get_file_path_stat_and_gzip, check_for_gzipped_file
+ )
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
last_modified = st.st_mtime
@@ -258,6 +271,10 @@ class FileResponse(StreamResponse):
self.headers[hdrs.CONTENT_ENCODING] = encoding
if gzip:
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
+ # Disable compression if we are already sending
+ # a compressed file since we don't want to double
+ # compress.
+ self._compression = False
self.etag = etag_value # type: ignore[assignment]
self.last_modified = st.st_mtime # type: ignore[assignment]
@@ -273,7 +290,7 @@ class FileResponse(StreamResponse):
)
# If we are sending 0 bytes calling sendfile() will throw a ValueError
- if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
+ if count == 0 or must_be_empty_body(request.method, self.status):
return await super().prepare(request)
fobj = await loop.run_in_executor(None, filepath.open, "rb")
@@ -285,4 +302,4 @@ class FileResponse(StreamResponse):
try:
return await self._sendfile(request, fobj, offset, count)
finally:
- await loop.run_in_executor(None, fobj.close)
+ await asyncio.shield(loop.run_in_executor(None, fobj.close))
diff --git a/contrib/python/aiohttp/aiohttp/web_log.py b/contrib/python/aiohttp/aiohttp/web_log.py
index a977c1ba5c..633e9e3ae6 100644
--- a/contrib/python/aiohttp/aiohttp/web_log.py
+++ b/contrib/python/aiohttp/aiohttp/web_log.py
@@ -3,6 +3,7 @@ import functools
import logging
import os
import re
+import time as time_mod
from collections import namedtuple
from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
@@ -57,7 +58,7 @@ class AccessLogger(AbstractAccessLogger):
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
CLEANUP_RE = re.compile(r"(%[^s])")
- _FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]]
+ _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {}
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
"""Initialise the logger.
@@ -142,9 +143,10 @@ class AccessLogger(AbstractAccessLogger):
@staticmethod
def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
- now = datetime.datetime.utcnow()
+ tz = datetime.timezone(datetime.timedelta(seconds=-time_mod.timezone))
+ now = datetime.datetime.now(tz)
start_time = now - datetime.timedelta(seconds=time)
- return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]")
+ return start_time.strftime("[%d/%b/%Y:%H:%M:%S %z]")
@staticmethod
def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
@@ -187,6 +189,9 @@ class AccessLogger(AbstractAccessLogger):
return [(key, method(request, response, time)) for key, method in self._methods]
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ if not self.logger.isEnabledFor(logging.INFO):
+ # Avoid formatting the log line if it will not be emitted.
+ return
try:
fmt_info = self._format_line(request, response, time)
diff --git a/contrib/python/aiohttp/aiohttp/web_middlewares.py b/contrib/python/aiohttp/aiohttp/web_middlewares.py
index fabcc449a2..5da1533c0d 100644
--- a/contrib/python/aiohttp/aiohttp/web_middlewares.py
+++ b/contrib/python/aiohttp/aiohttp/web_middlewares.py
@@ -1,8 +1,8 @@
import re
-from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
+from typing import TYPE_CHECKING, Tuple, Type, TypeVar
-from .typedefs import Handler
-from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
+from .typedefs import Handler, Middleware
+from .web_exceptions import HTTPMove, HTTPPermanentRedirect
from .web_request import Request
from .web_response import StreamResponse
from .web_urldispatcher import SystemRoute
@@ -12,7 +12,7 @@ __all__ = (
"normalize_path_middleware",
)
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_app import Application
_Func = TypeVar("_Func")
@@ -35,16 +35,13 @@ def middleware(f: _Func) -> _Func:
return f
-_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
-
-
def normalize_path_middleware(
*,
append_slash: bool = True,
remove_slash: bool = False,
merge_slashes: bool = True,
- redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect,
-) -> _Middleware:
+ redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
+) -> Middleware:
"""Factory for producing a middleware that normalizes the path of a request.
Normalizing means:
@@ -110,7 +107,7 @@ def normalize_path_middleware(
return impl
-def _fix_request_current_app(app: "Application") -> _Middleware:
+def _fix_request_current_app(app: "Application") -> Middleware:
@middleware
async def impl(request: Request, handler: Handler) -> StreamResponse:
with request.match_info.set_current_app(app):
diff --git a/contrib/python/aiohttp/aiohttp/web_protocol.py b/contrib/python/aiohttp/aiohttp/web_protocol.py
index ad0c0498e3..f083b13eb0 100644
--- a/contrib/python/aiohttp/aiohttp/web_protocol.py
+++ b/contrib/python/aiohttp/aiohttp/web_protocol.py
@@ -26,7 +26,7 @@ import yarl
from .abc import AbstractAccessLogger, AbstractStreamWriter
from .base_protocol import BaseProtocol
-from .helpers import ceil_timeout
+from .helpers import ceil_timeout, set_exception
from .http import (
HttpProcessingError,
HttpRequestParser,
@@ -44,7 +44,7 @@ from .web_response import Response, StreamResponse
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_server import Server
@@ -127,6 +127,10 @@ class RequestHandler(BaseProtocol):
max_headers -- Optional maximum header size
+ timeout_ceil_threshold -- Optional value to specify
+ threshold to ceil() timeout
+ values
+
"""
KEEPALIVE_RESCHEDULE_DELAY = 1
@@ -157,6 +161,7 @@ class RequestHandler(BaseProtocol):
"_close",
"_force_close",
"_current_request",
+ "_timeout_ceil_threshold",
)
def __init__(
@@ -175,34 +180,35 @@ class RequestHandler(BaseProtocol):
max_headers: int = 32768,
max_field_size: int = 8190,
lingering_time: float = 10.0,
- read_bufsize: int = 2 ** 16,
+ read_bufsize: int = 2**16,
auto_decompress: bool = True,
+ timeout_ceil_threshold: float = 5,
):
super().__init__(loop)
self._request_count = 0
self._keepalive = False
- self._current_request = None # type: Optional[BaseRequest]
- self._manager = manager # type: Optional[Server]
+ self._current_request: Optional[BaseRequest] = None
+ self._manager: Optional[Server] = manager
self._request_handler: Optional[_RequestHandler] = manager.request_handler
self._request_factory: Optional[_RequestFactory] = manager.request_factory
self._tcp_keepalive = tcp_keepalive
# placeholder to be replaced on keepalive timeout setup
self._keepalive_time = 0.0
- self._keepalive_handle = None # type: Optional[asyncio.Handle]
+ self._keepalive_handle: Optional[asyncio.Handle] = None
self._keepalive_timeout = keepalive_timeout
self._lingering_time = float(lingering_time)
self._messages: Deque[_MsgType] = deque()
self._message_tail = b""
- self._waiter = None # type: Optional[asyncio.Future[None]]
- self._task_handler = None # type: Optional[asyncio.Task[None]]
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._task_handler: Optional[asyncio.Task[None]] = None
self._upgrade = False
- self._payload_parser = None # type: Any
- self._request_parser = HttpRequestParser(
+ self._payload_parser: Any = None
+ self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
self,
loop,
read_bufsize,
@@ -211,15 +217,21 @@ class RequestHandler(BaseProtocol):
max_headers=max_headers,
payload_exception=RequestPayloadError,
auto_decompress=auto_decompress,
- ) # type: Optional[HttpRequestParser]
+ )
+
+ self._timeout_ceil_threshold: float = 5
+ try:
+ self._timeout_ceil_threshold = float(timeout_ceil_threshold)
+ except (TypeError, ValueError):
+ pass
self.logger = logger
self.debug = debug
self.access_log = access_log
if access_log:
- self.access_logger = access_log_class(
+ self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
access_log, access_log_format
- ) # type: Optional[AbstractAccessLogger]
+ )
else:
self.access_logger = None
@@ -285,6 +297,9 @@ class RequestHandler(BaseProtocol):
super().connection_lost(exc)
+ # Grab value before setting _manager to None.
+ handler_cancellation = self._manager.handler_cancellation
+
self._manager = None
self._force_close = True
self._request_factory = None
@@ -299,11 +314,12 @@ class RequestHandler(BaseProtocol):
exc = ConnectionResetError("Connection lost")
self._current_request._cancel(exc)
- if self._task_handler is not None:
- self._task_handler.cancel()
if self._waiter is not None:
self._waiter.cancel()
+ if handler_cancellation and self._task_handler is not None:
+ self._task_handler.cancel()
+
self._task_handler = None
if self._payload_parser is not None:
@@ -419,7 +435,8 @@ class RequestHandler(BaseProtocol):
# not all request handlers are done,
# reschedule itself to next second
self._keepalive_handle = self._loop.call_later(
- self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive
+ self.KEEPALIVE_RESCHEDULE_DELAY,
+ self._process_keepalive,
)
async def _handle_request(
@@ -548,7 +565,7 @@ class RequestHandler(BaseProtocol):
self.log_debug("Uncompleted request.")
self.close()
- payload.set_exception(PayloadAccessError())
+ set_exception(payload, PayloadAccessError())
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection ")
diff --git a/contrib/python/aiohttp/aiohttp/web_request.py b/contrib/python/aiohttp/aiohttp/web_request.py
index b3574cafb3..4bc670a798 100644
--- a/contrib/python/aiohttp/aiohttp/web_request.py
+++ b/contrib/python/aiohttp/aiohttp/web_request.py
@@ -13,6 +13,7 @@ from typing import (
TYPE_CHECKING,
Any,
Dict,
+ Final,
Iterator,
Mapping,
MutableMapping,
@@ -25,12 +26,19 @@ from typing import (
from urllib.parse import parse_qsl
import attr
-from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+from multidict import (
+ CIMultiDict,
+ CIMultiDictProxy,
+ MultiDict,
+ MultiDictProxy,
+ MultiMapping,
+)
from yarl import URL
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import (
+ _SENTINEL,
DEBUG,
ETAG_ANY,
LIST_QUOTED_ETAG_RE,
@@ -40,6 +48,7 @@ from .helpers import (
parse_http_date,
reify,
sentinel,
+ set_exception,
)
from .http_parser import RawRequestMessage
from .http_writer import HttpVersion
@@ -47,7 +56,6 @@ from .multipart import BodyPartReader, MultipartReader
from .streams import EmptyStreamReader, StreamReader
from .typedefs import (
DEFAULT_JSON_DECODER,
- Final,
JSONDecoder,
LooseHeaders,
RawHeaders,
@@ -59,7 +67,7 @@ from .web_response import StreamResponse
__all__ = ("BaseRequest", "FileField", "Request")
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_app import Application
from .web_protocol import RequestHandler
from .web_urldispatcher import UrlMappingMatchInfo
@@ -77,7 +85,7 @@ class FileField:
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
# '-' at the end to prevent interpretation as range in a char class
-_TOKEN: Final[str] = fr"[{_TCHAR}]+"
+_TOKEN: Final[str] = rf"[{_TCHAR}]+"
_QDTEXT: Final[str] = r"[{}]".format(
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
@@ -148,7 +156,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
task: "asyncio.Task[None]",
loop: asyncio.AbstractEventLoop,
*,
- client_max_size: int = 1024 ** 2,
+ client_max_size: int = 1024**2,
state: Optional[Dict[str, Any]] = None,
scheme: Optional[str] = None,
host: Optional[str] = None,
@@ -164,14 +172,22 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
self._headers = message.headers
self._method = message.method
self._version = message.version
- self._rel_url = message.url
- self._post = (
- None
- ) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
- self._read_bytes = None # type: Optional[bytes]
+ self._cache: Dict[str, Any] = {}
+ url = message.url
+ if url.is_absolute():
+ # absolute URL is given,
+ # override auto-calculating url, host, and scheme
+ # all other properties should be good
+ self._cache["url"] = url
+ self._cache["host"] = url.host
+ self._cache["scheme"] = url.scheme
+ self._rel_url = url.relative()
+ else:
+ self._rel_url = message.url
+ self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
+ self._read_bytes: Optional[bytes] = None
self._state = state
- self._cache = {} # type: Dict[str, Any]
self._task = task
self._client_max_size = client_max_size
self._loop = loop
@@ -191,12 +207,13 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
def clone(
self,
*,
- method: str = sentinel,
- rel_url: StrOrURL = sentinel,
- headers: LooseHeaders = sentinel,
- scheme: str = sentinel,
- host: str = sentinel,
- remote: str = sentinel,
+ method: Union[str, _SENTINEL] = sentinel,
+ rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
+ headers: Union[LooseHeaders, _SENTINEL] = sentinel,
+ scheme: Union[str, _SENTINEL] = sentinel,
+ host: Union[str, _SENTINEL] = sentinel,
+ remote: Union[str, _SENTINEL] = sentinel,
+ client_max_size: Union[int, _SENTINEL] = sentinel,
) -> "BaseRequest":
"""Clone itself with replacement some attributes.
@@ -207,11 +224,11 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
if self._read_bytes:
raise RuntimeError("Cannot clone request " "after reading its content")
- dct = {} # type: Dict[str, Any]
+ dct: Dict[str, Any] = {}
if method is not sentinel:
dct["method"] = method
if rel_url is not sentinel:
- new_url = URL(rel_url)
+ new_url: URL = URL(rel_url)
dct["url"] = new_url
dct["path"] = str(new_url)
if headers is not sentinel:
@@ -230,6 +247,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
kwargs["host"] = host
if remote is not sentinel:
kwargs["remote"] = remote
+ if client_max_size is sentinel:
+ client_max_size = self._client_max_size
return self.__class__(
message,
@@ -238,7 +257,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
self._payload_writer,
self._task,
self._loop,
- client_max_size=self._client_max_size,
+ client_max_size=client_max_size,
state=self._state.copy(),
**kwargs,
)
@@ -261,6 +280,10 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
def writer(self) -> AbstractStreamWriter:
return self._payload_writer
+ @property
+ def client_max_size(self) -> int:
+ return self._client_max_size
+
@reify
def message(self) -> RawRequestMessage:
warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
@@ -326,7 +349,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
length = len(field_value)
pos = 0
need_separator = False
- elem = {} # type: Dict[str, str]
+ elem: Dict[str, str] = {}
elems.append(types.MappingProxyType(elem))
while 0 <= pos < length:
match = _FORWARDED_PAIR_RE.match(field_value, pos)
@@ -456,7 +479,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._message.path
@reify
- def query(self) -> "MultiDictProxy[str]":
+ def query(self) -> "MultiMapping[str]":
"""A multidict with all the variables in the query string."""
return MultiDictProxy(self._rel_url.query)
@@ -469,7 +492,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return self._rel_url.query_string
@reify
- def headers(self) -> "CIMultiDictProxy[str]":
+ def headers(self) -> "MultiMapping[str]":
"""A case-insensitive multidict proxy with all headers."""
return self._headers
@@ -560,7 +583,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
A read-only dictionary-like object.
"""
raw = self.headers.get(hdrs.COOKIE, "")
- parsed = SimpleCookie(raw) # type: SimpleCookie[str]
+ parsed = SimpleCookie(raw)
return MappingProxyType({key: val.value for key, val in parsed.items()})
@reify
@@ -683,7 +706,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
self._post = MultiDictProxy(MultiDict())
return self._post
- out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
+ out: MultiDict[Union[str, bytes, FileField]] = MultiDict()
if content_type == "multipart/form-data":
multipart = await self.multipart()
@@ -703,19 +726,21 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
# https://tools.ietf.org/html/rfc7578#section-4.4
if field.filename:
# store file in temp file
- tmp = tempfile.TemporaryFile()
- chunk = await field.read_chunk(size=2 ** 16)
+ tmp = await self._loop.run_in_executor(
+ None, tempfile.TemporaryFile
+ )
+ chunk = await field.read_chunk(size=2**16)
while chunk:
chunk = field.decode(chunk)
- tmp.write(chunk)
+ await self._loop.run_in_executor(None, tmp.write, chunk)
size += len(chunk)
if 0 < max_size < size:
- tmp.close()
+ await self._loop.run_in_executor(None, tmp.close)
raise HTTPRequestEntityTooLarge(
max_size=max_size, actual_size=size
)
- chunk = await field.read_chunk(size=2 ** 16)
- tmp.seek(0)
+ chunk = await field.read_chunk(size=2**16)
+ await self._loop.run_in_executor(None, tmp.seek, 0)
if field_ct is None:
field_ct = "application/octet-stream"
@@ -792,7 +817,7 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin):
return
def _cancel(self, exc: BaseException) -> None:
- self._payload.set_exception(exc)
+ set_exception(self._payload, exc)
class Request(BaseRequest):
@@ -806,7 +831,7 @@ class Request(BaseRequest):
# or information about traversal lookup
# initialized after route resolving
- self._match_info = None # type: Optional[UrlMappingMatchInfo]
+ self._match_info: Optional[UrlMappingMatchInfo] = None
if DEBUG:
@@ -823,12 +848,13 @@ class Request(BaseRequest):
def clone(
self,
*,
- method: str = sentinel,
- rel_url: StrOrURL = sentinel,
- headers: LooseHeaders = sentinel,
- scheme: str = sentinel,
- host: str = sentinel,
- remote: str = sentinel,
+ method: Union[str, _SENTINEL] = sentinel,
+ rel_url: Union[StrOrURL, _SENTINEL] = sentinel,
+ headers: Union[LooseHeaders, _SENTINEL] = sentinel,
+ scheme: Union[str, _SENTINEL] = sentinel,
+ host: Union[str, _SENTINEL] = sentinel,
+ remote: Union[str, _SENTINEL] = sentinel,
+ client_max_size: Union[int, _SENTINEL] = sentinel,
) -> "Request":
ret = super().clone(
method=method,
@@ -837,6 +863,7 @@ class Request(BaseRequest):
scheme=scheme,
host=host,
remote=remote,
+ client_max_size=client_max_size,
)
new_ret = cast(Request, ret)
new_ret._match_info = self._match_info
diff --git a/contrib/python/aiohttp/aiohttp/web_response.py b/contrib/python/aiohttp/aiohttp/web_response.py
index 7880ab2d02..40d6f01eca 100644
--- a/contrib/python/aiohttp/aiohttp/web_response.py
+++ b/contrib/python/aiohttp/aiohttp/web_response.py
@@ -6,18 +6,16 @@ import json
import math
import time
import warnings
-import zlib
from concurrent.futures import Executor
-from http.cookies import Morsel, SimpleCookie
+from http import HTTPStatus
+from http.cookies import SimpleCookie
from typing import (
TYPE_CHECKING,
Any,
Dict,
Iterator,
- Mapping,
MutableMapping,
Optional,
- Tuple,
Union,
cast,
)
@@ -26,25 +24,27 @@ from multidict import CIMultiDict, istr
from . import hdrs, payload
from .abc import AbstractStreamWriter
+from .compression_utils import ZLibCompressor
from .helpers import (
ETAG_ANY,
- PY_38,
QUOTED_ETAG_RE,
ETag,
HeadersMixin,
+ must_be_empty_body,
parse_http_date,
rfc822_formatted_time,
sentinel,
+ should_remove_content_length,
validate_etag_value,
)
-from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
+from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
from .payload import Payload
from .typedefs import JSONEncoder, LooseHeaders
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_request import BaseRequest
BaseClass = MutableMapping[str, Any]
@@ -52,12 +52,6 @@ else:
BaseClass = collections.abc.MutableMapping
-if not PY_38:
- # allow samesite to be used in python < 3.8
- # already permitted in python 3.8, see https://bugs.python.org/issue29613
- Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
-
-
class ContentCoding(enum.Enum):
# The content codings that we have support for.
#
@@ -85,20 +79,21 @@ class StreamResponse(BaseClass, HeadersMixin):
headers: Optional[LooseHeaders] = None,
) -> None:
self._body = None
- self._keep_alive = None # type: Optional[bool]
+ self._keep_alive: Optional[bool] = None
self._chunked = False
self._compression = False
- self._compression_force = None # type: Optional[ContentCoding]
- self._cookies = SimpleCookie() # type: SimpleCookie[str]
+ self._compression_force: Optional[ContentCoding] = None
+ self._cookies = SimpleCookie()
- self._req = None # type: Optional[BaseRequest]
- self._payload_writer = None # type: Optional[AbstractStreamWriter]
+ self._req: Optional[BaseRequest] = None
+ self._payload_writer: Optional[AbstractStreamWriter] = None
self._eof_sent = False
+ self._must_be_empty_body: Optional[bool] = None
self._body_length = 0
- self._state = {} # type: Dict[str, Any]
+ self._state: Dict[str, Any] = {}
if headers is not None:
- self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
+ self._headers: CIMultiDict[str] = CIMultiDict(headers)
else:
self._headers = CIMultiDict()
@@ -135,7 +130,6 @@ class StreamResponse(BaseClass, HeadersMixin):
self,
status: int,
reason: Optional[str] = None,
- _RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES,
) -> None:
assert not self.prepared, (
"Cannot change the response status code after " "the headers have been sent"
@@ -143,8 +137,8 @@ class StreamResponse(BaseClass, HeadersMixin):
self._status = int(status)
if reason is None:
try:
- reason = _RESPONSES[self._status][0]
- except Exception:
+ reason = HTTPStatus(self._status).phrase
+ except ValueError:
reason = ""
self._reason = reason
@@ -199,7 +193,7 @@ class StreamResponse(BaseClass, HeadersMixin):
return self._headers
@property
- def cookies(self) -> "SimpleCookie[str]":
+ def cookies(self) -> SimpleCookie:
return self._cookies
def set_cookie(
@@ -406,6 +400,8 @@ class StreamResponse(BaseClass, HeadersMixin):
if self._compression_force:
await self._do_start_compression(self._compression_force)
else:
+ # Encoding comparisons should be case-insensitive
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
for coding in ContentCoding:
if coding.value in accept_encoding:
@@ -417,7 +413,7 @@ class StreamResponse(BaseClass, HeadersMixin):
return None
if self._payload_writer is not None:
return self._payload_writer
-
+ self._must_be_empty_body = must_be_empty_body(request.method, self.status)
return await self._start(request)
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
@@ -456,26 +452,33 @@ class StreamResponse(BaseClass, HeadersMixin):
"Using chunked encoding is forbidden "
"for HTTP/{0.major}.{0.minor}".format(request.version)
)
- writer.enable_chunking()
- headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ if not self._must_be_empty_body:
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
elif self._length_check:
writer.length = self.content_length
if writer.length is None:
- if version >= HttpVersion11 and self.status != 204:
- writer.enable_chunking()
- headers[hdrs.TRANSFER_ENCODING] = "chunked"
- if hdrs.CONTENT_LENGTH in headers:
- del headers[hdrs.CONTENT_LENGTH]
- else:
+ if version >= HttpVersion11:
+ if not self._must_be_empty_body:
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ elif not self._must_be_empty_body:
keep_alive = False
- # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
- # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
- elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
- del headers[hdrs.CONTENT_LENGTH]
- if self.status not in (204, 304):
+ # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
+ # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
+ if self._must_be_empty_body:
+ if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
+ request.method, self.status
+ ):
+ del headers[hdrs.CONTENT_LENGTH]
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
+ # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
+ if hdrs.TRANSFER_ENCODING in headers:
+ del headers[hdrs.TRANSFER_ENCODING]
+ else:
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
@@ -589,7 +592,7 @@ class Response(StreamResponse):
raise ValueError("body and text are not allowed together")
if headers is None:
- real_headers = CIMultiDict() # type: CIMultiDict[str]
+ real_headers: CIMultiDict[str] = CIMultiDict()
elif not isinstance(headers, CIMultiDict):
real_headers = CIMultiDict(headers)
else:
@@ -638,7 +641,7 @@ class Response(StreamResponse):
else:
self.body = body
- self._compressed_body = None # type: Optional[bytes]
+ self._compressed_body: Optional[bytes] = None
self._zlib_executor_size = zlib_executor_size
self._zlib_executor = zlib_executor
@@ -647,15 +650,10 @@ class Response(StreamResponse):
return self._body
@body.setter
- def body(
- self,
- body: bytes,
- CONTENT_TYPE: istr = hdrs.CONTENT_TYPE,
- CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
- ) -> None:
+ def body(self, body: bytes) -> None:
if body is None:
- self._body = None # type: Optional[bytes]
- self._body_payload = False # type: bool
+ self._body: Optional[bytes] = None
+ self._body_payload: bool = False
elif isinstance(body, (bytes, bytearray)):
self._body = body
self._body_payload = False
@@ -669,15 +667,9 @@ class Response(StreamResponse):
headers = self._headers
- # set content-length header if needed
- if not self._chunked and CONTENT_LENGTH not in headers:
- size = body.size
- if size is not None:
- headers[CONTENT_LENGTH] = str(size)
-
# set content-type
- if CONTENT_TYPE not in headers:
- headers[CONTENT_TYPE] = body.content_type
+ if hdrs.CONTENT_TYPE not in headers:
+ headers[hdrs.CONTENT_TYPE] = body.content_type
# copy payload headers
if body.headers:
@@ -735,14 +727,14 @@ class Response(StreamResponse):
if self._eof_sent:
return
if self._compressed_body is None:
- body = self._body # type: Optional[Union[bytes, Payload]]
+ body: Optional[Union[bytes, Payload]] = self._body
else:
body = self._compressed_body
assert not data, f"data arg is not supported, got {data!r}"
assert self._req is not None
assert self._payload_writer is not None
if body is not None:
- if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]:
+ if self._must_be_empty_body:
await super().write_eof()
elif self._body_payload:
payload = cast(Payload, body)
@@ -754,22 +746,24 @@ class Response(StreamResponse):
await super().write_eof()
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
- if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
- if not self._body_payload:
- if self._body is not None:
- self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
- else:
- self._headers[hdrs.CONTENT_LENGTH] = "0"
+ if should_remove_content_length(request.method, self.status):
+ if hdrs.CONTENT_LENGTH in self._headers:
+ del self._headers[hdrs.CONTENT_LENGTH]
+ elif not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
+ if self._body_payload:
+ size = cast(Payload, self._body).size
+ if size is not None:
+ self._headers[hdrs.CONTENT_LENGTH] = str(size)
+ else:
+ body_len = len(self._body) if self._body else "0"
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
+ if body_len != "0" or (
+ self.status != 304 and request.method.upper() != hdrs.METH_HEAD
+ ):
+ self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
return await super()._start(request)
- def _compress_body(self, zlib_mode: int) -> None:
- assert zlib_mode > 0
- compressobj = zlib.compressobj(wbits=zlib_mode)
- body_in = self._body
- assert body_in is not None
- self._compressed_body = compressobj.compress(body_in) + compressobj.flush()
-
async def _do_start_compression(self, coding: ContentCoding) -> None:
if self._body_payload or self._chunked:
return await super()._do_start_compression(coding)
@@ -777,26 +771,26 @@ class Response(StreamResponse):
if coding != ContentCoding.identity:
# Instead of using _payload_writer.enable_compression,
# compress the whole body
- zlib_mode = (
- 16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS
+ compressor = ZLibCompressor(
+ encoding=str(coding.value),
+ max_sync_chunk_size=self._zlib_executor_size,
+ executor=self._zlib_executor,
)
- body_in = self._body
- assert body_in is not None
- if (
- self._zlib_executor_size is not None
- and len(body_in) > self._zlib_executor_size
- ):
- await asyncio.get_event_loop().run_in_executor(
- self._zlib_executor, self._compress_body, zlib_mode
+ assert self._body is not None
+ if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
+ warnings.warn(
+ "Synchronous compression of large response bodies "
+ f"({len(self._body)} bytes) might block the async event loop. "
+ "Consider providing a custom value to zlib_executor_size/"
+ "zlib_executor response properties or disabling compression on it."
)
- else:
- self._compress_body(zlib_mode)
-
- body_out = self._compressed_body
- assert body_out is not None
+ self._compressed_body = (
+ await compressor.compress(self._body) + compressor.flush()
+ )
+ assert self._compressed_body is not None
self._headers[hdrs.CONTENT_ENCODING] = coding.value
- self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
+ self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
def json_response(
diff --git a/contrib/python/aiohttp/aiohttp/web_routedef.py b/contrib/python/aiohttp/aiohttp/web_routedef.py
index 671e5c7f46..d79cd32a14 100644
--- a/contrib/python/aiohttp/aiohttp/web_routedef.py
+++ b/contrib/python/aiohttp/aiohttp/web_routedef.py
@@ -20,7 +20,7 @@ from . import hdrs
from .abc import AbstractView
from .typedefs import Handler, PathLike
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_request import Request
from .web_response import StreamResponse
from .web_urldispatcher import AbstractRoute, UrlDispatcher
@@ -156,7 +156,7 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
"""Route definition table"""
def __init__(self) -> None:
- self._items = [] # type: List[AbstractRouteDef]
+ self._items: List[AbstractRouteDef] = []
def __repr__(self) -> str:
return f"<RouteTableDef count={len(self._items)}>"
@@ -206,6 +206,9 @@ class RouteTableDef(Sequence[AbstractRouteDef]):
def delete(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_DELETE, path, **kwargs)
+ def options(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_OPTIONS, path, **kwargs)
+
def view(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_ANY, path, **kwargs)
diff --git a/contrib/python/aiohttp/aiohttp/web_runner.py b/contrib/python/aiohttp/aiohttp/web_runner.py
index f4a64bff66..19a4441658 100644
--- a/contrib/python/aiohttp/aiohttp/web_runner.py
+++ b/contrib/python/aiohttp/aiohttp/web_runner.py
@@ -1,11 +1,13 @@
import asyncio
import signal
import socket
+import warnings
from abc import ABC, abstractmethod
-from typing import Any, List, Optional, Set
+from typing import Any, Awaitable, Callable, List, Optional, Set
from yarl import URL
+from .typedefs import PathLike
from .web_app import Application
from .web_server import Server
@@ -37,7 +39,7 @@ def _raise_graceful_exit() -> None:
class BaseSite(ABC):
- __slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server")
+ __slots__ = ("_runner", "_ssl_context", "_backlog", "_server")
def __init__(
self,
@@ -49,11 +51,14 @@ class BaseSite(ABC):
) -> None:
if runner.server is None:
raise RuntimeError("Call runner.setup() before making a site")
+ if shutdown_timeout != 60.0:
+ msg = "shutdown_timeout should be set on BaseRunner"
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+ runner._shutdown_timeout = shutdown_timeout
self._runner = runner
- self._shutdown_timeout = shutdown_timeout
self._ssl_context = ssl_context
self._backlog = backlog
- self._server = None # type: Optional[asyncio.AbstractServer]
+ self._server: Optional[asyncio.AbstractServer] = None
@property
@abstractmethod
@@ -66,16 +71,9 @@ class BaseSite(ABC):
async def stop(self) -> None:
self._runner._check_site(self)
- if self._server is None:
- self._runner._unreg_site(self)
- return # not started yet
- self._server.close()
- # named pipes do not have wait_closed property
- if hasattr(self._server, "wait_closed"):
- await self._server.wait_closed()
- await self._runner.shutdown()
- assert self._runner.server
- await self._runner.server.shutdown(self._shutdown_timeout)
+ if self._server is not None: # Maybe not started yet
+ self._server.close()
+
self._runner._unreg_site(self)
@@ -135,7 +133,7 @@ class UnixSite(BaseSite):
def __init__(
self,
runner: "BaseRunner",
- path: str,
+ path: PathLike,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
@@ -160,7 +158,10 @@ class UnixSite(BaseSite):
server = self._runner.server
assert server is not None
self._server = await loop.create_unix_server(
- server, self._path, ssl=self._ssl_context, backlog=self._backlog
+ server,
+ self._path,
+ ssl=self._ssl_context,
+ backlog=self._backlog,
)
@@ -237,13 +238,28 @@ class SockSite(BaseSite):
class BaseRunner(ABC):
- __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites")
+ __slots__ = (
+ "shutdown_callback",
+ "_handle_signals",
+ "_kwargs",
+ "_server",
+ "_sites",
+ "_shutdown_timeout",
+ )
- def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
+ def __init__(
+ self,
+ *,
+ handle_signals: bool = False,
+ shutdown_timeout: float = 60.0,
+ **kwargs: Any,
+ ) -> None:
+ self.shutdown_callback: Optional[Callable[[], Awaitable[None]]] = None
self._handle_signals = handle_signals
self._kwargs = kwargs
- self._server = None # type: Optional[Server]
- self._sites = [] # type: List[BaseSite]
+ self._server: Optional[Server] = None
+ self._sites: List[BaseSite] = []
+ self._shutdown_timeout = shutdown_timeout
@property
def server(self) -> Optional[Server]:
@@ -251,11 +267,11 @@ class BaseRunner(ABC):
@property
def addresses(self) -> List[Any]:
- ret = [] # type: List[Any]
+ ret: List[Any] = []
for site in self._sites:
server = site._server
if server is not None:
- sockets = server.sockets
+ sockets = server.sockets # type: ignore[attr-defined]
if sockets is not None:
for sock in sockets:
ret.append(sock.getsockname())
@@ -280,20 +296,32 @@ class BaseRunner(ABC):
@abstractmethod
async def shutdown(self) -> None:
- pass # pragma: no cover
+ """Call any shutdown hooks to help server close gracefully."""
async def cleanup(self) -> None:
- loop = asyncio.get_event_loop()
-
# The loop over sites is intentional, an exception on gather()
# leaves self._sites in unpredictable state.
# The loop guaranties that a site is either deleted on success or
# still present on failure
for site in list(self._sites):
await site.stop()
+
+ if self._server: # If setup succeeded
+ # Yield to event loop to ensure incoming requests prior to stopping the sites
+ # have all started to be handled before we proceed to close idle connections.
+ await asyncio.sleep(0)
+ self._server.pre_shutdown()
+ await self.shutdown()
+
+ if self.shutdown_callback:
+ await self.shutdown_callback()
+
+ await self._server.shutdown(self._shutdown_timeout)
await self._cleanup_server()
+
self._server = None
if self._handle_signals:
+ loop = asyncio.get_running_loop()
try:
loop.remove_signal_handler(signal.SIGINT)
loop.remove_signal_handler(signal.SIGTERM)
diff --git a/contrib/python/aiohttp/aiohttp/web_server.py b/contrib/python/aiohttp/aiohttp/web_server.py
index 5657ed9c80..52faacb164 100644
--- a/contrib/python/aiohttp/aiohttp/web_server.py
+++ b/contrib/python/aiohttp/aiohttp/web_server.py
@@ -18,15 +18,17 @@ class Server:
handler: _RequestHandler,
*,
request_factory: Optional[_RequestFactory] = None,
+ handler_cancellation: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any
) -> None:
self._loop = get_running_loop(loop)
- self._connections = {} # type: Dict[RequestHandler, asyncio.Transport]
+ self._connections: Dict[RequestHandler, asyncio.Transport] = {}
self._kwargs = kwargs
self.requests_count = 0
self.request_handler = handler
self.request_factory = request_factory or self._make_request
+ self.handler_cancellation = handler_cancellation
@property
def connections(self) -> List[RequestHandler]:
@@ -53,10 +55,23 @@ class Server:
) -> BaseRequest:
return BaseRequest(message, payload, protocol, writer, task, self._loop)
+ def pre_shutdown(self) -> None:
+ for conn in self._connections:
+ conn.close()
+
async def shutdown(self, timeout: Optional[float] = None) -> None:
- coros = [conn.shutdown(timeout) for conn in self._connections]
+ coros = (conn.shutdown(timeout) for conn in self._connections)
await asyncio.gather(*coros)
self._connections.clear()
def __call__(self) -> RequestHandler:
- return RequestHandler(self, loop=self._loop, **self._kwargs)
+ try:
+ return RequestHandler(self, loop=self._loop, **self._kwargs)
+ except TypeError:
+ # Failsafe creation: remove all custom handler_args
+ kwargs = {
+ k: v
+ for k, v in self._kwargs.items()
+ if k in ["debug", "access_log_class"]
+ }
+ return RequestHandler(self, loop=self._loop, **kwargs)
diff --git a/contrib/python/aiohttp/aiohttp/web_urldispatcher.py b/contrib/python/aiohttp/aiohttp/web_urldispatcher.py
index 73ec4c05d0..954291f644 100644
--- a/contrib/python/aiohttp/aiohttp/web_urldispatcher.py
+++ b/contrib/python/aiohttp/aiohttp/web_urldispatcher.py
@@ -1,7 +1,9 @@
import abc
import asyncio
import base64
+import functools
import hashlib
+import html
import inspect
import keyword
import os
@@ -18,17 +20,20 @@ from typing import (
Callable,
Container,
Dict,
+ Final,
Generator,
Iterable,
Iterator,
List,
Mapping,
+ NoReturn,
Optional,
Pattern,
Set,
Sized,
Tuple,
Type,
+ TypedDict,
Union,
cast,
)
@@ -39,7 +44,7 @@ from . import hdrs
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
from .helpers import DEBUG
from .http import HttpVersion11
-from .typedefs import Final, Handler, PathLike, TypedDict
+from .typedefs import Handler, PathLike
from .web_exceptions import (
HTTPException,
HTTPExpectationFailed,
@@ -66,7 +71,7 @@ __all__ = (
)
-if TYPE_CHECKING: # pragma: no cover
+if TYPE_CHECKING:
from .web_app import Application
BaseDict = Dict[str, str]
@@ -84,9 +89,11 @@ ROUTE_RE: Final[Pattern[str]] = re.compile(
PATH_SEP: Final[str] = re.escape("/")
-_ExpectHandler = Callable[[Request], Awaitable[None]]
+_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]]
_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
+html_escape = functools.partial(html.escape, quote=True)
+
class _InfoDict(TypedDict, total=False):
path: str
@@ -194,8 +201,9 @@ class AbstractRoute(abc.ABC):
async def handler_wrapper(request: Request) -> StreamResponse:
result = old_handler(request)
if asyncio.iscoroutine(result):
- return await result
- return result # type: ignore[return-value]
+ result = await result
+ assert isinstance(result, StreamResponse)
+ return result
old_handler = handler
handler = handler_wrapper
@@ -230,16 +238,16 @@ class AbstractRoute(abc.ABC):
def url_for(self, *args: str, **kwargs: str) -> URL:
"""Construct url for route with additional params."""
- async def handle_expect_header(self, request: Request) -> None:
- await self._expect_handler(request)
+ async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]:
+ return await self._expect_handler(request)
class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
super().__init__(match_dict)
self._route = route
- self._apps = [] # type: List[Application]
- self._current_app = None # type: Optional[Application]
+ self._apps: List[Application] = []
+ self._current_app: Optional[Application] = None
self._frozen = False
@property
@@ -333,7 +341,7 @@ async def _default_expect_handler(request: Request) -> None:
class Resource(AbstractResource):
def __init__(self, *, name: Optional[str] = None) -> None:
super().__init__(name=name)
- self._routes = [] # type: List[ResourceRoute]
+ self._routes: List[ResourceRoute] = []
def add_route(
self,
@@ -362,7 +370,7 @@ class Resource(AbstractResource):
self._routes.append(route)
async def resolve(self, request: Request) -> _Resolve:
- allowed_methods = set() # type: Set[str]
+ allowed_methods: Set[str] = set()
match_dict = self._match(request.rel_url.raw_path)
if match_dict is None:
@@ -384,7 +392,7 @@ class Resource(AbstractResource):
def __len__(self) -> int:
return len(self._routes)
- def __iter__(self) -> Iterator[AbstractRoute]:
+ def __iter__(self) -> Iterator["ResourceRoute"]:
return iter(self._routes)
# TODO: implement all abstract methods
@@ -576,14 +584,12 @@ class StaticResource(PrefixResource):
def url_for( # type: ignore[override]
self,
*,
- filename: Union[str, Path],
+ filename: PathLike,
append_version: Optional[bool] = None,
) -> URL:
if append_version is None:
append_version = self._append_version
- if isinstance(filename, Path):
- filename = str(filename)
- filename = filename.lstrip("/")
+ filename = str(filename).lstrip("/")
url = URL.build(path=self._prefix, encoded=True)
# filename is not encoded
@@ -593,9 +599,14 @@ class StaticResource(PrefixResource):
url = url / filename
if append_version:
+ unresolved_path = self._directory.joinpath(filename)
try:
- filepath = self._directory.joinpath(filename).resolve()
- if not self._follow_symlinks:
+ if self._follow_symlinks:
+ normalized_path = Path(os.path.normpath(unresolved_path))
+ normalized_path.relative_to(self._directory)
+ filepath = normalized_path.resolve()
+ else:
+ filepath = unresolved_path.resolve()
filepath.relative_to(self._directory)
except (ValueError, FileNotFoundError):
# ValueError for case when path point to symlink
@@ -660,8 +671,13 @@ class StaticResource(PrefixResource):
# /static/\\machine_name\c$ or /static/D:\path
# where the static dir is totally different
raise HTTPForbidden()
- filepath = self._directory.joinpath(filename).resolve()
- if not self._follow_symlinks:
+ unresolved_path = self._directory.joinpath(filename)
+ if self._follow_symlinks:
+ normalized_path = Path(os.path.normpath(unresolved_path))
+ normalized_path.relative_to(self._directory)
+ filepath = normalized_path.resolve()
+ else:
+ filepath = unresolved_path.resolve()
filepath.relative_to(self._directory)
except (ValueError, FileNotFoundError) as error:
# relatively safe
@@ -696,7 +712,7 @@ class StaticResource(PrefixResource):
assert filepath.is_dir()
relative_path_to_dir = filepath.relative_to(self._directory).as_posix()
- index_of = f"Index of /{relative_path_to_dir}"
+ index_of = f"Index of /{html_escape(relative_path_to_dir)}"
h1 = f"<h1>{index_of}</h1>"
index_list = []
@@ -704,7 +720,7 @@ class StaticResource(PrefixResource):
for _file in sorted(dir_index):
# show file url as relative to static path
rel_path = _file.relative_to(self._directory).as_posix()
- file_url = self._prefix + "/" + rel_path
+ quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}")
# if file is a directory, add '/' to the end of the name
if _file.is_dir():
@@ -713,9 +729,7 @@ class StaticResource(PrefixResource):
file_name = _file.name
index_list.append(
- '<li><a href="{url}">{name}</a></li>'.format(
- url=file_url, name=file_name
- )
+ f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>'
)
ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
body = f"<body>\n{h1}\n{ul}\n</body>"
@@ -946,18 +960,18 @@ class View(AbstractView):
async def _iter(self) -> StreamResponse:
if self.request.method not in hdrs.METH_ALL:
self._raise_allowed_methods()
- method: Callable[[], Awaitable[StreamResponse]] = getattr(
- self, self.request.method.lower(), None
- )
+ method: Optional[Callable[[], Awaitable[StreamResponse]]]
+ method = getattr(self, self.request.method.lower(), None)
if method is None:
self._raise_allowed_methods()
- resp = await method()
- return resp
+ ret = await method()
+ assert isinstance(ret, StreamResponse)
+ return ret
def __await__(self) -> Generator[Any, None, StreamResponse]:
return self._iter().__await__()
- def _raise_allowed_methods(self) -> None:
+ def _raise_allowed_methods(self) -> NoReturn:
allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
@@ -978,7 +992,7 @@ class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResourc
class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
def __init__(self, resources: List[AbstractResource]):
- self._routes = [] # type: List[AbstractRoute]
+ self._routes: List[AbstractRoute] = []
for resource in resources:
for route in resource:
self._routes.append(route)
@@ -999,12 +1013,12 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
def __init__(self) -> None:
super().__init__()
- self._resources = [] # type: List[AbstractResource]
- self._named_resources = {} # type: Dict[str, AbstractResource]
+ self._resources: List[AbstractResource] = []
+ self._named_resources: Dict[str, AbstractResource] = {}
async def resolve(self, request: Request) -> UrlMappingMatchInfo:
method = request.method
- allowed_methods = set() # type: Set[str]
+ allowed_methods: Set[str] = set()
for resource in self._resources:
match_dict, allowed = await resource.resolve(request)
diff --git a/contrib/python/aiohttp/aiohttp/web_ws.py b/contrib/python/aiohttp/aiohttp/web_ws.py
index 16b0a1747c..9fe6652753 100644
--- a/contrib/python/aiohttp/aiohttp/web_ws.py
+++ b/contrib/python/aiohttp/aiohttp/web_ws.py
@@ -3,15 +3,15 @@ import base64
import binascii
import hashlib
import json
-from typing import Any, Iterable, Optional, Tuple, cast
+import sys
+from typing import Any, Final, Iterable, Optional, Tuple, cast
-import async_timeout
import attr
from multidict import CIMultiDict
from . import hdrs
from .abc import AbstractStreamWriter
-from .helpers import call_later, set_result
+from .helpers import call_later, set_exception, set_result
from .http import (
WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE,
@@ -27,11 +27,16 @@ from .http import (
)
from .log import ws_logger
from .streams import EofStream, FlowControlDataQueue
-from .typedefs import Final, JSONDecoder, JSONEncoder
+from .typedefs import JSONDecoder, JSONEncoder
from .web_exceptions import HTTPBadRequest, HTTPException
from .web_request import BaseRequest
from .web_response import StreamResponse
+if sys.version_info >= (3, 11):
+ import asyncio as async_timeout
+else:
+ import async_timeout
+
__all__ = (
"WebSocketResponse",
"WebSocketReady",
@@ -68,16 +73,16 @@ class WebSocketResponse(StreamResponse):
) -> None:
super().__init__(status=101)
self._protocols = protocols
- self._ws_protocol = None # type: Optional[str]
- self._writer = None # type: Optional[WebSocketWriter]
- self._reader = None # type: Optional[FlowControlDataQueue[WSMessage]]
+ self._ws_protocol: Optional[str] = None
+ self._writer: Optional[WebSocketWriter] = None
+ self._reader: Optional[FlowControlDataQueue[WSMessage]] = None
self._closed = False
self._closing = False
self._conn_lost = 0
- self._close_code = None # type: Optional[int]
- self._loop = None # type: Optional[asyncio.AbstractEventLoop]
- self._waiting = None # type: Optional[asyncio.Future[bool]]
- self._exception = None # type: Optional[BaseException]
+ self._close_code: Optional[int] = None
+ self._loop: Optional[asyncio.AbstractEventLoop] = None
+ self._waiting: Optional[asyncio.Future[bool]] = None
+ self._exception: Optional[BaseException] = None
self._timeout = timeout
self._receive_timeout = receive_timeout
self._autoclose = autoclose
@@ -105,7 +110,12 @@ class WebSocketResponse(StreamResponse):
if self._heartbeat is not None:
assert self._loop is not None
self._heartbeat_cb = call_later(
- self._send_heartbeat, self._heartbeat, self._loop
+ self._send_heartbeat,
+ self._heartbeat,
+ self._loop,
+ timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold
+ if self._req is not None
+ else 5,
)
def _send_heartbeat(self) -> None:
@@ -119,15 +129,19 @@ class WebSocketResponse(StreamResponse):
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
- self._pong_not_received, self._pong_heartbeat, self._loop
+ self._pong_not_received,
+ self._pong_heartbeat,
+ self._loop,
+ timeout_ceil_threshold=self._req._protocol._timeout_ceil_threshold
+ if self._req is not None
+ else 5,
)
def _pong_not_received(self) -> None:
if self._req is not None and self._req.transport is not None:
self._closed = True
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
self._exception = asyncio.TimeoutError()
- self._req.transport.close()
async def prepare(self, request: BaseRequest) -> AbstractStreamWriter:
# make pre-check to don't hide it by do_handshake() exceptions
@@ -196,9 +210,9 @@ class WebSocketResponse(StreamResponse):
accept_val = base64.b64encode(
hashlib.sha1(key.encode() + WS_KEY).digest()
).decode()
- response_headers = CIMultiDict( # type: ignore[var-annotated]
+ response_headers = CIMultiDict(
{
- hdrs.UPGRADE: "websocket", # type: ignore[arg-type]
+ hdrs.UPGRADE: "websocket",
hdrs.CONNECTION: "upgrade",
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
}
@@ -253,7 +267,7 @@ class WebSocketResponse(StreamResponse):
loop = self._loop
assert loop is not None
- self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop)
+ self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop)
request.protocol.set_parser(
WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
)
@@ -286,6 +300,19 @@ class WebSocketResponse(StreamResponse):
def compress(self) -> bool:
return self._compress
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """Get optional transport information.
+
+ If no value associated with ``name`` is found, ``default`` is returned.
+ """
+ writer = self._writer
+ if writer is None:
+ return default
+ transport = writer.transport
+ if transport is None:
+ return default
+ return transport.get_extra_info(name, default)
+
def exception(self) -> Optional[BaseException]:
return self._exception
@@ -332,7 +359,10 @@ class WebSocketResponse(StreamResponse):
await self.close()
self._eof_sent = True
- async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
+ async def close(
+ self, *, code: int = WSCloseCode.OK, message: bytes = b"", drain: bool = True
+ ) -> bool:
+ """Close websocket connection."""
if self._writer is None:
raise RuntimeError("Call .prepare() first")
@@ -346,46 +376,63 @@ class WebSocketResponse(StreamResponse):
reader.feed_data(WS_CLOSING_MESSAGE, 0)
await self._waiting
- if not self._closed:
- self._closed = True
- try:
- await self._writer.close(code, message)
- writer = self._payload_writer
- assert writer is not None
- await writer.drain()
- except (asyncio.CancelledError, asyncio.TimeoutError):
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
- raise
- except Exception as exc:
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
- self._exception = exc
- return True
+ if self._closed:
+ return False
- if self._closing:
- return True
+ self._closed = True
+ try:
+ await self._writer.close(code, message)
+ writer = self._payload_writer
+ assert writer is not None
+ if drain:
+ await writer.drain()
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ raise
+ except Exception as exc:
+ self._exception = exc
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ return True
- reader = self._reader
- assert reader is not None
- try:
- async with async_timeout.timeout(self._timeout):
- msg = await reader.read()
- except asyncio.CancelledError:
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
- raise
- except Exception as exc:
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
- self._exception = exc
- return True
+ if self._closing:
+ self._close_transport()
+ return True
- if msg.type == WSMsgType.CLOSE:
- self._close_code = msg.data
- return True
+ reader = self._reader
+ assert reader is not None
+ try:
+ async with async_timeout.timeout(self._timeout):
+ msg = await reader.read()
+ except asyncio.CancelledError:
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ raise
+ except Exception as exc:
+ self._exception = exc
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ return True
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
- self._exception = asyncio.TimeoutError()
+ if msg.type == WSMsgType.CLOSE:
+ self._set_code_close_transport(msg.data)
return True
- else:
- return False
+
+ self._set_code_close_transport(WSCloseCode.ABNORMAL_CLOSURE)
+ self._exception = asyncio.TimeoutError()
+ return True
+
+ def _set_closing(self, code: WSCloseCode) -> None:
+ """Set the close code and mark the connection as closing."""
+ self._closing = True
+ self._close_code = code
+
+ def _set_code_close_transport(self, code: WSCloseCode) -> None:
+ """Set the close code and close the transport."""
+ self._close_code = code
+ self._close_transport()
+
+ def _close_transport(self) -> None:
+ """Close the transport."""
+ if self._req is not None and self._req.transport is not None:
+ self._req.transport.close()
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
if self._reader is None:
@@ -415,8 +462,7 @@ class WebSocketResponse(StreamResponse):
waiter = self._waiting
set_result(waiter, True)
self._waiting = None
- except (asyncio.CancelledError, asyncio.TimeoutError):
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ except asyncio.TimeoutError:
raise
except EofStream:
self._close_code = WSCloseCode.OK
@@ -428,18 +474,21 @@ class WebSocketResponse(StreamResponse):
return WSMessage(WSMsgType.ERROR, exc, None)
except Exception as exc:
self._exception = exc
- self._closing = True
- self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._set_closing(WSCloseCode.ABNORMAL_CLOSURE)
await self.close()
return WSMessage(WSMsgType.ERROR, exc, None)
if msg.type == WSMsgType.CLOSE:
- self._closing = True
- self._close_code = msg.data
+ self._set_closing(msg.data)
+ # Could be closed while awaiting reader.
if not self._closed and self._autoclose:
- await self.close()
+ # The client is likely going to close the
+ # connection out from under us so we do not
+ # want to drain any pending writes as it will
+ # likely result writing to a broken pipe.
+ await self.close(drain=False)
elif msg.type == WSMsgType.CLOSING:
- self._closing = True
+ self._set_closing(WSCloseCode.OK)
elif msg.type == WSMsgType.PING and self._autoping:
await self.pong(msg.data)
continue
@@ -483,5 +532,8 @@ class WebSocketResponse(StreamResponse):
return msg
def _cancel(self, exc: BaseException) -> None:
+ # web_protocol calls this from connection_lost
+ # or when the server is shutting down.
+ self._closing = True
if self._reader is not None:
- self._reader.set_exception(exc)
+ set_exception(self._reader, exc)
diff --git a/contrib/python/aiohttp/aiohttp/worker.py b/contrib/python/aiohttp/aiohttp/worker.py
index 08945bcb4b..9b30769733 100644
--- a/contrib/python/aiohttp/aiohttp/worker.py
+++ b/contrib/python/aiohttp/aiohttp/worker.py
@@ -26,7 +26,7 @@ except ImportError: # pragma: no cover
SSLContext = object # type: ignore[misc,assignment]
-__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
+__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
@@ -37,9 +37,9 @@ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
super().__init__(*args, **kw)
- self._task = None # type: Optional[asyncio.Task[None]]
+ self._task: Optional[asyncio.Task[None]] = None
self.exit_code = 0
- self._notify_waiter = None # type: Optional[asyncio.Future[bool]]
+ self._notify_waiter: Optional[asyncio.Future[bool]] = None
def init_process(self) -> None:
# create new event_loop after fork
@@ -89,6 +89,7 @@ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
access_log_format=self._get_valid_log_format(
self.cfg.access_log_format
),
+ shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
)
await runner.setup()
@@ -103,7 +104,6 @@ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
runner,
sock,
ssl_context=ctx,
- shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
)
await site.start()
@@ -114,7 +114,7 @@ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
self.notify()
cnt = server.requests_count
- if self.cfg.max_requests and cnt > self.cfg.max_requests:
+ if self.max_requests and cnt > self.max_requests:
self.alive = False
self.log.info("Max requests, shutting down: %s", self)
@@ -182,14 +182,8 @@ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
signal.siginterrupt(signal.SIGUSR1, False)
# Reset signals so Gunicorn doesn't swallow subprocess return codes
# See: https://github.com/aio-libs/aiohttp/issues/6130
- if sys.version_info < (3, 8):
- # Starting from Python 3.8,
- # the default child watcher is ThreadedChildWatcher.
- # The watcher doesn't depend on SIGCHLD signal,
- # there is no need to reset it.
- signal.signal(signal.SIGCHLD, signal.SIG_DFL)
-
- def handle_quit(self, sig: int, frame: FrameType) -> None:
+
+ def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None:
self.alive = False
# worker_int callback
@@ -198,7 +192,7 @@ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
# wakeup closing process
self._notify_waiter_done()
- def handle_abort(self, sig: int, frame: FrameType) -> None:
+ def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None:
self.alive = False
self.exit_code = 1
self.cfg.worker_abort(self)
@@ -251,19 +245,3 @@ class GunicornUVLoopWebWorker(GunicornWebWorker):
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
super().init_process()
-
-
-class GunicornTokioWebWorker(GunicornWebWorker):
- def init_process(self) -> None: # pragma: no cover
- import tokio
-
- # Close any existing event loop before setting a
- # new policy.
- asyncio.get_event_loop().close()
-
- # Setup tokio policy, so that every
- # asyncio.get_event_loop() will create an instance
- # of tokio event loop.
- asyncio.set_event_loop_policy(tokio.EventLoopPolicy())
-
- super().init_process()
diff --git a/contrib/python/aiohttp/ya.make b/contrib/python/aiohttp/ya.make
index 9fd0c0df74..40b3b6faab 100644
--- a/contrib/python/aiohttp/ya.make
+++ b/contrib/python/aiohttp/ya.make
@@ -2,15 +2,13 @@
PY3_LIBRARY()
-VERSION(3.8.1)
+VERSION(3.9.5)
LICENSE(Apache-2.0)
PEERDIR(
contrib/python/aiosignal
- contrib/python/async-timeout
contrib/python/attrs
- contrib/python/charset-normalizer
contrib/python/frozenlist
contrib/python/multidict
contrib/python/yarl
@@ -47,6 +45,7 @@ PY_SRCS(
aiohttp/client_proto.py
aiohttp/client_reqrep.py
aiohttp/client_ws.py
+ aiohttp/compression_utils.py
aiohttp/connector.py
aiohttp/cookiejar.py
aiohttp/formdata.py
diff --git a/contrib/python/appnope/ya.make b/contrib/python/appnope/ya.make
index 83f590fdf4..3134d715e3 100644
--- a/contrib/python/appnope/ya.make
+++ b/contrib/python/appnope/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/appnope/py2)
ELSE()
diff --git a/contrib/python/argcomplete/ya.make b/contrib/python/argcomplete/ya.make
index 1692542636..146bbb77b4 100644
--- a/contrib/python/argcomplete/ya.make
+++ b/contrib/python/argcomplete/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/argcomplete/py2)
ELSE()
diff --git a/contrib/python/asn1crypto/ya.make b/contrib/python/asn1crypto/ya.make
index 75419d4566..e5c947aac5 100644
--- a/contrib/python/asn1crypto/ya.make
+++ b/contrib/python/asn1crypto/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/asn1crypto/py2)
ELSE()
diff --git a/contrib/python/async-timeout/.dist-info/METADATA b/contrib/python/async-timeout/.dist-info/METADATA
deleted file mode 100644
index d8dd6d12d6..0000000000
--- a/contrib/python/async-timeout/.dist-info/METADATA
+++ /dev/null
@@ -1,131 +0,0 @@
-Metadata-Version: 2.1
-Name: async-timeout
-Version: 4.0.3
-Summary: Timeout context manager for asyncio programs
-Home-page: https://github.com/aio-libs/async-timeout
-Author: Andrew Svetlov <andrew.svetlov@gmail.com>
-Author-email: andrew.svetlov@gmail.com
-License: Apache 2
-Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
-Project-URL: CI: GitHub Actions, https://github.com/aio-libs/async-timeout/actions
-Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/async-timeout
-Project-URL: GitHub: issues, https://github.com/aio-libs/async-timeout/issues
-Project-URL: GitHub: repo, https://github.com/aio-libs/async-timeout
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Topic :: Software Development :: Libraries
-Classifier: Framework :: AsyncIO
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3 :: Only
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: 3.11
-Requires-Python: >=3.7
-Description-Content-Type: text/x-rst
-License-File: LICENSE
-Requires-Dist: typing-extensions >=3.6.5 ; python_version < "3.8"
-
-async-timeout
-=============
-.. image:: https://travis-ci.com/aio-libs/async-timeout.svg?branch=master
- :target: https://travis-ci.com/aio-libs/async-timeout
-.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg
- :target: https://codecov.io/gh/aio-libs/async-timeout
-.. image:: https://img.shields.io/pypi/v/async-timeout.svg
- :target: https://pypi.python.org/pypi/async-timeout
-.. image:: https://badges.gitter.im/Join%20Chat.svg
- :target: https://gitter.im/aio-libs/Lobby
- :alt: Chat on Gitter
-
-asyncio-compatible timeout context manager.
-
-
-Usage example
--------------
-
-
-The context manager is useful in cases when you want to apply timeout
-logic around block of code or in cases when ``asyncio.wait_for()`` is
-not suitable. Also it's much faster than ``asyncio.wait_for()``
-because ``timeout`` doesn't create a new task.
-
-The ``timeout(delay, *, loop=None)`` call returns a context manager
-that cancels a block on *timeout* expiring::
-
- from async_timeout import timeout
- async with timeout(1.5):
- await inner()
-
-1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing
- happens.
-2. Otherwise ``inner()`` is cancelled internally by sending
- ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is
- raised outside of context manager scope.
-
-*timeout* parameter could be ``None`` for skipping timeout functionality.
-
-
-Alternatively, ``timeout_at(when)`` can be used for scheduling
-at the absolute time::
-
- loop = asyncio.get_event_loop()
- now = loop.time()
-
- async with timeout_at(now + 1.5):
- await inner()
-
-
-Please note: it is not POSIX time but a time with
-undefined starting base, e.g. the time of the system power on.
-
-
-Context manager has ``.expired`` property for check if timeout happens
-exactly in context manager::
-
- async with timeout(1.5) as cm:
- await inner()
- print(cm.expired)
-
-The property is ``True`` if ``inner()`` execution is cancelled by
-timeout context manager.
-
-If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired``
-is ``False``.
-
-The scheduled deadline time is available as ``.deadline`` property::
-
- async with timeout(1.5) as cm:
- cm.deadline
-
-Not finished yet timeout can be rescheduled by ``shift_by()``
-or ``shift_to()`` methods::
-
- async with timeout(1.5) as cm:
- cm.shift(1) # add another second on waiting
- cm.update(loop.time() + 5) # reschedule to now+5 seconds
-
-Rescheduling is forbidden if the timeout is expired or after exit from ``async with``
-code block.
-
-
-Installation
-------------
-
-::
-
- $ pip install async-timeout
-
-The library is Python 3 only!
-
-
-
-Authors and License
--------------------
-
-The module is written by Andrew Svetlov.
-
-It's *Apache 2* licensed and freely available.
diff --git a/contrib/python/async-timeout/.dist-info/top_level.txt b/contrib/python/async-timeout/.dist-info/top_level.txt
deleted file mode 100644
index ad29955ef9..0000000000
--- a/contrib/python/async-timeout/.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-async_timeout
diff --git a/contrib/python/async-timeout/LICENSE b/contrib/python/async-timeout/LICENSE
deleted file mode 100644
index 033c86b7a4..0000000000
--- a/contrib/python/async-timeout/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright 2016-2020 aio-libs collaboration.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/contrib/python/async-timeout/README.rst b/contrib/python/async-timeout/README.rst
deleted file mode 100644
index 5ed02e4e93..0000000000
--- a/contrib/python/async-timeout/README.rst
+++ /dev/null
@@ -1,100 +0,0 @@
-async-timeout
-=============
-.. image:: https://travis-ci.com/aio-libs/async-timeout.svg?branch=master
- :target: https://travis-ci.com/aio-libs/async-timeout
-.. image:: https://codecov.io/gh/aio-libs/async-timeout/branch/master/graph/badge.svg
- :target: https://codecov.io/gh/aio-libs/async-timeout
-.. image:: https://img.shields.io/pypi/v/async-timeout.svg
- :target: https://pypi.python.org/pypi/async-timeout
-.. image:: https://badges.gitter.im/Join%20Chat.svg
- :target: https://gitter.im/aio-libs/Lobby
- :alt: Chat on Gitter
-
-asyncio-compatible timeout context manager.
-
-
-Usage example
--------------
-
-
-The context manager is useful in cases when you want to apply timeout
-logic around block of code or in cases when ``asyncio.wait_for()`` is
-not suitable. Also it's much faster than ``asyncio.wait_for()``
-because ``timeout`` doesn't create a new task.
-
-The ``timeout(delay, *, loop=None)`` call returns a context manager
-that cancels a block on *timeout* expiring::
-
- from async_timeout import timeout
- async with timeout(1.5):
- await inner()
-
-1. If ``inner()`` is executed faster than in ``1.5`` seconds nothing
- happens.
-2. Otherwise ``inner()`` is cancelled internally by sending
- ``asyncio.CancelledError`` into but ``asyncio.TimeoutError`` is
- raised outside of context manager scope.
-
-*timeout* parameter could be ``None`` for skipping timeout functionality.
-
-
-Alternatively, ``timeout_at(when)`` can be used for scheduling
-at the absolute time::
-
- loop = asyncio.get_event_loop()
- now = loop.time()
-
- async with timeout_at(now + 1.5):
- await inner()
-
-
-Please note: it is not POSIX time but a time with
-undefined starting base, e.g. the time of the system power on.
-
-
-Context manager has ``.expired`` property for check if timeout happens
-exactly in context manager::
-
- async with timeout(1.5) as cm:
- await inner()
- print(cm.expired)
-
-The property is ``True`` if ``inner()`` execution is cancelled by
-timeout context manager.
-
-If ``inner()`` call explicitly raises ``TimeoutError`` ``cm.expired``
-is ``False``.
-
-The scheduled deadline time is available as ``.deadline`` property::
-
- async with timeout(1.5) as cm:
- cm.deadline
-
-Not finished yet timeout can be rescheduled by ``shift_by()``
-or ``shift_to()`` methods::
-
- async with timeout(1.5) as cm:
- cm.shift(1) # add another second on waiting
- cm.update(loop.time() + 5) # reschedule to now+5 seconds
-
-Rescheduling is forbidden if the timeout is expired or after exit from ``async with``
-code block.
-
-
-Installation
-------------
-
-::
-
- $ pip install async-timeout
-
-The library is Python 3 only!
-
-
-
-Authors and License
--------------------
-
-The module is written by Andrew Svetlov.
-
-It's *Apache 2* licensed and freely available.
diff --git a/contrib/python/async-timeout/async_timeout/__init__.py b/contrib/python/async-timeout/async_timeout/__init__.py
deleted file mode 100644
index 1ffb069fce..0000000000
--- a/contrib/python/async-timeout/async_timeout/__init__.py
+++ /dev/null
@@ -1,239 +0,0 @@
-import asyncio
-import enum
-import sys
-import warnings
-from types import TracebackType
-from typing import Optional, Type
-
-
-if sys.version_info >= (3, 8):
- from typing import final
-else:
- from typing_extensions import final
-
-
-if sys.version_info >= (3, 11):
-
- def _uncancel_task(task: "asyncio.Task[object]") -> None:
- task.uncancel()
-
-else:
-
- def _uncancel_task(task: "asyncio.Task[object]") -> None:
- pass
-
-
-__version__ = "4.0.3"
-
-
-__all__ = ("timeout", "timeout_at", "Timeout")
-
-
-def timeout(delay: Optional[float]) -> "Timeout":
- """timeout context manager.
-
- Useful in cases when you want to apply timeout logic around block
- of code or in cases when asyncio.wait_for is not suitable. For example:
-
- >>> async with timeout(0.001):
- ... async with aiohttp.get('https://github.com') as r:
- ... await r.text()
-
-
- delay - value in seconds or None to disable timeout logic
- """
- loop = asyncio.get_running_loop()
- if delay is not None:
- deadline = loop.time() + delay # type: Optional[float]
- else:
- deadline = None
- return Timeout(deadline, loop)
-
-
-def timeout_at(deadline: Optional[float]) -> "Timeout":
- """Schedule the timeout at absolute time.
-
- deadline argument points on the time in the same clock system
- as loop.time().
-
- Please note: it is not POSIX time but a time with
- undefined starting base, e.g. the time of the system power on.
-
- >>> async with timeout_at(loop.time() + 10):
- ... async with aiohttp.get('https://github.com') as r:
- ... await r.text()
-
-
- """
- loop = asyncio.get_running_loop()
- return Timeout(deadline, loop)
-
-
-class _State(enum.Enum):
- INIT = "INIT"
- ENTER = "ENTER"
- TIMEOUT = "TIMEOUT"
- EXIT = "EXIT"
-
-
-@final
-class Timeout:
- # Internal class, please don't instantiate it directly
- # Use timeout() and timeout_at() public factories instead.
- #
- # Implementation note: `async with timeout()` is preferred
- # over `with timeout()`.
- # While technically the Timeout class implementation
- # doesn't need to be async at all,
- # the `async with` statement explicitly points that
- # the context manager should be used from async function context.
- #
- # This design allows to avoid many silly misusages.
- #
- # TimeoutError is raised immediately when scheduled
- # if the deadline is passed.
- # The purpose is to time out as soon as possible
- # without waiting for the next await expression.
-
- __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task")
-
- def __init__(
- self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
- ) -> None:
- self._loop = loop
- self._state = _State.INIT
-
- self._task: Optional["asyncio.Task[object]"] = None
- self._timeout_handler = None # type: Optional[asyncio.Handle]
- if deadline is None:
- self._deadline = None # type: Optional[float]
- else:
- self.update(deadline)
-
- def __enter__(self) -> "Timeout":
- warnings.warn(
- "with timeout() is deprecated, use async with timeout() instead",
- DeprecationWarning,
- stacklevel=2,
- )
- self._do_enter()
- return self
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
- ) -> Optional[bool]:
- self._do_exit(exc_type)
- return None
-
- async def __aenter__(self) -> "Timeout":
- self._do_enter()
- return self
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
- ) -> Optional[bool]:
- self._do_exit(exc_type)
- return None
-
- @property
- def expired(self) -> bool:
- """Is timeout expired during execution?"""
- return self._state == _State.TIMEOUT
-
- @property
- def deadline(self) -> Optional[float]:
- return self._deadline
-
- def reject(self) -> None:
- """Reject scheduled timeout if any."""
- # cancel is maybe better name but
- # task.cancel() raises CancelledError in asyncio world.
- if self._state not in (_State.INIT, _State.ENTER):
- raise RuntimeError(f"invalid state {self._state.value}")
- self._reject()
-
- def _reject(self) -> None:
- self._task = None
- if self._timeout_handler is not None:
- self._timeout_handler.cancel()
- self._timeout_handler = None
-
- def shift(self, delay: float) -> None:
- """Advance timeout on delay seconds.
-
- The delay can be negative.
-
- Raise RuntimeError if shift is called when deadline is not scheduled
- """
- deadline = self._deadline
- if deadline is None:
- raise RuntimeError("cannot shift timeout if deadline is not scheduled")
- self.update(deadline + delay)
-
- def update(self, deadline: float) -> None:
- """Set deadline to absolute value.
-
- deadline argument points on the time in the same clock system
- as loop.time().
-
- If new deadline is in the past the timeout is raised immediately.
-
- Please note: it is not POSIX time but a time with
- undefined starting base, e.g. the time of the system power on.
- """
- if self._state == _State.EXIT:
- raise RuntimeError("cannot reschedule after exit from context manager")
- if self._state == _State.TIMEOUT:
- raise RuntimeError("cannot reschedule expired timeout")
- if self._timeout_handler is not None:
- self._timeout_handler.cancel()
- self._deadline = deadline
- if self._state != _State.INIT:
- self._reschedule()
-
- def _reschedule(self) -> None:
- assert self._state == _State.ENTER
- deadline = self._deadline
- if deadline is None:
- return
-
- now = self._loop.time()
- if self._timeout_handler is not None:
- self._timeout_handler.cancel()
-
- self._task = asyncio.current_task()
- if deadline <= now:
- self._timeout_handler = self._loop.call_soon(self._on_timeout)
- else:
- self._timeout_handler = self._loop.call_at(deadline, self._on_timeout)
-
- def _do_enter(self) -> None:
- if self._state != _State.INIT:
- raise RuntimeError(f"invalid state {self._state.value}")
- self._state = _State.ENTER
- self._reschedule()
-
- def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
- if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
- assert self._task is not None
- _uncancel_task(self._task)
- self._timeout_handler = None
- self._task = None
- raise asyncio.TimeoutError
- # timeout has not expired
- self._state = _State.EXIT
- self._reject()
- return None
-
- def _on_timeout(self) -> None:
- assert self._task is not None
- self._task.cancel()
- self._state = _State.TIMEOUT
- # drop the reference early
- self._timeout_handler = None
diff --git a/contrib/python/async-timeout/async_timeout/py.typed b/contrib/python/async-timeout/async_timeout/py.typed
deleted file mode 100644
index 3b94f91573..0000000000
--- a/contrib/python/async-timeout/async_timeout/py.typed
+++ /dev/null
@@ -1 +0,0 @@
-Placeholder
diff --git a/contrib/python/async-timeout/ya.make b/contrib/python/async-timeout/ya.make
deleted file mode 100644
index 6bc2c940a1..0000000000
--- a/contrib/python/async-timeout/ya.make
+++ /dev/null
@@ -1,23 +0,0 @@
-# Generated by devtools/yamaker (pypi).
-
-PY3_LIBRARY()
-
-VERSION(4.0.3)
-
-LICENSE(Apache-2.0)
-
-NO_LINT()
-
-PY_SRCS(
- TOP_LEVEL
- async_timeout/__init__.py
-)
-
-RESOURCE_FILES(
- PREFIX contrib/python/async-timeout/
- .dist-info/METADATA
- .dist-info/top_level.txt
- async_timeout/py.typed
-)
-
-END()
diff --git a/contrib/python/atomicwrites/ya.make b/contrib/python/atomicwrites/ya.make
index 212e0edaa8..08fa00207e 100644
--- a/contrib/python/atomicwrites/ya.make
+++ b/contrib/python/atomicwrites/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/atomicwrites/py2)
ELSE()
diff --git a/contrib/python/attrs/ya.make b/contrib/python/attrs/ya.make
index 0fc12ef17e..92a5a1decf 100644
--- a/contrib/python/attrs/ya.make
+++ b/contrib/python/attrs/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/attrs/py2)
ELSE()
diff --git a/contrib/python/blinker/ya.make b/contrib/python/blinker/ya.make
index 269c8ae2e4..f2b2e942e3 100644
--- a/contrib/python/blinker/ya.make
+++ b/contrib/python/blinker/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/blinker/py2)
ELSE()
diff --git a/contrib/python/boto/ya.make b/contrib/python/boto/ya.make
index 0fc5a65079..3377040b77 100644
--- a/contrib/python/boto/ya.make
+++ b/contrib/python/boto/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/boto/py2)
ELSE()
diff --git a/contrib/python/boto3/ya.make b/contrib/python/boto3/ya.make
index 261d805b39..1d9d2aac51 100644
--- a/contrib/python/boto3/ya.make
+++ b/contrib/python/boto3/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/boto3/py2)
ELSE()
diff --git a/contrib/python/botocore/ya.make b/contrib/python/botocore/ya.make
index 719bdbb6b8..d6e4c1661e 100644
--- a/contrib/python/botocore/ya.make
+++ b/contrib/python/botocore/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/botocore/py2)
ELSE()
diff --git a/contrib/python/cachetools/ya.make b/contrib/python/cachetools/ya.make
index 3a8f917778..a263693d4d 100644
--- a/contrib/python/cachetools/ya.make
+++ b/contrib/python/cachetools/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/cachetools/py2)
ELSE()
diff --git a/contrib/python/certifi/ya.make b/contrib/python/certifi/ya.make
index 0835df974c..52dc685635 100644
--- a/contrib/python/certifi/ya.make
+++ b/contrib/python/certifi/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/certifi/py2)
ELSE()
diff --git a/contrib/python/cffi/ya.make b/contrib/python/cffi/ya.make
index 8fd78ed037..392e99cc81 100644
--- a/contrib/python/cffi/ya.make
+++ b/contrib/python/cffi/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/cffi/py2)
ELSE()
diff --git a/contrib/python/chardet/ya.make b/contrib/python/chardet/ya.make
index bab1b7a17c..cb609b3993 100644
--- a/contrib/python/chardet/ya.make
+++ b/contrib/python/chardet/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/chardet/py2)
ELSE()
diff --git a/contrib/python/click/ya.make b/contrib/python/click/ya.make
index d1c392abab..22ae29fcaf 100644
--- a/contrib/python/click/ya.make
+++ b/contrib/python/click/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/click/py2)
ELSE()
diff --git a/contrib/python/colorama/ya.make b/contrib/python/colorama/ya.make
index 81a8cac270..7343ae0fd8 100644
--- a/contrib/python/colorama/ya.make
+++ b/contrib/python/colorama/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/colorama/py2)
ELSE()
diff --git a/contrib/python/constantly/ya.make b/contrib/python/constantly/ya.make
index 9302f732d6..816e11fbba 100644
--- a/contrib/python/constantly/ya.make
+++ b/contrib/python/constantly/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/constantly/py2)
ELSE()
diff --git a/contrib/python/contextlib2/ya.make b/contrib/python/contextlib2/ya.make
index 00b1b55ba1..032690d1ad 100644
--- a/contrib/python/contextlib2/ya.make
+++ b/contrib/python/contextlib2/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/contextlib2/py2)
ELSE()
diff --git a/contrib/python/cookies/ya.make b/contrib/python/cookies/ya.make
index ebf6822d43..fcdd572935 100644
--- a/contrib/python/cookies/ya.make
+++ b/contrib/python/cookies/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/cookies/py2)
ELSE()
diff --git a/contrib/python/cryptography/ya.make b/contrib/python/cryptography/ya.make
index ce070b8473..e5a104b6e3 100644
--- a/contrib/python/cryptography/ya.make
+++ b/contrib/python/cryptography/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/cryptography/py2)
ELSE()
diff --git a/contrib/python/cycler/ya.make b/contrib/python/cycler/ya.make
index c8bf71a00e..63fda85c51 100644
--- a/contrib/python/cycler/ya.make
+++ b/contrib/python/cycler/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/cycler/py2)
ELSE()
diff --git a/contrib/python/decorator/ya.make b/contrib/python/decorator/ya.make
index 7431bc5c75..4a90f60cde 100644
--- a/contrib/python/decorator/ya.make
+++ b/contrib/python/decorator/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/decorator/py2)
ELSE()
diff --git a/contrib/python/freezegun/ya.make b/contrib/python/freezegun/ya.make
index 55e8f29de3..bd8c6b7a45 100644
--- a/contrib/python/freezegun/ya.make
+++ b/contrib/python/freezegun/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/freezegun/py2)
ELSE()
diff --git a/contrib/python/funcsigs/ya.make b/contrib/python/funcsigs/ya.make
index d8187f918d..b26d24ca15 100644
--- a/contrib/python/funcsigs/ya.make
+++ b/contrib/python/funcsigs/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/funcsigs/py2)
ELSE()
diff --git a/contrib/python/future/ya.make b/contrib/python/future/ya.make
index e2d0bf250f..8ebf9a27ff 100644
--- a/contrib/python/future/ya.make
+++ b/contrib/python/future/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/future/py2)
ELSE()
diff --git a/contrib/python/google-auth/ya.make b/contrib/python/google-auth/ya.make
index c67baa124d..a21068ca5f 100644
--- a/contrib/python/google-auth/ya.make
+++ b/contrib/python/google-auth/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/google-auth/py2)
ELSE()
diff --git a/contrib/python/grpcio/ya.make b/contrib/python/grpcio/ya.make
index 90333288f2..68b010dbcf 100644
--- a/contrib/python/grpcio/ya.make
+++ b/contrib/python/grpcio/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/grpcio/py2)
ELSE()
diff --git a/contrib/python/httplib2/ya.make b/contrib/python/httplib2/ya.make
index e53114c90c..ec7dbe35f0 100644
--- a/contrib/python/httplib2/ya.make
+++ b/contrib/python/httplib2/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/httplib2/py2)
ELSE()
diff --git a/contrib/python/hyperlink/ya.make b/contrib/python/hyperlink/ya.make
index 64a73ff34e..94d193270b 100644
--- a/contrib/python/hyperlink/ya.make
+++ b/contrib/python/hyperlink/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/hyperlink/py2)
ELSE()
diff --git a/contrib/python/hypothesis/ya.make b/contrib/python/hypothesis/ya.make
index e7d3c57903..cc840bd2ee 100644
--- a/contrib/python/hypothesis/ya.make
+++ b/contrib/python/hypothesis/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/hypothesis/py2)
ELSE()
diff --git a/contrib/python/idna/ya.make b/contrib/python/idna/ya.make
index 14c5524491..cce7d9e67f 100644
--- a/contrib/python/idna/ya.make
+++ b/contrib/python/idna/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/idna/py2)
ELSE()
diff --git a/contrib/python/importlib-metadata/ya.make b/contrib/python/importlib-metadata/ya.make
index 1243f06e83..95331c307f 100644
--- a/contrib/python/importlib-metadata/ya.make
+++ b/contrib/python/importlib-metadata/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/importlib-metadata/py2)
ELSE()
diff --git a/contrib/python/incremental/ya.make b/contrib/python/incremental/ya.make
index ad23ac6b27..0774b5030c 100644
--- a/contrib/python/incremental/ya.make
+++ b/contrib/python/incremental/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/incremental/py2)
ELSE()
diff --git a/contrib/python/ipdb/ya.make b/contrib/python/ipdb/ya.make
index 0591bbd5a1..36619f1d82 100644
--- a/contrib/python/ipdb/ya.make
+++ b/contrib/python/ipdb/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/ipdb/py2)
ELSE()
diff --git a/contrib/python/ipython-genutils/ya.make b/contrib/python/ipython-genutils/ya.make
index c4a9acb9f5..80d445eb64 100644
--- a/contrib/python/ipython-genutils/ya.make
+++ b/contrib/python/ipython-genutils/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/ipython-genutils/py2)
ELSE()
diff --git a/contrib/python/ipython/ya.make b/contrib/python/ipython/ya.make
index 42898ad604..20ea55e68c 100644
--- a/contrib/python/ipython/ya.make
+++ b/contrib/python/ipython/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/ipython/py2)
ELSE()
diff --git a/contrib/python/itsdangerous/ya.make b/contrib/python/itsdangerous/ya.make
index 7d300a116f..1c24b8c8c2 100644
--- a/contrib/python/itsdangerous/ya.make
+++ b/contrib/python/itsdangerous/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/itsdangerous/py2)
ELSE()
diff --git a/contrib/python/jedi/ya.make b/contrib/python/jedi/ya.make
index eb2e395b56..6132d3d958 100644
--- a/contrib/python/jedi/ya.make
+++ b/contrib/python/jedi/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/jedi/py2)
ELSE()
diff --git a/contrib/python/jmespath/ya.make b/contrib/python/jmespath/ya.make
index 092c23535c..84fcdc3826 100644
--- a/contrib/python/jmespath/ya.make
+++ b/contrib/python/jmespath/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/jmespath/py2)
ELSE()
diff --git a/contrib/python/jsonschema/ya.make b/contrib/python/jsonschema/ya.make
index 7b62c0d5ca..f3323c0dff 100644
--- a/contrib/python/jsonschema/ya.make
+++ b/contrib/python/jsonschema/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/jsonschema/py2)
ELSE()
diff --git a/contrib/python/kiwisolver/ya.make b/contrib/python/kiwisolver/ya.make
index 773d6b13bb..f76e1f0f98 100644
--- a/contrib/python/kiwisolver/ya.make
+++ b/contrib/python/kiwisolver/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/kiwisolver/py2)
ELSE()
diff --git a/contrib/python/lz4/ya.make b/contrib/python/lz4/ya.make
index 977908d228..2e06efcfba 100644
--- a/contrib/python/lz4/ya.make
+++ b/contrib/python/lz4/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/lz4/py2)
ELSE()
diff --git a/contrib/python/matplotlib/ya.make b/contrib/python/matplotlib/ya.make
index a4eb5c60c2..d892b3097a 100644
--- a/contrib/python/matplotlib/ya.make
+++ b/contrib/python/matplotlib/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/matplotlib/py2)
ELSE()
diff --git a/contrib/python/mock/ya.make b/contrib/python/mock/ya.make
index d00206341c..20980cf101 100644
--- a/contrib/python/mock/ya.make
+++ b/contrib/python/mock/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/mock/py2)
ELSE()
diff --git a/contrib/python/monotonic/ya.make b/contrib/python/monotonic/ya.make
index 8cdec1c354..f2e11382fa 100644
--- a/contrib/python/monotonic/ya.make
+++ b/contrib/python/monotonic/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/monotonic/py2)
ELSE()
diff --git a/contrib/python/more-itertools/ya.make b/contrib/python/more-itertools/ya.make
index 1e4b6a812a..bf1372cadd 100644
--- a/contrib/python/more-itertools/ya.make
+++ b/contrib/python/more-itertools/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/more-itertools/py2)
ELSE()
diff --git a/contrib/python/moto/ya.make b/contrib/python/moto/ya.make
index f093931e91..e1bceb1c78 100644
--- a/contrib/python/moto/ya.make
+++ b/contrib/python/moto/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/moto/py2)
ELSE()
diff --git a/contrib/python/numpy/ya.make b/contrib/python/numpy/ya.make
index 1c86272a78..56f7a597ae 100644
--- a/contrib/python/numpy/ya.make
+++ b/contrib/python/numpy/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
ADDINCL(
GLOBAL contrib/python/numpy/include/numpy/core/include
GLOBAL contrib/python/numpy/include/numpy/core/include/numpy
diff --git a/contrib/python/oauth2client/ya.make b/contrib/python/oauth2client/ya.make
index 8934e6dd84..e3919bf3f8 100644
--- a/contrib/python/oauth2client/ya.make
+++ b/contrib/python/oauth2client/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/oauth2client/py2)
ELSE()
diff --git a/contrib/python/olefile/ya.make b/contrib/python/olefile/ya.make
index b63b7a6123..32c9140777 100644
--- a/contrib/python/olefile/ya.make
+++ b/contrib/python/olefile/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/olefile/py2)
ELSE()
diff --git a/contrib/python/packaging/ya.make b/contrib/python/packaging/ya.make
index 02f0a77d8c..771b2849b6 100644
--- a/contrib/python/packaging/ya.make
+++ b/contrib/python/packaging/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/packaging/py2)
ELSE()
diff --git a/contrib/python/pandas/ya.make b/contrib/python/pandas/ya.make
index c372eeaacc..0e8d7b7f78 100644
--- a/contrib/python/pandas/ya.make
+++ b/contrib/python/pandas/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pandas/py2)
ELSE()
diff --git a/contrib/python/parameterized/ya.make b/contrib/python/parameterized/ya.make
index 8a6d8706c0..487ef45697 100644
--- a/contrib/python/parameterized/ya.make
+++ b/contrib/python/parameterized/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/parameterized/py2)
ELSE()
diff --git a/contrib/python/parso/ya.make b/contrib/python/parso/ya.make
index b47642bc39..4bc2dfe6b2 100644
--- a/contrib/python/parso/ya.make
+++ b/contrib/python/parso/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/parso/py2)
ELSE()
diff --git a/contrib/python/pathlib2/ya.make b/contrib/python/pathlib2/ya.make
index 4b633231b1..384ae8b4cd 100644
--- a/contrib/python/pathlib2/ya.make
+++ b/contrib/python/pathlib2/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pathlib2/py2)
ELSE()
diff --git a/contrib/python/pexpect/ya.make b/contrib/python/pexpect/ya.make
index ee4158641e..e045efe9c4 100644
--- a/contrib/python/pexpect/ya.make
+++ b/contrib/python/pexpect/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pexpect/py2)
ELSE()
diff --git a/contrib/python/pickleshare/ya.make b/contrib/python/pickleshare/ya.make
index 7933003fc5..f6ec953779 100644
--- a/contrib/python/pickleshare/ya.make
+++ b/contrib/python/pickleshare/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pickleshare/py2)
ELSE()
diff --git a/contrib/python/pluggy/ya.make b/contrib/python/pluggy/ya.make
index 22564cc847..52b039496f 100644
--- a/contrib/python/pluggy/ya.make
+++ b/contrib/python/pluggy/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pluggy/py2)
ELSE()
diff --git a/contrib/python/ply/ya.make b/contrib/python/ply/ya.make
index 1aa09b48b2..2f24ea8f9b 100644
--- a/contrib/python/ply/ya.make
+++ b/contrib/python/ply/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/ply/py2)
ELSE()
diff --git a/contrib/python/prettytable/ya.make b/contrib/python/prettytable/ya.make
index 32e12da22d..6ef2e997cc 100644
--- a/contrib/python/prettytable/ya.make
+++ b/contrib/python/prettytable/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/prettytable/py2)
ELSE()
diff --git a/contrib/python/prompt-toolkit/ya.make b/contrib/python/prompt-toolkit/ya.make
index ea3e9c8ee4..510cd099ce 100644
--- a/contrib/python/prompt-toolkit/ya.make
+++ b/contrib/python/prompt-toolkit/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/prompt-toolkit/py2)
ELSE()
diff --git a/contrib/python/protobuf/ya.make b/contrib/python/protobuf/ya.make
index 02d6c4459f..5d601b5842 100644
--- a/contrib/python/protobuf/ya.make
+++ b/contrib/python/protobuf/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/protobuf/py2)
ELSE()
diff --git a/contrib/python/psutil/ya.make b/contrib/python/psutil/ya.make
index f5946cffcc..cdf432a7e5 100644
--- a/contrib/python/psutil/ya.make
+++ b/contrib/python/psutil/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/psutil/py2)
ELSE()
diff --git a/contrib/python/ptyprocess/ya.make b/contrib/python/ptyprocess/ya.make
index 66ba24f281..eca414111a 100644
--- a/contrib/python/ptyprocess/ya.make
+++ b/contrib/python/ptyprocess/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/ptyprocess/py2)
ELSE()
diff --git a/contrib/python/py/ya.make b/contrib/python/py/ya.make
index 6ad9a124d4..26ae00e073 100644
--- a/contrib/python/py/ya.make
+++ b/contrib/python/py/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/py/py2)
ELSE()
diff --git a/contrib/python/pyOpenSSL/ya.make b/contrib/python/pyOpenSSL/ya.make
index 62a5fd852f..d21ff85dff 100644
--- a/contrib/python/pyOpenSSL/ya.make
+++ b/contrib/python/pyOpenSSL/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pyOpenSSL/py2)
ELSE()
diff --git a/contrib/python/pyasn1-modules/ya.make b/contrib/python/pyasn1-modules/ya.make
index 7d30eadc76..bf11ad23a7 100644
--- a/contrib/python/pyasn1-modules/ya.make
+++ b/contrib/python/pyasn1-modules/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pyasn1-modules/py2)
ELSE()
diff --git a/contrib/python/pyasn1/ya.make b/contrib/python/pyasn1/ya.make
index 1601f9047f..cc687a8393 100644
--- a/contrib/python/pyasn1/ya.make
+++ b/contrib/python/pyasn1/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pyasn1/py2)
ELSE()
diff --git a/contrib/python/pycparser/ya.make b/contrib/python/pycparser/ya.make
index edadfa0934..0e3e8ca21d 100644
--- a/contrib/python/pycparser/ya.make
+++ b/contrib/python/pycparser/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pycparser/py2)
ELSE()
diff --git a/contrib/python/pyparsing/ya.make b/contrib/python/pyparsing/ya.make
index ab2069fea8..7181093309 100644
--- a/contrib/python/pyparsing/ya.make
+++ b/contrib/python/pyparsing/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pyparsing/py2)
ELSE()
diff --git a/contrib/python/pyrsistent/ya.make b/contrib/python/pyrsistent/ya.make
index 41b7591417..9672a469d7 100644
--- a/contrib/python/pyrsistent/ya.make
+++ b/contrib/python/pyrsistent/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pyrsistent/py2)
ELSE()
diff --git a/contrib/python/pytest-localserver/ya.make b/contrib/python/pytest-localserver/ya.make
index 4e3ec10057..fee61da058 100644
--- a/contrib/python/pytest-localserver/ya.make
+++ b/contrib/python/pytest-localserver/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pytest-localserver/py2)
ELSE()
diff --git a/contrib/python/pytest-mock/ya.make b/contrib/python/pytest-mock/ya.make
index 1f41fd1bfb..0b760bd30f 100644
--- a/contrib/python/pytest-mock/ya.make
+++ b/contrib/python/pytest-mock/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pytest-mock/py2)
ELSE()
diff --git a/contrib/python/pytest/ya.make b/contrib/python/pytest/ya.make
index 3a2f78bc8a..dc4f9dc5d6 100644
--- a/contrib/python/pytest/ya.make
+++ b/contrib/python/pytest/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pytest/py2)
ELSE()
diff --git a/contrib/python/python-dateutil/ya.make b/contrib/python/python-dateutil/ya.make
index ed3aac7d91..2f2a47bc86 100644
--- a/contrib/python/python-dateutil/ya.make
+++ b/contrib/python/python-dateutil/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/python-dateutil/py2)
ELSE()
diff --git a/contrib/python/pytz/ya.make b/contrib/python/pytz/ya.make
index 36ca67d462..b55102e733 100644
--- a/contrib/python/pytz/ya.make
+++ b/contrib/python/pytz/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/pytz/py2)
ELSE()
diff --git a/contrib/python/requests-mock/ya.make b/contrib/python/requests-mock/ya.make
index ed5ea40daa..9d59772d15 100644
--- a/contrib/python/requests-mock/ya.make
+++ b/contrib/python/requests-mock/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/requests-mock/py2)
ELSE()
diff --git a/contrib/python/requests/ya.make b/contrib/python/requests/ya.make
index 75788d078c..c15c0a43c5 100644
--- a/contrib/python/requests/ya.make
+++ b/contrib/python/requests/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/requests/py2)
ELSE()
diff --git a/contrib/python/responses/ya.make b/contrib/python/responses/ya.make
index b175e9d55e..f0c1930c4f 100644
--- a/contrib/python/responses/ya.make
+++ b/contrib/python/responses/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/responses/py2)
ELSE()
diff --git a/contrib/python/retry/ya.make b/contrib/python/retry/ya.make
index d9090c7d9c..e28099c0bb 100644
--- a/contrib/python/retry/ya.make
+++ b/contrib/python/retry/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/retry/py2)
ELSE()
diff --git a/contrib/python/rsa/ya.make b/contrib/python/rsa/ya.make
index 0deffc020e..977f87cfae 100644
--- a/contrib/python/rsa/ya.make
+++ b/contrib/python/rsa/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/rsa/py2)
ELSE()
diff --git a/contrib/python/ruamel.yaml.clib/ya.make b/contrib/python/ruamel.yaml.clib/ya.make
index acaec752ef..5329be28a2 100644
--- a/contrib/python/ruamel.yaml.clib/ya.make
+++ b/contrib/python/ruamel.yaml.clib/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/ruamel.yaml.clib/py2)
ELSE()
diff --git a/contrib/python/ruamel.yaml/ya.make b/contrib/python/ruamel.yaml/ya.make
index 97670cf32f..4b07382118 100644
--- a/contrib/python/ruamel.yaml/ya.make
+++ b/contrib/python/ruamel.yaml/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/ruamel.yaml/py2)
ELSE()
diff --git a/contrib/python/s3transfer/ya.make b/contrib/python/s3transfer/ya.make
index 6bbb3db096..c9b8a7b33e 100644
--- a/contrib/python/s3transfer/ya.make
+++ b/contrib/python/s3transfer/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/s3transfer/py2)
ELSE()
diff --git a/contrib/python/scipy/ya.make b/contrib/python/scipy/ya.make
index 1ef565e751..7c0c17d32c 100644
--- a/contrib/python/scipy/ya.make
+++ b/contrib/python/scipy/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/scipy/py2)
ELSE()
diff --git a/contrib/python/setuptools/ya.make b/contrib/python/setuptools/ya.make
index 7778741857..1d37c3f31f 100644
--- a/contrib/python/setuptools/ya.make
+++ b/contrib/python/setuptools/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/setuptools/py2)
ELSE()
diff --git a/contrib/python/simplegeneric/ya.make b/contrib/python/simplegeneric/ya.make
index ca3b1a8ede..7888fa9962 100644
--- a/contrib/python/simplegeneric/ya.make
+++ b/contrib/python/simplegeneric/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/simplegeneric/py2)
ELSE()
diff --git a/contrib/python/simplejson/ya.make b/contrib/python/simplejson/ya.make
index f20a130eb4..8ba230decd 100644
--- a/contrib/python/simplejson/ya.make
+++ b/contrib/python/simplejson/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/simplejson/py2)
ELSE()
diff --git a/contrib/python/six/ya.make b/contrib/python/six/ya.make
index ad952a7eb5..eec3680ba1 100644
--- a/contrib/python/six/ya.make
+++ b/contrib/python/six/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/six/py2)
ELSE()
diff --git a/contrib/python/sortedcontainers/ya.make b/contrib/python/sortedcontainers/ya.make
index 617c0fd110..467600c522 100644
--- a/contrib/python/sortedcontainers/ya.make
+++ b/contrib/python/sortedcontainers/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/sortedcontainers/py2)
ELSE()
diff --git a/contrib/python/tenacity/ya.make b/contrib/python/tenacity/ya.make
index ea622da569..afea4fa1b7 100644
--- a/contrib/python/tenacity/ya.make
+++ b/contrib/python/tenacity/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/tenacity/py2)
ELSE()
diff --git a/contrib/python/toml/ya.make b/contrib/python/toml/ya.make
index 4677dd8f68..e41a5dc3f0 100644
--- a/contrib/python/toml/ya.make
+++ b/contrib/python/toml/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/toml/py2)
ELSE()
diff --git a/contrib/python/traitlets/ya.make b/contrib/python/traitlets/ya.make
index e18d19dd2f..72c990aac2 100644
--- a/contrib/python/traitlets/ya.make
+++ b/contrib/python/traitlets/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/traitlets/py2)
ELSE()
diff --git a/contrib/python/typing-extensions/ya.make b/contrib/python/typing-extensions/ya.make
index dba3ff4f97..e3088e98ed 100644
--- a/contrib/python/typing-extensions/ya.make
+++ b/contrib/python/typing-extensions/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/typing-extensions/py2)
ELSE()
diff --git a/contrib/python/urllib3/ya.make b/contrib/python/urllib3/ya.make
index a581d74047..ae285563b7 100644
--- a/contrib/python/urllib3/ya.make
+++ b/contrib/python/urllib3/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/urllib3/py2)
ELSE()
diff --git a/contrib/python/wcwidth/ya.make b/contrib/python/wcwidth/ya.make
index 4931812cf7..72e3cb813a 100644
--- a/contrib/python/wcwidth/ya.make
+++ b/contrib/python/wcwidth/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/wcwidth/py2)
ELSE()
diff --git a/contrib/python/websocket-client/ya.make b/contrib/python/websocket-client/ya.make
index f121a58fed..cb3ea7664b 100644
--- a/contrib/python/websocket-client/ya.make
+++ b/contrib/python/websocket-client/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/websocket-client/py2)
ELSE()
diff --git a/contrib/python/xmltodict/ya.make b/contrib/python/xmltodict/ya.make
index ee41fa80bc..1198b1fe48 100644
--- a/contrib/python/xmltodict/ya.make
+++ b/contrib/python/xmltodict/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/xmltodict/py2)
ELSE()
diff --git a/contrib/python/zope.interface/ya.make b/contrib/python/zope.interface/ya.make
index 1987d80896..42f01e3e49 100644
--- a/contrib/python/zope.interface/ya.make
+++ b/contrib/python/zope.interface/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/zope.interface/py2)
ELSE()
diff --git a/contrib/python/zstandard/ya.make b/contrib/python/zstandard/ya.make
index 75cf08f357..2f4aeb7fcd 100644
--- a/contrib/python/zstandard/ya.make
+++ b/contrib/python/zstandard/ya.make
@@ -2,6 +2,8 @@ PY23_LIBRARY()
LICENSE(Service-Py23-Proxy)
+VERSION(Service-proxy-version)
+
IF (PYTHON2)
PEERDIR(contrib/python/zstandard/py2)
ELSE()