aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/aiohttp
diff options
context:
space:
mode:
authorrekby <rekby@ydb.tech>2023-12-14 16:56:50 +0300
committerrekby <rekby@ydb.tech>2023-12-14 18:09:44 +0300
commitb2b2bb5997507072ca64548efe64447dd6395426 (patch)
treebbfbf77d11f1972c93ae4101fe561fd440d6ad6a /contrib/python/aiohttp
parent8b8678a6a4f57c62e348cdad8afd3849011a5f11 (diff)
downloadydb-b2b2bb5997507072ca64548efe64447dd6395426.tar.gz
KIKIMR-19900 switch arcadia to python ydb sdk from contrib
Этот PR создан скриптом - для переключения зависимостей на python ydb sdk с версии внутри ydb на код, приезжающий через контриб. Код в обеих версиях одинаковый, так что поломок/изменения функционала на ожидается. На всякий случай посмотрите свои проекты и если будут возражения пишите сюда в issues или в тикет KIKIMR-19900. Если всё ок - шипните, для определённости. При отсутствии блокеров PR будет перегенерирован и влит с force-мёрджем в четверг, 14 декабря.
Diffstat (limited to 'contrib/python/aiohttp')
-rw-r--r--contrib/python/aiohttp/.dist-info/METADATA255
-rw-r--r--contrib/python/aiohttp/.dist-info/top_level.txt1
-rw-r--r--contrib/python/aiohttp/LICENSE.txt13
-rw-r--r--contrib/python/aiohttp/README.rst204
-rw-r--r--contrib/python/aiohttp/aiohttp/__init__.py216
-rw-r--r--contrib/python/aiohttp/aiohttp/_cparser.pxd190
-rw-r--r--contrib/python/aiohttp/aiohttp/_find_header.c9870
-rw-r--r--contrib/python/aiohttp/aiohttp/_find_header.h14
-rw-r--r--contrib/python/aiohttp/aiohttp/_find_header.pxd2
-rw-r--r--contrib/python/aiohttp/aiohttp/_headers.pxi83
-rw-r--r--contrib/python/aiohttp/aiohttp/_helpers.pyx35
-rw-r--r--contrib/python/aiohttp/aiohttp/_http_parser.pyx818
-rw-r--r--contrib/python/aiohttp/aiohttp/_http_writer.pyx163
-rw-r--r--contrib/python/aiohttp/aiohttp/_websocket.pyx56
-rw-r--r--contrib/python/aiohttp/aiohttp/abc.py207
-rw-r--r--contrib/python/aiohttp/aiohttp/base_protocol.py87
-rw-r--r--contrib/python/aiohttp/aiohttp/client.py1304
-rw-r--r--contrib/python/aiohttp/aiohttp/client_exceptions.py342
-rw-r--r--contrib/python/aiohttp/aiohttp/client_proto.py251
-rw-r--r--contrib/python/aiohttp/aiohttp/client_reqrep.py1133
-rw-r--r--contrib/python/aiohttp/aiohttp/client_ws.py300
-rw-r--r--contrib/python/aiohttp/aiohttp/connector.py1451
-rw-r--r--contrib/python/aiohttp/aiohttp/cookiejar.py413
-rw-r--r--contrib/python/aiohttp/aiohttp/formdata.py172
-rw-r--r--contrib/python/aiohttp/aiohttp/hdrs.py114
-rw-r--r--contrib/python/aiohttp/aiohttp/helpers.py878
-rw-r--r--contrib/python/aiohttp/aiohttp/http.py72
-rw-r--r--contrib/python/aiohttp/aiohttp/http_exceptions.py105
-rw-r--r--contrib/python/aiohttp/aiohttp/http_parser.py956
-rw-r--r--contrib/python/aiohttp/aiohttp/http_websocket.py701
-rw-r--r--contrib/python/aiohttp/aiohttp/http_writer.py200
-rw-r--r--contrib/python/aiohttp/aiohttp/locks.py41
-rw-r--r--contrib/python/aiohttp/aiohttp/log.py8
-rw-r--r--contrib/python/aiohttp/aiohttp/multipart.py963
-rw-r--r--contrib/python/aiohttp/aiohttp/payload.py465
-rw-r--r--contrib/python/aiohttp/aiohttp/payload_streamer.py75
-rw-r--r--contrib/python/aiohttp/aiohttp/py.typed1
-rw-r--r--contrib/python/aiohttp/aiohttp/pytest_plugin.py391
-rw-r--r--contrib/python/aiohttp/aiohttp/resolver.py160
-rw-r--r--contrib/python/aiohttp/aiohttp/streams.py660
-rw-r--r--contrib/python/aiohttp/aiohttp/tcp_helpers.py38
-rw-r--r--contrib/python/aiohttp/aiohttp/test_utils.py698
-rw-r--r--contrib/python/aiohttp/aiohttp/tracing.py472
-rw-r--r--contrib/python/aiohttp/aiohttp/typedefs.py64
-rw-r--r--contrib/python/aiohttp/aiohttp/web.py586
-rw-r--r--contrib/python/aiohttp/aiohttp/web_app.py557
-rw-r--r--contrib/python/aiohttp/aiohttp/web_exceptions.py441
-rw-r--r--contrib/python/aiohttp/aiohttp/web_fileresponse.py288
-rw-r--r--contrib/python/aiohttp/aiohttp/web_log.py208
-rw-r--r--contrib/python/aiohttp/aiohttp/web_middlewares.py119
-rw-r--r--contrib/python/aiohttp/aiohttp/web_protocol.py681
-rw-r--r--contrib/python/aiohttp/aiohttp/web_request.py874
-rw-r--r--contrib/python/aiohttp/aiohttp/web_response.py825
-rw-r--r--contrib/python/aiohttp/aiohttp/web_routedef.py213
-rw-r--r--contrib/python/aiohttp/aiohttp/web_runner.py381
-rw-r--r--contrib/python/aiohttp/aiohttp/web_server.py62
-rw-r--r--contrib/python/aiohttp/aiohttp/web_urldispatcher.py1220
-rw-r--r--contrib/python/aiohttp/aiohttp/web_ws.py487
-rw-r--r--contrib/python/aiohttp/aiohttp/worker.py269
-rw-r--r--contrib/python/aiohttp/ya.make101
60 files changed, 31954 insertions, 0 deletions
diff --git a/contrib/python/aiohttp/.dist-info/METADATA b/contrib/python/aiohttp/.dist-info/METADATA
new file mode 100644
index 0000000000..c2f6befe9e
--- /dev/null
+++ b/contrib/python/aiohttp/.dist-info/METADATA
@@ -0,0 +1,255 @@
+Metadata-Version: 2.1
+Name: aiohttp
+Version: 3.8.1
+Summary: Async http client/server framework (asyncio)
+Home-page: https://github.com/aio-libs/aiohttp
+Maintainer: aiohttp team <team@aiohttp.org>
+Maintainer-email: team@aiohttp.org
+License: Apache 2
+Project-URL: Chat: Gitter, https://gitter.im/aio-libs/Lobby
+Project-URL: CI: GitHub Actions, https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/aiohttp
+Project-URL: Docs: Changelog, https://docs.aiohttp.org/en/stable/changes.html
+Project-URL: Docs: RTD, https://docs.aiohttp.org
+Project-URL: GitHub: issues, https://github.com/aio-libs/aiohttp/issues
+Project-URL: GitHub: repo, https://github.com/aio-libs/aiohttp
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Framework :: AsyncIO
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Topic :: Internet :: WWW/HTTP
+Requires-Python: >=3.6
+Description-Content-Type: text/x-rst
+License-File: LICENSE.txt
+Requires-Dist: attrs (>=17.3.0)
+Requires-Dist: charset-normalizer (<3.0,>=2.0)
+Requires-Dist: multidict (<7.0,>=4.5)
+Requires-Dist: async-timeout (<5.0,>=4.0.0a3)
+Requires-Dist: yarl (<2.0,>=1.0)
+Requires-Dist: frozenlist (>=1.1.1)
+Requires-Dist: aiosignal (>=1.1.2)
+Requires-Dist: idna-ssl (>=1.0) ; python_version < "3.7"
+Requires-Dist: asynctest (==0.13.0) ; python_version < "3.8"
+Requires-Dist: typing-extensions (>=3.7.4) ; python_version < "3.8"
+Provides-Extra: speedups
+Requires-Dist: aiodns ; extra == 'speedups'
+Requires-Dist: Brotli ; extra == 'speedups'
+Requires-Dist: cchardet ; extra == 'speedups'
+
+==================================
+Async http client/server framework
+==================================
+
+.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
+ :height: 64px
+ :width: 64px
+ :alt: aiohttp logo
+
+|
+
+.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+ :alt: GitHub Actions status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/aiohttp
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiohttp.svg
+ :target: https://pypi.org/project/aiohttp
+ :alt: Latest PyPI package version
+
+.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
+ :target: https://docs.aiohttp.org/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
+ :target: https://aio-libs.discourse.group
+ :alt: Discourse status
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+
+Key Features
+============
+
+- Supports both client and server side of HTTP protocol.
+- Supports both client and server Web-Sockets out-of-the-box and avoids
+ Callback Hell.
+- Provides Web-server with middlewares and plugable routing.
+
+
+Getting started
+===============
+
+Client
+------
+
+To get something from the web:
+
+.. code-block:: python
+
+ import aiohttp
+ import asyncio
+
+ async def main():
+
+ async with aiohttp.ClientSession() as session:
+ async with session.get('http://python.org') as response:
+
+ print("Status:", response.status)
+ print("Content-type:", response.headers['content-type'])
+
+ html = await response.text()
+ print("Body:", html[:15], "...")
+
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(main())
+
+This prints:
+
+.. code-block::
+
+ Status: 200
+ Content-type: text/html; charset=utf-8
+ Body: <!doctype html> ...
+
+Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
+
+Server
+------
+
+An example using a simple server:
+
+.. code-block:: python
+
+ # examples/server_simple.py
+ from aiohttp import web
+
+ async def handle(request):
+ name = request.match_info.get('name', "Anonymous")
+ text = "Hello, " + name
+ return web.Response(text=text)
+
+ async def wshandle(request):
+ ws = web.WebSocketResponse()
+ await ws.prepare(request)
+
+ async for msg in ws:
+ if msg.type == web.WSMsgType.text:
+ await ws.send_str("Hello, {}".format(msg.data))
+ elif msg.type == web.WSMsgType.binary:
+ await ws.send_bytes(msg.data)
+ elif msg.type == web.WSMsgType.close:
+ break
+
+ return ws
+
+
+ app = web.Application()
+ app.add_routes([web.get('/', handle),
+ web.get('/echo', wshandle),
+ web.get('/{name}', handle)])
+
+ if __name__ == '__main__':
+ web.run_app(app)
+
+
+Documentation
+=============
+
+https://aiohttp.readthedocs.io/
+
+
+Demos
+=====
+
+https://github.com/aio-libs/aiohttp-demos
+
+
+External links
+==============
+
+* `Third party libraries
+ <http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
+* `Built with aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
+* `Powered by aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
+
+Feel free to make a Pull Request for adding your link to these pages!
+
+
+Communication channels
+======================
+
+*aio-libs discourse group*: https://aio-libs.discourse.group
+
+*gitter chat* https://gitter.im/aio-libs/Lobby
+
+We support `Stack Overflow
+<https://stackoverflow.com/questions/tagged/aiohttp>`_.
+Please add *aiohttp* tag to your question there.
+
+Requirements
+============
+
+- Python >= 3.6
+- async-timeout_
+- attrs_
+- charset-normalizer_
+- multidict_
+- yarl_
+
+Optionally you may install the cChardet_ and aiodns_ libraries (highly
+recommended for sake of speed).
+
+.. _charset-normalizer: https://pypi.org/project/charset-normalizer
+.. _aiodns: https://pypi.python.org/pypi/aiodns
+.. _attrs: https://github.com/python-attrs/attrs
+.. _multidict: https://pypi.python.org/pypi/multidict
+.. _yarl: https://pypi.python.org/pypi/yarl
+.. _async-timeout: https://pypi.python.org/pypi/async_timeout
+.. _cChardet: https://pypi.python.org/pypi/cchardet
+
+License
+=======
+
+``aiohttp`` is offered under the Apache 2 license.
+
+
+Keepsafe
+========
+
+The aiohttp community would like to thank Keepsafe
+(https://www.getkeepsafe.com) for its support in the early days of
+the project.
+
+
+Source code
+===========
+
+The latest developer version is available in a GitHub repository:
+https://github.com/aio-libs/aiohttp
+
+Benchmarks
+==========
+
+If you are interested in efficiency, the AsyncIO community maintains a
+list of benchmarks on the official wiki:
+https://github.com/python/asyncio/wiki/Benchmarks
+
+
diff --git a/contrib/python/aiohttp/.dist-info/top_level.txt b/contrib/python/aiohttp/.dist-info/top_level.txt
new file mode 100644
index 0000000000..ee4ba4f3d7
--- /dev/null
+++ b/contrib/python/aiohttp/.dist-info/top_level.txt
@@ -0,0 +1 @@
+aiohttp
diff --git a/contrib/python/aiohttp/LICENSE.txt b/contrib/python/aiohttp/LICENSE.txt
new file mode 100644
index 0000000000..054102f2db
--- /dev/null
+++ b/contrib/python/aiohttp/LICENSE.txt
@@ -0,0 +1,13 @@
+ Copyright 2013-2020 aio-libs collaboration.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/aiohttp/README.rst b/contrib/python/aiohttp/README.rst
new file mode 100644
index 0000000000..d057acbe2f
--- /dev/null
+++ b/contrib/python/aiohttp/README.rst
@@ -0,0 +1,204 @@
+==================================
+Async http client/server framework
+==================================
+
+.. image:: https://raw.githubusercontent.com/aio-libs/aiohttp/master/docs/aiohttp-plain.svg
+ :height: 64px
+ :width: 64px
+ :alt: aiohttp logo
+
+|
+
+.. image:: https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg
+ :target: https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI
+ :alt: GitHub Actions status for master branch
+
+.. image:: https://codecov.io/gh/aio-libs/aiohttp/branch/master/graph/badge.svg
+ :target: https://codecov.io/gh/aio-libs/aiohttp
+ :alt: codecov.io status for master branch
+
+.. image:: https://badge.fury.io/py/aiohttp.svg
+ :target: https://pypi.org/project/aiohttp
+ :alt: Latest PyPI package version
+
+.. image:: https://readthedocs.org/projects/aiohttp/badge/?version=latest
+ :target: https://docs.aiohttp.org/
+ :alt: Latest Read The Docs
+
+.. image:: https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group
+ :target: https://aio-libs.discourse.group
+ :alt: Discourse status
+
+.. image:: https://badges.gitter.im/Join%20Chat.svg
+ :target: https://gitter.im/aio-libs/Lobby
+ :alt: Chat on Gitter
+
+
+Key Features
+============
+
+- Supports both client and server side of HTTP protocol.
+- Supports both client and server Web-Sockets out-of-the-box and avoids
+ Callback Hell.
+- Provides Web-server with middlewares and plugable routing.
+
+
+Getting started
+===============
+
+Client
+------
+
+To get something from the web:
+
+.. code-block:: python
+
+ import aiohttp
+ import asyncio
+
+ async def main():
+
+ async with aiohttp.ClientSession() as session:
+ async with session.get('http://python.org') as response:
+
+ print("Status:", response.status)
+ print("Content-type:", response.headers['content-type'])
+
+ html = await response.text()
+ print("Body:", html[:15], "...")
+
+ loop = asyncio.get_event_loop()
+ loop.run_until_complete(main())
+
+This prints:
+
+.. code-block::
+
+ Status: 200
+ Content-type: text/html; charset=utf-8
+ Body: <!doctype html> ...
+
+Coming from `requests <https://requests.readthedocs.io/>`_ ? Read `why we need so many lines <https://aiohttp.readthedocs.io/en/latest/http_request_lifecycle.html>`_.
+
+Server
+------
+
+An example using a simple server:
+
+.. code-block:: python
+
+ # examples/server_simple.py
+ from aiohttp import web
+
+ async def handle(request):
+ name = request.match_info.get('name', "Anonymous")
+ text = "Hello, " + name
+ return web.Response(text=text)
+
+ async def wshandle(request):
+ ws = web.WebSocketResponse()
+ await ws.prepare(request)
+
+ async for msg in ws:
+ if msg.type == web.WSMsgType.text:
+ await ws.send_str("Hello, {}".format(msg.data))
+ elif msg.type == web.WSMsgType.binary:
+ await ws.send_bytes(msg.data)
+ elif msg.type == web.WSMsgType.close:
+ break
+
+ return ws
+
+
+ app = web.Application()
+ app.add_routes([web.get('/', handle),
+ web.get('/echo', wshandle),
+ web.get('/{name}', handle)])
+
+ if __name__ == '__main__':
+ web.run_app(app)
+
+
+Documentation
+=============
+
+https://aiohttp.readthedocs.io/
+
+
+Demos
+=====
+
+https://github.com/aio-libs/aiohttp-demos
+
+
+External links
+==============
+
+* `Third party libraries
+ <http://aiohttp.readthedocs.io/en/latest/third_party.html>`_
+* `Built with aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/built_with.html>`_
+* `Powered by aiohttp
+ <http://aiohttp.readthedocs.io/en/latest/powered_by.html>`_
+
+Feel free to make a Pull Request for adding your link to these pages!
+
+
+Communication channels
+======================
+
+*aio-libs discourse group*: https://aio-libs.discourse.group
+
+*gitter chat* https://gitter.im/aio-libs/Lobby
+
+We support `Stack Overflow
+<https://stackoverflow.com/questions/tagged/aiohttp>`_.
+Please add *aiohttp* tag to your question there.
+
+Requirements
+============
+
+- Python >= 3.6
+- async-timeout_
+- attrs_
+- charset-normalizer_
+- multidict_
+- yarl_
+
+Optionally you may install the cChardet_ and aiodns_ libraries (highly
+recommended for sake of speed).
+
+.. _charset-normalizer: https://pypi.org/project/charset-normalizer
+.. _aiodns: https://pypi.python.org/pypi/aiodns
+.. _attrs: https://github.com/python-attrs/attrs
+.. _multidict: https://pypi.python.org/pypi/multidict
+.. _yarl: https://pypi.python.org/pypi/yarl
+.. _async-timeout: https://pypi.python.org/pypi/async_timeout
+.. _cChardet: https://pypi.python.org/pypi/cchardet
+
+License
+=======
+
+``aiohttp`` is offered under the Apache 2 license.
+
+
+Keepsafe
+========
+
+The aiohttp community would like to thank Keepsafe
+(https://www.getkeepsafe.com) for its support in the early days of
+the project.
+
+
+Source code
+===========
+
+The latest developer version is available in a GitHub repository:
+https://github.com/aio-libs/aiohttp
+
+Benchmarks
+==========
+
+If you are interested in efficiency, the AsyncIO community maintains a
+list of benchmarks on the official wiki:
+https://github.com/python/asyncio/wiki/Benchmarks
diff --git a/contrib/python/aiohttp/aiohttp/__init__.py b/contrib/python/aiohttp/aiohttp/__init__.py
new file mode 100644
index 0000000000..4bbcef2935
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/__init__.py
@@ -0,0 +1,216 @@
+__version__ = "3.8.1"
+
+from typing import Tuple
+
+from . import hdrs as hdrs
+from .client import (
+ BaseConnector as BaseConnector,
+ ClientConnectionError as ClientConnectionError,
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
+ ClientConnectorError as ClientConnectorError,
+ ClientConnectorSSLError as ClientConnectorSSLError,
+ ClientError as ClientError,
+ ClientHttpProxyError as ClientHttpProxyError,
+ ClientOSError as ClientOSError,
+ ClientPayloadError as ClientPayloadError,
+ ClientProxyConnectionError as ClientProxyConnectionError,
+ ClientRequest as ClientRequest,
+ ClientResponse as ClientResponse,
+ ClientResponseError as ClientResponseError,
+ ClientSession as ClientSession,
+ ClientSSLError as ClientSSLError,
+ ClientTimeout as ClientTimeout,
+ ClientWebSocketResponse as ClientWebSocketResponse,
+ ContentTypeError as ContentTypeError,
+ Fingerprint as Fingerprint,
+ InvalidURL as InvalidURL,
+ NamedPipeConnector as NamedPipeConnector,
+ RequestInfo as RequestInfo,
+ ServerConnectionError as ServerConnectionError,
+ ServerDisconnectedError as ServerDisconnectedError,
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
+ ServerTimeoutError as ServerTimeoutError,
+ TCPConnector as TCPConnector,
+ TooManyRedirects as TooManyRedirects,
+ UnixConnector as UnixConnector,
+ WSServerHandshakeError as WSServerHandshakeError,
+ request as request,
+)
+from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
+from .formdata import FormData as FormData
+from .helpers import BasicAuth, ChainMapProxy, ETag
+from .http import (
+ HttpVersion as HttpVersion,
+ HttpVersion10 as HttpVersion10,
+ HttpVersion11 as HttpVersion11,
+ WebSocketError as WebSocketError,
+ WSCloseCode as WSCloseCode,
+ WSMessage as WSMessage,
+ WSMsgType as WSMsgType,
+)
+from .multipart import (
+ BadContentDispositionHeader as BadContentDispositionHeader,
+ BadContentDispositionParam as BadContentDispositionParam,
+ BodyPartReader as BodyPartReader,
+ MultipartReader as MultipartReader,
+ MultipartWriter as MultipartWriter,
+ content_disposition_filename as content_disposition_filename,
+ parse_content_disposition as parse_content_disposition,
+)
+from .payload import (
+ PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
+ AsyncIterablePayload as AsyncIterablePayload,
+ BufferedReaderPayload as BufferedReaderPayload,
+ BytesIOPayload as BytesIOPayload,
+ BytesPayload as BytesPayload,
+ IOBasePayload as IOBasePayload,
+ JsonPayload as JsonPayload,
+ Payload as Payload,
+ StringIOPayload as StringIOPayload,
+ StringPayload as StringPayload,
+ TextIOPayload as TextIOPayload,
+ get_payload as get_payload,
+ payload_type as payload_type,
+)
+from .payload_streamer import streamer as streamer
+from .resolver import (
+ AsyncResolver as AsyncResolver,
+ DefaultResolver as DefaultResolver,
+ ThreadedResolver as ThreadedResolver,
+)
+from .streams import (
+ EMPTY_PAYLOAD as EMPTY_PAYLOAD,
+ DataQueue as DataQueue,
+ EofStream as EofStream,
+ FlowControlDataQueue as FlowControlDataQueue,
+ StreamReader as StreamReader,
+)
+from .tracing import (
+ TraceConfig as TraceConfig,
+ TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
+ TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
+ TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
+ TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
+ TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
+ TraceDnsCacheHitParams as TraceDnsCacheHitParams,
+ TraceDnsCacheMissParams as TraceDnsCacheMissParams,
+ TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
+ TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
+ TraceRequestChunkSentParams as TraceRequestChunkSentParams,
+ TraceRequestEndParams as TraceRequestEndParams,
+ TraceRequestExceptionParams as TraceRequestExceptionParams,
+ TraceRequestRedirectParams as TraceRequestRedirectParams,
+ TraceRequestStartParams as TraceRequestStartParams,
+ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
+)
+
+__all__: Tuple[str, ...] = (
+ "hdrs",
+ # client
+ "BaseConnector",
+ "ClientConnectionError",
+ "ClientConnectorCertificateError",
+ "ClientConnectorError",
+ "ClientConnectorSSLError",
+ "ClientError",
+ "ClientHttpProxyError",
+ "ClientOSError",
+ "ClientPayloadError",
+ "ClientProxyConnectionError",
+ "ClientResponse",
+ "ClientRequest",
+ "ClientResponseError",
+ "ClientSSLError",
+ "ClientSession",
+ "ClientTimeout",
+ "ClientWebSocketResponse",
+ "ContentTypeError",
+ "Fingerprint",
+ "InvalidURL",
+ "RequestInfo",
+ "ServerConnectionError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ServerTimeoutError",
+ "TCPConnector",
+ "TooManyRedirects",
+ "UnixConnector",
+ "NamedPipeConnector",
+ "WSServerHandshakeError",
+ "request",
+ # cookiejar
+ "CookieJar",
+ "DummyCookieJar",
+ # formdata
+ "FormData",
+ # helpers
+ "BasicAuth",
+ "ChainMapProxy",
+ "ETag",
+ # http
+ "HttpVersion",
+ "HttpVersion10",
+ "HttpVersion11",
+ "WSMsgType",
+ "WSCloseCode",
+ "WSMessage",
+ "WebSocketError",
+ # multipart
+ "BadContentDispositionHeader",
+ "BadContentDispositionParam",
+ "BodyPartReader",
+ "MultipartReader",
+ "MultipartWriter",
+ "content_disposition_filename",
+ "parse_content_disposition",
+ # payload
+ "AsyncIterablePayload",
+ "BufferedReaderPayload",
+ "BytesIOPayload",
+ "BytesPayload",
+ "IOBasePayload",
+ "JsonPayload",
+ "PAYLOAD_REGISTRY",
+ "Payload",
+ "StringIOPayload",
+ "StringPayload",
+ "TextIOPayload",
+ "get_payload",
+ "payload_type",
+ # payload_streamer
+ "streamer",
+ # resolver
+ "AsyncResolver",
+ "DefaultResolver",
+ "ThreadedResolver",
+ # streams
+ "DataQueue",
+ "EMPTY_PAYLOAD",
+ "EofStream",
+ "FlowControlDataQueue",
+ "StreamReader",
+ # tracing
+ "TraceConfig",
+ "TraceConnectionCreateEndParams",
+ "TraceConnectionCreateStartParams",
+ "TraceConnectionQueuedEndParams",
+ "TraceConnectionQueuedStartParams",
+ "TraceConnectionReuseconnParams",
+ "TraceDnsCacheHitParams",
+ "TraceDnsCacheMissParams",
+ "TraceDnsResolveHostEndParams",
+ "TraceDnsResolveHostStartParams",
+ "TraceRequestChunkSentParams",
+ "TraceRequestEndParams",
+ "TraceRequestExceptionParams",
+ "TraceRequestRedirectParams",
+ "TraceRequestStartParams",
+ "TraceResponseChunkReceivedParams",
+)
+
+try:
+ from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
+
+ __all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
+except ImportError: # pragma: no cover
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/_cparser.pxd b/contrib/python/aiohttp/aiohttp/_cparser.pxd
new file mode 100644
index 0000000000..49055d6a56
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_cparser.pxd
@@ -0,0 +1,190 @@
+from libc.stdint cimport (
+ int8_t,
+ int16_t,
+ int32_t,
+ int64_t,
+ uint8_t,
+ uint16_t,
+ uint32_t,
+ uint64_t,
+)
+
+
+cdef extern from "llhttp.h":
+
+ struct llhttp__internal_s:
+ int32_t _index
+ void* _span_pos0
+ void* _span_cb0
+ int32_t error
+ const char* reason
+ const char* error_pos
+ void* data
+ void* _current
+ uint64_t content_length
+ uint8_t type
+ uint8_t method
+ uint8_t http_major
+ uint8_t http_minor
+ uint8_t header_state
+ uint8_t lenient_flags
+ uint8_t upgrade
+ uint8_t finish
+ uint16_t flags
+ uint16_t status_code
+ void* settings
+
+ ctypedef llhttp__internal_s llhttp__internal_t
+ ctypedef llhttp__internal_t llhttp_t
+
+ ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
+ ctypedef int (*llhttp_cb)(llhttp_t*) except -1
+
+ struct llhttp_settings_s:
+ llhttp_cb on_message_begin
+ llhttp_data_cb on_url
+ llhttp_data_cb on_status
+ llhttp_data_cb on_header_field
+ llhttp_data_cb on_header_value
+ llhttp_cb on_headers_complete
+ llhttp_data_cb on_body
+ llhttp_cb on_message_complete
+ llhttp_cb on_chunk_header
+ llhttp_cb on_chunk_complete
+
+ llhttp_cb on_url_complete
+ llhttp_cb on_status_complete
+ llhttp_cb on_header_field_complete
+ llhttp_cb on_header_value_complete
+
+ ctypedef llhttp_settings_s llhttp_settings_t
+
+ enum llhttp_errno:
+ HPE_OK,
+ HPE_INTERNAL,
+ HPE_STRICT,
+ HPE_LF_EXPECTED,
+ HPE_UNEXPECTED_CONTENT_LENGTH,
+ HPE_CLOSED_CONNECTION,
+ HPE_INVALID_METHOD,
+ HPE_INVALID_URL,
+ HPE_INVALID_CONSTANT,
+ HPE_INVALID_VERSION,
+ HPE_INVALID_HEADER_TOKEN,
+ HPE_INVALID_CONTENT_LENGTH,
+ HPE_INVALID_CHUNK_SIZE,
+ HPE_INVALID_STATUS,
+ HPE_INVALID_EOF_STATE,
+ HPE_INVALID_TRANSFER_ENCODING,
+ HPE_CB_MESSAGE_BEGIN,
+ HPE_CB_HEADERS_COMPLETE,
+ HPE_CB_MESSAGE_COMPLETE,
+ HPE_CB_CHUNK_HEADER,
+ HPE_CB_CHUNK_COMPLETE,
+ HPE_PAUSED,
+ HPE_PAUSED_UPGRADE,
+ HPE_USER
+
+ ctypedef llhttp_errno llhttp_errno_t
+
+ enum llhttp_flags:
+ F_CONNECTION_KEEP_ALIVE,
+ F_CONNECTION_CLOSE,
+ F_CONNECTION_UPGRADE,
+ F_CHUNKED,
+ F_UPGRADE,
+ F_CONTENT_LENGTH,
+ F_SKIPBODY,
+ F_TRAILING,
+ F_TRANSFER_ENCODING
+
+ enum llhttp_lenient_flags:
+ LENIENT_HEADERS,
+ LENIENT_CHUNKED_LENGTH
+
+ enum llhttp_type:
+ HTTP_REQUEST,
+ HTTP_RESPONSE,
+ HTTP_BOTH
+
+ enum llhttp_finish_t:
+ HTTP_FINISH_SAFE,
+ HTTP_FINISH_SAFE_WITH_CB,
+ HTTP_FINISH_UNSAFE
+
+ enum llhttp_method:
+ HTTP_DELETE,
+ HTTP_GET,
+ HTTP_HEAD,
+ HTTP_POST,
+ HTTP_PUT,
+ HTTP_CONNECT,
+ HTTP_OPTIONS,
+ HTTP_TRACE,
+ HTTP_COPY,
+ HTTP_LOCK,
+ HTTP_MKCOL,
+ HTTP_MOVE,
+ HTTP_PROPFIND,
+ HTTP_PROPPATCH,
+ HTTP_SEARCH,
+ HTTP_UNLOCK,
+ HTTP_BIND,
+ HTTP_REBIND,
+ HTTP_UNBIND,
+ HTTP_ACL,
+ HTTP_REPORT,
+ HTTP_MKACTIVITY,
+ HTTP_CHECKOUT,
+ HTTP_MERGE,
+ HTTP_MSEARCH,
+ HTTP_NOTIFY,
+ HTTP_SUBSCRIBE,
+ HTTP_UNSUBSCRIBE,
+ HTTP_PATCH,
+ HTTP_PURGE,
+ HTTP_MKCALENDAR,
+ HTTP_LINK,
+ HTTP_UNLINK,
+ HTTP_SOURCE,
+ HTTP_PRI,
+ HTTP_DESCRIBE,
+ HTTP_ANNOUNCE,
+ HTTP_SETUP,
+ HTTP_PLAY,
+ HTTP_PAUSE,
+ HTTP_TEARDOWN,
+ HTTP_GET_PARAMETER,
+ HTTP_SET_PARAMETER,
+ HTTP_REDIRECT,
+ HTTP_RECORD,
+ HTTP_FLUSH
+
+ ctypedef llhttp_method llhttp_method_t;
+
+ void llhttp_settings_init(llhttp_settings_t* settings)
+ void llhttp_init(llhttp_t* parser, llhttp_type type,
+ const llhttp_settings_t* settings)
+
+ llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
+ llhttp_errno_t llhttp_finish(llhttp_t* parser)
+
+ int llhttp_message_needs_eof(const llhttp_t* parser)
+
+ int llhttp_should_keep_alive(const llhttp_t* parser)
+
+ void llhttp_pause(llhttp_t* parser)
+ void llhttp_resume(llhttp_t* parser)
+
+ void llhttp_resume_after_upgrade(llhttp_t* parser)
+
+ llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
+ const char* llhttp_get_error_reason(const llhttp_t* parser)
+ void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
+ const char* llhttp_get_error_pos(const llhttp_t* parser)
+ const char* llhttp_errno_name(llhttp_errno_t err)
+
+ const char* llhttp_method_name(llhttp_method_t method)
+
+ void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
+ void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)
diff --git a/contrib/python/aiohttp/aiohttp/_find_header.c b/contrib/python/aiohttp/aiohttp/_find_header.c
new file mode 100644
index 0000000000..012cba33ac
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_find_header.c
@@ -0,0 +1,9870 @@
+/* The file is autogenerated from aiohttp/hdrs.py
+Run ./tools/gen.py to update it after the origin changing. */
+
+#include "_find_header.h"
+
+#define NEXT_CHAR() \
+{ \
+ count++; \
+ if (count == size) { \
+ /* end of search */ \
+ return -1; \
+ } \
+ pchar++; \
+ ch = *pchar; \
+ last = (count == size -1); \
+} while(0);
+
+int
+find_header(const char *str, int size)
+{
+ char *pchar = str;
+ int last;
+ char ch;
+ int count = -1;
+ pchar--;
+
+
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto A;
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto C;
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto D;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto D;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto E;
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto F;
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto H;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto I;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto I;
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto K;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto K;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto L;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto L;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto M;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto O;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto O;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto P;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto P;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto R;
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto S;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto S;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto T;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto T;
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto U;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto U;
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto V;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto V;
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto W;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto W;
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto X;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto X;
+ default:
+ return -1;
+ }
+
+A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto AC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto AC;
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto AG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto AG;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto AL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto AL;
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto AU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto AU;
+ default:
+ return -1;
+ }
+
+AC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACC;
+ default:
+ return -1;
+ }
+
+ACC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCE;
+ default:
+ return -1;
+ }
+
+ACCE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto ACCEP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto ACCEP;
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCES;
+ default:
+ return -1;
+ }
+
+ACCEP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 0;
+ }
+ goto ACCEPT;
+ case 't':
+ if (last) {
+ return 0;
+ }
+ goto ACCEPT;
+ default:
+ return -1;
+ }
+
+ACCEPT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_;
+ default:
+ return -1;
+ }
+
+ACCEPT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_C;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_E;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_L;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_L;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_R;
+ default:
+ return -1;
+ }
+
+ACCEPT_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CH;
+ default:
+ return -1;
+ }
+
+ACCEPT_CH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHA;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHAR;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARS;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHARS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARSE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_CHARSE;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHARSE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 1;
+ }
+ goto ACCEPT_CHARSET;
+ case 't':
+ if (last) {
+ return 1;
+ }
+ goto ACCEPT_CHARSET;
+ default:
+ return -1;
+ }
+
+ACCEPT_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_EN;
+ default:
+ return -1;
+ }
+
+ACCEPT_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENC;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCO;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCOD;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODI;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_ENCODIN;
+ default:
+ return -1;
+ }
+
+ACCEPT_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 2;
+ }
+ goto ACCEPT_ENCODING;
+ case 'g':
+ if (last) {
+ return 2;
+ }
+ goto ACCEPT_ENCODING;
+ default:
+ return -1;
+ }
+
+ACCEPT_L:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LA;
+ default:
+ return -1;
+ }
+
+ACCEPT_LA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LAN;
+ default:
+ return -1;
+ }
+
+ACCEPT_LAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANG;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGU;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANGU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUA;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANGUA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUAG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_LANGUAG;
+ default:
+ return -1;
+ }
+
+ACCEPT_LANGUAG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 3;
+ }
+ goto ACCEPT_LANGUAGE;
+ case 'e':
+ if (last) {
+ return 3;
+ }
+ goto ACCEPT_LANGUAGE;
+ default:
+ return -1;
+ }
+
+ACCEPT_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RA;
+ default:
+ return -1;
+ }
+
+ACCEPT_RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RAN;
+ default:
+ return -1;
+ }
+
+ACCEPT_RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANG;
+ default:
+ return -1;
+ }
+
+ACCEPT_RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCEPT_RANGE;
+ default:
+ return -1;
+ }
+
+ACCEPT_RANGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 4;
+ }
+ goto ACCEPT_RANGES;
+ case 's':
+ if (last) {
+ return 4;
+ }
+ goto ACCEPT_RANGES;
+ default:
+ return -1;
+ }
+
+ACCES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS;
+ default:
+ return -1;
+ }
+
+ACCESS:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_;
+ default:
+ return -1;
+ }
+
+ACCESS_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_C;
+ default:
+ return -1;
+ }
+
+ACCESS_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CO;
+ default:
+ return -1;
+ }
+
+ACCESS_CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CON;
+ default:
+ return -1;
+ }
+
+ACCESS_CON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONT;
+ default:
+ return -1;
+ }
+
+ACCESS_CONT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTR;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTRO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_A;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_E;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_M;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_R;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_AL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_AL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_AL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_C;
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_H;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_M;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_O;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_O;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CR;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CRE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CRED;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CRED:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDEN;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENT;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTI;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENTIA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIAL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIAL;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_CREDENTIAL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 5;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIALS;
+ case 's':
+ if (last) {
+ return 5;
+ }
+ goto ACCESS_CONTROL_ALLOW_CREDENTIALS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEAD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEADE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADER;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_HEADER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 6;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADERS;
+ case 's':
+ if (last) {
+ return 6;
+ }
+ goto ACCESS_CONTROL_ALLOW_HEADERS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ME;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ME;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ME:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_MET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_MET;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_MET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METH;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_METH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_METHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHOD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_METHOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 7;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHODS;
+ case 's':
+ if (last) {
+ return 7;
+ }
+ goto ACCESS_CONTROL_ALLOW_METHODS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_O:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_OR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_OR;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_OR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORI;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIG;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ORIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGI;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_ALLOW_ORIGI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 8;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGIN;
+ case 'n':
+ if (last) {
+ return 8;
+ }
+ goto ACCESS_CONTROL_ALLOW_ORIGIN;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EX;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXP;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_H;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEAD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEADE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADER;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_EXPOSE_HEADER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 9;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADERS;
+ case 's':
+ if (last) {
+ return 9;
+ }
+ goto ACCESS_CONTROL_EXPOSE_HEADERS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_A;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_AG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_MAX_AG;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_MAX_AG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 10;
+ }
+ goto ACCESS_CONTROL_MAX_AGE;
+ case 'e':
+ if (last) {
+ return 10;
+ }
+ goto ACCESS_CONTROL_MAX_AGE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_RE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_RE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_RE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Q':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQ;
+ case 'q':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQ;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQ:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQU;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUES;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_H;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_M;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEA;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEAD;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADE;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEADE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADER;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_HEADER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 11;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADERS;
+ case 's':
+ if (last) {
+ return 11;
+ }
+ goto ACCESS_CONTROL_REQUEST_HEADERS;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_ME;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_ME;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_ME:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_MET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_MET;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_MET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METH;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_METH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHO;
+ default:
+ return -1;
+ }
+
+ACCESS_CONTROL_REQUEST_METHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 12;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHOD;
+ case 'd':
+ if (last) {
+ return 12;
+ }
+ goto ACCESS_CONTROL_REQUEST_METHOD;
+ default:
+ return -1;
+ }
+
+AG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 13;
+ }
+ goto AGE;
+ case 'e':
+ if (last) {
+ return 13;
+ }
+ goto AGE;
+ default:
+ return -1;
+ }
+
+AL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto ALL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto ALL;
+ default:
+ return -1;
+ }
+
+ALL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto ALLO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto ALLO;
+ default:
+ return -1;
+ }
+
+ALLO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return 14;
+ }
+ goto ALLOW;
+ case 'w':
+ if (last) {
+ return 14;
+ }
+ goto ALLOW;
+ default:
+ return -1;
+ }
+
+AU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto AUT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto AUT;
+ default:
+ return -1;
+ }
+
+AUT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto AUTH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto AUTH;
+ default:
+ return -1;
+ }
+
+AUTH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto AUTHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto AUTHO;
+ default:
+ return -1;
+ }
+
+AUTHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto AUTHOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto AUTHOR;
+ default:
+ return -1;
+ }
+
+AUTHOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORI;
+ default:
+ return -1;
+ }
+
+AUTHORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Z':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZ;
+ case 'z':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZ;
+ default:
+ return -1;
+ }
+
+AUTHORIZ:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZA;
+ default:
+ return -1;
+ }
+
+AUTHORIZA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZAT;
+ default:
+ return -1;
+ }
+
+AUTHORIZAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATI;
+ default:
+ return -1;
+ }
+
+AUTHORIZATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto AUTHORIZATIO;
+ default:
+ return -1;
+ }
+
+AUTHORIZATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 15;
+ }
+ goto AUTHORIZATION;
+ case 'n':
+ if (last) {
+ return 15;
+ }
+ goto AUTHORIZATION;
+ default:
+ return -1;
+ }
+
+C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CA;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CO;
+ default:
+ return -1;
+ }
+
+CA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CAC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CAC;
+ default:
+ return -1;
+ }
+
+CAC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto CACH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto CACH;
+ default:
+ return -1;
+ }
+
+CACH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CACHE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CACHE;
+ default:
+ return -1;
+ }
+
+CACHE:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_;
+ default:
+ return -1;
+ }
+
+CACHE_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_C;
+ default:
+ return -1;
+ }
+
+CACHE_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CO;
+ default:
+ return -1;
+ }
+
+CACHE_CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CON;
+ default:
+ return -1;
+ }
+
+CACHE_CON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONT;
+ default:
+ return -1;
+ }
+
+CACHE_CONT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTR;
+ default:
+ return -1;
+ }
+
+CACHE_CONTR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CACHE_CONTRO;
+ default:
+ return -1;
+ }
+
+CACHE_CONTRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return 16;
+ }
+ goto CACHE_CONTROL;
+ case 'l':
+ if (last) {
+ return 16;
+ }
+ goto CACHE_CONTROL;
+ default:
+ return -1;
+ }
+
+CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CON;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto COO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto COO;
+ default:
+ return -1;
+ }
+
+CON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONN;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONT;
+ default:
+ return -1;
+ }
+
+CONN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONNE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONNE;
+ default:
+ return -1;
+ }
+
+CONNE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONNEC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONNEC;
+ default:
+ return -1;
+ }
+
+CONNEC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONNECT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONNECT;
+ default:
+ return -1;
+ }
+
+CONNECT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTI;
+ default:
+ return -1;
+ }
+
+CONNECTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONNECTIO;
+ default:
+ return -1;
+ }
+
+CONNECTIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 17;
+ }
+ goto CONNECTION;
+ case 'n':
+ if (last) {
+ return 17;
+ }
+ goto CONNECTION;
+ default:
+ return -1;
+ }
+
+CONT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTE;
+ default:
+ return -1;
+ }
+
+CONTE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTEN;
+ default:
+ return -1;
+ }
+
+CONTEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT;
+ default:
+ return -1;
+ }
+
+CONTENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_;
+ default:
+ return -1;
+ }
+
+CONTENT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_D;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_D;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_E;
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_L;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_L;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_M;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_R;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_T;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_T;
+ default:
+ return -1;
+ }
+
+CONTENT_D:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DI;
+ default:
+ return -1;
+ }
+
+CONTENT_DI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DIS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DIS;
+ default:
+ return -1;
+ }
+
+CONTENT_DIS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISP;
+ default:
+ return -1;
+ }
+
+CONTENT_DISP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPO;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOS;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSI;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSIT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSIT;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSIT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITI;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSITI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_DISPOSITIO;
+ default:
+ return -1;
+ }
+
+CONTENT_DISPOSITIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 18;
+ }
+ goto CONTENT_DISPOSITION;
+ case 'n':
+ if (last) {
+ return 18;
+ }
+ goto CONTENT_DISPOSITION;
+ default:
+ return -1;
+ }
+
+CONTENT_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_EN;
+ default:
+ return -1;
+ }
+
+CONTENT_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENC;
+ default:
+ return -1;
+ }
+
+CONTENT_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCO;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCOD;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODI;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_ENCODIN;
+ default:
+ return -1;
+ }
+
+CONTENT_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 19;
+ }
+ goto CONTENT_ENCODING;
+ case 'g':
+ if (last) {
+ return 19;
+ }
+ goto CONTENT_ENCODING;
+ default:
+ return -1;
+ }
+
+CONTENT_L:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LA;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LE;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LO;
+ default:
+ return -1;
+ }
+
+CONTENT_LA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LAN;
+ default:
+ return -1;
+ }
+
+CONTENT_LAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANG;
+ default:
+ return -1;
+ }
+
+CONTENT_LANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGU;
+ default:
+ return -1;
+ }
+
+CONTENT_LANGU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUA;
+ default:
+ return -1;
+ }
+
+CONTENT_LANGUA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUAG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LANGUAG;
+ default:
+ return -1;
+ }
+
+CONTENT_LANGUAG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 20;
+ }
+ goto CONTENT_LANGUAGE;
+ case 'e':
+ if (last) {
+ return 20;
+ }
+ goto CONTENT_LANGUAGE;
+ default:
+ return -1;
+ }
+
+CONTENT_LE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LEN;
+ default:
+ return -1;
+ }
+
+CONTENT_LEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENG;
+ default:
+ return -1;
+ }
+
+CONTENT_LENG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENGT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LENGT;
+ default:
+ return -1;
+ }
+
+CONTENT_LENGT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return 21;
+ }
+ goto CONTENT_LENGTH;
+ case 'h':
+ if (last) {
+ return 21;
+ }
+ goto CONTENT_LENGTH;
+ default:
+ return -1;
+ }
+
+CONTENT_LO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOC;
+ default:
+ return -1;
+ }
+
+CONTENT_LOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCA;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCAT;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATI;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_LOCATIO;
+ default:
+ return -1;
+ }
+
+CONTENT_LOCATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 22;
+ }
+ goto CONTENT_LOCATION;
+ case 'n':
+ if (last) {
+ return 22;
+ }
+ goto CONTENT_LOCATION;
+ default:
+ return -1;
+ }
+
+CONTENT_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_MD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_MD;
+ default:
+ return -1;
+ }
+
+CONTENT_MD:
+ NEXT_CHAR();
+ switch (ch) {
+ case '5':
+ if (last) {
+ return 23;
+ }
+ goto CONTENT_MD5;
+ default:
+ return -1;
+ }
+
+CONTENT_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RA;
+ default:
+ return -1;
+ }
+
+CONTENT_RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RAN;
+ default:
+ return -1;
+ }
+
+CONTENT_RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_RANG;
+ default:
+ return -1;
+ }
+
+CONTENT_RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 24;
+ }
+ goto CONTENT_RANGE;
+ case 'e':
+ if (last) {
+ return 24;
+ }
+ goto CONTENT_RANGE;
+ default:
+ return -1;
+ }
+
+CONTENT_T:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TR;
+ case 'Y':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TY;
+ case 'y':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TY;
+ default:
+ return -1;
+ }
+
+CONTENT_TR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRA;
+ default:
+ return -1;
+ }
+
+CONTENT_TRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRAN;
+ default:
+ return -1;
+ }
+
+CONTENT_TRAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANS;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSF;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFE;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_E;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_EN;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENC;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCO;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCOD;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODI;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TRANSFER_ENCODIN;
+ default:
+ return -1;
+ }
+
+CONTENT_TRANSFER_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 25;
+ }
+ goto CONTENT_TRANSFER_ENCODING;
+ case 'g':
+ if (last) {
+ return 25;
+ }
+ goto CONTENT_TRANSFER_ENCODING;
+ default:
+ return -1;
+ }
+
+CONTENT_TY:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TYP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto CONTENT_TYP;
+ default:
+ return -1;
+ }
+
+CONTENT_TYP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 26;
+ }
+ goto CONTENT_TYPE;
+ case 'e':
+ if (last) {
+ return 26;
+ }
+ goto CONTENT_TYPE;
+ default:
+ return -1;
+ }
+
+COO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto COOK;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto COOK;
+ default:
+ return -1;
+ }
+
+COOK:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto COOKI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto COOKI;
+ default:
+ return -1;
+ }
+
+COOKI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 27;
+ }
+ goto COOKIE;
+ case 'e':
+ if (last) {
+ return 27;
+ }
+ goto COOKIE;
+ default:
+ return -1;
+ }
+
+D:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto DA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto DA;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto DE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto DE;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto DI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto DI;
+ default:
+ return -1;
+ }
+
+DA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto DAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto DAT;
+ default:
+ return -1;
+ }
+
+DAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 28;
+ }
+ goto DATE;
+ case 'e':
+ if (last) {
+ return 28;
+ }
+ goto DATE;
+ default:
+ return -1;
+ }
+
+DE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto DES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto DES;
+ default:
+ return -1;
+ }
+
+DES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto DEST;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto DEST;
+ default:
+ return -1;
+ }
+
+DEST:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto DESTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto DESTI;
+ default:
+ return -1;
+ }
+
+DESTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto DESTIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto DESTIN;
+ default:
+ return -1;
+ }
+
+DESTIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto DESTINA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto DESTINA;
+ default:
+ return -1;
+ }
+
+DESTINA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto DESTINAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto DESTINAT;
+ default:
+ return -1;
+ }
+
+DESTINAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATI;
+ default:
+ return -1;
+ }
+
+DESTINATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto DESTINATIO;
+ default:
+ return -1;
+ }
+
+DESTINATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 29;
+ }
+ goto DESTINATION;
+ case 'n':
+ if (last) {
+ return 29;
+ }
+ goto DESTINATION;
+ default:
+ return -1;
+ }
+
+DI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto DIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto DIG;
+ default:
+ return -1;
+ }
+
+DIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto DIGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto DIGE;
+ default:
+ return -1;
+ }
+
+DIGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto DIGES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto DIGES;
+ default:
+ return -1;
+ }
+
+DIGES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 30;
+ }
+ goto DIGEST;
+ case 't':
+ if (last) {
+ return 30;
+ }
+ goto DIGEST;
+ default:
+ return -1;
+ }
+
+E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto ET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto ET;
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto EX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto EX;
+ default:
+ return -1;
+ }
+
+ET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto ETA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto ETA;
+ default:
+ return -1;
+ }
+
+ETA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 31;
+ }
+ goto ETAG;
+ case 'g':
+ if (last) {
+ return 31;
+ }
+ goto ETAG;
+ default:
+ return -1;
+ }
+
+EX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto EXP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto EXP;
+ default:
+ return -1;
+ }
+
+EXP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto EXPE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto EXPE;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto EXPI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto EXPI;
+ default:
+ return -1;
+ }
+
+EXPE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto EXPEC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto EXPEC;
+ default:
+ return -1;
+ }
+
+EXPEC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 32;
+ }
+ goto EXPECT;
+ case 't':
+ if (last) {
+ return 32;
+ }
+ goto EXPECT;
+ default:
+ return -1;
+ }
+
+EXPI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto EXPIR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto EXPIR;
+ default:
+ return -1;
+ }
+
+EXPIR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto EXPIRE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto EXPIRE;
+ default:
+ return -1;
+ }
+
+EXPIRE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 33;
+ }
+ goto EXPIRES;
+ case 's':
+ if (last) {
+ return 33;
+ }
+ goto EXPIRES;
+ default:
+ return -1;
+ }
+
+F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto FO;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto FR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto FR;
+ default:
+ return -1;
+ }
+
+FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto FOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto FOR;
+ default:
+ return -1;
+ }
+
+FOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto FORW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto FORW;
+ default:
+ return -1;
+ }
+
+FORW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto FORWA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto FORWA;
+ default:
+ return -1;
+ }
+
+FORWA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto FORWAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto FORWAR;
+ default:
+ return -1;
+ }
+
+FORWAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto FORWARD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto FORWARD;
+ default:
+ return -1;
+ }
+
+FORWARD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto FORWARDE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto FORWARDE;
+ default:
+ return -1;
+ }
+
+FORWARDE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 34;
+ }
+ goto FORWARDED;
+ case 'd':
+ if (last) {
+ return 34;
+ }
+ goto FORWARDED;
+ default:
+ return -1;
+ }
+
+FR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto FRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto FRO;
+ default:
+ return -1;
+ }
+
+FRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return 35;
+ }
+ goto FROM;
+ case 'm':
+ if (last) {
+ return 35;
+ }
+ goto FROM;
+ default:
+ return -1;
+ }
+
+H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto HO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto HO;
+ default:
+ return -1;
+ }
+
+HO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto HOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto HOS;
+ default:
+ return -1;
+ }
+
+HOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 36;
+ }
+ goto HOST;
+ case 't':
+ if (last) {
+ return 36;
+ }
+ goto HOST;
+ default:
+ return -1;
+ }
+
+I:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto IF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto IF;
+ default:
+ return -1;
+ }
+
+IF:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_;
+ default:
+ return -1;
+ }
+
+IF_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto IF_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto IF_M;
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_N;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_N;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto IF_R;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto IF_R;
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto IF_U;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto IF_U;
+ default:
+ return -1;
+ }
+
+IF_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto IF_MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto IF_MA;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto IF_MO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto IF_MO;
+ default:
+ return -1;
+ }
+
+IF_MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto IF_MAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto IF_MAT;
+ default:
+ return -1;
+ }
+
+IF_MAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_MATC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_MATC;
+ default:
+ return -1;
+ }
+
+IF_MATC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return 37;
+ }
+ goto IF_MATCH;
+ case 'h':
+ if (last) {
+ return 37;
+ }
+ goto IF_MATCH;
+ default:
+ return -1;
+ }
+
+IF_MO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_MOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_MOD;
+ default:
+ return -1;
+ }
+
+IF_MOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODI;
+ default:
+ return -1;
+ }
+
+IF_MODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIF;
+ default:
+ return -1;
+ }
+
+IF_MODIF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFI;
+ default:
+ return -1;
+ }
+
+IF_MODIFI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIE;
+ default:
+ return -1;
+ }
+
+IF_MODIFIE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_S;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_S;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_S:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SI;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_SI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SIN;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_SIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SINC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_MODIFIED_SINC;
+ default:
+ return -1;
+ }
+
+IF_MODIFIED_SINC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 38;
+ }
+ goto IF_MODIFIED_SINCE;
+ case 'e':
+ if (last) {
+ return 38;
+ }
+ goto IF_MODIFIED_SINCE;
+ default:
+ return -1;
+ }
+
+IF_N:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto IF_NO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto IF_NO;
+ default:
+ return -1;
+ }
+
+IF_NO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_NON;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_NON;
+ default:
+ return -1;
+ }
+
+IF_NON:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE;
+ default:
+ return -1;
+ }
+
+IF_NONE:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_;
+ default:
+ return -1;
+ }
+
+IF_NONE_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_M;
+ default:
+ return -1;
+ }
+
+IF_NONE_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MA;
+ default:
+ return -1;
+ }
+
+IF_NONE_MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MAT;
+ default:
+ return -1;
+ }
+
+IF_NONE_MAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MATC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_NONE_MATC;
+ default:
+ return -1;
+ }
+
+IF_NONE_MATC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return 39;
+ }
+ goto IF_NONE_MATCH;
+ case 'h':
+ if (last) {
+ return 39;
+ }
+ goto IF_NONE_MATCH;
+ default:
+ return -1;
+ }
+
+IF_R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto IF_RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto IF_RA;
+ default:
+ return -1;
+ }
+
+IF_RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_RAN;
+ default:
+ return -1;
+ }
+
+IF_RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto IF_RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto IF_RANG;
+ default:
+ return -1;
+ }
+
+IF_RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 40;
+ }
+ goto IF_RANGE;
+ case 'e':
+ if (last) {
+ return 40;
+ }
+ goto IF_RANGE;
+ default:
+ return -1;
+ }
+
+IF_U:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_UN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_UN;
+ default:
+ return -1;
+ }
+
+IF_UN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNM;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNM;
+ default:
+ return -1;
+ }
+
+IF_UNM:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMO;
+ default:
+ return -1;
+ }
+
+IF_UNMO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMOD;
+ default:
+ return -1;
+ }
+
+IF_UNMOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODI;
+ default:
+ return -1;
+ }
+
+IF_UNMODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIF;
+ default:
+ return -1;
+ }
+
+IF_UNMODIF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFI;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIE;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_S;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_S;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_S:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SI;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_SI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SIN;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_SIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SINC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto IF_UNMODIFIED_SINC;
+ default:
+ return -1;
+ }
+
+IF_UNMODIFIED_SINC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 41;
+ }
+ goto IF_UNMODIFIED_SINCE;
+ case 'e':
+ if (last) {
+ return 41;
+ }
+ goto IF_UNMODIFIED_SINCE;
+ default:
+ return -1;
+ }
+
+K:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto KE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto KE;
+ default:
+ return -1;
+ }
+
+KE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto KEE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto KEE;
+ default:
+ return -1;
+ }
+
+KEE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto KEEP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto KEEP;
+ default:
+ return -1;
+ }
+
+KEEP:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_;
+ default:
+ return -1;
+ }
+
+KEEP_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_A;
+ default:
+ return -1;
+ }
+
+KEEP_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_AL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_AL;
+ default:
+ return -1;
+ }
+
+KEEP_AL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALI;
+ default:
+ return -1;
+ }
+
+KEEP_ALI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALIV;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto KEEP_ALIV;
+ default:
+ return -1;
+ }
+
+KEEP_ALIV:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 42;
+ }
+ goto KEEP_ALIVE;
+ case 'e':
+ if (last) {
+ return 42;
+ }
+ goto KEEP_ALIVE;
+ default:
+ return -1;
+ }
+
+L:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto LA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto LA;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LI;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto LO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto LO;
+ default:
+ return -1;
+ }
+
+LA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto LAS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto LAS;
+ default:
+ return -1;
+ }
+
+LAS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto LAST;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto LAST;
+ default:
+ return -1;
+ }
+
+LAST:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto LAST_;
+ default:
+ return -1;
+ }
+
+LAST_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto LAST_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto LAST_E;
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto LAST_M;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto LAST_M;
+ default:
+ return -1;
+ }
+
+LAST_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EV;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EV;
+ default:
+ return -1;
+ }
+
+LAST_EV:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVE;
+ default:
+ return -1;
+ }
+
+LAST_EVE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVEN;
+ default:
+ return -1;
+ }
+
+LAST_EVEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT;
+ default:
+ return -1;
+ }
+
+LAST_EVENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT_;
+ default:
+ return -1;
+ }
+
+LAST_EVENT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT_I;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LAST_EVENT_I;
+ default:
+ return -1;
+ }
+
+LAST_EVENT_I:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 43;
+ }
+ goto LAST_EVENT_ID;
+ case 'd':
+ if (last) {
+ return 43;
+ }
+ goto LAST_EVENT_ID;
+ default:
+ return -1;
+ }
+
+LAST_M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MO;
+ default:
+ return -1;
+ }
+
+LAST_MO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MOD;
+ default:
+ return -1;
+ }
+
+LAST_MOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODI;
+ default:
+ return -1;
+ }
+
+LAST_MODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIF;
+ default:
+ return -1;
+ }
+
+LAST_MODIF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFI;
+ default:
+ return -1;
+ }
+
+LAST_MODIFI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFIE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto LAST_MODIFIE;
+ default:
+ return -1;
+ }
+
+LAST_MODIFIE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return 44;
+ }
+ goto LAST_MODIFIED;
+ case 'd':
+ if (last) {
+ return 44;
+ }
+ goto LAST_MODIFIED;
+ default:
+ return -1;
+ }
+
+LI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto LIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto LIN;
+ default:
+ return -1;
+ }
+
+LIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return 45;
+ }
+ goto LINK;
+ case 'k':
+ if (last) {
+ return 45;
+ }
+ goto LINK;
+ default:
+ return -1;
+ }
+
+LO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto LOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto LOC;
+ default:
+ return -1;
+ }
+
+LOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto LOCA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto LOCA;
+ default:
+ return -1;
+ }
+
+LOCA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto LOCAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto LOCAT;
+ default:
+ return -1;
+ }
+
+LOCAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto LOCATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto LOCATI;
+ default:
+ return -1;
+ }
+
+LOCATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto LOCATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto LOCATIO;
+ default:
+ return -1;
+ }
+
+LOCATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 46;
+ }
+ goto LOCATION;
+ case 'n':
+ if (last) {
+ return 46;
+ }
+ goto LOCATION;
+ default:
+ return -1;
+ }
+
+M:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto MA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto MA;
+ default:
+ return -1;
+ }
+
+MA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto MAX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto MAX;
+ default:
+ return -1;
+ }
+
+MAX:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto MAX_;
+ default:
+ return -1;
+ }
+
+MAX_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto MAX_F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto MAX_F;
+ default:
+ return -1;
+ }
+
+MAX_F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FO;
+ default:
+ return -1;
+ }
+
+MAX_FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FOR;
+ default:
+ return -1;
+ }
+
+MAX_FOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORW;
+ default:
+ return -1;
+ }
+
+MAX_FORW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWA;
+ default:
+ return -1;
+ }
+
+MAX_FORWA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWAR;
+ default:
+ return -1;
+ }
+
+MAX_FORWAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWARD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto MAX_FORWARD;
+ default:
+ return -1;
+ }
+
+MAX_FORWARD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 47;
+ }
+ goto MAX_FORWARDS;
+ case 's':
+ if (last) {
+ return 47;
+ }
+ goto MAX_FORWARDS;
+ default:
+ return -1;
+ }
+
+O:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto OR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto OR;
+ default:
+ return -1;
+ }
+
+OR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ORI;
+ default:
+ return -1;
+ }
+
+ORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto ORIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto ORIG;
+ default:
+ return -1;
+ }
+
+ORIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto ORIGI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto ORIGI;
+ default:
+ return -1;
+ }
+
+ORIGI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 48;
+ }
+ goto ORIGIN;
+ case 'n':
+ if (last) {
+ return 48;
+ }
+ goto ORIGIN;
+ default:
+ return -1;
+ }
+
+P:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto PR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto PR;
+ default:
+ return -1;
+ }
+
+PR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PRA;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto PRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto PRO;
+ default:
+ return -1;
+ }
+
+PRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto PRAG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto PRAG;
+ default:
+ return -1;
+ }
+
+PRAG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'M':
+ if (last) {
+ return -1;
+ }
+ goto PRAGM;
+ case 'm':
+ if (last) {
+ return -1;
+ }
+ goto PRAGM;
+ default:
+ return -1;
+ }
+
+PRAGM:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return 49;
+ }
+ goto PRAGMA;
+ case 'a':
+ if (last) {
+ return 49;
+ }
+ goto PRAGMA;
+ default:
+ return -1;
+ }
+
+PRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto PROX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto PROX;
+ default:
+ return -1;
+ }
+
+PROX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return -1;
+ }
+ goto PROXY;
+ case 'y':
+ if (last) {
+ return -1;
+ }
+ goto PROXY;
+ default:
+ return -1;
+ }
+
+PROXY:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_;
+ default:
+ return -1;
+ }
+
+PROXY_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_A;
+ default:
+ return -1;
+ }
+
+PROXY_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AU;
+ default:
+ return -1;
+ }
+
+PROXY_AU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUT;
+ default:
+ return -1;
+ }
+
+PROXY_AUT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTH;
+ default:
+ return -1;
+ }
+
+PROXY_AUTH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHE;
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHO;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHEN;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENT;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTI;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTIC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTIC;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTIC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICA;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTICA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHENTICAT;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHENTICAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 50;
+ }
+ goto PROXY_AUTHENTICATE;
+ case 'e':
+ if (last) {
+ return 50;
+ }
+ goto PROXY_AUTHENTICATE;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHOR;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORI;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Z':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZ;
+ case 'z':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZ;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZ:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZA;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZAT;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATI;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZATI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto PROXY_AUTHORIZATIO;
+ default:
+ return -1;
+ }
+
+PROXY_AUTHORIZATIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 51;
+ }
+ goto PROXY_AUTHORIZATION;
+ case 'n':
+ if (last) {
+ return 51;
+ }
+ goto PROXY_AUTHORIZATION;
+ default:
+ return -1;
+ }
+
+R:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto RA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto RA;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto RE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto RE;
+ default:
+ return -1;
+ }
+
+RA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto RAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto RAN;
+ default:
+ return -1;
+ }
+
+RAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto RANG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto RANG;
+ default:
+ return -1;
+ }
+
+RANG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 52;
+ }
+ goto RANGE;
+ case 'e':
+ if (last) {
+ return 52;
+ }
+ goto RANGE;
+ default:
+ return -1;
+ }
+
+RE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto REF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto REF;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto RET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto RET;
+ default:
+ return -1;
+ }
+
+REF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto REFE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto REFE;
+ default:
+ return -1;
+ }
+
+REFE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto REFER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto REFER;
+ default:
+ return -1;
+ }
+
+REFER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto REFERE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto REFERE;
+ default:
+ return -1;
+ }
+
+REFERE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 53;
+ }
+ goto REFERER;
+ case 'r':
+ if (last) {
+ return 53;
+ }
+ goto REFERER;
+ default:
+ return -1;
+ }
+
+RET:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto RETR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto RETR;
+ default:
+ return -1;
+ }
+
+RETR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return -1;
+ }
+ goto RETRY;
+ case 'y':
+ if (last) {
+ return -1;
+ }
+ goto RETRY;
+ default:
+ return -1;
+ }
+
+RETRY:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_;
+ default:
+ return -1;
+ }
+
+RETRY_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_A;
+ default:
+ return -1;
+ }
+
+RETRY_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AF;
+ default:
+ return -1;
+ }
+
+RETRY_AF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFT;
+ default:
+ return -1;
+ }
+
+RETRY_AFT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFTE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto RETRY_AFTE;
+ default:
+ return -1;
+ }
+
+RETRY_AFTE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 54;
+ }
+ goto RETRY_AFTER;
+ case 'r':
+ if (last) {
+ return 54;
+ }
+ goto RETRY_AFTER;
+ default:
+ return -1;
+ }
+
+S:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SE;
+ default:
+ return -1;
+ }
+
+SE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto SER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto SER;
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SET;
+ default:
+ return -1;
+ }
+
+SEC:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto SEC_;
+ default:
+ return -1;
+ }
+
+SEC_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto SEC_W;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto SEC_W;
+ default:
+ return -1;
+ }
+
+SEC_W:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WE;
+ default:
+ return -1;
+ }
+
+SEC_WE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'B':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEB;
+ case 'b':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEB;
+ default:
+ return -1;
+ }
+
+SEC_WEB:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBS;
+ default:
+ return -1;
+ }
+
+SEC_WEBS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCK;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCK;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCK:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_A;
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_E;
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_K;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_K;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_P;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_P;
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_V;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_V;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_AC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_AC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_AC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_ACC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_ACCE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCEP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_ACCEP;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_ACCEP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 55;
+ }
+ goto SEC_WEBSOCKET_ACCEPT;
+ case 't':
+ if (last) {
+ return 55;
+ }
+ goto SEC_WEBSOCKET_ACCEPT;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'X':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EX;
+ case 'x':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EX;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EX:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXT;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTEN;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENS;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSI;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENSI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSIO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENSIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSION;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_EXTENSION;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_EXTENSION:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return 56;
+ }
+ goto SEC_WEBSOCKET_EXTENSIONS;
+ case 's':
+ if (last) {
+ return 56;
+ }
+ goto SEC_WEBSOCKET_EXTENSIONS;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_K:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_KE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_KE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_KE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return 57;
+ }
+ goto SEC_WEBSOCKET_KEY;
+ case 'y':
+ if (last) {
+ return 57;
+ }
+ goto SEC_WEBSOCKET_KEY;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_KEY:
+ NEXT_CHAR();
+ switch (ch) {
+ case '1':
+ if (last) {
+ return 58;
+ }
+ goto SEC_WEBSOCKET_KEY1;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_P:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PR;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PRO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROT;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROTO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOC;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROTOC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_PROTOCO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_PROTOCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return 59;
+ }
+ goto SEC_WEBSOCKET_PROTOCOL;
+ case 'l':
+ if (last) {
+ return 59;
+ }
+ goto SEC_WEBSOCKET_PROTOCOL;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_V:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VE;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VER;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERS;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VERS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSI;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VERSI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSIO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SEC_WEBSOCKET_VERSIO;
+ default:
+ return -1;
+ }
+
+SEC_WEBSOCKET_VERSIO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return 60;
+ }
+ goto SEC_WEBSOCKET_VERSION;
+ case 'n':
+ if (last) {
+ return 60;
+ }
+ goto SEC_WEBSOCKET_VERSION;
+ default:
+ return -1;
+ }
+
+SER:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'V':
+ if (last) {
+ return -1;
+ }
+ goto SERV;
+ case 'v':
+ if (last) {
+ return -1;
+ }
+ goto SERV;
+ default:
+ return -1;
+ }
+
+SERV:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto SERVE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto SERVE;
+ default:
+ return -1;
+ }
+
+SERVE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 61;
+ }
+ goto SERVER;
+ case 'r':
+ if (last) {
+ return 61;
+ }
+ goto SERVER;
+ default:
+ return -1;
+ }
+
+SET:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto SET_;
+ default:
+ return -1;
+ }
+
+SET_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto SET_C;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto SET_C;
+ default:
+ return -1;
+ }
+
+SET_C:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SET_CO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SET_CO;
+ default:
+ return -1;
+ }
+
+SET_CO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto SET_COO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto SET_COO;
+ default:
+ return -1;
+ }
+
+SET_COO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'K':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOK;
+ case 'k':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOK;
+ default:
+ return -1;
+ }
+
+SET_COOK:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOKI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto SET_COOKI;
+ default:
+ return -1;
+ }
+
+SET_COOKI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 62;
+ }
+ goto SET_COOKIE;
+ case 'e':
+ if (last) {
+ return 62;
+ }
+ goto SET_COOKIE;
+ default:
+ return -1;
+ }
+
+T:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 63;
+ }
+ goto TE;
+ case 'e':
+ if (last) {
+ return 63;
+ }
+ goto TE;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto TR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto TR;
+ default:
+ return -1;
+ }
+
+TR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto TRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto TRA;
+ default:
+ return -1;
+ }
+
+TRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto TRAI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto TRAI;
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto TRAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto TRAN;
+ default:
+ return -1;
+ }
+
+TRAI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'L':
+ if (last) {
+ return -1;
+ }
+ goto TRAIL;
+ case 'l':
+ if (last) {
+ return -1;
+ }
+ goto TRAIL;
+ default:
+ return -1;
+ }
+
+TRAIL:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto TRAILE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto TRAILE;
+ default:
+ return -1;
+ }
+
+TRAILE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 64;
+ }
+ goto TRAILER;
+ case 'r':
+ if (last) {
+ return 64;
+ }
+ goto TRAILER;
+ default:
+ return -1;
+ }
+
+TRAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto TRANS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto TRANS;
+ default:
+ return -1;
+ }
+
+TRANS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto TRANSF;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto TRANSF;
+ default:
+ return -1;
+ }
+
+TRANSF:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFE;
+ default:
+ return -1;
+ }
+
+TRANSFE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER;
+ default:
+ return -1;
+ }
+
+TRANSFER:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_;
+ default:
+ return -1;
+ }
+
+TRANSFER_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_E;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_E;
+ default:
+ return -1;
+ }
+
+TRANSFER_E:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_EN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_EN;
+ default:
+ return -1;
+ }
+
+TRANSFER_EN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENC;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCO;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCOD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCOD;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCOD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODI;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCODI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto TRANSFER_ENCODIN;
+ default:
+ return -1;
+ }
+
+TRANSFER_ENCODIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 65;
+ }
+ goto TRANSFER_ENCODING;
+ case 'g':
+ if (last) {
+ return 65;
+ }
+ goto TRANSFER_ENCODING;
+ default:
+ return -1;
+ }
+
+U:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto UR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto UR;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto UP;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto UP;
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto US;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto US;
+ default:
+ return -1;
+ }
+
+UR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return 66;
+ }
+ goto URI;
+ case 'i':
+ if (last) {
+ return 66;
+ }
+ goto URI;
+ default:
+ return -1;
+ }
+
+UP:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto UPG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto UPG;
+ default:
+ return -1;
+ }
+
+UPG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto UPGR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto UPGR;
+ default:
+ return -1;
+ }
+
+UPGR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto UPGRA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto UPGRA;
+ default:
+ return -1;
+ }
+
+UPGRA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto UPGRAD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto UPGRAD;
+ default:
+ return -1;
+ }
+
+UPGRAD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 67;
+ }
+ goto UPGRADE;
+ case 'e':
+ if (last) {
+ return 67;
+ }
+ goto UPGRADE;
+ default:
+ return -1;
+ }
+
+US:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto USE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto USE;
+ default:
+ return -1;
+ }
+
+USE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto USER;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto USER;
+ default:
+ return -1;
+ }
+
+USER:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto USER_;
+ default:
+ return -1;
+ }
+
+USER_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto USER_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto USER_A;
+ default:
+ return -1;
+ }
+
+USER_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto USER_AG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto USER_AG;
+ default:
+ return -1;
+ }
+
+USER_AG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGE;
+ default:
+ return -1;
+ }
+
+USER_AGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto USER_AGEN;
+ default:
+ return -1;
+ }
+
+USER_AGEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 68;
+ }
+ goto USER_AGENT;
+ case 't':
+ if (last) {
+ return 68;
+ }
+ goto USER_AGENT;
+ default:
+ return -1;
+ }
+
+V:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto VA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto VA;
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto VI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto VI;
+ default:
+ return -1;
+ }
+
+VA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto VAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto VAR;
+ default:
+ return -1;
+ }
+
+VAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'Y':
+ if (last) {
+ return 69;
+ }
+ goto VARY;
+ case 'y':
+ if (last) {
+ return 69;
+ }
+ goto VARY;
+ default:
+ return -1;
+ }
+
+VI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return 70;
+ }
+ goto VIA;
+ case 'a':
+ if (last) {
+ return 70;
+ }
+ goto VIA;
+ default:
+ return -1;
+ }
+
+W:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto WW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto WW;
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto WA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto WA;
+ default:
+ return -1;
+ }
+
+WW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto WWW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto WWW;
+ default:
+ return -1;
+ }
+
+WWW:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto WWW_;
+ default:
+ return -1;
+ }
+
+WWW_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto WWW_A;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto WWW_A;
+ default:
+ return -1;
+ }
+
+WWW_A:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'U':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AU;
+ case 'u':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AU;
+ default:
+ return -1;
+ }
+
+WWW_AU:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUT;
+ default:
+ return -1;
+ }
+
+WWW_AUT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTH;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTH;
+ default:
+ return -1;
+ }
+
+WWW_AUTH:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHE;
+ default:
+ return -1;
+ }
+
+WWW_AUTHE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHEN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHEN;
+ default:
+ return -1;
+ }
+
+WWW_AUTHEN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENT;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTI;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'C':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTIC;
+ case 'c':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTIC;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTIC:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICA;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTICA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICAT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WWW_AUTHENTICAT;
+ default:
+ return -1;
+ }
+
+WWW_AUTHENTICAT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return 71;
+ }
+ goto WWW_AUTHENTICATE;
+ case 'e':
+ if (last) {
+ return 71;
+ }
+ goto WWW_AUTHENTICATE;
+ default:
+ return -1;
+ }
+
+WA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WAN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WAN;
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto WAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto WAR;
+ default:
+ return -1;
+ }
+
+WAN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto WANT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto WANT;
+ default:
+ return -1;
+ }
+
+WANT:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto WANT_;
+ default:
+ return -1;
+ }
+
+WANT_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto WANT_D;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto WANT_D;
+ default:
+ return -1;
+ }
+
+WANT_D:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DI;
+ default:
+ return -1;
+ }
+
+WANT_DI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIG;
+ case 'g':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIG;
+ default:
+ return -1;
+ }
+
+WANT_DIG:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGE;
+ default:
+ return -1;
+ }
+
+WANT_DIGE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGES;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto WANT_DIGES;
+ default:
+ return -1;
+ }
+
+WANT_DIGES:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 72;
+ }
+ goto WANT_DIGEST;
+ case 't':
+ if (last) {
+ return 72;
+ }
+ goto WANT_DIGEST;
+ default:
+ return -1;
+ }
+
+WAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WARN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WARN;
+ default:
+ return -1;
+ }
+
+WARN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'I':
+ if (last) {
+ return -1;
+ }
+ goto WARNI;
+ case 'i':
+ if (last) {
+ return -1;
+ }
+ goto WARNI;
+ default:
+ return -1;
+ }
+
+WARNI:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'N':
+ if (last) {
+ return -1;
+ }
+ goto WARNIN;
+ case 'n':
+ if (last) {
+ return -1;
+ }
+ goto WARNIN;
+ default:
+ return -1;
+ }
+
+WARNIN:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'G':
+ if (last) {
+ return 73;
+ }
+ goto WARNING;
+ case 'g':
+ if (last) {
+ return 73;
+ }
+ goto WARNING;
+ default:
+ return -1;
+ }
+
+X:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto X_;
+ default:
+ return -1;
+ }
+
+X_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto X_F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto X_F;
+ default:
+ return -1;
+ }
+
+X_F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FO;
+ default:
+ return -1;
+ }
+
+X_FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto X_FOR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto X_FOR;
+ default:
+ return -1;
+ }
+
+X_FOR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'W':
+ if (last) {
+ return -1;
+ }
+ goto X_FORW;
+ case 'w':
+ if (last) {
+ return -1;
+ }
+ goto X_FORW;
+ default:
+ return -1;
+ }
+
+X_FORW:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'A':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWA;
+ case 'a':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWA;
+ default:
+ return -1;
+ }
+
+X_FORWA:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWAR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWAR;
+ default:
+ return -1;
+ }
+
+X_FORWAR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARD;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARD;
+ default:
+ return -1;
+ }
+
+X_FORWARD:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'E':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDE;
+ case 'e':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDE;
+ default:
+ return -1;
+ }
+
+X_FORWARDE:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'D':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED;
+ case 'd':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED;
+ default:
+ return -1;
+ }
+
+X_FORWARDED:
+ NEXT_CHAR();
+ switch (ch) {
+ case '-':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'F':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_F;
+ case 'f':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_F;
+ case 'H':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_H;
+ case 'h':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_H;
+ case 'P':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_P;
+ case 'p':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_P;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_F:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_FO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_FO;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_FO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return 74;
+ }
+ goto X_FORWARDED_FOR;
+ case 'r':
+ if (last) {
+ return 74;
+ }
+ goto X_FORWARDED_FOR;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_H:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HO;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_HO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'S':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HOS;
+ case 's':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_HOS;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_HOS:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return 75;
+ }
+ goto X_FORWARDED_HOST;
+ case 't':
+ if (last) {
+ return 75;
+ }
+ goto X_FORWARDED_HOST;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_P:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'R':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PR;
+ case 'r':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PR;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_PR:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PRO;
+ case 'o':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PRO;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_PRO:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'T':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PROT;
+ case 't':
+ if (last) {
+ return -1;
+ }
+ goto X_FORWARDED_PROT;
+ default:
+ return -1;
+ }
+
+X_FORWARDED_PROT:
+ NEXT_CHAR();
+ switch (ch) {
+ case 'O':
+ if (last) {
+ return 76;
+ }
+ goto X_FORWARDED_PROTO;
+ case 'o':
+ if (last) {
+ return 76;
+ }
+ goto X_FORWARDED_PROTO;
+ default:
+ return -1;
+ }
+
+ACCEPT_CHARSET:
+ACCEPT_ENCODING:
+ACCEPT_LANGUAGE:
+ACCEPT_RANGES:
+ACCESS_CONTROL_ALLOW_CREDENTIALS:
+ACCESS_CONTROL_ALLOW_HEADERS:
+ACCESS_CONTROL_ALLOW_METHODS:
+ACCESS_CONTROL_ALLOW_ORIGIN:
+ACCESS_CONTROL_EXPOSE_HEADERS:
+ACCESS_CONTROL_MAX_AGE:
+ACCESS_CONTROL_REQUEST_HEADERS:
+ACCESS_CONTROL_REQUEST_METHOD:
+AGE:
+ALLOW:
+AUTHORIZATION:
+CACHE_CONTROL:
+CONNECTION:
+CONTENT_DISPOSITION:
+CONTENT_ENCODING:
+CONTENT_LANGUAGE:
+CONTENT_LENGTH:
+CONTENT_LOCATION:
+CONTENT_MD5:
+CONTENT_RANGE:
+CONTENT_TRANSFER_ENCODING:
+CONTENT_TYPE:
+COOKIE:
+DATE:
+DESTINATION:
+DIGEST:
+ETAG:
+EXPECT:
+EXPIRES:
+FORWARDED:
+FROM:
+HOST:
+IF_MATCH:
+IF_MODIFIED_SINCE:
+IF_NONE_MATCH:
+IF_RANGE:
+IF_UNMODIFIED_SINCE:
+KEEP_ALIVE:
+LAST_EVENT_ID:
+LAST_MODIFIED:
+LINK:
+LOCATION:
+MAX_FORWARDS:
+ORIGIN:
+PRAGMA:
+PROXY_AUTHENTICATE:
+PROXY_AUTHORIZATION:
+RANGE:
+REFERER:
+RETRY_AFTER:
+SEC_WEBSOCKET_ACCEPT:
+SEC_WEBSOCKET_EXTENSIONS:
+SEC_WEBSOCKET_KEY1:
+SEC_WEBSOCKET_PROTOCOL:
+SEC_WEBSOCKET_VERSION:
+SERVER:
+SET_COOKIE:
+TE:
+TRAILER:
+TRANSFER_ENCODING:
+UPGRADE:
+URI:
+USER_AGENT:
+VARY:
+VIA:
+WANT_DIGEST:
+WARNING:
+WWW_AUTHENTICATE:
+X_FORWARDED_FOR:
+X_FORWARDED_HOST:
+X_FORWARDED_PROTO:
+missing:
+ /* nothing found */
+ return -1;
+}
diff --git a/contrib/python/aiohttp/aiohttp/_find_header.h b/contrib/python/aiohttp/aiohttp/_find_header.h
new file mode 100644
index 0000000000..99b7b4f828
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_find_header.h
@@ -0,0 +1,14 @@
+#ifndef _FIND_HEADERS_H
+#define _FIND_HEADERS_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int find_header(const char *str, int size);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/contrib/python/aiohttp/aiohttp/_find_header.pxd b/contrib/python/aiohttp/aiohttp/_find_header.pxd
new file mode 100644
index 0000000000..37a6c37268
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_find_header.pxd
@@ -0,0 +1,2 @@
+cdef extern from "_find_header.h":
+ int find_header(char *, int)
diff --git a/contrib/python/aiohttp/aiohttp/_headers.pxi b/contrib/python/aiohttp/aiohttp/_headers.pxi
new file mode 100644
index 0000000000..3744721d47
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_headers.pxi
@@ -0,0 +1,83 @@
+# The file is autogenerated from aiohttp/hdrs.py
+# Run ./tools/gen.py to update it after the origin changing.
+
+from . import hdrs
+cdef tuple headers = (
+ hdrs.ACCEPT,
+ hdrs.ACCEPT_CHARSET,
+ hdrs.ACCEPT_ENCODING,
+ hdrs.ACCEPT_LANGUAGE,
+ hdrs.ACCEPT_RANGES,
+ hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
+ hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
+ hdrs.ACCESS_CONTROL_ALLOW_METHODS,
+ hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
+ hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
+ hdrs.ACCESS_CONTROL_MAX_AGE,
+ hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
+ hdrs.ACCESS_CONTROL_REQUEST_METHOD,
+ hdrs.AGE,
+ hdrs.ALLOW,
+ hdrs.AUTHORIZATION,
+ hdrs.CACHE_CONTROL,
+ hdrs.CONNECTION,
+ hdrs.CONTENT_DISPOSITION,
+ hdrs.CONTENT_ENCODING,
+ hdrs.CONTENT_LANGUAGE,
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_MD5,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TRANSFER_ENCODING,
+ hdrs.CONTENT_TYPE,
+ hdrs.COOKIE,
+ hdrs.DATE,
+ hdrs.DESTINATION,
+ hdrs.DIGEST,
+ hdrs.ETAG,
+ hdrs.EXPECT,
+ hdrs.EXPIRES,
+ hdrs.FORWARDED,
+ hdrs.FROM,
+ hdrs.HOST,
+ hdrs.IF_MATCH,
+ hdrs.IF_MODIFIED_SINCE,
+ hdrs.IF_NONE_MATCH,
+ hdrs.IF_RANGE,
+ hdrs.IF_UNMODIFIED_SINCE,
+ hdrs.KEEP_ALIVE,
+ hdrs.LAST_EVENT_ID,
+ hdrs.LAST_MODIFIED,
+ hdrs.LINK,
+ hdrs.LOCATION,
+ hdrs.MAX_FORWARDS,
+ hdrs.ORIGIN,
+ hdrs.PRAGMA,
+ hdrs.PROXY_AUTHENTICATE,
+ hdrs.PROXY_AUTHORIZATION,
+ hdrs.RANGE,
+ hdrs.REFERER,
+ hdrs.RETRY_AFTER,
+ hdrs.SEC_WEBSOCKET_ACCEPT,
+ hdrs.SEC_WEBSOCKET_EXTENSIONS,
+ hdrs.SEC_WEBSOCKET_KEY,
+ hdrs.SEC_WEBSOCKET_KEY1,
+ hdrs.SEC_WEBSOCKET_PROTOCOL,
+ hdrs.SEC_WEBSOCKET_VERSION,
+ hdrs.SERVER,
+ hdrs.SET_COOKIE,
+ hdrs.TE,
+ hdrs.TRAILER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.URI,
+ hdrs.UPGRADE,
+ hdrs.USER_AGENT,
+ hdrs.VARY,
+ hdrs.VIA,
+ hdrs.WWW_AUTHENTICATE,
+ hdrs.WANT_DIGEST,
+ hdrs.WARNING,
+ hdrs.X_FORWARDED_FOR,
+ hdrs.X_FORWARDED_HOST,
+ hdrs.X_FORWARDED_PROTO,
+)
diff --git a/contrib/python/aiohttp/aiohttp/_helpers.pyx b/contrib/python/aiohttp/aiohttp/_helpers.pyx
new file mode 100644
index 0000000000..665f367c5d
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_helpers.pyx
@@ -0,0 +1,35 @@
+cdef class reify:
+ """Use as a class method decorator. It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+
+ """
+
+ cdef object wrapped
+ cdef object name
+
+ def __init__(self, wrapped):
+ self.wrapped = wrapped
+ self.name = wrapped.__name__
+
+ @property
+ def __doc__(self):
+ return self.wrapped.__doc__
+
+ def __get__(self, inst, owner):
+ try:
+ try:
+ return inst._cache[self.name]
+ except KeyError:
+ val = self.wrapped(inst)
+ inst._cache[self.name] = val
+ return val
+ except AttributeError:
+ if inst is None:
+ return self
+ raise
+
+ def __set__(self, inst, value):
+ raise AttributeError("reified property is read-only")
diff --git a/contrib/python/aiohttp/aiohttp/_http_parser.pyx b/contrib/python/aiohttp/aiohttp/_http_parser.pyx
new file mode 100644
index 0000000000..77bf0aa598
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_http_parser.pyx
@@ -0,0 +1,818 @@
+#cython: language_level=3
+#
+# Based on https://github.com/MagicStack/httptools
+#
+from __future__ import absolute_import, print_function
+
+from cpython cimport (
+ Py_buffer,
+ PyBUF_SIMPLE,
+ PyBuffer_Release,
+ PyBytes_AsString,
+ PyBytes_AsStringAndSize,
+ PyObject_GetBuffer,
+)
+from cpython.mem cimport PyMem_Free, PyMem_Malloc
+from libc.limits cimport ULLONG_MAX
+from libc.string cimport memcpy
+
+from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
+from yarl import URL as _URL
+
+from aiohttp import hdrs
+
+from .http_exceptions import (
+ BadHttpMessage,
+ BadStatusLine,
+ ContentLengthError,
+ InvalidHeader,
+ InvalidURLError,
+ LineTooLong,
+ PayloadEncodingError,
+ TransferEncodingError,
+)
+from .http_parser import DeflateBuffer as _DeflateBuffer
+from .http_writer import (
+ HttpVersion as _HttpVersion,
+ HttpVersion10 as _HttpVersion10,
+ HttpVersion11 as _HttpVersion11,
+)
+from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
+
+cimport cython
+
+from aiohttp cimport _cparser as cparser
+
+include "_headers.pxi"
+
+from aiohttp cimport _find_header
+
+DEF DEFAULT_FREELIST_SIZE = 250
+
+cdef extern from "Python.h":
+ int PyByteArray_Resize(object, Py_ssize_t) except -1
+ Py_ssize_t PyByteArray_Size(object) except -1
+ char* PyByteArray_AsString(object)
+
+__all__ = ('HttpRequestParser', 'HttpResponseParser',
+ 'RawRequestMessage', 'RawResponseMessage')
+
+cdef object URL = _URL
+cdef object URL_build = URL.build
+cdef object CIMultiDict = _CIMultiDict
+cdef object CIMultiDictProxy = _CIMultiDictProxy
+cdef object HttpVersion = _HttpVersion
+cdef object HttpVersion10 = _HttpVersion10
+cdef object HttpVersion11 = _HttpVersion11
+cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
+cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
+cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
+cdef object StreamReader = _StreamReader
+cdef object DeflateBuffer = _DeflateBuffer
+
+
+cdef inline object extend(object buf, const char* at, size_t length):
+ cdef Py_ssize_t s
+ cdef char* ptr
+ s = PyByteArray_Size(buf)
+ PyByteArray_Resize(buf, s + length)
+ ptr = PyByteArray_AsString(buf)
+ memcpy(ptr + s, at, length)
+
+
+DEF METHODS_COUNT = 46;
+
+cdef list _http_method = []
+
+for i in range(METHODS_COUNT):
+ _http_method.append(
+ cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
+
+
+cdef inline str http_method_str(int i):
+ if i < METHODS_COUNT:
+ return <str>_http_method[i]
+ else:
+ return "<unknown>"
+
+cdef inline object find_header(bytes raw_header):
+ cdef Py_ssize_t size
+ cdef char *buf
+ cdef int idx
+ PyBytes_AsStringAndSize(raw_header, &buf, &size)
+ idx = _find_header.find_header(buf, size)
+ if idx == -1:
+ return raw_header.decode('utf-8', 'surrogateescape')
+ return headers[idx]
+
+
+@cython.freelist(DEFAULT_FREELIST_SIZE)
+cdef class RawRequestMessage:
+ cdef readonly str method
+ cdef readonly str path
+ cdef readonly object version # HttpVersion
+ cdef readonly object headers # CIMultiDict
+ cdef readonly object raw_headers # tuple
+ cdef readonly object should_close
+ cdef readonly object compression
+ cdef readonly object upgrade
+ cdef readonly object chunked
+ cdef readonly object url # yarl.URL
+
+ def __init__(self, method, path, version, headers, raw_headers,
+ should_close, compression, upgrade, chunked, url):
+ self.method = method
+ self.path = path
+ self.version = version
+ self.headers = headers
+ self.raw_headers = raw_headers
+ self.should_close = should_close
+ self.compression = compression
+ self.upgrade = upgrade
+ self.chunked = chunked
+ self.url = url
+
+ def __repr__(self):
+ info = []
+ info.append(("method", self.method))
+ info.append(("path", self.path))
+ info.append(("version", self.version))
+ info.append(("headers", self.headers))
+ info.append(("raw_headers", self.raw_headers))
+ info.append(("should_close", self.should_close))
+ info.append(("compression", self.compression))
+ info.append(("upgrade", self.upgrade))
+ info.append(("chunked", self.chunked))
+ info.append(("url", self.url))
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
+ return '<RawRequestMessage(' + sinfo + ')>'
+
+ def _replace(self, **dct):
+ cdef RawRequestMessage ret
+ ret = _new_request_message(self.method,
+ self.path,
+ self.version,
+ self.headers,
+ self.raw_headers,
+ self.should_close,
+ self.compression,
+ self.upgrade,
+ self.chunked,
+ self.url)
+ if "method" in dct:
+ ret.method = dct["method"]
+ if "path" in dct:
+ ret.path = dct["path"]
+ if "version" in dct:
+ ret.version = dct["version"]
+ if "headers" in dct:
+ ret.headers = dct["headers"]
+ if "raw_headers" in dct:
+ ret.raw_headers = dct["raw_headers"]
+ if "should_close" in dct:
+ ret.should_close = dct["should_close"]
+ if "compression" in dct:
+ ret.compression = dct["compression"]
+ if "upgrade" in dct:
+ ret.upgrade = dct["upgrade"]
+ if "chunked" in dct:
+ ret.chunked = dct["chunked"]
+ if "url" in dct:
+ ret.url = dct["url"]
+ return ret
+
+cdef _new_request_message(str method,
+ str path,
+ object version,
+ object headers,
+ object raw_headers,
+ bint should_close,
+ object compression,
+ bint upgrade,
+ bint chunked,
+ object url):
+ cdef RawRequestMessage ret
+ ret = RawRequestMessage.__new__(RawRequestMessage)
+ ret.method = method
+ ret.path = path
+ ret.version = version
+ ret.headers = headers
+ ret.raw_headers = raw_headers
+ ret.should_close = should_close
+ ret.compression = compression
+ ret.upgrade = upgrade
+ ret.chunked = chunked
+ ret.url = url
+ return ret
+
+
+@cython.freelist(DEFAULT_FREELIST_SIZE)
+cdef class RawResponseMessage:
+ cdef readonly object version # HttpVersion
+ cdef readonly int code
+ cdef readonly str reason
+ cdef readonly object headers # CIMultiDict
+ cdef readonly object raw_headers # tuple
+ cdef readonly object should_close
+ cdef readonly object compression
+ cdef readonly object upgrade
+ cdef readonly object chunked
+
+ def __init__(self, version, code, reason, headers, raw_headers,
+ should_close, compression, upgrade, chunked):
+ self.version = version
+ self.code = code
+ self.reason = reason
+ self.headers = headers
+ self.raw_headers = raw_headers
+ self.should_close = should_close
+ self.compression = compression
+ self.upgrade = upgrade
+ self.chunked = chunked
+
+ def __repr__(self):
+ info = []
+ info.append(("version", self.version))
+ info.append(("code", self.code))
+ info.append(("reason", self.reason))
+ info.append(("headers", self.headers))
+ info.append(("raw_headers", self.raw_headers))
+ info.append(("should_close", self.should_close))
+ info.append(("compression", self.compression))
+ info.append(("upgrade", self.upgrade))
+ info.append(("chunked", self.chunked))
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
+ return '<RawResponseMessage(' + sinfo + ')>'
+
+
+cdef _new_response_message(object version,
+ int code,
+ str reason,
+ object headers,
+ object raw_headers,
+ bint should_close,
+ object compression,
+ bint upgrade,
+ bint chunked):
+ cdef RawResponseMessage ret
+ ret = RawResponseMessage.__new__(RawResponseMessage)
+ ret.version = version
+ ret.code = code
+ ret.reason = reason
+ ret.headers = headers
+ ret.raw_headers = raw_headers
+ ret.should_close = should_close
+ ret.compression = compression
+ ret.upgrade = upgrade
+ ret.chunked = chunked
+ return ret
+
+
+@cython.internal
+cdef class HttpParser:
+
+ cdef:
+ cparser.llhttp_t* _cparser
+ cparser.llhttp_settings_t* _csettings
+
+ bytearray _raw_name
+ bytearray _raw_value
+ bint _has_value
+
+ object _protocol
+ object _loop
+ object _timer
+
+ size_t _max_line_size
+ size_t _max_field_size
+ size_t _max_headers
+ bint _response_with_body
+ bint _read_until_eof
+
+ bint _started
+ object _url
+ bytearray _buf
+ str _path
+ str _reason
+ object _headers
+ list _raw_headers
+ bint _upgraded
+ list _messages
+ object _payload
+ bint _payload_error
+ object _payload_exception
+ object _last_error
+ bint _auto_decompress
+ int _limit
+
+ str _content_encoding
+
+ Py_buffer py_buf
+
+ def __cinit__(self):
+ self._cparser = <cparser.llhttp_t*> \
+ PyMem_Malloc(sizeof(cparser.llhttp_t))
+ if self._cparser is NULL:
+ raise MemoryError()
+
+ self._csettings = <cparser.llhttp_settings_t*> \
+ PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
+ if self._csettings is NULL:
+ raise MemoryError()
+
+ def __dealloc__(self):
+ PyMem_Free(self._cparser)
+ PyMem_Free(self._csettings)
+
+ cdef _init(
+ self, cparser.llhttp_type mode,
+ object protocol, object loop, int limit,
+ object timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True,
+ ):
+ cparser.llhttp_settings_init(self._csettings)
+ cparser.llhttp_init(self._cparser, mode, self._csettings)
+ self._cparser.data = <void*>self
+ self._cparser.content_length = 0
+
+ self._protocol = protocol
+ self._loop = loop
+ self._timer = timer
+
+ self._buf = bytearray()
+ self._payload = None
+ self._payload_error = 0
+ self._payload_exception = payload_exception
+ self._messages = []
+
+ self._raw_name = bytearray()
+ self._raw_value = bytearray()
+ self._has_value = False
+
+ self._max_line_size = max_line_size
+ self._max_headers = max_headers
+ self._max_field_size = max_field_size
+ self._response_with_body = response_with_body
+ self._read_until_eof = read_until_eof
+ self._upgraded = False
+ self._auto_decompress = auto_decompress
+ self._content_encoding = None
+
+ self._csettings.on_url = cb_on_url
+ self._csettings.on_status = cb_on_status
+ self._csettings.on_header_field = cb_on_header_field
+ self._csettings.on_header_value = cb_on_header_value
+ self._csettings.on_headers_complete = cb_on_headers_complete
+ self._csettings.on_body = cb_on_body
+ self._csettings.on_message_begin = cb_on_message_begin
+ self._csettings.on_message_complete = cb_on_message_complete
+ self._csettings.on_chunk_header = cb_on_chunk_header
+ self._csettings.on_chunk_complete = cb_on_chunk_complete
+
+ self._last_error = None
+ self._limit = limit
+
+ cdef _process_header(self):
+ if self._raw_name:
+ raw_name = bytes(self._raw_name)
+ raw_value = bytes(self._raw_value)
+
+ name = find_header(raw_name)
+ value = raw_value.decode('utf-8', 'surrogateescape')
+
+ self._headers.add(name, value)
+
+ if name is CONTENT_ENCODING:
+ self._content_encoding = value
+
+ PyByteArray_Resize(self._raw_name, 0)
+ PyByteArray_Resize(self._raw_value, 0)
+ self._has_value = False
+ self._raw_headers.append((raw_name, raw_value))
+
+ cdef _on_header_field(self, const char* at, size_t length):
+ cdef Py_ssize_t size
+ cdef char *buf
+ if self._has_value:
+ self._process_header()
+
+ size = PyByteArray_Size(self._raw_name)
+ PyByteArray_Resize(self._raw_name, size + length)
+ buf = PyByteArray_AsString(self._raw_name)
+ memcpy(buf + size, at, length)
+
+ cdef _on_header_value(self, const char* at, size_t length):
+ cdef Py_ssize_t size
+ cdef char *buf
+
+ size = PyByteArray_Size(self._raw_value)
+ PyByteArray_Resize(self._raw_value, size + length)
+ buf = PyByteArray_AsString(self._raw_value)
+ memcpy(buf + size, at, length)
+ self._has_value = True
+
+ cdef _on_headers_complete(self):
+ self._process_header()
+
+ method = http_method_str(self._cparser.method)
+ should_close = not cparser.llhttp_should_keep_alive(self._cparser)
+ upgrade = self._cparser.upgrade
+ chunked = self._cparser.flags & cparser.F_CHUNKED
+
+ raw_headers = tuple(self._raw_headers)
+ headers = CIMultiDictProxy(self._headers)
+
+ if upgrade or self._cparser.method == 5: # cparser.CONNECT:
+ self._upgraded = True
+
+ # do not support old websocket spec
+ if SEC_WEBSOCKET_KEY1 in headers:
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
+
+ encoding = None
+ enc = self._content_encoding
+ if enc is not None:
+ self._content_encoding = None
+ enc = enc.lower()
+ if enc in ('gzip', 'deflate', 'br'):
+ encoding = enc
+
+ if self._cparser.type == cparser.HTTP_REQUEST:
+ msg = _new_request_message(
+ method, self._path,
+ self.http_version(), headers, raw_headers,
+ should_close, encoding, upgrade, chunked, self._url)
+ else:
+ msg = _new_response_message(
+ self.http_version(), self._cparser.status_code, self._reason,
+ headers, raw_headers, should_close, encoding,
+ upgrade, chunked)
+
+ if (
+ ULLONG_MAX > self._cparser.content_length > 0 or chunked or
+ self._cparser.method == 5 or # CONNECT: 5
+ (self._cparser.status_code >= 199 and
+ self._cparser.content_length == 0 and
+ self._read_until_eof)
+ ):
+ payload = StreamReader(
+ self._protocol, timer=self._timer, loop=self._loop,
+ limit=self._limit)
+ else:
+ payload = EMPTY_PAYLOAD
+
+ self._payload = payload
+ if encoding is not None and self._auto_decompress:
+ self._payload = DeflateBuffer(payload, encoding)
+
+ if not self._response_with_body:
+ payload = EMPTY_PAYLOAD
+
+ self._messages.append((msg, payload))
+
+ cdef _on_message_complete(self):
+ self._payload.feed_eof()
+ self._payload = None
+
+ cdef _on_chunk_header(self):
+ self._payload.begin_http_chunk_receiving()
+
+ cdef _on_chunk_complete(self):
+ self._payload.end_http_chunk_receiving()
+
+ cdef object _on_status_complete(self):
+ pass
+
+ cdef inline http_version(self):
+ cdef cparser.llhttp_t* parser = self._cparser
+
+ if parser.http_major == 1:
+ if parser.http_minor == 0:
+ return HttpVersion10
+ elif parser.http_minor == 1:
+ return HttpVersion11
+
+ return HttpVersion(parser.http_major, parser.http_minor)
+
+ ### Public API ###
+
+ def feed_eof(self):
+ cdef bytes desc
+
+ if self._payload is not None:
+ if self._cparser.flags & cparser.F_CHUNKED:
+ raise TransferEncodingError(
+ "Not enough data for satisfy transfer length header.")
+ elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
+ raise ContentLengthError(
+ "Not enough data for satisfy content length header.")
+ elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
+ desc = cparser.llhttp_get_error_reason(self._cparser)
+ raise PayloadEncodingError(desc.decode('latin-1'))
+ else:
+ self._payload.feed_eof()
+ elif self._started:
+ self._on_headers_complete()
+ if self._messages:
+ return self._messages[-1][0]
+
+ def feed_data(self, data):
+ cdef:
+ size_t data_len
+ size_t nb
+ cdef cparser.llhttp_errno_t errno
+
+ PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
+ data_len = <size_t>self.py_buf.len
+
+ errno = cparser.llhttp_execute(
+ self._cparser,
+ <char*>self.py_buf.buf,
+ data_len)
+
+ if errno is cparser.HPE_PAUSED_UPGRADE:
+ cparser.llhttp_resume_after_upgrade(self._cparser)
+
+ nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
+
+ PyBuffer_Release(&self.py_buf)
+
+ if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
+ if self._payload_error == 0:
+ if self._last_error is not None:
+ ex = self._last_error
+ self._last_error = None
+ else:
+ ex = parser_error_from_errno(self._cparser)
+ self._payload = None
+ raise ex
+
+ if self._messages:
+ messages = self._messages
+ self._messages = []
+ else:
+ messages = ()
+
+ if self._upgraded:
+ return messages, True, data[nb:]
+ else:
+ return messages, False, b''
+
+ def set_upgraded(self, val):
+ self._upgraded = val
+
+
+cdef class HttpRequestParser(HttpParser):
+
+ def __init__(
+ self, protocol, loop, int limit, timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True,
+ ):
+ self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
+ max_line_size, max_headers, max_field_size,
+ payload_exception, response_with_body, read_until_eof,
+ auto_decompress)
+
+ cdef object _on_status_complete(self):
+ cdef int idx1, idx2
+ if not self._buf:
+ return
+ self._path = self._buf.decode('utf-8', 'surrogateescape')
+ try:
+ idx3 = len(self._path)
+ idx1 = self._path.find("?")
+ if idx1 == -1:
+ query = ""
+ idx2 = self._path.find("#")
+ if idx2 == -1:
+ path = self._path
+ fragment = ""
+ else:
+ path = self._path[0: idx2]
+ fragment = self._path[idx2+1:]
+
+ else:
+ path = self._path[0:idx1]
+ idx1 += 1
+ idx2 = self._path.find("#", idx1+1)
+ if idx2 == -1:
+ query = self._path[idx1:]
+ fragment = ""
+ else:
+ query = self._path[idx1: idx2]
+ fragment = self._path[idx2+1:]
+
+ self._url = URL.build(
+ path=path,
+ query_string=query,
+ fragment=fragment,
+ encoded=True,
+ )
+ finally:
+ PyByteArray_Resize(self._buf, 0)
+
+
+cdef class HttpResponseParser(HttpParser):
+
+ def __init__(
+ self, protocol, loop, int limit, timer=None,
+ size_t max_line_size=8190, size_t max_headers=32768,
+ size_t max_field_size=8190, payload_exception=None,
+ bint response_with_body=True, bint read_until_eof=False,
+ bint auto_decompress=True
+ ):
+ self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
+ max_line_size, max_headers, max_field_size,
+ payload_exception, response_with_body, read_until_eof,
+ auto_decompress)
+
+ cdef object _on_status_complete(self):
+ if self._buf:
+ self._reason = self._buf.decode('utf-8', 'surrogateescape')
+ PyByteArray_Resize(self._buf, 0)
+ else:
+ self._reason = self._reason or ''
+
+cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+
+ pyparser._started = True
+ pyparser._headers = CIMultiDict()
+ pyparser._raw_headers = []
+ PyByteArray_Resize(pyparser._buf, 0)
+ pyparser._path = None
+ pyparser._reason = None
+ return 0
+
+
+cdef int cb_on_url(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ if length > pyparser._max_line_size:
+ raise LineTooLong(
+ 'Status line is too long', pyparser._max_line_size, length)
+ extend(pyparser._buf, at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_status(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef str reason
+ try:
+ if length > pyparser._max_line_size:
+ raise LineTooLong(
+ 'Status line is too long', pyparser._max_line_size, length)
+ extend(pyparser._buf, at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_header_field(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef Py_ssize_t size
+ try:
+ pyparser._on_status_complete()
+ size = len(pyparser._raw_name) + length
+ if size > pyparser._max_field_size:
+ raise LineTooLong(
+ 'Header name is too long', pyparser._max_field_size, size)
+ pyparser._on_header_field(at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_header_value(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef Py_ssize_t size
+ try:
+ size = len(pyparser._raw_value) + length
+ if size > pyparser._max_field_size:
+ raise LineTooLong(
+ 'Header value is too long', pyparser._max_field_size, size)
+ pyparser._on_header_value(at, length)
+ except BaseException as ex:
+ pyparser._last_error = ex
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_status_complete()
+ pyparser._on_headers_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
+ return 2
+ else:
+ return 0
+
+
+cdef int cb_on_body(cparser.llhttp_t* parser,
+ const char *at, size_t length) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ cdef bytes body = at[:length]
+ try:
+ pyparser._payload.feed_data(body, length)
+ except BaseException as exc:
+ if pyparser._payload_exception is not None:
+ pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
+ else:
+ pyparser._payload.set_exception(exc)
+ pyparser._payload_error = 1
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._started = False
+ pyparser._on_message_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_chunk_header()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
+ cdef HttpParser pyparser = <HttpParser>parser.data
+ try:
+ pyparser._on_chunk_complete()
+ except BaseException as exc:
+ pyparser._last_error = exc
+ return -1
+ else:
+ return 0
+
+
+cdef parser_error_from_errno(cparser.llhttp_t* parser):
+ cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
+ cdef bytes desc = cparser.llhttp_get_error_reason(parser)
+
+ if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
+ cparser.HPE_CB_HEADERS_COMPLETE,
+ cparser.HPE_CB_MESSAGE_COMPLETE,
+ cparser.HPE_CB_CHUNK_HEADER,
+ cparser.HPE_CB_CHUNK_COMPLETE,
+ cparser.HPE_INVALID_CONSTANT,
+ cparser.HPE_INVALID_HEADER_TOKEN,
+ cparser.HPE_INVALID_CONTENT_LENGTH,
+ cparser.HPE_INVALID_CHUNK_SIZE,
+ cparser.HPE_INVALID_EOF_STATE,
+ cparser.HPE_INVALID_TRANSFER_ENCODING):
+ cls = BadHttpMessage
+
+ elif errno == cparser.HPE_INVALID_STATUS:
+ cls = BadStatusLine
+
+ elif errno == cparser.HPE_INVALID_METHOD:
+ cls = BadStatusLine
+
+ elif errno == cparser.HPE_INVALID_VERSION:
+ cls = BadStatusLine
+
+ elif errno == cparser.HPE_INVALID_URL:
+ cls = InvalidURLError
+
+ else:
+ cls = BadHttpMessage
+
+ return cls(desc.decode('latin-1'))
diff --git a/contrib/python/aiohttp/aiohttp/_http_writer.pyx b/contrib/python/aiohttp/aiohttp/_http_writer.pyx
new file mode 100644
index 0000000000..eff8521958
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_http_writer.pyx
@@ -0,0 +1,163 @@
+from cpython.bytes cimport PyBytes_FromStringAndSize
+from cpython.exc cimport PyErr_NoMemory
+from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
+from cpython.object cimport PyObject_Str
+from libc.stdint cimport uint8_t, uint64_t
+from libc.string cimport memcpy
+
+from multidict import istr
+
+DEF BUF_SIZE = 16 * 1024 # 16KiB
+cdef char BUFFER[BUF_SIZE]
+
+cdef object _istr = istr
+
+
+# ----------------- writer ---------------------------
+
+cdef struct Writer:
+ char *buf
+ Py_ssize_t size
+ Py_ssize_t pos
+
+
+cdef inline void _init_writer(Writer* writer):
+ writer.buf = &BUFFER[0]
+ writer.size = BUF_SIZE
+ writer.pos = 0
+
+
+cdef inline void _release_writer(Writer* writer):
+ if writer.buf != BUFFER:
+ PyMem_Free(writer.buf)
+
+
+cdef inline int _write_byte(Writer* writer, uint8_t ch):
+ cdef char * buf
+ cdef Py_ssize_t size
+
+ if writer.pos == writer.size:
+ # reallocate
+ size = writer.size + BUF_SIZE
+ if writer.buf == BUFFER:
+ buf = <char*>PyMem_Malloc(size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ memcpy(buf, writer.buf, writer.size)
+ else:
+ buf = <char*>PyMem_Realloc(writer.buf, size)
+ if buf == NULL:
+ PyErr_NoMemory()
+ return -1
+ writer.buf = buf
+ writer.size = size
+ writer.buf[writer.pos] = <char>ch
+ writer.pos += 1
+ return 0
+
+
+cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
+ cdef uint64_t utf = <uint64_t> symbol
+
+ if utf < 0x80:
+ return _write_byte(writer, <uint8_t>utf)
+ elif utf < 0x800:
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+ elif 0xD800 <= utf <= 0xDFFF:
+ # surogate pair, ignored
+ return 0
+ elif utf < 0x10000:
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
+ return -1
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+ elif utf > 0x10FFFF:
+ # symbol is too large
+ return 0
+ else:
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
+ return -1
+ if _write_byte(writer,
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
+ return -1
+ if _write_byte(writer,
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
+ return -1
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
+
+
+cdef inline int _write_str(Writer* writer, str s):
+ cdef Py_UCS4 ch
+ for ch in s:
+ if _write_utf8(writer, ch) < 0:
+ return -1
+
+
+# --------------- _serialize_headers ----------------------
+
+cdef str to_str(object s):
+ typ = type(s)
+ if typ is str:
+ return <str>s
+ elif typ is _istr:
+ return PyObject_Str(s)
+ elif not isinstance(s, str):
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
+ else:
+ return str(s)
+
+
+cdef void _safe_header(str string) except *:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return character detected in HTTP status message or "
+ "header. This is a potential security issue."
+ )
+
+
+def _serialize_headers(str status_line, headers):
+ cdef Writer writer
+ cdef object key
+ cdef object val
+ cdef bytes ret
+
+ _init_writer(&writer)
+
+ for key, val in headers.items():
+ _safe_header(to_str(key))
+ _safe_header(to_str(val))
+
+ try:
+ if _write_str(&writer, status_line) < 0:
+ raise
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ for key, val in headers.items():
+ if _write_str(&writer, to_str(key)) < 0:
+ raise
+ if _write_byte(&writer, b':') < 0:
+ raise
+ if _write_byte(&writer, b' ') < 0:
+ raise
+ if _write_str(&writer, to_str(val)) < 0:
+ raise
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ if _write_byte(&writer, b'\r') < 0:
+ raise
+ if _write_byte(&writer, b'\n') < 0:
+ raise
+
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
+ finally:
+ _release_writer(&writer)
diff --git a/contrib/python/aiohttp/aiohttp/_websocket.pyx b/contrib/python/aiohttp/aiohttp/_websocket.pyx
new file mode 100644
index 0000000000..94318d2b1b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/_websocket.pyx
@@ -0,0 +1,56 @@
+from cpython cimport PyBytes_AsString
+
+
+#from cpython cimport PyByteArray_AsString # cython still not exports that
+cdef extern from "Python.h":
+ char* PyByteArray_AsString(bytearray ba) except NULL
+
+from libc.stdint cimport uint32_t, uint64_t, uintmax_t
+
+
+def _websocket_mask_cython(object mask, object data):
+ """Note, this function mutates its `data` argument
+ """
+ cdef:
+ Py_ssize_t data_len, i
+ # bit operations on signed integers are implementation-specific
+ unsigned char * in_buf
+ const unsigned char * mask_buf
+ uint32_t uint32_msk
+ uint64_t uint64_msk
+
+ assert len(mask) == 4
+
+ if not isinstance(mask, bytes):
+ mask = bytes(mask)
+
+ if isinstance(data, bytearray):
+ data = <bytearray>data
+ else:
+ data = bytearray(data)
+
+ data_len = len(data)
+ in_buf = <unsigned char*>PyByteArray_AsString(data)
+ mask_buf = <const unsigned char*>PyBytes_AsString(mask)
+ uint32_msk = (<uint32_t*>mask_buf)[0]
+
+ # TODO: align in_data ptr to achieve even faster speeds
+ # does it need in python ?! malloc() always aligns to sizeof(long) bytes
+
+ if sizeof(size_t) >= 8:
+ uint64_msk = uint32_msk
+ uint64_msk = (uint64_msk << 32) | uint32_msk
+
+ while data_len >= 8:
+ (<uint64_t*>in_buf)[0] ^= uint64_msk
+ in_buf += 8
+ data_len -= 8
+
+
+ while data_len >= 4:
+ (<uint32_t*>in_buf)[0] ^= uint32_msk
+ in_buf += 4
+ data_len -= 4
+
+ for i in range(0, data_len):
+ in_buf[i] ^= mask_buf[i]
diff --git a/contrib/python/aiohttp/aiohttp/abc.py b/contrib/python/aiohttp/aiohttp/abc.py
new file mode 100644
index 0000000000..06fc831638
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/abc.py
@@ -0,0 +1,207 @@
+import asyncio
+import logging
+from abc import ABC, abstractmethod
+from collections.abc import Sized
+from http.cookies import BaseCookie, Morsel
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Dict,
+ Generator,
+ Iterable,
+ List,
+ Optional,
+ Tuple,
+)
+
+from multidict import CIMultiDict
+from yarl import URL
+
+from .helpers import get_running_loop
+from .typedefs import LooseCookies
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+ from .web_exceptions import HTTPException
+ from .web_request import BaseRequest, Request
+ from .web_response import StreamResponse
+else:
+ BaseRequest = Request = Application = StreamResponse = None
+ HTTPException = None
+
+
+class AbstractRouter(ABC):
+ def __init__(self) -> None:
+ self._frozen = False
+
+ def post_init(self, app: Application) -> None:
+ """Post init stage.
+
+ Not an abstract method for sake of backward compatibility,
+ but if the router wants to be aware of the application
+ it can override this.
+ """
+
+ @property
+ def frozen(self) -> bool:
+ return self._frozen
+
+ def freeze(self) -> None:
+ """Freeze router."""
+ self._frozen = True
+
+ @abstractmethod
+ async def resolve(self, request: Request) -> "AbstractMatchInfo":
+ """Return MATCH_INFO for given request"""
+
+
+class AbstractMatchInfo(ABC):
+ @property # pragma: no branch
+ @abstractmethod
+ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
+ """Execute matched request handler"""
+
+ @property
+ @abstractmethod
+ def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
+ """Expect handler for 100-continue processing"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def http_exception(self) -> Optional[HTTPException]:
+ """HTTPException instance raised on router's resolving, or None"""
+
+ @abstractmethod # pragma: no branch
+ def get_info(self) -> Dict[str, Any]:
+ """Return a dict with additional info useful for introspection"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def apps(self) -> Tuple[Application, ...]:
+ """Stack of nested applications.
+
+ Top level application is left-most element.
+
+ """
+
+ @abstractmethod
+ def add_app(self, app: Application) -> None:
+ """Add application to the nested apps stack."""
+
+ @abstractmethod
+ def freeze(self) -> None:
+ """Freeze the match info.
+
+ The method is called after route resolution.
+
+ After the call .add_app() is forbidden.
+
+ """
+
+
+class AbstractView(ABC):
+ """Abstract class based view."""
+
+ def __init__(self, request: Request) -> None:
+ self._request = request
+
+ @property
+ def request(self) -> Request:
+ """Request instance."""
+ return self._request
+
+ @abstractmethod
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
+ """Execute the view handler."""
+
+
+class AbstractResolver(ABC):
+ """Abstract DNS resolver."""
+
+ @abstractmethod
+ async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
+ """Return IP address for given hostname"""
+
+ @abstractmethod
+ async def close(self) -> None:
+ """Release resolver"""
+
+
+if TYPE_CHECKING: # pragma: no cover
+ IterableBase = Iterable[Morsel[str]]
+else:
+ IterableBase = Iterable
+
+
+ClearCookiePredicate = Callable[["Morsel[str]"], bool]
+
+
+class AbstractCookieJar(Sized, IterableBase):
+ """Abstract Cookie Jar."""
+
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = get_running_loop(loop)
+
+ @abstractmethod
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ """Clear all cookies if no predicate is passed."""
+
+ @abstractmethod
+ def clear_domain(self, domain: str) -> None:
+ """Clear all cookies for domain and all subdomains."""
+
+ @abstractmethod
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ """Update cookies."""
+
+ @abstractmethod
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
+ """Return the jar's cookies filtered by their attributes."""
+
+
+class AbstractStreamWriter(ABC):
+ """Abstract stream writer."""
+
+ buffer_size = 0
+ output_size = 0
+ length = 0 # type: Optional[int]
+
+ @abstractmethod
+ async def write(self, chunk: bytes) -> None:
+ """Write chunk into stream."""
+
+ @abstractmethod
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ """Write last chunk."""
+
+ @abstractmethod
+ async def drain(self) -> None:
+ """Flush the write buffer."""
+
+ @abstractmethod
+ def enable_compression(self, encoding: str = "deflate") -> None:
+ """Enable HTTP body compression"""
+
+ @abstractmethod
+ def enable_chunking(self) -> None:
+ """Enable HTTP chunked mode"""
+
+ @abstractmethod
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write HTTP headers"""
+
+
+class AbstractAccessLogger(ABC):
+ """Abstract writer to access log."""
+
+ def __init__(self, logger: logging.Logger, log_format: str) -> None:
+ self.logger = logger
+ self.log_format = log_format
+
+ @abstractmethod
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ """Emit log to logger."""
diff --git a/contrib/python/aiohttp/aiohttp/base_protocol.py b/contrib/python/aiohttp/aiohttp/base_protocol.py
new file mode 100644
index 0000000000..fff4610a1e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/base_protocol.py
@@ -0,0 +1,87 @@
+import asyncio
+from typing import Optional, cast
+
+from .tcp_helpers import tcp_nodelay
+
+
+class BaseProtocol(asyncio.Protocol):
+ __slots__ = (
+ "_loop",
+ "_paused",
+ "_drain_waiter",
+ "_connection_lost",
+ "_reading_paused",
+ "transport",
+ )
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop # type: asyncio.AbstractEventLoop
+ self._paused = False
+ self._drain_waiter = None # type: Optional[asyncio.Future[None]]
+ self._connection_lost = False
+ self._reading_paused = False
+
+ self.transport = None # type: Optional[asyncio.Transport]
+
+ def pause_writing(self) -> None:
+ assert not self._paused
+ self._paused = True
+
+ def resume_writing(self) -> None:
+ assert self._paused
+ self._paused = False
+
+ waiter = self._drain_waiter
+ if waiter is not None:
+ self._drain_waiter = None
+ if not waiter.done():
+ waiter.set_result(None)
+
+ def pause_reading(self) -> None:
+ if not self._reading_paused and self.transport is not None:
+ try:
+ self.transport.pause_reading()
+ except (AttributeError, NotImplementedError, RuntimeError):
+ pass
+ self._reading_paused = True
+
+ def resume_reading(self) -> None:
+ if self._reading_paused and self.transport is not None:
+ try:
+ self.transport.resume_reading()
+ except (AttributeError, NotImplementedError, RuntimeError):
+ pass
+ self._reading_paused = False
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ tr = cast(asyncio.Transport, transport)
+ tcp_nodelay(tr, True)
+ self.transport = tr
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ self._connection_lost = True
+ # Wake up the writer if currently paused.
+ self.transport = None
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ if waiter is None:
+ return
+ self._drain_waiter = None
+ if waiter.done():
+ return
+ if exc is None:
+ waiter.set_result(None)
+ else:
+ waiter.set_exception(exc)
+
+ async def _drain_helper(self) -> None:
+ if self._connection_lost:
+ raise ConnectionResetError("Connection lost")
+ if not self._paused:
+ return
+ waiter = self._drain_waiter
+ if waiter is None:
+ waiter = self._loop.create_future()
+ self._drain_waiter = waiter
+ await asyncio.shield(waiter)
diff --git a/contrib/python/aiohttp/aiohttp/client.py b/contrib/python/aiohttp/aiohttp/client.py
new file mode 100644
index 0000000000..6ae9549db9
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client.py
@@ -0,0 +1,1304 @@
+"""HTTP Client for asyncio."""
+
+import asyncio
+import base64
+import hashlib
+import json
+import os
+import sys
+import traceback
+import warnings
+from contextlib import suppress
+from types import SimpleNamespace, TracebackType
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Coroutine,
+ FrozenSet,
+ Generator,
+ Generic,
+ Iterable,
+ List,
+ Mapping,
+ Optional,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+
+import attr
+from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
+from yarl import URL
+
+from . import hdrs, http, payload
+from .abc import AbstractCookieJar
+from .client_exceptions import (
+ ClientConnectionError as ClientConnectionError,
+ ClientConnectorCertificateError as ClientConnectorCertificateError,
+ ClientConnectorError as ClientConnectorError,
+ ClientConnectorSSLError as ClientConnectorSSLError,
+ ClientError as ClientError,
+ ClientHttpProxyError as ClientHttpProxyError,
+ ClientOSError as ClientOSError,
+ ClientPayloadError as ClientPayloadError,
+ ClientProxyConnectionError as ClientProxyConnectionError,
+ ClientResponseError as ClientResponseError,
+ ClientSSLError as ClientSSLError,
+ ContentTypeError as ContentTypeError,
+ InvalidURL as InvalidURL,
+ ServerConnectionError as ServerConnectionError,
+ ServerDisconnectedError as ServerDisconnectedError,
+ ServerFingerprintMismatch as ServerFingerprintMismatch,
+ ServerTimeoutError as ServerTimeoutError,
+ TooManyRedirects as TooManyRedirects,
+ WSServerHandshakeError as WSServerHandshakeError,
+)
+from .client_reqrep import (
+ ClientRequest as ClientRequest,
+ ClientResponse as ClientResponse,
+ Fingerprint as Fingerprint,
+ RequestInfo as RequestInfo,
+ _merge_ssl_params,
+)
+from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
+from .connector import (
+ BaseConnector as BaseConnector,
+ NamedPipeConnector as NamedPipeConnector,
+ TCPConnector as TCPConnector,
+ UnixConnector as UnixConnector,
+)
+from .cookiejar import CookieJar
+from .helpers import (
+ DEBUG,
+ PY_36,
+ BasicAuth,
+ TimeoutHandle,
+ ceil_timeout,
+ get_env_proxy_for_url,
+ get_running_loop,
+ sentinel,
+ strip_auth_from_url,
+)
+from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
+from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
+from .streams import FlowControlDataQueue
+from .tracing import Trace, TraceConfig
+from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
+
+__all__ = (
+ # client_exceptions
+ "ClientConnectionError",
+ "ClientConnectorCertificateError",
+ "ClientConnectorError",
+ "ClientConnectorSSLError",
+ "ClientError",
+ "ClientHttpProxyError",
+ "ClientOSError",
+ "ClientPayloadError",
+ "ClientProxyConnectionError",
+ "ClientResponseError",
+ "ClientSSLError",
+ "ContentTypeError",
+ "InvalidURL",
+ "ServerConnectionError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ServerTimeoutError",
+ "TooManyRedirects",
+ "WSServerHandshakeError",
+ # client_reqrep
+ "ClientRequest",
+ "ClientResponse",
+ "Fingerprint",
+ "RequestInfo",
+ # connector
+ "BaseConnector",
+ "TCPConnector",
+ "UnixConnector",
+ "NamedPipeConnector",
+ # client_ws
+ "ClientWebSocketResponse",
+ # client
+ "ClientSession",
+ "ClientTimeout",
+ "request",
+)
+
+
+try:
+ from ssl import SSLContext
+except ImportError: # pragma: no cover
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ClientTimeout:
+ total: Optional[float] = None
+ connect: Optional[float] = None
+ sock_read: Optional[float] = None
+ sock_connect: Optional[float] = None
+
+ # pool_queue_timeout: Optional[float] = None
+ # dns_resolution_timeout: Optional[float] = None
+ # socket_connect_timeout: Optional[float] = None
+ # connection_acquiring_timeout: Optional[float] = None
+ # new_connection_timeout: Optional[float] = None
+ # http_header_timeout: Optional[float] = None
+ # response_body_timeout: Optional[float] = None
+
+ # to create a timeout specific for a single request, either
+ # - create a completely new one to overwrite the default
+ # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
+ # to overwrite the defaults
+
+
+# 5 Minute default read timeout
+DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
+
+_RetType = TypeVar("_RetType")
+
+
+class ClientSession:
+ """First-class interface for making HTTP requests."""
+
+ ATTRS = frozenset(
+ [
+ "_base_url",
+ "_source_traceback",
+ "_connector",
+ "requote_redirect_url",
+ "_loop",
+ "_cookie_jar",
+ "_connector_owner",
+ "_default_auth",
+ "_version",
+ "_json_serialize",
+ "_requote_redirect_url",
+ "_timeout",
+ "_raise_for_status",
+ "_auto_decompress",
+ "_trust_env",
+ "_default_headers",
+ "_skip_auto_headers",
+ "_request_class",
+ "_response_class",
+ "_ws_response_class",
+ "_trace_configs",
+ "_read_bufsize",
+ ]
+ )
+
+ _source_traceback = None
+
+ def __init__(
+ self,
+ base_url: Optional[StrOrURL] = None,
+ *,
+ connector: Optional[BaseConnector] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ cookies: Optional[LooseCookies] = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ json_serialize: JSONEncoder = json.dumps,
+ request_class: Type[ClientRequest] = ClientRequest,
+ response_class: Type[ClientResponse] = ClientResponse,
+ ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
+ version: HttpVersion = http.HttpVersion11,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ connector_owner: bool = True,
+ raise_for_status: bool = False,
+ read_timeout: Union[float, object] = sentinel,
+ conn_timeout: Optional[float] = None,
+ timeout: Union[object, ClientTimeout] = sentinel,
+ auto_decompress: bool = True,
+ trust_env: bool = False,
+ requote_redirect_url: bool = True,
+ trace_configs: Optional[List[TraceConfig]] = None,
+ read_bufsize: int = 2 ** 16,
+ ) -> None:
+ if loop is None:
+ if connector is not None:
+ loop = connector._loop
+
+ loop = get_running_loop(loop)
+
+ if base_url is None or isinstance(base_url, URL):
+ self._base_url: Optional[URL] = base_url
+ else:
+ self._base_url = URL(base_url)
+ assert (
+ self._base_url.origin() == self._base_url
+ ), "Only absolute URLs without path part are supported"
+
+ if connector is None:
+ connector = TCPConnector(loop=loop)
+
+ if connector._loop is not loop:
+ raise RuntimeError("Session and connector has to use same event loop")
+
+ self._loop = loop
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ if cookie_jar is None:
+ cookie_jar = CookieJar(loop=loop)
+ self._cookie_jar = cookie_jar
+
+ if cookies is not None:
+ self._cookie_jar.update_cookies(cookies)
+
+ self._connector = connector # type: Optional[BaseConnector]
+ self._connector_owner = connector_owner
+ self._default_auth = auth
+ self._version = version
+ self._json_serialize = json_serialize
+ if timeout is sentinel:
+ self._timeout = DEFAULT_TIMEOUT
+ if read_timeout is not sentinel:
+ warnings.warn(
+ "read_timeout is deprecated, " "use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
+ if conn_timeout is not None:
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
+ warnings.warn(
+ "conn_timeout is deprecated, " "use timeout argument instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ else:
+ self._timeout = timeout # type: ignore[assignment]
+ if read_timeout is not sentinel:
+ raise ValueError(
+ "read_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.read"
+ )
+ if conn_timeout is not None:
+ raise ValueError(
+ "conn_timeout and timeout parameters "
+ "conflict, please setup "
+ "timeout.connect"
+ )
+ self._raise_for_status = raise_for_status
+ self._auto_decompress = auto_decompress
+ self._trust_env = trust_env
+ self._requote_redirect_url = requote_redirect_url
+ self._read_bufsize = read_bufsize
+
+ # Convert to list of tuples
+ if headers:
+ real_headers = CIMultiDict(headers) # type: CIMultiDict[str]
+ else:
+ real_headers = CIMultiDict()
+ self._default_headers = real_headers # type: CIMultiDict[str]
+ if skip_auto_headers is not None:
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
+ else:
+ self._skip_auto_headers = frozenset()
+
+ self._request_class = request_class
+ self._response_class = response_class
+ self._ws_response_class = ws_response_class
+
+ self._trace_configs = trace_configs or []
+ for trace_config in self._trace_configs:
+ trace_config.freeze()
+
+ def __init_subclass__(cls: Type["ClientSession"]) -> None:
+ warnings.warn(
+ "Inheritance class {} from ClientSession "
+ "is discouraged".format(cls.__name__),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if DEBUG:
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom ClientSession.{} attribute "
+ "is discouraged".format(name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if not self.closed:
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(
+ f"Unclosed client session {self!r}", ResourceWarning, **kwargs
+ )
+ context = {"client_session": self, "message": "Unclosed client session"}
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def request(
+ self, method: str, url: StrOrURL, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP request."""
+ return _RequestContextManager(self._request(method, url, **kwargs))
+
+ def _build_url(self, str_or_url: StrOrURL) -> URL:
+ url = URL(str_or_url)
+ if self._base_url is None:
+ return url
+ else:
+ assert not url.is_absolute() and url.path.startswith("/")
+ return self._base_url.join(url)
+
+ async def _request(
+ self,
+ method: str,
+ str_or_url: StrOrURL,
+ *,
+ params: Optional[Mapping[str, str]] = None,
+ data: Any = None,
+ json: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ allow_redirects: bool = True,
+ max_redirects: int = 10,
+ compress: Optional[str] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ raise_for_status: Optional[bool] = None,
+ read_until_eof: bool = True,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timeout: Union[ClientTimeout, object] = sentinel,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ trace_request_ctx: Optional[SimpleNamespace] = None,
+ read_bufsize: Optional[int] = None,
+ ) -> ClientResponse:
+
+ # NOTE: timeout clamps existing connect and read timeouts. We cannot
+ # set the default to None because we need to detect if the user wants
+ # to use the existing timeouts by setting timeout to None.
+
+ if self.closed:
+ raise RuntimeError("Session is closed")
+
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+
+ if data is not None and json is not None:
+ raise ValueError(
+ "data and json parameters can not be used at the same time"
+ )
+ elif json is not None:
+ data = payload.JsonPayload(json, dumps=self._json_serialize)
+
+ if not isinstance(chunked, bool) and chunked is not None:
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
+
+ redirects = 0
+ history = []
+ version = self._version
+
+ # Merge with default headers and transform to CIMultiDict
+ headers = self._prepare_headers(headers)
+ proxy_headers = self._prepare_headers(proxy_headers)
+
+ try:
+ url = self._build_url(str_or_url)
+ except ValueError as e:
+ raise InvalidURL(str_or_url) from e
+
+ skip_headers = set(self._skip_auto_headers)
+ if skip_auto_headers is not None:
+ for i in skip_auto_headers:
+ skip_headers.add(istr(i))
+
+ if proxy is not None:
+ try:
+ proxy = URL(proxy)
+ except ValueError as e:
+ raise InvalidURL(proxy) from e
+
+ if timeout is sentinel:
+ real_timeout = self._timeout # type: ClientTimeout
+ else:
+ if not isinstance(timeout, ClientTimeout):
+ real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
+ else:
+ real_timeout = timeout
+ # timeout is cumulative for all request operations
+ # (request, redirects, responses, data consuming)
+ tm = TimeoutHandle(self._loop, real_timeout.total)
+ handle = tm.start()
+
+ if read_bufsize is None:
+ read_bufsize = self._read_bufsize
+
+ traces = [
+ Trace(
+ self,
+ trace_config,
+ trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
+ )
+ for trace_config in self._trace_configs
+ ]
+
+ for trace in traces:
+ await trace.send_request_start(method, url.update_query(params), headers)
+
+ timer = tm.timer()
+ try:
+ with timer:
+ while True:
+ url, auth_from_url = strip_auth_from_url(url)
+ if auth and auth_from_url:
+ raise ValueError(
+ "Cannot combine AUTH argument with "
+ "credentials encoded in URL"
+ )
+
+ if auth is None:
+ auth = auth_from_url
+ if auth is None:
+ auth = self._default_auth
+ # It would be confusing if we support explicit
+ # Authorization header with auth argument
+ if (
+ headers is not None
+ and auth is not None
+ and hdrs.AUTHORIZATION in headers
+ ):
+ raise ValueError(
+ "Cannot combine AUTHORIZATION header "
+ "with AUTH argument or credentials "
+ "encoded in URL"
+ )
+
+ all_cookies = self._cookie_jar.filter_cookies(url)
+
+ if cookies is not None:
+ tmp_cookie_jar = CookieJar()
+ tmp_cookie_jar.update_cookies(cookies)
+ req_cookies = tmp_cookie_jar.filter_cookies(url)
+ if req_cookies:
+ all_cookies.load(req_cookies)
+
+ if proxy is not None:
+ proxy = URL(proxy)
+ elif self._trust_env:
+ with suppress(LookupError):
+ proxy, proxy_auth = get_env_proxy_for_url(url)
+
+ req = self._request_class(
+ method,
+ url,
+ params=params,
+ headers=headers,
+ skip_auto_headers=skip_headers,
+ data=data,
+ cookies=all_cookies,
+ auth=auth,
+ version=version,
+ compress=compress,
+ chunked=chunked,
+ expect100=expect100,
+ loop=self._loop,
+ response_class=self._response_class,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ timer=timer,
+ session=self,
+ ssl=ssl,
+ proxy_headers=proxy_headers,
+ traces=traces,
+ )
+
+ # connection timeout
+ try:
+ async with ceil_timeout(real_timeout.connect):
+ assert self._connector is not None
+ conn = await self._connector.connect(
+ req, traces=traces, timeout=real_timeout
+ )
+ except asyncio.TimeoutError as exc:
+ raise ServerTimeoutError(
+ "Connection timeout " "to host {}".format(url)
+ ) from exc
+
+ assert conn.transport is not None
+
+ assert conn.protocol is not None
+ conn.protocol.set_response_params(
+ timer=timer,
+ skip_payload=method.upper() == "HEAD",
+ read_until_eof=read_until_eof,
+ auto_decompress=self._auto_decompress,
+ read_timeout=real_timeout.sock_read,
+ read_bufsize=read_bufsize,
+ )
+
+ try:
+ try:
+ resp = await req.send(conn)
+ try:
+ await resp.start(conn)
+ except BaseException:
+ resp.close()
+ raise
+ except BaseException:
+ conn.close()
+ raise
+ except ClientError:
+ raise
+ except OSError as exc:
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
+ raise
+ raise ClientOSError(*exc.args) from exc
+
+ self._cookie_jar.update_cookies(resp.cookies, resp.url)
+
+ # redirects
+ if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
+
+ for trace in traces:
+ await trace.send_request_redirect(
+ method, url.update_query(params), headers, resp
+ )
+
+ redirects += 1
+ history.append(resp)
+ if max_redirects and redirects >= max_redirects:
+ resp.close()
+ raise TooManyRedirects(
+ history[0].request_info, tuple(history)
+ )
+
+ # For 301 and 302, mimic IE, now changed in RFC
+ # https://github.com/kennethreitz/requests/pull/269
+ if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
+ resp.status in (301, 302) and resp.method == hdrs.METH_POST
+ ):
+ method = hdrs.METH_GET
+ data = None
+ if headers.get(hdrs.CONTENT_LENGTH):
+ headers.pop(hdrs.CONTENT_LENGTH)
+
+ r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
+ hdrs.URI
+ )
+ if r_url is None:
+ # see github.com/aio-libs/aiohttp/issues/2022
+ break
+ else:
+ # reading from correct redirection
+ # response is forbidden
+ resp.release()
+
+ try:
+ parsed_url = URL(
+ r_url, encoded=not self._requote_redirect_url
+ )
+
+ except ValueError as e:
+ raise InvalidURL(r_url) from e
+
+ scheme = parsed_url.scheme
+ if scheme not in ("http", "https", ""):
+ resp.close()
+ raise ValueError("Can redirect only to http or https")
+ elif not scheme:
+ parsed_url = url.join(parsed_url)
+
+ if url.origin() != parsed_url.origin():
+ auth = None
+ headers.pop(hdrs.AUTHORIZATION, None)
+
+ url = parsed_url
+ params = None
+ resp.release()
+ continue
+
+ break
+
+ # check response status
+ if raise_for_status is None:
+ raise_for_status = self._raise_for_status
+ if raise_for_status:
+ resp.raise_for_status()
+
+ # register connection
+ if handle is not None:
+ if resp.connection is not None:
+ resp.connection.add_callback(handle.cancel)
+ else:
+ handle.cancel()
+
+ resp._history = tuple(history)
+
+ for trace in traces:
+ await trace.send_request_end(
+ method, url.update_query(params), headers, resp
+ )
+ return resp
+
+ except BaseException as e:
+ # cleanup timer
+ tm.close()
+ if handle:
+ handle.cancel()
+ handle = None
+
+ for trace in traces:
+ await trace.send_request_exception(
+ method, url.update_query(params), headers, e
+ )
+ raise
+
+ def ws_connect(
+ self,
+ url: StrOrURL,
+ *,
+ method: str = hdrs.METH_GET,
+ protocols: Iterable[str] = (),
+ timeout: float = 10.0,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ auth: Optional[BasicAuth] = None,
+ origin: Optional[str] = None,
+ params: Optional[Mapping[str, str]] = None,
+ headers: Optional[LooseHeaders] = None,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ ssl: Union[SSLContext, bool, None, Fingerprint] = None,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ compress: int = 0,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> "_WSRequestContextManager":
+ """Initiate websocket connection."""
+ return _WSRequestContextManager(
+ self._ws_connect(
+ url,
+ method=method,
+ protocols=protocols,
+ timeout=timeout,
+ receive_timeout=receive_timeout,
+ autoclose=autoclose,
+ autoping=autoping,
+ heartbeat=heartbeat,
+ auth=auth,
+ origin=origin,
+ params=params,
+ headers=headers,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ ssl=ssl,
+ verify_ssl=verify_ssl,
+ fingerprint=fingerprint,
+ ssl_context=ssl_context,
+ proxy_headers=proxy_headers,
+ compress=compress,
+ max_msg_size=max_msg_size,
+ )
+ )
+
+ async def _ws_connect(
+ self,
+ url: StrOrURL,
+ *,
+ method: str = hdrs.METH_GET,
+ protocols: Iterable[str] = (),
+ timeout: float = 10.0,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ auth: Optional[BasicAuth] = None,
+ origin: Optional[str] = None,
+ params: Optional[Mapping[str, str]] = None,
+ headers: Optional[LooseHeaders] = None,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ ssl: Union[SSLContext, bool, None, Fingerprint] = None,
+ verify_ssl: Optional[bool] = None,
+ fingerprint: Optional[bytes] = None,
+ ssl_context: Optional[SSLContext] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ compress: int = 0,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> ClientWebSocketResponse:
+
+ if headers is None:
+ real_headers = CIMultiDict() # type: CIMultiDict[str]
+ else:
+ real_headers = CIMultiDict(headers)
+
+ default_headers = {
+ hdrs.UPGRADE: "websocket",
+ hdrs.CONNECTION: "upgrade",
+ hdrs.SEC_WEBSOCKET_VERSION: "13",
+ }
+
+ for key, value in default_headers.items():
+ real_headers.setdefault(key, value)
+
+ sec_key = base64.b64encode(os.urandom(16))
+ real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
+
+ if protocols:
+ real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
+ if origin is not None:
+ real_headers[hdrs.ORIGIN] = origin
+ if compress:
+ extstr = ws_ext_gen(compress=compress)
+ real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
+
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+
+ # send request
+ resp = await self.request(
+ method,
+ url,
+ params=params,
+ headers=real_headers,
+ read_until_eof=False,
+ auth=auth,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ ssl=ssl,
+ proxy_headers=proxy_headers,
+ )
+
+ try:
+ # check handshake
+ if resp.status != 101:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid response status",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid upgrade header",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid connection header",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ # key calculation
+ r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
+ match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
+ if r_key != match:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message="Invalid challenge response",
+ status=resp.status,
+ headers=resp.headers,
+ )
+
+ # websocket protocol
+ protocol = None
+ if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
+ resp_protocols = [
+ proto.strip()
+ for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
+ ]
+
+ for proto in resp_protocols:
+ if proto in protocols:
+ protocol = proto
+ break
+
+ # websocket compress
+ notakeover = False
+ if compress:
+ compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
+ if compress_hdrs:
+ try:
+ compress, notakeover = ws_ext_parse(compress_hdrs)
+ except WSHandshakeError as exc:
+ raise WSServerHandshakeError(
+ resp.request_info,
+ resp.history,
+ message=exc.args[0],
+ status=resp.status,
+ headers=resp.headers,
+ ) from exc
+ else:
+ compress = 0
+ notakeover = False
+
+ conn = resp.connection
+ assert conn is not None
+ conn_proto = conn.protocol
+ assert conn_proto is not None
+ transport = conn.transport
+ assert transport is not None
+ reader = FlowControlDataQueue(
+ conn_proto, 2 ** 16, loop=self._loop
+ ) # type: FlowControlDataQueue[WSMessage]
+ conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
+ writer = WebSocketWriter(
+ conn_proto,
+ transport,
+ use_mask=True,
+ compress=compress,
+ notakeover=notakeover,
+ )
+ except BaseException:
+ resp.close()
+ raise
+ else:
+ return self._ws_response_class(
+ reader,
+ writer,
+ protocol,
+ resp,
+ timeout,
+ autoclose,
+ autoping,
+ self._loop,
+ receive_timeout=receive_timeout,
+ heartbeat=heartbeat,
+ compress=compress,
+ client_notakeover=notakeover,
+ )
+
+ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
+ """Add default headers and transform it to CIMultiDict"""
+ # Convert headers to MultiDict
+ result = CIMultiDict(self._default_headers)
+ if headers:
+ if not isinstance(headers, (MultiDictProxy, MultiDict)):
+ headers = CIMultiDict(headers)
+ added_names = set() # type: Set[str]
+ for key, value in headers.items():
+ if key in added_names:
+ result.add(key, value)
+ else:
+ result[key] = value
+ added_names.add(key)
+ return result
+
+ def get(
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP GET request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
+ )
+
+ def options(
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP OPTIONS request."""
+ return _RequestContextManager(
+ self._request(
+ hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
+ )
+ )
+
+ def head(
+ self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP HEAD request."""
+ return _RequestContextManager(
+ self._request(
+ hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
+ )
+ )
+
+ def post(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP POST request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_POST, url, data=data, **kwargs)
+ )
+
+ def put(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP PUT request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PUT, url, data=data, **kwargs)
+ )
+
+ def patch(
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
+ ) -> "_RequestContextManager":
+ """Perform HTTP PATCH request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
+ )
+
+ def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
+ """Perform HTTP DELETE request."""
+ return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
+
+ async def close(self) -> None:
+ """Close underlying connector.
+
+ Release all acquired resources.
+ """
+ if not self.closed:
+ if self._connector is not None and self._connector_owner:
+ await self._connector.close()
+ self._connector = None
+
+ @property
+ def closed(self) -> bool:
+ """Is client session closed.
+
+ A readonly property.
+ """
+ return self._connector is None or self._connector.closed
+
+ @property
+ def connector(self) -> Optional[BaseConnector]:
+ """Connector instance used for the session."""
+ return self._connector
+
+ @property
+ def cookie_jar(self) -> AbstractCookieJar:
+ """The session cookies."""
+ return self._cookie_jar
+
+ @property
+ def version(self) -> Tuple[int, int]:
+ """The session HTTP protocol version."""
+ return self._version
+
+ @property
+ def requote_redirect_url(self) -> bool:
+ """Do URL requoting on redirection handling."""
+ return self._requote_redirect_url
+
+ @requote_redirect_url.setter
+ def requote_redirect_url(self, val: bool) -> None:
+ """Do URL requoting on redirection handling."""
+ warnings.warn(
+ "session.requote_redirect_url modification " "is deprecated #2778",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self._requote_redirect_url = val
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ """Session's loop."""
+ warnings.warn(
+ "client.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ @property
+ def timeout(self) -> Union[object, ClientTimeout]:
+ """Timeout for the session."""
+ return self._timeout
+
+ @property
+ def headers(self) -> "CIMultiDict[str]":
+ """The default headers of the client session."""
+ return self._default_headers
+
+ @property
+ def skip_auto_headers(self) -> FrozenSet[istr]:
+ """Headers for which autogeneration should be skipped"""
+ return self._skip_auto_headers
+
+ @property
+ def auth(self) -> Optional[BasicAuth]:
+ """An object that represents HTTP Basic Authorization"""
+ return self._default_auth
+
+ @property
+ def json_serialize(self) -> JSONEncoder:
+ """Json serializer callable"""
+ return self._json_serialize
+
+ @property
+ def connector_owner(self) -> bool:
+ """Should connector be closed on session closing"""
+ return self._connector_owner
+
+ @property
+ def raise_for_status(
+ self,
+ ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
+ """Should `ClientResponse.raise_for_status()` be called for each response."""
+ return self._raise_for_status
+
+ @property
+ def auto_decompress(self) -> bool:
+ """Should the body response be automatically decompressed."""
+ return self._auto_decompress
+
+ @property
+ def trust_env(self) -> bool:
+ """
+ Should proxies information from environment or netrc be trusted.
+
+ Information is from HTTP_PROXY / HTTPS_PROXY environment variables
+ or ~/.netrc file if present.
+ """
+ return self._trust_env
+
+ @property
+ def trace_configs(self) -> List[TraceConfig]:
+ """A list of TraceConfig instances used for client tracing"""
+ return self._trace_configs
+
+ def detach(self) -> None:
+ """Detach connector from session without closing the former.
+
+ Session is switched to closed state anyway.
+ """
+ self._connector = None
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "ClientSession":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
+
+ __slots__ = ("_coro", "_resp")
+
+ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
+ self._coro = coro
+
+ def send(self, arg: None) -> "asyncio.Future[Any]":
+ return self._coro.send(arg)
+
+ def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override]
+ self._coro.throw(arg)
+
+ def close(self) -> None:
+ return self._coro.close()
+
+ def __await__(self) -> Generator[Any, None, _RetType]:
+ ret = self._coro.__await__()
+ return ret
+
+ def __iter__(self) -> Generator[Any, None, _RetType]:
+ return self.__await__()
+
+ async def __aenter__(self) -> _RetType:
+ self._resp = await self._coro
+ return self._resp
+
+
+class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
+ __slots__ = ()
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ # We're basing behavior on the exception as it can be caused by
+ # user code unrelated to the status of the connection. If you
+ # would like to close a connection you must do that
+ # explicitly. Otherwise connection error handling should kick in
+ # and close/recycle the connection as required.
+ self._resp.release()
+
+
+class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
+ __slots__ = ()
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ await self._resp.close()
+
+
+class _SessionRequestContextManager:
+
+ __slots__ = ("_coro", "_resp", "_session")
+
+ def __init__(
+ self,
+ coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
+ session: ClientSession,
+ ) -> None:
+ self._coro = coro
+ self._resp = None # type: Optional[ClientResponse]
+ self._session = session
+
+ async def __aenter__(self) -> ClientResponse:
+ try:
+ self._resp = await self._coro
+ except BaseException:
+ await self._session.close()
+ raise
+ else:
+ return self._resp
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ assert self._resp is not None
+ self._resp.close()
+ await self._session.close()
+
+
+def request(
+ method: str,
+ url: StrOrURL,
+ *,
+ params: Optional[Mapping[str, str]] = None,
+ data: Any = None,
+ json: Any = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ auth: Optional[BasicAuth] = None,
+ allow_redirects: bool = True,
+ max_redirects: int = 10,
+ compress: Optional[str] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ raise_for_status: Optional[bool] = None,
+ read_until_eof: bool = True,
+ proxy: Optional[StrOrURL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timeout: Union[ClientTimeout, object] = sentinel,
+ cookies: Optional[LooseCookies] = None,
+ version: HttpVersion = http.HttpVersion11,
+ connector: Optional[BaseConnector] = None,
+ read_bufsize: Optional[int] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> _SessionRequestContextManager:
+ """Constructs and sends a request.
+
+ Returns response object.
+ method - HTTP method
+ url - request url
+ params - (optional) Dictionary or bytes to be sent in the query
+ string of the new request
+ data - (optional) Dictionary, bytes, or file-like object to
+ send in the body of the request
+ json - (optional) Any json compatible python object
+ headers - (optional) Dictionary of HTTP Headers to send with
+ the request
+ cookies - (optional) Dict object to send with the request
+ auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
+ auth - aiohttp.helpers.BasicAuth
+ allow_redirects - (optional) If set to False, do not follow
+ redirects
+ version - Request HTTP version.
+ compress - Set to True if request has to be compressed
+ with deflate encoding.
+ chunked - Set to chunk size for chunked transfer encoding.
+ expect100 - Expect 100-continue response from server.
+ connector - BaseConnector sub-class instance to support
+ connection pooling.
+ read_until_eof - Read response until eof if response
+ does not have Content-Length header.
+ loop - Optional event loop.
+ timeout - Optional ClientTimeout settings structure, 5min
+ total timeout by default.
+ Usage::
+ >>> import aiohttp
+ >>> resp = await aiohttp.request('GET', 'http://python.org/')
+ >>> resp
+ <ClientResponse(python.org/) [200]>
+ >>> data = await resp.read()
+ """
+ connector_owner = False
+ if connector is None:
+ connector_owner = True
+ connector = TCPConnector(loop=loop, force_close=True)
+
+ session = ClientSession(
+ loop=loop,
+ cookies=cookies,
+ version=version,
+ timeout=timeout,
+ connector=connector,
+ connector_owner=connector_owner,
+ )
+
+ return _SessionRequestContextManager(
+ session._request(
+ method,
+ url,
+ params=params,
+ data=data,
+ json=json,
+ headers=headers,
+ skip_auto_headers=skip_auto_headers,
+ auth=auth,
+ allow_redirects=allow_redirects,
+ max_redirects=max_redirects,
+ compress=compress,
+ chunked=chunked,
+ expect100=expect100,
+ raise_for_status=raise_for_status,
+ read_until_eof=read_until_eof,
+ proxy=proxy,
+ proxy_auth=proxy_auth,
+ read_bufsize=read_bufsize,
+ ),
+ session,
+ )
diff --git a/contrib/python/aiohttp/aiohttp/client_exceptions.py b/contrib/python/aiohttp/aiohttp/client_exceptions.py
new file mode 100644
index 0000000000..dd55321054
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_exceptions.py
@@ -0,0 +1,342 @@
+"""HTTP related errors."""
+
+import asyncio
+import warnings
+from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
+
+from .http_parser import RawResponseMessage
+from .typedefs import LooseHeaders
+
+try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+except ImportError: # pragma: no cover
+ ssl = SSLContext = None # type: ignore[assignment]
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
+else:
+ RequestInfo = ClientResponse = ConnectionKey = None
+
+__all__ = (
+ "ClientError",
+ "ClientConnectionError",
+ "ClientOSError",
+ "ClientConnectorError",
+ "ClientProxyConnectionError",
+ "ClientSSLError",
+ "ClientConnectorSSLError",
+ "ClientConnectorCertificateError",
+ "ServerConnectionError",
+ "ServerTimeoutError",
+ "ServerDisconnectedError",
+ "ServerFingerprintMismatch",
+ "ClientResponseError",
+ "ClientHttpProxyError",
+ "WSServerHandshakeError",
+ "ContentTypeError",
+ "ClientPayloadError",
+ "InvalidURL",
+)
+
+
+class ClientError(Exception):
+ """Base class for client connection errors."""
+
+
+class ClientResponseError(ClientError):
+ """Connection error during reading response.
+
+ request_info: instance of RequestInfo
+ """
+
+ def __init__(
+ self,
+ request_info: RequestInfo,
+ history: Tuple[ClientResponse, ...],
+ *,
+ code: Optional[int] = None,
+ status: Optional[int] = None,
+ message: str = "",
+ headers: Optional[LooseHeaders] = None,
+ ) -> None:
+ self.request_info = request_info
+ if code is not None:
+ if status is not None:
+ raise ValueError(
+ "Both code and status arguments are provided; "
+ "code is deprecated, use status instead"
+ )
+ warnings.warn(
+ "code argument is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if status is not None:
+ self.status = status
+ elif code is not None:
+ self.status = code
+ else:
+ self.status = 0
+ self.message = message
+ self.headers = headers
+ self.history = history
+ self.args = (request_info, history)
+
+ def __str__(self) -> str:
+ return "{}, message={!r}, url={!r}".format(
+ self.status,
+ self.message,
+ self.request_info.real_url,
+ )
+
+ def __repr__(self) -> str:
+ args = f"{self.request_info!r}, {self.history!r}"
+ if self.status != 0:
+ args += f", status={self.status!r}"
+ if self.message != "":
+ args += f", message={self.message!r}"
+ if self.headers is not None:
+ args += f", headers={self.headers!r}"
+ return f"{type(self).__name__}({args})"
+
+ @property
+ def code(self) -> int:
+ warnings.warn(
+ "code property is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.status
+
+ @code.setter
+ def code(self, value: int) -> None:
+ warnings.warn(
+ "code property is deprecated, use status instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.status = value
+
+
+class ContentTypeError(ClientResponseError):
+ """ContentType found is not valid."""
+
+
+class WSServerHandshakeError(ClientResponseError):
+ """websocket server handshake error."""
+
+
+class ClientHttpProxyError(ClientResponseError):
+ """HTTP proxy error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ proxy responds with status other than ``200 OK``
+ on ``CONNECT`` request.
+ """
+
+
+class TooManyRedirects(ClientResponseError):
+ """Client was redirected too many times."""
+
+
+class ClientConnectionError(ClientError):
+ """Base class for client socket errors."""
+
+
+class ClientOSError(ClientConnectionError, OSError):
+ """OSError error."""
+
+
+class ClientConnectorError(ClientOSError):
+ """Client connector error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ connection to proxy can not be established.
+ """
+
+ def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
+ self._conn_key = connection_key
+ self._os_error = os_error
+ super().__init__(os_error.errno, os_error.strerror)
+ self.args = (connection_key, os_error)
+
+ @property
+ def os_error(self) -> OSError:
+ return self._os_error
+
+ @property
+ def host(self) -> str:
+ return self._conn_key.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._conn_key.port
+
+ @property
+ def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
+ return self._conn_key.ssl
+
+ def __str__(self) -> str:
+ return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
+ self, self.ssl if self.ssl is not None else "default", self.strerror
+ )
+
+ # OSError.__reduce__ does too much black magick
+ __reduce__ = BaseException.__reduce__
+
+
+class ClientProxyConnectionError(ClientConnectorError):
+ """Proxy connection error.
+
+ Raised in :class:`aiohttp.connector.TCPConnector` if
+ connection to proxy can not be established.
+ """
+
+
+class UnixClientConnectorError(ClientConnectorError):
+ """Unix connector error.
+
+ Raised in :py:class:`aiohttp.connector.UnixConnector`
+ if connection to unix socket can not be established.
+ """
+
+ def __init__(
+ self, path: str, connection_key: ConnectionKey, os_error: OSError
+ ) -> None:
+ self._path = path
+ super().__init__(connection_key, os_error)
+
+ @property
+ def path(self) -> str:
+ return self._path
+
+ def __str__(self) -> str:
+ return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
+ self, self.ssl if self.ssl is not None else "default", self.strerror
+ )
+
+
+class ServerConnectionError(ClientConnectionError):
+ """Server connection errors."""
+
+
+class ServerDisconnectedError(ServerConnectionError):
+ """Server disconnected."""
+
+ def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
+ if message is None:
+ message = "Server disconnected"
+
+ self.args = (message,)
+ self.message = message
+
+
+class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
+ """Server timeout error."""
+
+
+class ServerFingerprintMismatch(ServerConnectionError):
+ """SSL certificate does not match expected fingerprint."""
+
+ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
+ self.expected = expected
+ self.got = got
+ self.host = host
+ self.port = port
+ self.args = (expected, got, host, port)
+
+ def __repr__(self) -> str:
+ return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
+ self.__class__.__name__, self.expected, self.got, self.host, self.port
+ )
+
+
+class ClientPayloadError(ClientError):
+ """Response payload error."""
+
+
+class InvalidURL(ClientError, ValueError):
+ """Invalid URL.
+
+ URL used for fetching is malformed, e.g. it doesn't contains host
+ part.
+ """
+
+ # Derive from ValueError for backward compatibility
+
+ def __init__(self, url: Any) -> None:
+ # The type of url is not yarl.URL because the exception can be raised
+ # on URL(url) call
+ super().__init__(url)
+
+ @property
+ def url(self) -> Any:
+ return self.args[0]
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self.url}>"
+
+
+class ClientSSLError(ClientConnectorError):
+ """Base error for ssl.*Errors."""
+
+
+if ssl is not None:
+ cert_errors = (ssl.CertificateError,)
+ cert_errors_bases = (
+ ClientSSLError,
+ ssl.CertificateError,
+ )
+
+ ssl_errors = (ssl.SSLError,)
+ ssl_error_bases = (ClientSSLError, ssl.SSLError)
+else: # pragma: no cover
+ cert_errors = tuple()
+ cert_errors_bases = (
+ ClientSSLError,
+ ValueError,
+ )
+
+ ssl_errors = tuple()
+ ssl_error_bases = (ClientSSLError,)
+
+
+class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
+ """Response ssl error."""
+
+
+class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
+ """Response certificate error."""
+
+ def __init__(
+ self, connection_key: ConnectionKey, certificate_error: Exception
+ ) -> None:
+ self._conn_key = connection_key
+ self._certificate_error = certificate_error
+ self.args = (connection_key, certificate_error)
+
+ @property
+ def certificate_error(self) -> Exception:
+ return self._certificate_error
+
+ @property
+ def host(self) -> str:
+ return self._conn_key.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._conn_key.port
+
+ @property
+ def ssl(self) -> bool:
+ return self._conn_key.is_ssl
+
+ def __str__(self) -> str:
+ return (
+ "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
+ "[{0.certificate_error.__class__.__name__}: "
+ "{0.certificate_error.args}]".format(self)
+ )
diff --git a/contrib/python/aiohttp/aiohttp/client_proto.py b/contrib/python/aiohttp/aiohttp/client_proto.py
new file mode 100644
index 0000000000..f36863b836
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_proto.py
@@ -0,0 +1,251 @@
+import asyncio
+from contextlib import suppress
+from typing import Any, Optional, Tuple
+
+from .base_protocol import BaseProtocol
+from .client_exceptions import (
+ ClientOSError,
+ ClientPayloadError,
+ ServerDisconnectedError,
+ ServerTimeoutError,
+)
+from .helpers import BaseTimerContext
+from .http import HttpResponseParser, RawResponseMessage
+from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
+
+
+class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
+ """Helper class to adapt between Protocol and StreamReader."""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ BaseProtocol.__init__(self, loop=loop)
+ DataQueue.__init__(self, loop)
+
+ self._should_close = False
+
+ self._payload: Optional[StreamReader] = None
+ self._skip_payload = False
+ self._payload_parser = None
+
+ self._timer = None
+
+ self._tail = b""
+ self._upgraded = False
+ self._parser = None # type: Optional[HttpResponseParser]
+
+ self._read_timeout = None # type: Optional[float]
+ self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
+
+ @property
+ def upgraded(self) -> bool:
+ return self._upgraded
+
+ @property
+ def should_close(self) -> bool:
+ if self._payload is not None and not self._payload.is_eof() or self._upgraded:
+ return True
+
+ return (
+ self._should_close
+ or self._upgraded
+ or self.exception() is not None
+ or self._payload_parser is not None
+ or len(self) > 0
+ or bool(self._tail)
+ )
+
+ def force_close(self) -> None:
+ self._should_close = True
+
+ def close(self) -> None:
+ transport = self.transport
+ if transport is not None:
+ transport.close()
+ self.transport = None
+ self._payload = None
+ self._drop_timeout()
+
+ def is_connected(self) -> bool:
+ return self.transport is not None and not self.transport.is_closing()
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ self._drop_timeout()
+
+ if self._payload_parser is not None:
+ with suppress(Exception):
+ self._payload_parser.feed_eof()
+
+ uncompleted = None
+ if self._parser is not None:
+ try:
+ uncompleted = self._parser.feed_eof()
+ except Exception:
+ if self._payload is not None:
+ self._payload.set_exception(
+ ClientPayloadError("Response payload is not completed")
+ )
+
+ if not self.is_eof():
+ if isinstance(exc, OSError):
+ exc = ClientOSError(*exc.args)
+ if exc is None:
+ exc = ServerDisconnectedError(uncompleted)
+ # assigns self._should_close to True as side effect,
+ # we do it anyway below
+ self.set_exception(exc)
+
+ self._should_close = True
+ self._parser = None
+ self._payload = None
+ self._payload_parser = None
+ self._reading_paused = False
+
+ super().connection_lost(exc)
+
+ def eof_received(self) -> None:
+ # should call parser.feed_eof() most likely
+ self._drop_timeout()
+
+ def pause_reading(self) -> None:
+ super().pause_reading()
+ self._drop_timeout()
+
+ def resume_reading(self) -> None:
+ super().resume_reading()
+ self._reschedule_timeout()
+
+ def set_exception(self, exc: BaseException) -> None:
+ self._should_close = True
+ self._drop_timeout()
+ super().set_exception(exc)
+
+ def set_parser(self, parser: Any, payload: Any) -> None:
+ # TODO: actual types are:
+ # parser: WebSocketReader
+ # payload: FlowControlDataQueue
+ # but they are not generi enough
+ # Need an ABC for both types
+ self._payload = payload
+ self._payload_parser = parser
+
+ self._drop_timeout()
+
+ if self._tail:
+ data, self._tail = self._tail, b""
+ self.data_received(data)
+
+ def set_response_params(
+ self,
+ *,
+ timer: Optional[BaseTimerContext] = None,
+ skip_payload: bool = False,
+ read_until_eof: bool = False,
+ auto_decompress: bool = True,
+ read_timeout: Optional[float] = None,
+ read_bufsize: int = 2 ** 16,
+ ) -> None:
+ self._skip_payload = skip_payload
+
+ self._read_timeout = read_timeout
+ self._reschedule_timeout()
+
+ self._parser = HttpResponseParser(
+ self,
+ self._loop,
+ read_bufsize,
+ timer=timer,
+ payload_exception=ClientPayloadError,
+ response_with_body=not skip_payload,
+ read_until_eof=read_until_eof,
+ auto_decompress=auto_decompress,
+ )
+
+ if self._tail:
+ data, self._tail = self._tail, b""
+ self.data_received(data)
+
+ def _drop_timeout(self) -> None:
+ if self._read_timeout_handle is not None:
+ self._read_timeout_handle.cancel()
+ self._read_timeout_handle = None
+
+ def _reschedule_timeout(self) -> None:
+ timeout = self._read_timeout
+ if self._read_timeout_handle is not None:
+ self._read_timeout_handle.cancel()
+
+ if timeout:
+ self._read_timeout_handle = self._loop.call_later(
+ timeout, self._on_read_timeout
+ )
+ else:
+ self._read_timeout_handle = None
+
+ def _on_read_timeout(self) -> None:
+ exc = ServerTimeoutError("Timeout on reading data from socket")
+ self.set_exception(exc)
+ if self._payload is not None:
+ self._payload.set_exception(exc)
+
+ def data_received(self, data: bytes) -> None:
+ self._reschedule_timeout()
+
+ if not data:
+ return
+
+ # custom payload parser
+ if self._payload_parser is not None:
+ eof, tail = self._payload_parser.feed_data(data)
+ if eof:
+ self._payload = None
+ self._payload_parser = None
+
+ if tail:
+ self.data_received(tail)
+ return
+ else:
+ if self._upgraded or self._parser is None:
+ # i.e. websocket connection, websocket parser is not set yet
+ self._tail += data
+ else:
+ # parse http messages
+ try:
+ messages, upgraded, tail = self._parser.feed_data(data)
+ except BaseException as exc:
+ if self.transport is not None:
+ # connection.release() could be called BEFORE
+ # data_received(), the transport is already
+ # closed in this case
+ self.transport.close()
+ # should_close is True after the call
+ self.set_exception(exc)
+ return
+
+ self._upgraded = upgraded
+
+ payload: Optional[StreamReader] = None
+ for message, payload in messages:
+ if message.should_close:
+ self._should_close = True
+
+ self._payload = payload
+
+ if self._skip_payload or message.code in (204, 304):
+ self.feed_data((message, EMPTY_PAYLOAD), 0)
+ else:
+ self.feed_data((message, payload), 0)
+ if payload is not None:
+ # new message(s) was processed
+ # register timeout handler unsubscribing
+ # either on end-of-stream or immediately for
+ # EMPTY_PAYLOAD
+ if payload is not EMPTY_PAYLOAD:
+ payload.on_eof(self._drop_timeout)
+ else:
+ self._drop_timeout()
+
+ if tail:
+ if upgraded:
+ self.data_received(tail)
+ else:
+ self._tail = tail
diff --git a/contrib/python/aiohttp/aiohttp/client_reqrep.py b/contrib/python/aiohttp/aiohttp/client_reqrep.py
new file mode 100644
index 0000000000..343002517b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_reqrep.py
@@ -0,0 +1,1133 @@
+import asyncio
+import codecs
+import functools
+import io
+import re
+import sys
+import traceback
+import warnings
+from hashlib import md5, sha1, sha256
+from http.cookies import CookieError, Morsel, SimpleCookie
+from types import MappingProxyType, TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterable,
+ List,
+ Mapping,
+ Optional,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+from yarl import URL
+
+from . import hdrs, helpers, http, multipart, payload
+from .abc import AbstractStreamWriter
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientOSError,
+ ClientResponseError,
+ ContentTypeError,
+ InvalidURL,
+ ServerFingerprintMismatch,
+)
+from .formdata import FormData
+from .helpers import (
+ PY_36,
+ BaseTimerContext,
+ BasicAuth,
+ HeadersMixin,
+ TimerNoop,
+ noop,
+ reify,
+ set_result,
+)
+from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter
+from .log import client_logger
+from .streams import StreamReader
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ JSONDecoder,
+ LooseCookies,
+ LooseHeaders,
+ RawHeaders,
+)
+
+try:
+ import ssl
+ from ssl import SSLContext
+except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+try:
+ import cchardet as chardet
+except ImportError: # pragma: no cover
+ import charset_normalizer as chardet # type: ignore[no-redef]
+
+
+__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client import ClientSession
+ from .connector import Connection
+ from .tracing import Trace
+
+
+json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ContentDisposition:
+ type: Optional[str]
+ parameters: "MappingProxyType[str, str]"
+ filename: Optional[str]
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class RequestInfo:
+ url: URL
+ method: str
+ headers: "CIMultiDictProxy[str]"
+ real_url: URL = attr.ib()
+
+ @real_url.default
+ def real_url_default(self) -> URL:
+ return self.url
+
+
+class Fingerprint:
+ HASHFUNC_BY_DIGESTLEN = {
+ 16: md5,
+ 20: sha1,
+ 32: sha256,
+ }
+
+ def __init__(self, fingerprint: bytes) -> None:
+ digestlen = len(fingerprint)
+ hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
+ if not hashfunc:
+ raise ValueError("fingerprint has invalid length")
+ elif hashfunc is md5 or hashfunc is sha1:
+ raise ValueError(
+ "md5 and sha1 are insecure and " "not supported. Use sha256."
+ )
+ self._hashfunc = hashfunc
+ self._fingerprint = fingerprint
+
+ @property
+ def fingerprint(self) -> bytes:
+ return self._fingerprint
+
+ def check(self, transport: asyncio.Transport) -> None:
+ if not transport.get_extra_info("sslcontext"):
+ return
+ sslobj = transport.get_extra_info("ssl_object")
+ cert = sslobj.getpeercert(binary_form=True)
+ got = self._hashfunc(cert).digest()
+ if got != self._fingerprint:
+ host, port, *_ = transport.get_extra_info("peername")
+ raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
+
+
+if ssl is not None:
+ SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
+else: # pragma: no cover
+ SSL_ALLOWED_TYPES = type(None)
+
+
+def _merge_ssl_params(
+ ssl: Union["SSLContext", bool, Fingerprint, None],
+ verify_ssl: Optional[bool],
+ ssl_context: Optional["SSLContext"],
+ fingerprint: Optional[bytes],
+) -> Union["SSLContext", bool, Fingerprint, None]:
+ if verify_ssl is not None and not verify_ssl:
+ warnings.warn(
+ "verify_ssl is deprecated, use ssl=False instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not None:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = False
+ if ssl_context is not None:
+ warnings.warn(
+ "ssl_context is deprecated, use ssl=context instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not None:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = ssl_context
+ if fingerprint is not None:
+ warnings.warn(
+ "fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if ssl is not None:
+ raise ValueError(
+ "verify_ssl, ssl_context, fingerprint and ssl "
+ "parameters are mutually exclusive"
+ )
+ else:
+ ssl = Fingerprint(fingerprint)
+ if not isinstance(ssl, SSL_ALLOWED_TYPES):
+ raise TypeError(
+ "ssl should be SSLContext, bool, Fingerprint or None, "
+ "got {!r} instead.".format(ssl)
+ )
+ return ssl
+
+
+@attr.s(auto_attribs=True, slots=True, frozen=True)
+class ConnectionKey:
+ # the key should contain an information about used proxy / TLS
+ # to prevent reusing wrong connections from a pool
+ host: str
+ port: Optional[int]
+ is_ssl: bool
+ ssl: Union[SSLContext, None, bool, Fingerprint]
+ proxy: Optional[URL]
+ proxy_auth: Optional[BasicAuth]
+ proxy_headers_hash: Optional[int] # hash(CIMultiDict)
+
+
+def _is_expected_content_type(
+ response_content_type: str, expected_content_type: str
+) -> bool:
+ if expected_content_type == "application/json":
+ return json_re.match(response_content_type) is not None
+ return expected_content_type in response_content_type
+
+
+class ClientRequest:
+ GET_METHODS = {
+ hdrs.METH_GET,
+ hdrs.METH_HEAD,
+ hdrs.METH_OPTIONS,
+ hdrs.METH_TRACE,
+ }
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
+
+ DEFAULT_HEADERS = {
+ hdrs.ACCEPT: "*/*",
+ hdrs.ACCEPT_ENCODING: "gzip, deflate",
+ }
+
+ body = b""
+ auth = None
+ response = None
+
+ _writer = None # async task for streaming data
+ _continue = None # waiter future for '100 Continue' response
+
+ # N.B.
+ # Adding __del__ method with self._writer closing doesn't make sense
+ # because _writer is instance method, thus it keeps a reference to self.
+ # Until writer has finished finalizer will not be called.
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ params: Optional[Mapping[str, str]] = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Iterable[str] = frozenset(),
+ data: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ auth: Optional[BasicAuth] = None,
+ version: http.HttpVersion = http.HttpVersion11,
+ compress: Optional[str] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ response_class: Optional[Type["ClientResponse"]] = None,
+ proxy: Optional[URL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timer: Optional[BaseTimerContext] = None,
+ session: Optional["ClientSession"] = None,
+ ssl: Union[SSLContext, bool, Fingerprint, None] = None,
+ proxy_headers: Optional[LooseHeaders] = None,
+ traces: Optional[List["Trace"]] = None,
+ ):
+
+ if loop is None:
+ loop = asyncio.get_event_loop()
+
+ assert isinstance(url, URL), url
+ assert isinstance(proxy, (URL, type(None))), proxy
+ # FIXME: session is None in tests only, need to fix tests
+ # assert session is not None
+ self._session = cast("ClientSession", session)
+ if params:
+ q = MultiDict(url.query)
+ url2 = url.with_query(params)
+ q.extend(url2.query)
+ url = url.with_query(q)
+ self.original_url = url
+ self.url = url.with_fragment(None)
+ self.method = method.upper()
+ self.chunked = chunked
+ self.compress = compress
+ self.loop = loop
+ self.length = None
+ if response_class is None:
+ real_response_class = ClientResponse
+ else:
+ real_response_class = response_class
+ self.response_class = real_response_class # type: Type[ClientResponse]
+ self._timer = timer if timer is not None else TimerNoop()
+ self._ssl = ssl
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ self.update_version(version)
+ self.update_host(url)
+ self.update_headers(headers)
+ self.update_auto_headers(skip_auto_headers)
+ self.update_cookies(cookies)
+ self.update_content_encoding(data)
+ self.update_auth(auth)
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
+
+ self.update_body_from_data(data)
+ if data is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
+ self.update_expect_continue(expect100)
+ if traces is None:
+ traces = []
+ self._traces = traces
+
+ def is_ssl(self) -> bool:
+ return self.url.scheme in ("https", "wss")
+
+ @property
+ def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]:
+ return self._ssl
+
+ @property
+ def connection_key(self) -> ConnectionKey:
+ proxy_headers = self.proxy_headers
+ if proxy_headers:
+ h = hash(
+ tuple((k, v) for k, v in proxy_headers.items())
+ ) # type: Optional[int]
+ else:
+ h = None
+ return ConnectionKey(
+ self.host,
+ self.port,
+ self.is_ssl(),
+ self.ssl,
+ self.proxy,
+ self.proxy_auth,
+ h,
+ )
+
+ @property
+ def host(self) -> str:
+ ret = self.url.raw_host
+ assert ret is not None
+ return ret
+
+ @property
+ def port(self) -> Optional[int]:
+ return self.url.port
+
+ @property
+ def request_info(self) -> RequestInfo:
+ headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str]
+ return RequestInfo(self.url, self.method, headers, self.original_url)
+
+ def update_host(self, url: URL) -> None:
+ """Update destination host, port and connection type (ssl)."""
+ # get host/port
+ if not url.raw_host:
+ raise InvalidURL(url)
+
+ # basic auth info
+ username, password = url.user, url.password
+ if username:
+ self.auth = helpers.BasicAuth(username, password or "")
+
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
+ """Convert request version to two elements tuple.
+
+ parser HTTP version '1.1' => (1, 1)
+ """
+ if isinstance(version, str):
+ v = [part.strip() for part in version.split(".", 1)]
+ try:
+ version = http.HttpVersion(int(v[0]), int(v[1]))
+ except ValueError:
+ raise ValueError(
+ f"Can not parse http version number: {version}"
+ ) from None
+ self.version = version
+
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
+ """Update request headers."""
+ self.headers = CIMultiDict() # type: CIMultiDict[str]
+
+ # add host
+ netloc = cast(str, self.url.raw_host)
+ if helpers.is_ipv6_address(netloc):
+ netloc = f"[{netloc}]"
+ if self.url.port is not None and not self.url.is_default_port():
+ netloc += ":" + str(self.url.port)
+ self.headers[hdrs.HOST] = netloc
+
+ if headers:
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
+ headers = headers.items() # type: ignore[assignment]
+
+ for key, value in headers: # type: ignore[misc]
+ # A special case for Host header
+ if key.lower() == "host":
+ self.headers[key] = value
+ else:
+ self.headers.add(key, value)
+
+ def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
+ self.skip_auto_headers = CIMultiDict(
+ (hdr, None) for hdr in sorted(skip_auto_headers)
+ )
+ used_headers = self.headers.copy()
+ used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
+
+ for hdr, val in self.DEFAULT_HEADERS.items():
+ if hdr not in used_headers:
+ self.headers.add(hdr, val)
+
+ if hdrs.USER_AGENT not in used_headers:
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
+
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
+ """Update request cookies header."""
+ if not cookies:
+ return
+
+ c = SimpleCookie() # type: SimpleCookie[str]
+ if hdrs.COOKIE in self.headers:
+ c.load(self.headers.get(hdrs.COOKIE, ""))
+ del self.headers[hdrs.COOKIE]
+
+ if isinstance(cookies, Mapping):
+ iter_cookies = cookies.items()
+ else:
+ iter_cookies = cookies # type: ignore[assignment]
+ for name, value in iter_cookies:
+ if isinstance(value, Morsel):
+ # Preserve coded_value
+ mrsl_val = value.get(value.key, Morsel())
+ mrsl_val.set(value.key, value.value, value.coded_value)
+ c[name] = mrsl_val
+ else:
+ c[name] = value # type: ignore[assignment]
+
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
+
+ def update_content_encoding(self, data: Any) -> None:
+ """Set request content encoding."""
+ if data is None:
+ return
+
+ enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
+ if enc:
+ if self.compress:
+ raise ValueError(
+ "compress can not be set " "if Content-Encoding header is set"
+ )
+ elif self.compress:
+ if not isinstance(self.compress, str):
+ self.compress = "deflate"
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
+ self.chunked = True # enable chunked, no need to deal with length
+
+ def update_transfer_encoding(self) -> None:
+ """Analyze transfer-encoding header."""
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+
+ if "chunked" in te:
+ if self.chunked:
+ raise ValueError(
+ "chunked can not be set "
+ 'if "Transfer-Encoding: chunked" header is set'
+ )
+
+ elif self.chunked:
+ if hdrs.CONTENT_LENGTH in self.headers:
+ raise ValueError(
+ "chunked can not be set " "if Content-Length header is set"
+ )
+
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ else:
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
+
+ def update_auth(self, auth: Optional[BasicAuth]) -> None:
+ """Set basic auth."""
+ if auth is None:
+ auth = self.auth
+ if auth is None:
+ return
+
+ if not isinstance(auth, helpers.BasicAuth):
+ raise TypeError("BasicAuth() tuple is required instead")
+
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
+
+ def update_body_from_data(self, body: Any) -> None:
+ if body is None:
+ return
+
+ # FormData
+ if isinstance(body, FormData):
+ body = body()
+
+ try:
+ body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
+ except payload.LookupError:
+ body = FormData(body)()
+
+ self.body = body
+
+ # enable chunked encoding if needed
+ if not self.chunked:
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ size = body.size
+ if size is None:
+ self.chunked = True
+ else:
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
+
+ # copy payload headers
+ assert body.headers
+ for (key, value) in body.headers.items():
+ if key in self.headers:
+ continue
+ if key in self.skip_auto_headers:
+ continue
+ self.headers[key] = value
+
+ def update_expect_continue(self, expect: bool = False) -> None:
+ if expect:
+ self.headers[hdrs.EXPECT] = "100-continue"
+ elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
+ expect = True
+
+ if expect:
+ self._continue = self.loop.create_future()
+
+ def update_proxy(
+ self,
+ proxy: Optional[URL],
+ proxy_auth: Optional[BasicAuth],
+ proxy_headers: Optional[LooseHeaders],
+ ) -> None:
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
+ self.proxy = proxy
+ self.proxy_auth = proxy_auth
+ self.proxy_headers = proxy_headers
+
+ def keep_alive(self) -> bool:
+ if self.version < HttpVersion10:
+ # keep alive not supported at all
+ return False
+ if self.version == HttpVersion10:
+ if self.headers.get(hdrs.CONNECTION) == "keep-alive":
+ return True
+ else: # no headers means we close for Http 1.0
+ return False
+ elif self.headers.get(hdrs.CONNECTION) == "close":
+ return False
+
+ return True
+
+ async def write_bytes(
+ self, writer: AbstractStreamWriter, conn: "Connection"
+ ) -> None:
+ """Support coroutines that yields bytes objects."""
+ # 100 response
+ if self._continue is not None:
+ await writer.drain()
+ await self._continue
+
+ protocol = conn.protocol
+ assert protocol is not None
+ try:
+ if isinstance(self.body, payload.Payload):
+ await self.body.write(writer)
+ else:
+ if isinstance(self.body, (bytes, bytearray)):
+ self.body = (self.body,) # type: ignore[assignment]
+
+ for chunk in self.body:
+ await writer.write(chunk) # type: ignore[arg-type]
+
+ await writer.write_eof()
+ except OSError as exc:
+ new_exc = ClientOSError(
+ exc.errno, "Can not write request body for %s" % self.url
+ )
+ new_exc.__context__ = exc
+ new_exc.__cause__ = exc
+ protocol.set_exception(new_exc)
+ except asyncio.CancelledError as exc:
+ if not conn.closed:
+ protocol.set_exception(exc)
+ except Exception as exc:
+ protocol.set_exception(exc)
+ finally:
+ self._writer = None
+
+ async def send(self, conn: "Connection") -> "ClientResponse":
+ # Specify request target:
+ # - CONNECT request must send authority form URI
+ # - not CONNECT proxy must send absolute form URI
+ # - most common is origin form URI
+ if self.method == hdrs.METH_CONNECT:
+ connect_host = self.url.raw_host
+ assert connect_host is not None
+ if helpers.is_ipv6_address(connect_host):
+ connect_host = f"[{connect_host}]"
+ path = f"{connect_host}:{self.url.port}"
+ elif self.proxy and not self.is_ssl():
+ path = str(self.url)
+ else:
+ path = self.url.raw_path
+ if self.url.raw_query_string:
+ path += "?" + self.url.raw_query_string
+
+ protocol = conn.protocol
+ assert protocol is not None
+ writer = StreamWriter(
+ protocol,
+ self.loop,
+ on_chunk_sent=functools.partial(
+ self._on_chunk_request_sent, self.method, self.url
+ ),
+ on_headers_sent=functools.partial(
+ self._on_headers_request_sent, self.method, self.url
+ ),
+ )
+
+ if self.compress:
+ writer.enable_compression(self.compress)
+
+ if self.chunked is not None:
+ writer.enable_chunking()
+
+ # set default content-type
+ if (
+ self.method in self.POST_METHODS
+ and hdrs.CONTENT_TYPE not in self.skip_auto_headers
+ and hdrs.CONTENT_TYPE not in self.headers
+ ):
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
+
+ # set the connection header
+ connection = self.headers.get(hdrs.CONNECTION)
+ if not connection:
+ if self.keep_alive():
+ if self.version == HttpVersion10:
+ connection = "keep-alive"
+ else:
+ if self.version == HttpVersion11:
+ connection = "close"
+
+ if connection is not None:
+ self.headers[hdrs.CONNECTION] = connection
+
+ # status + headers
+ status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format(
+ self.method, path, self.version
+ )
+ await writer.write_headers(status_line, self.headers)
+
+ self._writer = self.loop.create_task(self.write_bytes(writer, conn))
+
+ response_class = self.response_class
+ assert response_class is not None
+ self.response = response_class(
+ self.method,
+ self.original_url,
+ writer=self._writer,
+ continue100=self._continue,
+ timer=self._timer,
+ request_info=self.request_info,
+ traces=self._traces,
+ loop=self.loop,
+ session=self._session,
+ )
+ return self.response
+
+ async def close(self) -> None:
+ if self._writer is not None:
+ try:
+ await self._writer
+ finally:
+ self._writer = None
+
+ def terminate(self) -> None:
+ if self._writer is not None:
+ if not self.loop.is_closed():
+ self._writer.cancel()
+ self._writer = None
+
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
+ for trace in self._traces:
+ await trace.send_request_chunk_sent(method, url, chunk)
+
+ async def _on_headers_request_sent(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ for trace in self._traces:
+ await trace.send_request_headers(method, url, headers)
+
+
+class ClientResponse(HeadersMixin):
+
+ # from the Status-Line of the response
+ version = None # HTTP-Version
+ status = None # type: int # Status-Code
+ reason = None # Reason-Phrase
+
+ content = None # type: StreamReader # Payload stream
+ _headers = None # type: CIMultiDictProxy[str] # Response headers
+ _raw_headers = None # type: RawHeaders # Response raw headers
+
+ _connection = None # current connection
+ _source_traceback = None
+ # setted up by ClientRequest after ClientResponse object creation
+ # post-init stage allows to not change ctor signature
+ _closed = True # to allow __del__ for non-initialized properly response
+ _released = False
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ writer: "asyncio.Task[None]",
+ continue100: Optional["asyncio.Future[bool]"],
+ timer: BaseTimerContext,
+ request_info: RequestInfo,
+ traces: List["Trace"],
+ loop: asyncio.AbstractEventLoop,
+ session: "ClientSession",
+ ) -> None:
+ assert isinstance(url, URL)
+
+ self.method = method
+ self.cookies = SimpleCookie() # type: SimpleCookie[str]
+
+ self._real_url = url
+ self._url = url.with_fragment(None)
+ self._body = None # type: Any
+ self._writer = writer # type: Optional[asyncio.Task[None]]
+ self._continue = continue100 # None by default
+ self._closed = True
+ self._history = () # type: Tuple[ClientResponse, ...]
+ self._request_info = request_info
+ self._timer = timer if timer is not None else TimerNoop()
+ self._cache = {} # type: Dict[str, Any]
+ self._traces = traces
+ self._loop = loop
+ # store a reference to session #1985
+ self._session = session # type: Optional[ClientSession]
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ @reify
+ def url(self) -> URL:
+ return self._url
+
+ @reify
+ def url_obj(self) -> URL:
+ warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
+ return self._url
+
+ @reify
+ def real_url(self) -> URL:
+ return self._real_url
+
+ @reify
+ def host(self) -> str:
+ assert self._url.host is not None
+ return self._url.host
+
+ @reify
+ def headers(self) -> "CIMultiDictProxy[str]":
+ return self._headers
+
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ return self._raw_headers
+
+ @reify
+ def request_info(self) -> RequestInfo:
+ return self._request_info
+
+ @reify
+ def content_disposition(self) -> Optional[ContentDisposition]:
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
+ if raw is None:
+ return None
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
+ params = MappingProxyType(params_dct)
+ filename = multipart.content_disposition_filename(params)
+ return ContentDisposition(disposition_type, params, filename)
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
+
+ if self._connection is not None:
+ self._connection.release()
+ self._cleanup_writer()
+
+ if self._loop.get_debug():
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
+ context = {"client_response": self, "message": "Unclosed response"}
+ if self._source_traceback:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def __repr__(self) -> str:
+ out = io.StringIO()
+ ascii_encodable_url = str(self.url)
+ if self.reason:
+ ascii_encodable_reason = self.reason.encode(
+ "ascii", "backslashreplace"
+ ).decode("ascii")
+ else:
+ ascii_encodable_reason = self.reason
+ print(
+ "<ClientResponse({}) [{} {}]>".format(
+ ascii_encodable_url, self.status, ascii_encodable_reason
+ ),
+ file=out,
+ )
+ print(self.headers, file=out)
+ return out.getvalue()
+
+ @property
+ def connection(self) -> Optional["Connection"]:
+ return self._connection
+
+ @reify
+ def history(self) -> Tuple["ClientResponse", ...]:
+ """A sequence of of responses, if redirects occurred."""
+ return self._history
+
+ @reify
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
+ links_str = ", ".join(self.headers.getall("link", []))
+
+ if not links_str:
+ return MultiDictProxy(MultiDict())
+
+ links = MultiDict() # type: MultiDict[MultiDictProxy[Union[str, URL]]]
+
+ for val in re.split(r",(?=\s*<)", links_str):
+ match = re.match(r"\s*<(.*)>(.*)", val)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ url, params_str = match.groups()
+ params = params_str.split(";")[1:]
+
+ link = MultiDict() # type: MultiDict[Union[str, URL]]
+
+ for param in params:
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ key, _, value, _ = match.groups()
+
+ link.add(key, value)
+
+ key = link.get("rel", url) # type: ignore[assignment]
+
+ link.add("url", self.url.join(URL(url)))
+
+ links.add(key, MultiDictProxy(link))
+
+ return MultiDictProxy(links)
+
+ async def start(self, connection: "Connection") -> "ClientResponse":
+ """Start response processing."""
+ self._closed = False
+ self._protocol = connection.protocol
+ self._connection = connection
+
+ with self._timer:
+ while True:
+ # read response
+ try:
+ protocol = self._protocol
+ message, payload = await protocol.read() # type: ignore[union-attr]
+ except http.HttpProcessingError as exc:
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=exc.code,
+ message=exc.message,
+ headers=exc.headers,
+ ) from exc
+
+ if message.code < 100 or message.code > 199 or message.code == 101:
+ break
+
+ if self._continue is not None:
+ set_result(self._continue, True)
+ self._continue = None
+
+ # payload eof handler
+ payload.on_eof(self._response_eof)
+
+ # response status
+ self.version = message.version
+ self.status = message.code
+ self.reason = message.reason
+
+ # headers
+ self._headers = message.headers # type is CIMultiDictProxy
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
+
+ # payload
+ self.content = payload
+
+ # cookies
+ for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
+ try:
+ self.cookies.load(hdr)
+ except CookieError as exc:
+ client_logger.warning("Can not load response cookies: %s", exc)
+ return self
+
+ def _response_eof(self) -> None:
+ if self._closed:
+ return
+
+ if self._connection is not None:
+ # websocket, protocol could be None because
+ # connection could be detached
+ if (
+ self._connection.protocol is not None
+ and self._connection.protocol.upgraded
+ ):
+ return
+
+ self._connection.release()
+ self._connection = None
+
+ self._closed = True
+ self._cleanup_writer()
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ def close(self) -> None:
+ if not self._released:
+ self._notify_content()
+ if self._closed:
+ return
+
+ self._closed = True
+ if self._loop is None or self._loop.is_closed():
+ return
+
+ if self._connection is not None:
+ self._connection.close()
+ self._connection = None
+ self._cleanup_writer()
+
+ def release(self) -> Any:
+ if not self._released:
+ self._notify_content()
+ if self._closed:
+ return noop()
+
+ self._closed = True
+ if self._connection is not None:
+ self._connection.release()
+ self._connection = None
+
+ self._cleanup_writer()
+ return noop()
+
+ @property
+ def ok(self) -> bool:
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
+
+ This is **not** a check for ``200 OK`` but a check that the response
+ status is under 400.
+ """
+ return 400 > self.status
+
+ def raise_for_status(self) -> None:
+ if not self.ok:
+ # reason should always be not None for a started response
+ assert self.reason is not None
+ self.release()
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=self.reason,
+ headers=self.headers,
+ )
+
+ def _cleanup_writer(self) -> None:
+ if self._writer is not None:
+ self._writer.cancel()
+ self._writer = None
+ self._session = None
+
+ def _notify_content(self) -> None:
+ content = self.content
+ if content and content.exception() is None:
+ content.set_exception(ClientConnectionError("Connection closed"))
+ self._released = True
+
+ async def wait_for_close(self) -> None:
+ if self._writer is not None:
+ try:
+ await self._writer
+ finally:
+ self._writer = None
+ self.release()
+
+ async def read(self) -> bytes:
+ """Read response payload."""
+ if self._body is None:
+ try:
+ self._body = await self.content.read()
+ for trace in self._traces:
+ await trace.send_response_chunk_received(
+ self.method, self.url, self._body
+ )
+ except BaseException:
+ self.close()
+ raise
+ elif self._released:
+ raise ClientConnectionError("Connection closed")
+
+ return self._body # type: ignore[no-any-return]
+
+ def get_encoding(self) -> str:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ mimetype = helpers.parse_mimetype(ctype)
+
+ encoding = mimetype.parameters.get("charset")
+ if encoding:
+ try:
+ codecs.lookup(encoding)
+ except LookupError:
+ encoding = None
+ if not encoding:
+ if mimetype.type == "application" and (
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
+ ):
+ # RFC 7159 states that the default encoding is UTF-8.
+ # RFC 7483 defines application/rdap+json
+ encoding = "utf-8"
+ elif self._body is None:
+ raise RuntimeError(
+ "Cannot guess the encoding of " "a not yet read body"
+ )
+ else:
+ encoding = chardet.detect(self._body)["encoding"]
+ if not encoding:
+ encoding = "utf-8"
+
+ return encoding
+
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
+ """Read response payload and decode."""
+ if self._body is None:
+ await self.read()
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return self._body.decode( # type: ignore[no-any-return,union-attr]
+ encoding, errors=errors
+ )
+
+ async def json(
+ self,
+ *,
+ encoding: Optional[str] = None,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ content_type: Optional[str] = "application/json",
+ ) -> Any:
+ """Read and decodes JSON response."""
+ if self._body is None:
+ await self.read()
+
+ if content_type:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ if not _is_expected_content_type(ctype, content_type):
+ raise ContentTypeError(
+ self.request_info,
+ self.history,
+ message=(
+ "Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
+ ),
+ headers=self.headers,
+ )
+
+ stripped = self._body.strip() # type: ignore[union-attr]
+ if not stripped:
+ return None
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return loads(stripped.decode(encoding))
+
+ async def __aenter__(self) -> "ClientResponse":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ # similar to _RequestContextManager, we do not need to check
+ # for exceptions, response object can close connection
+ # if state is broken
+ self.release()
diff --git a/contrib/python/aiohttp/aiohttp/client_ws.py b/contrib/python/aiohttp/aiohttp/client_ws.py
new file mode 100644
index 0000000000..7c8121f659
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/client_ws.py
@@ -0,0 +1,300 @@
+"""WebSocket client for asyncio."""
+
+import asyncio
+from typing import Any, Optional, cast
+
+import async_timeout
+
+from .client_exceptions import ClientError
+from .client_reqrep import ClientResponse
+from .helpers import call_later, set_result
+from .http import (
+ WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE,
+ WebSocketError,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType,
+)
+from .http_websocket import WebSocketWriter # WSMessage
+from .streams import EofStream, FlowControlDataQueue
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ DEFAULT_JSON_ENCODER,
+ JSONDecoder,
+ JSONEncoder,
+)
+
+
+class ClientWebSocketResponse:
+ def __init__(
+ self,
+ reader: "FlowControlDataQueue[WSMessage]",
+ writer: WebSocketWriter,
+ protocol: Optional[str],
+ response: ClientResponse,
+ timeout: float,
+ autoclose: bool,
+ autoping: bool,
+ loop: asyncio.AbstractEventLoop,
+ *,
+ receive_timeout: Optional[float] = None,
+ heartbeat: Optional[float] = None,
+ compress: int = 0,
+ client_notakeover: bool = False,
+ ) -> None:
+ self._response = response
+ self._conn = response.connection
+
+ self._writer = writer
+ self._reader = reader
+ self._protocol = protocol
+ self._closed = False
+ self._closing = False
+ self._close_code = None # type: Optional[int]
+ self._timeout = timeout
+ self._receive_timeout = receive_timeout
+ self._autoclose = autoclose
+ self._autoping = autoping
+ self._heartbeat = heartbeat
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
+ if heartbeat is not None:
+ self._pong_heartbeat = heartbeat / 2.0
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
+ self._loop = loop
+ self._waiting = None # type: Optional[asyncio.Future[bool]]
+ self._exception = None # type: Optional[BaseException]
+ self._compress = compress
+ self._client_notakeover = client_notakeover
+
+ self._reset_heartbeat()
+
+ def _cancel_heartbeat(self) -> None:
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = None
+
+ if self._heartbeat_cb is not None:
+ self._heartbeat_cb.cancel()
+ self._heartbeat_cb = None
+
+ def _reset_heartbeat(self) -> None:
+ self._cancel_heartbeat()
+
+ if self._heartbeat is not None:
+ self._heartbeat_cb = call_later(
+ self._send_heartbeat, self._heartbeat, self._loop
+ )
+
+ def _send_heartbeat(self) -> None:
+ if self._heartbeat is not None and not self._closed:
+ # fire-and-forget a task is not perfect but maybe ok for
+ # sending ping. Otherwise we need a long-living heartbeat
+ # task in the class.
+ self._loop.create_task(self._writer.ping())
+
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = call_later(
+ self._pong_not_received, self._pong_heartbeat, self._loop
+ )
+
+ def _pong_not_received(self) -> None:
+ if not self._closed:
+ self._closed = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = asyncio.TimeoutError()
+ self._response.close()
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def close_code(self) -> Optional[int]:
+ return self._close_code
+
+ @property
+ def protocol(self) -> Optional[str]:
+ return self._protocol
+
+ @property
+ def compress(self) -> int:
+ return self._compress
+
+ @property
+ def client_notakeover(self) -> bool:
+ return self._client_notakeover
+
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """extra info from connection transport"""
+ conn = self._response.connection
+ if conn is None:
+ return default
+ transport = conn.transport
+ if transport is None:
+ return default
+ return transport.get_extra_info(name, default)
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ async def ping(self, message: bytes = b"") -> None:
+ await self._writer.ping(message)
+
+ async def pong(self, message: bytes = b"") -> None:
+ await self._writer.pong(message)
+
+ async def send_str(self, data: str, compress: Optional[int] = None) -> None:
+ if not isinstance(data, str):
+ raise TypeError("data argument must be str (%r)" % type(data))
+ await self._writer.send(data, binary=False, compress=compress)
+
+ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
+ if not isinstance(data, (bytes, bytearray, memoryview)):
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
+ await self._writer.send(data, binary=True, compress=compress)
+
+ async def send_json(
+ self,
+ data: Any,
+ compress: Optional[int] = None,
+ *,
+ dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
+ ) -> None:
+ await self.send_str(dumps(data), compress=compress)
+
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
+ # we need to break `receive()` cycle first,
+ # `close()` may be called from different task
+ if self._waiting is not None and not self._closed:
+ self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
+ await self._waiting
+
+ if not self._closed:
+ self._cancel_heartbeat()
+ self._closed = True
+ try:
+ await self._writer.close(code, message)
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._response.close()
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ self._response.close()
+ return True
+
+ if self._closing:
+ self._response.close()
+ return True
+
+ while True:
+ try:
+ async with async_timeout.timeout(self._timeout):
+ msg = await self._reader.read()
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._response.close()
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ self._response.close()
+ return True
+
+ if msg.type == WSMsgType.CLOSE:
+ self._close_code = msg.data
+ self._response.close()
+ return True
+ else:
+ return False
+
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
+ while True:
+ if self._waiting is not None:
+ raise RuntimeError("Concurrent call to receive() is not allowed")
+
+ if self._closed:
+ return WS_CLOSED_MESSAGE
+ elif self._closing:
+ await self.close()
+ return WS_CLOSED_MESSAGE
+
+ try:
+ self._waiting = self._loop.create_future()
+ try:
+ async with async_timeout.timeout(timeout or self._receive_timeout):
+ msg = await self._reader.read()
+ self._reset_heartbeat()
+ finally:
+ waiter = self._waiting
+ self._waiting = None
+ set_result(waiter, True)
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except EofStream:
+ self._close_code = WSCloseCode.OK
+ await self.close()
+ return WSMessage(WSMsgType.CLOSED, None, None)
+ except ClientError:
+ self._closed = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ return WS_CLOSED_MESSAGE
+ except WebSocketError as exc:
+ self._close_code = exc.code
+ await self.close(code=exc.code)
+ return WSMessage(WSMsgType.ERROR, exc, None)
+ except Exception as exc:
+ self._exception = exc
+ self._closing = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ await self.close()
+ return WSMessage(WSMsgType.ERROR, exc, None)
+
+ if msg.type == WSMsgType.CLOSE:
+ self._closing = True
+ self._close_code = msg.data
+ if not self._closed and self._autoclose:
+ await self.close()
+ elif msg.type == WSMsgType.CLOSING:
+ self._closing = True
+ elif msg.type == WSMsgType.PING and self._autoping:
+ await self.pong(msg.data)
+ continue
+ elif msg.type == WSMsgType.PONG and self._autoping:
+ continue
+
+ return msg
+
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.TEXT:
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
+ return cast(str, msg.data)
+
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.BINARY:
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
+ return cast(bytes, msg.data)
+
+ async def receive_json(
+ self,
+ *,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ timeout: Optional[float] = None,
+ ) -> Any:
+ data = await self.receive_str(timeout=timeout)
+ return loads(data)
+
+ def __aiter__(self) -> "ClientWebSocketResponse":
+ return self
+
+ async def __anext__(self) -> WSMessage:
+ msg = await self.receive()
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
+ raise StopAsyncIteration
+ return msg
diff --git a/contrib/python/aiohttp/aiohttp/connector.py b/contrib/python/aiohttp/aiohttp/connector.py
new file mode 100644
index 0000000000..4c9a951d6e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/connector.py
@@ -0,0 +1,1451 @@
+import asyncio
+import functools
+import random
+import sys
+import traceback
+import warnings
+from collections import defaultdict, deque
+from contextlib import suppress
+from http.cookies import SimpleCookie
+from itertools import cycle, islice
+from time import monotonic
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ DefaultDict,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+
+from . import hdrs, helpers
+from .abc import AbstractResolver
+from .client_exceptions import (
+ ClientConnectionError,
+ ClientConnectorCertificateError,
+ ClientConnectorError,
+ ClientConnectorSSLError,
+ ClientHttpProxyError,
+ ClientProxyConnectionError,
+ ServerFingerprintMismatch,
+ UnixClientConnectorError,
+ cert_errors,
+ ssl_errors,
+)
+from .client_proto import ResponseHandler
+from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
+from .helpers import (
+ PY_36,
+ ceil_timeout,
+ get_running_loop,
+ is_ip_address,
+ noop,
+ sentinel,
+)
+from .http import RESPONSES
+from .locks import EventResultOrError
+from .resolver import DefaultResolver
+
+try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client import ClientTimeout
+ from .client_reqrep import ConnectionKey
+ from .tracing import Trace
+
+
+class _DeprecationWaiter:
+ __slots__ = ("_awaitable", "_awaited")
+
+ def __init__(self, awaitable: Awaitable[Any]) -> None:
+ self._awaitable = awaitable
+ self._awaited = False
+
+ def __await__(self) -> Any:
+ self._awaited = True
+ return self._awaitable.__await__()
+
+ def __del__(self) -> None:
+ if not self._awaited:
+ warnings.warn(
+ "Connector.close() is a coroutine, "
+ "please use await connector.close()",
+ DeprecationWarning,
+ )
+
+
+class Connection:
+
+ _source_traceback = None
+ _transport = None
+
+ def __init__(
+ self,
+ connector: "BaseConnector",
+ key: "ConnectionKey",
+ protocol: ResponseHandler,
+ loop: asyncio.AbstractEventLoop,
+ ) -> None:
+ self._key = key
+ self._connector = connector
+ self._loop = loop
+ self._protocol = protocol # type: Optional[ResponseHandler]
+ self._callbacks = [] # type: List[Callable[[], None]]
+
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ def __repr__(self) -> str:
+ return f"Connection<{self._key}>"
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._protocol is not None:
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
+ if self._loop.is_closed():
+ return
+
+ self._connector._release(self._key, self._protocol, should_close=True)
+
+ context = {"client_connection": self, "message": "Unclosed connection"}
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ warnings.warn(
+ "connector.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ if self._protocol is None:
+ return None
+ return self._protocol.transport
+
+ @property
+ def protocol(self) -> Optional[ResponseHandler]:
+ return self._protocol
+
+ def add_callback(self, callback: Callable[[], None]) -> None:
+ if callback is not None:
+ self._callbacks.append(callback)
+
+ def _notify_release(self) -> None:
+ callbacks, self._callbacks = self._callbacks[:], []
+
+ for cb in callbacks:
+ with suppress(Exception):
+ cb()
+
+ def close(self) -> None:
+ self._notify_release()
+
+ if self._protocol is not None:
+ self._connector._release(self._key, self._protocol, should_close=True)
+ self._protocol = None
+
+ def release(self) -> None:
+ self._notify_release()
+
+ if self._protocol is not None:
+ self._connector._release(
+ self._key, self._protocol, should_close=self._protocol.should_close
+ )
+ self._protocol = None
+
+ @property
+ def closed(self) -> bool:
+ return self._protocol is None or not self._protocol.is_connected()
+
+
+class _TransportPlaceholder:
+ """placeholder for BaseConnector.connect function"""
+
+ def close(self) -> None:
+ pass
+
+
+class BaseConnector:
+ """Base connector class.
+
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
+ Disabled by default.
+ loop - Optional event loop.
+ """
+
+ _closed = True # prevent AttributeError in __del__ if ctor was failed
+ _source_traceback = None
+
+ # abort transport after 2 seconds (cleanup broken connections)
+ _cleanup_closed_period = 2.0
+
+ def __init__(
+ self,
+ *,
+ keepalive_timeout: Union[object, None, float] = sentinel,
+ force_close: bool = False,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ enable_cleanup_closed: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+
+ if force_close:
+ if keepalive_timeout is not None and keepalive_timeout is not sentinel:
+ raise ValueError(
+ "keepalive_timeout cannot " "be set if force_close is True"
+ )
+ else:
+ if keepalive_timeout is sentinel:
+ keepalive_timeout = 15.0
+
+ loop = get_running_loop(loop)
+
+ self._closed = False
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
+
+ self._conns = (
+ {}
+ ) # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]]
+ self._limit = limit
+ self._limit_per_host = limit_per_host
+ self._acquired = set() # type: Set[ResponseHandler]
+ self._acquired_per_host = defaultdict(
+ set
+ ) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]]
+ self._keepalive_timeout = cast(float, keepalive_timeout)
+ self._force_close = force_close
+
+ # {host_key: FIFO list of waiters}
+ self._waiters = defaultdict(deque) # type: ignore[var-annotated]
+
+ self._loop = loop
+ self._factory = functools.partial(ResponseHandler, loop=loop)
+
+ self.cookies = SimpleCookie() # type: SimpleCookie[str]
+
+ # start keep-alive connection cleanup task
+ self._cleanup_handle: Optional[asyncio.TimerHandle] = None
+
+ # start cleanup closed transports task
+ self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
+ self._cleanup_closed_disabled = not enable_cleanup_closed
+ self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]]
+ self._cleanup_closed()
+
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
+ if not self._conns:
+ return
+
+ conns = [repr(c) for c in self._conns.values()]
+
+ self._close()
+
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
+ context = {
+ "connector": self,
+ "connections": conns,
+ "message": "Unclosed connector",
+ }
+ if self._source_traceback is not None:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
+
+ def __enter__(self) -> "BaseConnector":
+ warnings.warn(
+ '"witn Connector():" is deprecated, '
+ 'use "async with Connector():" instead',
+ DeprecationWarning,
+ )
+ return self
+
+ def __exit__(self, *exc: Any) -> None:
+ self.close()
+
+ async def __aenter__(self) -> "BaseConnector":
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]] = None,
+ exc_value: Optional[BaseException] = None,
+ exc_traceback: Optional[TracebackType] = None,
+ ) -> None:
+ await self.close()
+
+ @property
+ def force_close(self) -> bool:
+ """Ultimately close connection on releasing if True."""
+ return self._force_close
+
+ @property
+ def limit(self) -> int:
+ """The total number for simultaneous connections.
+
+ If limit is 0 the connector has no limit.
+ The default limit size is 100.
+ """
+ return self._limit
+
+ @property
+ def limit_per_host(self) -> int:
+ """The limit for simultaneous connections to the same endpoint.
+
+ Endpoints are the same if they are have equal
+ (host, port, is_ssl) triple.
+ """
+ return self._limit_per_host
+
+ def _cleanup(self) -> None:
+ """Cleanup unused transports."""
+ if self._cleanup_handle:
+ self._cleanup_handle.cancel()
+ # _cleanup_handle should be unset, otherwise _release() will not
+ # recreate it ever!
+ self._cleanup_handle = None
+
+ now = self._loop.time()
+ timeout = self._keepalive_timeout
+
+ if self._conns:
+ connections = {}
+ deadline = now - timeout
+ for key, conns in self._conns.items():
+ alive = []
+ for proto, use_time in conns:
+ if proto.is_connected():
+ if use_time - deadline < 0:
+ transport = proto.transport
+ proto.close()
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+ else:
+ alive.append((proto, use_time))
+ else:
+ transport = proto.transport
+ proto.close()
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+
+ if alive:
+ connections[key] = alive
+
+ self._conns = connections
+
+ if self._conns:
+ self._cleanup_handle = helpers.weakref_handle(
+ self, "_cleanup", timeout, self._loop
+ )
+
+ def _drop_acquired_per_host(
+ self, key: "ConnectionKey", val: ResponseHandler
+ ) -> None:
+ acquired_per_host = self._acquired_per_host
+ if key not in acquired_per_host:
+ return
+ conns = acquired_per_host[key]
+ conns.remove(val)
+ if not conns:
+ del self._acquired_per_host[key]
+
+ def _cleanup_closed(self) -> None:
+ """Double confirmation for transport close.
+
+ Some broken ssl servers may leave socket open without proper close.
+ """
+ if self._cleanup_closed_handle:
+ self._cleanup_closed_handle.cancel()
+
+ for transport in self._cleanup_closed_transports:
+ if transport is not None:
+ transport.abort()
+
+ self._cleanup_closed_transports = []
+
+ if not self._cleanup_closed_disabled:
+ self._cleanup_closed_handle = helpers.weakref_handle(
+ self, "_cleanup_closed", self._cleanup_closed_period, self._loop
+ )
+
+ def close(self) -> Awaitable[None]:
+ """Close all opened transports."""
+ self._close()
+ return _DeprecationWaiter(noop())
+
+ def _close(self) -> None:
+ if self._closed:
+ return
+
+ self._closed = True
+
+ try:
+ if self._loop.is_closed():
+ return
+
+ # cancel cleanup task
+ if self._cleanup_handle:
+ self._cleanup_handle.cancel()
+
+ # cancel cleanup close task
+ if self._cleanup_closed_handle:
+ self._cleanup_closed_handle.cancel()
+
+ for data in self._conns.values():
+ for proto, t0 in data:
+ proto.close()
+
+ for proto in self._acquired:
+ proto.close()
+
+ for transport in self._cleanup_closed_transports:
+ if transport is not None:
+ transport.abort()
+
+ finally:
+ self._conns.clear()
+ self._acquired.clear()
+ self._waiters.clear()
+ self._cleanup_handle = None
+ self._cleanup_closed_transports.clear()
+ self._cleanup_closed_handle = None
+
+ @property
+ def closed(self) -> bool:
+ """Is connector closed.
+
+ A readonly property.
+ """
+ return self._closed
+
+ def _available_connections(self, key: "ConnectionKey") -> int:
+ """
+ Return number of available connections.
+
+ The limit, limit_per_host and the connection key are taken into account.
+
+ If it returns less than 1 means that there are no connections
+ available.
+ """
+ if self._limit:
+ # total calc available connections
+ available = self._limit - len(self._acquired)
+
+ # check limit per host
+ if (
+ self._limit_per_host
+ and available > 0
+ and key in self._acquired_per_host
+ ):
+ acquired = self._acquired_per_host.get(key)
+ assert acquired is not None
+ available = self._limit_per_host - len(acquired)
+
+ elif self._limit_per_host and key in self._acquired_per_host:
+ # check limit per host
+ acquired = self._acquired_per_host.get(key)
+ assert acquired is not None
+ available = self._limit_per_host - len(acquired)
+ else:
+ available = 1
+
+ return available
+
+ async def connect(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> Connection:
+ """Get from pool or create new connection."""
+ key = req.connection_key
+ available = self._available_connections(key)
+
+ # Wait if there are no available connections or if there are/were
+ # waiters (i.e. don't steal connection from a waiter about to wake up)
+ if available <= 0 or key in self._waiters:
+ fut = self._loop.create_future()
+
+ # This connection will now count towards the limit.
+ self._waiters[key].append(fut)
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_queued_start()
+
+ try:
+ await fut
+ except BaseException as e:
+ if key in self._waiters:
+ # remove a waiter even if it was cancelled, normally it's
+ # removed when it's notified
+ try:
+ self._waiters[key].remove(fut)
+ except ValueError: # fut may no longer be in list
+ pass
+
+ raise e
+ finally:
+ if key in self._waiters and not self._waiters[key]:
+ del self._waiters[key]
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_queued_end()
+
+ proto = self._get(key)
+ if proto is None:
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
+ self._acquired.add(placeholder)
+ self._acquired_per_host[key].add(placeholder)
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_create_start()
+
+ try:
+ proto = await self._create_connection(req, traces, timeout)
+ if self._closed:
+ proto.close()
+ raise ClientConnectionError("Connector is closed.")
+ except BaseException:
+ if not self._closed:
+ self._acquired.remove(placeholder)
+ self._drop_acquired_per_host(key, placeholder)
+ self._release_waiter()
+ raise
+ else:
+ if not self._closed:
+ self._acquired.remove(placeholder)
+ self._drop_acquired_per_host(key, placeholder)
+
+ if traces:
+ for trace in traces:
+ await trace.send_connection_create_end()
+ else:
+ if traces:
+ # Acquire the connection to prevent race conditions with limits
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
+ self._acquired.add(placeholder)
+ self._acquired_per_host[key].add(placeholder)
+ for trace in traces:
+ await trace.send_connection_reuseconn()
+ self._acquired.remove(placeholder)
+ self._drop_acquired_per_host(key, placeholder)
+
+ self._acquired.add(proto)
+ self._acquired_per_host[key].add(proto)
+ return Connection(self, key, proto, self._loop)
+
+ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
+ try:
+ conns = self._conns[key]
+ except KeyError:
+ return None
+
+ t1 = self._loop.time()
+ while conns:
+ proto, t0 = conns.pop()
+ if proto.is_connected():
+ if t1 - t0 > self._keepalive_timeout:
+ transport = proto.transport
+ proto.close()
+ # only for SSL transports
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+ else:
+ if not conns:
+ # The very last connection was reclaimed: drop the key
+ del self._conns[key]
+ return proto
+ else:
+ transport = proto.transport
+ proto.close()
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+
+ # No more connections: drop the key
+ del self._conns[key]
+ return None
+
+ def _release_waiter(self) -> None:
+ """
+ Iterates over all waiters until one to be released is found.
+
+ The one to be released is not finsihed and
+ belongs to a host that has available connections.
+ """
+ if not self._waiters:
+ return
+
+ # Having the dict keys ordered this avoids to iterate
+ # at the same order at each call.
+ queues = list(self._waiters.keys())
+ random.shuffle(queues)
+
+ for key in queues:
+ if self._available_connections(key) < 1:
+ continue
+
+ waiters = self._waiters[key]
+ while waiters:
+ waiter = waiters.popleft()
+ if not waiter.done():
+ waiter.set_result(None)
+ return
+
+ def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
+ if self._closed:
+ # acquired connection is already released on connector closing
+ return
+
+ try:
+ self._acquired.remove(proto)
+ self._drop_acquired_per_host(key, proto)
+ except KeyError: # pragma: no cover
+ # this may be result of undetermenistic order of objects
+ # finalization due garbage collection.
+ pass
+ else:
+ self._release_waiter()
+
+ def _release(
+ self,
+ key: "ConnectionKey",
+ protocol: ResponseHandler,
+ *,
+ should_close: bool = False,
+ ) -> None:
+ if self._closed:
+ # acquired connection is already released on connector closing
+ return
+
+ self._release_acquired(key, protocol)
+
+ if self._force_close:
+ should_close = True
+
+ if should_close or protocol.should_close:
+ transport = protocol.transport
+ protocol.close()
+
+ if key.is_ssl and not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transport)
+ else:
+ conns = self._conns.get(key)
+ if conns is None:
+ conns = self._conns[key] = []
+ conns.append((protocol, self._loop.time()))
+
+ if self._cleanup_handle is None:
+ self._cleanup_handle = helpers.weakref_handle(
+ self, "_cleanup", self._keepalive_timeout, self._loop
+ )
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ raise NotImplementedError()
+
+
+class _DNSCacheTable:
+ def __init__(self, ttl: Optional[float] = None) -> None:
+ self._addrs_rr = (
+ {}
+ ) # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]]
+ self._timestamps = {} # type: Dict[Tuple[str, int], float]
+ self._ttl = ttl
+
+ def __contains__(self, host: object) -> bool:
+ return host in self._addrs_rr
+
+ def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
+ self._addrs_rr[key] = (cycle(addrs), len(addrs))
+
+ if self._ttl:
+ self._timestamps[key] = monotonic()
+
+ def remove(self, key: Tuple[str, int]) -> None:
+ self._addrs_rr.pop(key, None)
+
+ if self._ttl:
+ self._timestamps.pop(key, None)
+
+ def clear(self) -> None:
+ self._addrs_rr.clear()
+ self._timestamps.clear()
+
+ def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]:
+ loop, length = self._addrs_rr[key]
+ addrs = list(islice(loop, length))
+ # Consume one more element to shift internal state of `cycle`
+ next(loop)
+ return addrs
+
+ def expired(self, key: Tuple[str, int]) -> bool:
+ if self._ttl is None:
+ return False
+
+ return self._timestamps[key] + self._ttl < monotonic()
+
+
+class TCPConnector(BaseConnector):
+ """TCP connector.
+
+ verify_ssl - Set to True to check ssl certifications.
+ fingerprint - Pass the binary sha256
+ digest of the expected certificate in DER format to verify
+ that the certificate the server presents matches. See also
+ https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning
+ resolver - Enable DNS lookups and use this
+ resolver
+ use_dns_cache - Use memory cache for DNS lookups.
+ ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
+ family - socket address family
+ local_addr - local tuple of (host, port) to bind socket to
+
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
+ Disabled by default.
+ loop - Optional event loop.
+ """
+
+ def __init__(
+ self,
+ *,
+ verify_ssl: bool = True,
+ fingerprint: Optional[bytes] = None,
+ use_dns_cache: bool = True,
+ ttl_dns_cache: Optional[int] = 10,
+ family: int = 0,
+ ssl_context: Optional[SSLContext] = None,
+ ssl: Union[None, bool, Fingerprint, SSLContext] = None,
+ local_addr: Optional[Tuple[str, int]] = None,
+ resolver: Optional[AbstractResolver] = None,
+ keepalive_timeout: Union[None, float, object] = sentinel,
+ force_close: bool = False,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ enable_cleanup_closed: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ):
+ super().__init__(
+ keepalive_timeout=keepalive_timeout,
+ force_close=force_close,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ enable_cleanup_closed=enable_cleanup_closed,
+ loop=loop,
+ )
+
+ self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
+ if resolver is None:
+ resolver = DefaultResolver(loop=self._loop)
+ self._resolver = resolver
+
+ self._use_dns_cache = use_dns_cache
+ self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
+ self._throttle_dns_events = (
+ {}
+ ) # type: Dict[Tuple[str, int], EventResultOrError]
+ self._family = family
+ self._local_addr = local_addr
+
+ def close(self) -> Awaitable[None]:
+ """Close all ongoing DNS calls."""
+ for ev in self._throttle_dns_events.values():
+ ev.cancel()
+
+ return super().close()
+
+ @property
+ def family(self) -> int:
+ """Socket family like AF_INET."""
+ return self._family
+
+ @property
+ def use_dns_cache(self) -> bool:
+ """True if local DNS caching is enabled."""
+ return self._use_dns_cache
+
+ def clear_dns_cache(
+ self, host: Optional[str] = None, port: Optional[int] = None
+ ) -> None:
+ """Remove specified host/port or clear all dns local cache."""
+ if host is not None and port is not None:
+ self._cached_hosts.remove((host, port))
+ elif host is not None or port is not None:
+ raise ValueError("either both host and port " "or none of them are allowed")
+ else:
+ self._cached_hosts.clear()
+
+ async def _resolve_host(
+ self, host: str, port: int, traces: Optional[List["Trace"]] = None
+ ) -> List[Dict[str, Any]]:
+ if is_ip_address(host):
+ return [
+ {
+ "hostname": host,
+ "host": host,
+ "port": port,
+ "family": self._family,
+ "proto": 0,
+ "flags": 0,
+ }
+ ]
+
+ if not self._use_dns_cache:
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_start(host)
+
+ res = await self._resolver.resolve(host, port, family=self._family)
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_end(host)
+
+ return res
+
+ key = (host, port)
+
+ if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)):
+ # get result early, before any await (#4014)
+ result = self._cached_hosts.next_addrs(key)
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_hit(host)
+ return result
+
+ if key in self._throttle_dns_events:
+ # get event early, before any await (#4014)
+ event = self._throttle_dns_events[key]
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_hit(host)
+ await event.wait()
+ else:
+ # update dict early, before any await (#4014)
+ self._throttle_dns_events[key] = EventResultOrError(self._loop)
+ if traces:
+ for trace in traces:
+ await trace.send_dns_cache_miss(host)
+ try:
+
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_start(host)
+
+ addrs = await self._resolver.resolve(host, port, family=self._family)
+ if traces:
+ for trace in traces:
+ await trace.send_dns_resolvehost_end(host)
+
+ self._cached_hosts.add(key, addrs)
+ self._throttle_dns_events[key].set()
+ except BaseException as e:
+ # any DNS exception, independently of the implementation
+ # is set for the waiters to raise the same exception.
+ self._throttle_dns_events[key].set(exc=e)
+ raise
+ finally:
+ self._throttle_dns_events.pop(key)
+
+ return self._cached_hosts.next_addrs(key)
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ """Create connection.
+
+ Has same keyword arguments as BaseEventLoop.create_connection.
+ """
+ if req.proxy:
+ _, proto = await self._create_proxy_connection(req, traces, timeout)
+ else:
+ _, proto = await self._create_direct_connection(req, traces, timeout)
+
+ return proto
+
+ @staticmethod
+ @functools.lru_cache(None)
+ def _make_ssl_context(verified: bool) -> SSLContext:
+ if verified:
+ return ssl.create_default_context()
+ else:
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ sslcontext.options |= ssl.OP_NO_SSLv2
+ sslcontext.options |= ssl.OP_NO_SSLv3
+ sslcontext.check_hostname = False
+ sslcontext.verify_mode = ssl.CERT_NONE
+ try:
+ sslcontext.options |= ssl.OP_NO_COMPRESSION
+ except AttributeError as attr_err:
+ warnings.warn(
+ "{!s}: The Python interpreter is compiled "
+ "against OpenSSL < 1.0.0. Ref: "
+ "https://docs.python.org/3/library/ssl.html"
+ "#ssl.OP_NO_COMPRESSION".format(attr_err),
+ )
+ sslcontext.set_default_verify_paths()
+ return sslcontext
+
+ def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]:
+ """Logic to get the correct SSL context
+
+ 0. if req.ssl is false, return None
+
+ 1. if ssl_context is specified in req, use it
+ 2. if _ssl_context is specified in self, use it
+ 3. otherwise:
+ 1. if verify_ssl is not specified in req, use self.ssl_context
+ (will generate a default context according to self.verify_ssl)
+ 2. if verify_ssl is True in req, generate a default SSL context
+ 3. if verify_ssl is False in req, generate a SSL context that
+ won't verify
+ """
+ if req.is_ssl():
+ if ssl is None: # pragma: no cover
+ raise RuntimeError("SSL is not supported.")
+ sslcontext = req.ssl
+ if isinstance(sslcontext, ssl.SSLContext):
+ return sslcontext
+ if sslcontext is not None:
+ # not verified or fingerprinted
+ return self._make_ssl_context(False)
+ sslcontext = self._ssl
+ if isinstance(sslcontext, ssl.SSLContext):
+ return sslcontext
+ if sslcontext is not None:
+ # not verified or fingerprinted
+ return self._make_ssl_context(False)
+ return self._make_ssl_context(True)
+ else:
+ return None
+
+ def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]:
+ ret = req.ssl
+ if isinstance(ret, Fingerprint):
+ return ret
+ ret = self._ssl
+ if isinstance(ret, Fingerprint):
+ return ret
+ return None
+
+ async def _wrap_create_connection(
+ self,
+ *args: Any,
+ req: "ClientRequest",
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ **kwargs: Any,
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ raise client_error(req.connection_key, exc) from exc
+
+ def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
+ """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
+
+ One case is that :py:meth:`asyncio.loop.start_tls` is not yet
+ implemented under Python 3.6. It is necessary for TLS-in-TLS so
+ that it is possible to send HTTPS queries through HTTPS proxies.
+
+ This doesn't affect regular HTTP requests, though.
+ """
+ if not req.is_ssl():
+ return
+
+ proxy_url = req.proxy
+ assert proxy_url is not None
+ if proxy_url.scheme != "https":
+ return
+
+ self._check_loop_for_start_tls()
+
+ def _check_loop_for_start_tls(self) -> None:
+ try:
+ self._loop.start_tls
+ except AttributeError as attr_exc:
+ raise RuntimeError(
+ "An HTTPS request is being sent through an HTTPS proxy. "
+ "This needs support for TLS in TLS but it is not implemented "
+ "in your runtime for the stdlib asyncio.\n\n"
+ "Please upgrade to Python 3.7 or higher. For more details, "
+ "please see:\n"
+ "* https://bugs.python.org/issue37179\n"
+ "* https://github.com/python/cpython/pull/28073\n"
+ "* https://docs.aiohttp.org/en/stable/"
+ "client_advanced.html#proxy-support\n"
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
+ ) from attr_exc
+
+ def _loop_supports_start_tls(self) -> bool:
+ try:
+ self._check_loop_for_start_tls()
+ except RuntimeError:
+ return False
+ else:
+ return True
+
+ def _warn_about_tls_in_tls(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: "ClientRequest",
+ ) -> None:
+ """Issue a warning if the requested URL has HTTPS scheme."""
+ if req.request_info.url.scheme != "https":
+ return
+
+ asyncio_supports_tls_in_tls = getattr(
+ underlying_transport,
+ "_start_tls_compatible",
+ False,
+ )
+
+ if asyncio_supports_tls_in_tls:
+ return
+
+ warnings.warn(
+ "An HTTPS request is being sent through an HTTPS proxy. "
+ "This support for TLS in TLS is known to be disabled "
+ "in the stdlib asyncio. This is why you'll probably see "
+ "an error in the log below.\n\n"
+ "It is possible to enable it via monkeypatching under "
+ "Python 3.7 or higher. For more details, see:\n"
+ "* https://bugs.python.org/issue37179\n"
+ "* https://github.com/python/cpython/pull/28073\n\n"
+ "You can temporarily patch this as follows:\n"
+ "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
+ RuntimeWarning,
+ source=self,
+ # Why `4`? At least 3 of the calls in the stack originate
+ # from the methods in this class.
+ stacklevel=3,
+ )
+
+ async def _start_tls_connection(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: "ClientRequest",
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
+ """Wrap the raw TCP transport with TLS."""
+ tls_proto = self._factory() # Create a brand new proto for TLS
+
+ # Safety of the `cast()` call here is based on the fact that
+ # internally `_get_ssl_context()` only returns `None` when
+ # `req.is_ssl()` evaluates to `False` which is never gonna happen
+ # in this code path. Of course, it's rather fragile
+ # maintainability-wise but this is to be solved separately.
+ sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
+
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ try:
+ tls_transport = await self._loop.start_tls(
+ underlying_transport,
+ tls_proto,
+ sslcontext,
+ server_hostname=req.host,
+ ssl_handshake_timeout=timeout.total,
+ )
+ except BaseException:
+ # We need to close the underlying transport since
+ # `start_tls()` probably failed before it had a
+ # chance to do this:
+ underlying_transport.close()
+ raise
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ raise client_error(req.connection_key, exc) from exc
+ except TypeError as type_err:
+ # Example cause looks like this:
+ # TypeError: transport <asyncio.sslproto._SSLProtocolTransport
+ # object at 0x7f760615e460> is not supported by start_tls()
+
+ raise ClientConnectionError(
+ "Cannot initialize a TLS-in-TLS connection to host "
+ f"{req.host!s}:{req.port:d} through an underlying connection "
+ f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
+ f"[{type_err!s}]"
+ ) from type_err
+ else:
+ tls_proto.connection_made(
+ tls_transport
+ ) # Kick the state machine of the new TLS protocol
+
+ return tls_transport, tls_proto
+
+ async def _create_direct_connection(
+ self,
+ req: "ClientRequest",
+ traces: List["Trace"],
+ timeout: "ClientTimeout",
+ *,
+ client_error: Type[Exception] = ClientConnectorError,
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ sslcontext = self._get_ssl_context(req)
+ fingerprint = self._get_fingerprint(req)
+
+ host = req.url.raw_host
+ assert host is not None
+ port = req.port
+ assert port is not None
+ host_resolved = asyncio.ensure_future(
+ self._resolve_host(host, port, traces=traces), loop=self._loop
+ )
+ try:
+ # Cancelling this lookup should not cancel the underlying lookup
+ # or else the cancel event will get broadcast to all the waiters
+ # across all connections.
+ hosts = await asyncio.shield(host_resolved)
+ except asyncio.CancelledError:
+
+ def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
+ with suppress(Exception, asyncio.CancelledError):
+ fut.result()
+
+ host_resolved.add_done_callback(drop_exception)
+ raise
+ except OSError as exc:
+ # in case of proxy it is not ClientProxyConnectionError
+ # it is problem of resolving proxy ip itself
+ raise ClientConnectorError(req.connection_key, exc) from exc
+
+ last_exc = None # type: Optional[Exception]
+
+ for hinfo in hosts:
+ host = hinfo["host"]
+ port = hinfo["port"]
+
+ try:
+ transp, proto = await self._wrap_create_connection(
+ self._factory,
+ host,
+ port,
+ timeout=timeout,
+ ssl=sslcontext,
+ family=hinfo["family"],
+ proto=hinfo["proto"],
+ flags=hinfo["flags"],
+ server_hostname=hinfo["hostname"] if sslcontext else None,
+ local_addr=self._local_addr,
+ req=req,
+ client_error=client_error,
+ )
+ except ClientConnectorError as exc:
+ last_exc = exc
+ continue
+
+ if req.is_ssl() and fingerprint:
+ try:
+ fingerprint.check(transp)
+ except ServerFingerprintMismatch as exc:
+ transp.close()
+ if not self._cleanup_closed_disabled:
+ self._cleanup_closed_transports.append(transp)
+ last_exc = exc
+ continue
+
+ return transp, proto
+ else:
+ assert last_exc is not None
+ raise last_exc
+
+ async def _create_proxy_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
+ self._fail_on_no_start_tls(req)
+ runtime_has_start_tls = self._loop_supports_start_tls()
+ if req.proxy.scheme != "https":
+ runtime_has_start_tls = False
+
+ headers = {} # type: Dict[str, str]
+ if req.proxy_headers is not None:
+ headers = req.proxy_headers # type: ignore[assignment]
+ headers[hdrs.HOST] = req.headers[hdrs.HOST]
+
+ url = req.proxy
+ assert url is not None
+ proxy_req = ClientRequest(
+ hdrs.METH_GET,
+ url,
+ headers=headers,
+ auth=req.proxy_auth,
+ loop=self._loop,
+ ssl=req.ssl,
+ )
+
+ # create connection to proxy server
+ transport, proto = await self._create_direct_connection(
+ proxy_req, [], timeout, client_error=ClientProxyConnectionError
+ )
+
+ # Many HTTP proxies has buggy keepalive support. Let's not
+ # reuse connection but close it after processing every
+ # response.
+ proto.force_close()
+
+ auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
+ if auth is not None:
+ if not req.is_ssl():
+ req.headers[hdrs.PROXY_AUTHORIZATION] = auth
+ else:
+ proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
+
+ if req.is_ssl():
+ if runtime_has_start_tls:
+ self._warn_about_tls_in_tls(transport, req)
+
+ # For HTTPS requests over HTTP proxy
+ # we must notify proxy to tunnel connection
+ # so we send CONNECT command:
+ # CONNECT www.python.org:443 HTTP/1.1
+ # Host: www.python.org
+ #
+ # next we must do TLS handshake and so on
+ # to do this we must wrap raw socket into secure one
+ # asyncio handles this perfectly
+ proxy_req.method = hdrs.METH_CONNECT
+ proxy_req.url = req.url
+ key = attr.evolve(
+ req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
+ )
+ conn = Connection(self, key, proto, self._loop)
+ proxy_resp = await proxy_req.send(conn)
+ try:
+ protocol = conn._protocol
+ assert protocol is not None
+
+ # read_until_eof=True will ensure the connection isn't closed
+ # once the response is received and processed allowing
+ # START_TLS to work on the connection below.
+ protocol.set_response_params(read_until_eof=runtime_has_start_tls)
+ resp = await proxy_resp.start(conn)
+ except BaseException:
+ proxy_resp.close()
+ conn.close()
+ raise
+ else:
+ conn._protocol = None
+ conn._transport = None
+ try:
+ if resp.status != 200:
+ message = resp.reason
+ if message is None:
+ message = RESPONSES[resp.status][0]
+ raise ClientHttpProxyError(
+ proxy_resp.request_info,
+ resp.history,
+ status=resp.status,
+ message=message,
+ headers=resp.headers,
+ )
+ if not runtime_has_start_tls:
+ rawsock = transport.get_extra_info("socket", default=None)
+ if rawsock is None:
+ raise RuntimeError(
+ "Transport does not expose socket instance"
+ )
+ # Duplicate the socket, so now we can close proxy transport
+ rawsock = rawsock.dup()
+ except BaseException:
+ # It shouldn't be closed in `finally` because it's fed to
+ # `loop.start_tls()` and the docs say not to touch it after
+ # passing there.
+ transport.close()
+ raise
+ finally:
+ if not runtime_has_start_tls:
+ transport.close()
+
+ if not runtime_has_start_tls:
+ # HTTP proxy with support for upgrade to HTTPS
+ sslcontext = self._get_ssl_context(req)
+ return await self._wrap_create_connection(
+ self._factory,
+ timeout=timeout,
+ ssl=sslcontext,
+ sock=rawsock,
+ server_hostname=req.host,
+ req=req,
+ )
+
+ return await self._start_tls_connection(
+ # Access the old transport for the last time before it's
+ # closed and forgotten forever:
+ transport,
+ req=req,
+ timeout=timeout,
+ )
+ finally:
+ proxy_resp.close()
+
+ return transport, proto
+
+
+class UnixConnector(BaseConnector):
+ """Unix socket connector.
+
+ path - Unix socket path.
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ loop - Optional event loop.
+ """
+
+ def __init__(
+ self,
+ path: str,
+ force_close: bool = False,
+ keepalive_timeout: Union[object, float, None] = sentinel,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(
+ force_close=force_close,
+ keepalive_timeout=keepalive_timeout,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ loop=loop,
+ )
+ self._path = path
+
+ @property
+ def path(self) -> str:
+ """Path to unix socket."""
+ return self._path
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ _, proto = await self._loop.create_unix_connection(
+ self._factory, self._path
+ )
+ except OSError as exc:
+ raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
+
+ return cast(ResponseHandler, proto)
+
+
+class NamedPipeConnector(BaseConnector):
+ """Named pipe connector.
+
+ Only supported by the proactor event loop.
+ See also: https://docs.python.org/3.7/library/asyncio-eventloop.html
+
+ path - Windows named pipe path.
+ keepalive_timeout - (optional) Keep-alive timeout.
+ force_close - Set to True to force close and do reconnect
+ after each request (and between redirects).
+ limit - The total number of simultaneous connections.
+ limit_per_host - Number of simultaneous connections to one host.
+ loop - Optional event loop.
+ """
+
+ def __init__(
+ self,
+ path: str,
+ force_close: bool = False,
+ keepalive_timeout: Union[object, float, None] = sentinel,
+ limit: int = 100,
+ limit_per_host: int = 0,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(
+ force_close=force_close,
+ keepalive_timeout=keepalive_timeout,
+ limit=limit,
+ limit_per_host=limit_per_host,
+ loop=loop,
+ )
+ if not isinstance(
+ self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ ):
+ raise RuntimeError(
+ "Named Pipes only available in proactor " "loop under windows"
+ )
+ self._path = path
+
+ @property
+ def path(self) -> str:
+ """Path to the named pipe."""
+ return self._path
+
+ async def _create_connection(
+ self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
+ ) -> ResponseHandler:
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
+ self._factory, self._path
+ )
+ # the drain is required so that the connection_made is called
+ # and transport is set otherwise it is not set before the
+ # `assert conn.transport is not None`
+ # in client.py's _request method
+ await asyncio.sleep(0)
+ # other option is to manually set transport like
+ # `proto.transport = trans`
+ except OSError as exc:
+ raise ClientConnectorError(req.connection_key, exc) from exc
+
+ return cast(ResponseHandler, proto)
diff --git a/contrib/python/aiohttp/aiohttp/cookiejar.py b/contrib/python/aiohttp/aiohttp/cookiejar.py
new file mode 100644
index 0000000000..0a2656634d
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/cookiejar.py
@@ -0,0 +1,413 @@
+import asyncio
+import contextlib
+import datetime
+import os # noqa
+import pathlib
+import pickle
+import re
+from collections import defaultdict
+from http.cookies import BaseCookie, Morsel, SimpleCookie
+from typing import ( # noqa
+ DefaultDict,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+ cast,
+)
+
+from yarl import URL
+
+from .abc import AbstractCookieJar, ClearCookiePredicate
+from .helpers import is_ip_address, next_whole_second
+from .typedefs import LooseCookies, PathLike, StrOrURL
+
+__all__ = ("CookieJar", "DummyCookieJar")
+
+
+CookieItem = Union[str, "Morsel[str]"]
+
+
+class CookieJar(AbstractCookieJar):
+ """Implements cookie storage adhering to RFC 6265."""
+
+ DATE_TOKENS_RE = re.compile(
+ r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
+ r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
+ )
+
+ DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
+
+ DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
+
+ DATE_MONTH_RE = re.compile(
+ "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
+ re.I,
+ )
+
+ DATE_YEAR_RE = re.compile(r"(\d{2,4})")
+
+ MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
+
+ MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
+
+ def __init__(
+ self,
+ *,
+ unsafe: bool = False,
+ quote_cookie: bool = True,
+ treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ super().__init__(loop=loop)
+ self._cookies = defaultdict(
+ SimpleCookie
+ ) # type: DefaultDict[str, SimpleCookie[str]]
+ self._host_only_cookies = set() # type: Set[Tuple[str, str]]
+ self._unsafe = unsafe
+ self._quote_cookie = quote_cookie
+ if treat_as_secure_origin is None:
+ treat_as_secure_origin = []
+ elif isinstance(treat_as_secure_origin, URL):
+ treat_as_secure_origin = [treat_as_secure_origin.origin()]
+ elif isinstance(treat_as_secure_origin, str):
+ treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
+ else:
+ treat_as_secure_origin = [
+ URL(url).origin() if isinstance(url, str) else url.origin()
+ for url in treat_as_secure_origin
+ ]
+ self._treat_as_secure_origin = treat_as_secure_origin
+ self._next_expiration = next_whole_second()
+ self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
+ # #4515: datetime.max may not be representable on 32-bit platforms
+ self._max_time = self.MAX_TIME
+ try:
+ self._max_time.timestamp()
+ except OverflowError:
+ self._max_time = self.MAX_32BIT_TIME
+
+ def save(self, file_path: PathLike) -> None:
+ file_path = pathlib.Path(file_path)
+ with file_path.open(mode="wb") as f:
+ pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
+
+ def load(self, file_path: PathLike) -> None:
+ file_path = pathlib.Path(file_path)
+ with file_path.open(mode="rb") as f:
+ self._cookies = pickle.load(f)
+
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ if predicate is None:
+ self._next_expiration = next_whole_second()
+ self._cookies.clear()
+ self._host_only_cookies.clear()
+ self._expirations.clear()
+ return
+
+ to_del = []
+ now = datetime.datetime.now(datetime.timezone.utc)
+ for domain, cookie in self._cookies.items():
+ for name, morsel in cookie.items():
+ key = (domain, name)
+ if (
+ key in self._expirations and self._expirations[key] <= now
+ ) or predicate(morsel):
+ to_del.append(key)
+
+ for domain, name in to_del:
+ key = (domain, name)
+ self._host_only_cookies.discard(key)
+ if key in self._expirations:
+ del self._expirations[(domain, name)]
+ self._cookies[domain].pop(name, None)
+
+ next_expiration = min(self._expirations.values(), default=self._max_time)
+ try:
+ self._next_expiration = next_expiration.replace(
+ microsecond=0
+ ) + datetime.timedelta(seconds=1)
+ except OverflowError:
+ self._next_expiration = self._max_time
+
+ def clear_domain(self, domain: str) -> None:
+ self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
+
+ def __iter__(self) -> "Iterator[Morsel[str]]":
+ self._do_expiration()
+ for val in self._cookies.values():
+ yield from val.values()
+
+ def __len__(self) -> int:
+ return sum(1 for i in self)
+
+ def _do_expiration(self) -> None:
+ self.clear(lambda x: False)
+
+ def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
+ self._next_expiration = min(self._next_expiration, when)
+ self._expirations[(domain, name)] = when
+
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ """Update cookies."""
+ hostname = response_url.raw_host
+
+ if not self._unsafe and is_ip_address(hostname):
+ # Don't accept cookies from IPs
+ return
+
+ if isinstance(cookies, Mapping):
+ cookies = cookies.items()
+
+ for name, cookie in cookies:
+ if not isinstance(cookie, Morsel):
+ tmp = SimpleCookie() # type: SimpleCookie[str]
+ tmp[name] = cookie # type: ignore[assignment]
+ cookie = tmp[name]
+
+ domain = cookie["domain"]
+
+ # ignore domains with trailing dots
+ if domain.endswith("."):
+ domain = ""
+ del cookie["domain"]
+
+ if not domain and hostname is not None:
+ # Set the cookie's domain to the response hostname
+ # and set its host-only-flag
+ self._host_only_cookies.add((hostname, name))
+ domain = cookie["domain"] = hostname
+
+ if domain.startswith("."):
+ # Remove leading dot
+ domain = domain[1:]
+ cookie["domain"] = domain
+
+ if hostname and not self._is_domain_match(domain, hostname):
+ # Setting cookies for different domains is not allowed
+ continue
+
+ path = cookie["path"]
+ if not path or not path.startswith("/"):
+ # Set the cookie's path to the response path
+ path = response_url.path
+ if not path.startswith("/"):
+ path = "/"
+ else:
+ # Cut everything from the last slash to the end
+ path = "/" + path[1 : path.rfind("/")]
+ cookie["path"] = path
+
+ max_age = cookie["max-age"]
+ if max_age:
+ try:
+ delta_seconds = int(max_age)
+ try:
+ max_age_expiration = datetime.datetime.now(
+ datetime.timezone.utc
+ ) + datetime.timedelta(seconds=delta_seconds)
+ except OverflowError:
+ max_age_expiration = self._max_time
+ self._expire_cookie(max_age_expiration, domain, name)
+ except ValueError:
+ cookie["max-age"] = ""
+
+ else:
+ expires = cookie["expires"]
+ if expires:
+ expire_time = self._parse_date(expires)
+ if expire_time:
+ self._expire_cookie(expire_time, domain, name)
+ else:
+ cookie["expires"] = ""
+
+ self._cookies[domain][name] = cookie
+
+ self._do_expiration()
+
+ def filter_cookies(
+ self, request_url: URL = URL()
+ ) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
+ """Returns this jar's cookies filtered by their attributes."""
+ self._do_expiration()
+ request_url = URL(request_url)
+ filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
+ SimpleCookie() if self._quote_cookie else BaseCookie()
+ )
+ hostname = request_url.raw_host or ""
+ request_origin = URL()
+ with contextlib.suppress(ValueError):
+ request_origin = request_url.origin()
+
+ is_not_secure = (
+ request_url.scheme not in ("https", "wss")
+ and request_origin not in self._treat_as_secure_origin
+ )
+
+ for cookie in self:
+ name = cookie.key
+ domain = cookie["domain"]
+
+ # Send shared cookies
+ if not domain:
+ filtered[name] = cookie.value
+ continue
+
+ if not self._unsafe and is_ip_address(hostname):
+ continue
+
+ if (domain, name) in self._host_only_cookies:
+ if domain != hostname:
+ continue
+ elif not self._is_domain_match(domain, hostname):
+ continue
+
+ if not self._is_path_match(request_url.path, cookie["path"]):
+ continue
+
+ if is_not_secure and cookie["secure"]:
+ continue
+
+ # It's critical we use the Morsel so the coded_value
+ # (based on cookie version) is preserved
+ mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
+ mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
+ filtered[name] = mrsl_val
+
+ return filtered
+
+ @staticmethod
+ def _is_domain_match(domain: str, hostname: str) -> bool:
+ """Implements domain matching adhering to RFC 6265."""
+ if hostname == domain:
+ return True
+
+ if not hostname.endswith(domain):
+ return False
+
+ non_matching = hostname[: -len(domain)]
+
+ if not non_matching.endswith("."):
+ return False
+
+ return not is_ip_address(hostname)
+
+ @staticmethod
+ def _is_path_match(req_path: str, cookie_path: str) -> bool:
+ """Implements path matching adhering to RFC 6265."""
+ if not req_path.startswith("/"):
+ req_path = "/"
+
+ if req_path == cookie_path:
+ return True
+
+ if not req_path.startswith(cookie_path):
+ return False
+
+ if cookie_path.endswith("/"):
+ return True
+
+ non_matching = req_path[len(cookie_path) :]
+
+ return non_matching.startswith("/")
+
+ @classmethod
+ def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
+ """Implements date string parsing adhering to RFC 6265."""
+ if not date_str:
+ return None
+
+ found_time = False
+ found_day = False
+ found_month = False
+ found_year = False
+
+ hour = minute = second = 0
+ day = 0
+ month = 0
+ year = 0
+
+ for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
+
+ token = token_match.group("token")
+
+ if not found_time:
+ time_match = cls.DATE_HMS_TIME_RE.match(token)
+ if time_match:
+ found_time = True
+ hour, minute, second = (int(s) for s in time_match.groups())
+ continue
+
+ if not found_day:
+ day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
+ if day_match:
+ found_day = True
+ day = int(day_match.group())
+ continue
+
+ if not found_month:
+ month_match = cls.DATE_MONTH_RE.match(token)
+ if month_match:
+ found_month = True
+ assert month_match.lastindex is not None
+ month = month_match.lastindex
+ continue
+
+ if not found_year:
+ year_match = cls.DATE_YEAR_RE.match(token)
+ if year_match:
+ found_year = True
+ year = int(year_match.group())
+
+ if 70 <= year <= 99:
+ year += 1900
+ elif 0 <= year <= 69:
+ year += 2000
+
+ if False in (found_day, found_month, found_year, found_time):
+ return None
+
+ if not 1 <= day <= 31:
+ return None
+
+ if year < 1601 or hour > 23 or minute > 59 or second > 59:
+ return None
+
+ return datetime.datetime(
+ year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
+ )
+
+
+class DummyCookieJar(AbstractCookieJar):
+ """Implements a dummy cookie storage.
+
+ It can be used with the ClientSession when no cookie processing is needed.
+
+ """
+
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ super().__init__(loop=loop)
+
+ def __iter__(self) -> "Iterator[Morsel[str]]":
+ while False:
+ yield None
+
+ def __len__(self) -> int:
+ return 0
+
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ pass
+
+ def clear_domain(self, domain: str) -> None:
+ pass
+
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ pass
+
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
+ return SimpleCookie()
diff --git a/contrib/python/aiohttp/aiohttp/formdata.py b/contrib/python/aiohttp/aiohttp/formdata.py
new file mode 100644
index 0000000000..4857c89856
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/formdata.py
@@ -0,0 +1,172 @@
+import io
+from typing import Any, Iterable, List, Optional
+from urllib.parse import urlencode
+
+from multidict import MultiDict, MultiDictProxy
+
+from . import hdrs, multipart, payload
+from .helpers import guess_filename
+from .payload import Payload
+
+__all__ = ("FormData",)
+
+
+class FormData:
+ """Helper class for form body generation.
+
+ Supports multipart/form-data and application/x-www-form-urlencoded.
+ """
+
+ def __init__(
+ self,
+ fields: Iterable[Any] = (),
+ quote_fields: bool = True,
+ charset: Optional[str] = None,
+ ) -> None:
+ self._writer = multipart.MultipartWriter("form-data")
+ self._fields = [] # type: List[Any]
+ self._is_multipart = False
+ self._is_processed = False
+ self._quote_fields = quote_fields
+ self._charset = charset
+
+ if isinstance(fields, dict):
+ fields = list(fields.items())
+ elif not isinstance(fields, (list, tuple)):
+ fields = (fields,)
+ self.add_fields(*fields)
+
+ @property
+ def is_multipart(self) -> bool:
+ return self._is_multipart
+
+ def add_field(
+ self,
+ name: str,
+ value: Any,
+ *,
+ content_type: Optional[str] = None,
+ filename: Optional[str] = None,
+ content_transfer_encoding: Optional[str] = None,
+ ) -> None:
+
+ if isinstance(value, io.IOBase):
+ self._is_multipart = True
+ elif isinstance(value, (bytes, bytearray, memoryview)):
+ if filename is None and content_transfer_encoding is None:
+ filename = name
+
+ type_options = MultiDict({"name": name}) # type: MultiDict[str]
+ if filename is not None and not isinstance(filename, str):
+ raise TypeError(
+ "filename must be an instance of str. " "Got: %s" % filename
+ )
+ if filename is None and isinstance(value, io.IOBase):
+ filename = guess_filename(value, name)
+ if filename is not None:
+ type_options["filename"] = filename
+ self._is_multipart = True
+
+ headers = {}
+ if content_type is not None:
+ if not isinstance(content_type, str):
+ raise TypeError(
+ "content_type must be an instance of str. " "Got: %s" % content_type
+ )
+ headers[hdrs.CONTENT_TYPE] = content_type
+ self._is_multipart = True
+ if content_transfer_encoding is not None:
+ if not isinstance(content_transfer_encoding, str):
+ raise TypeError(
+ "content_transfer_encoding must be an instance"
+ " of str. Got: %s" % content_transfer_encoding
+ )
+ headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
+ self._is_multipart = True
+
+ self._fields.append((type_options, headers, value))
+
+ def add_fields(self, *fields: Any) -> None:
+ to_add = list(fields)
+
+ while to_add:
+ rec = to_add.pop(0)
+
+ if isinstance(rec, io.IOBase):
+ k = guess_filename(rec, "unknown")
+ self.add_field(k, rec) # type: ignore[arg-type]
+
+ elif isinstance(rec, (MultiDictProxy, MultiDict)):
+ to_add.extend(rec.items())
+
+ elif isinstance(rec, (list, tuple)) and len(rec) == 2:
+ k, fp = rec
+ self.add_field(k, fp) # type: ignore[arg-type]
+
+ else:
+ raise TypeError(
+ "Only io.IOBase, multidict and (name, file) "
+ "pairs allowed, use .add_field() for passing "
+ "more complex parameters, got {!r}".format(rec)
+ )
+
+ def _gen_form_urlencoded(self) -> payload.BytesPayload:
+ # form data (x-www-form-urlencoded)
+ data = []
+ for type_options, _, value in self._fields:
+ data.append((type_options["name"], value))
+
+ charset = self._charset if self._charset is not None else "utf-8"
+
+ if charset == "utf-8":
+ content_type = "application/x-www-form-urlencoded"
+ else:
+ content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
+
+ return payload.BytesPayload(
+ urlencode(data, doseq=True, encoding=charset).encode(),
+ content_type=content_type,
+ )
+
+ def _gen_form_data(self) -> multipart.MultipartWriter:
+ """Encode a list of fields using the multipart/form-data MIME format"""
+ if self._is_processed:
+ raise RuntimeError("Form data has been processed already")
+ for dispparams, headers, value in self._fields:
+ try:
+ if hdrs.CONTENT_TYPE in headers:
+ part = payload.get_payload(
+ value,
+ content_type=headers[hdrs.CONTENT_TYPE],
+ headers=headers,
+ encoding=self._charset,
+ )
+ else:
+ part = payload.get_payload(
+ value, headers=headers, encoding=self._charset
+ )
+ except Exception as exc:
+ raise TypeError(
+ "Can not serialize value type: %r\n "
+ "headers: %r\n value: %r" % (type(value), headers, value)
+ ) from exc
+
+ if dispparams:
+ part.set_content_disposition(
+ "form-data", quote_fields=self._quote_fields, **dispparams
+ )
+ # FIXME cgi.FieldStorage doesn't likes body parts with
+ # Content-Length which were sent via chunked transfer encoding
+ assert part.headers is not None
+ part.headers.popall(hdrs.CONTENT_LENGTH, None)
+
+ self._writer.append_payload(part)
+
+ self._is_processed = True
+ return self._writer
+
+ def __call__(self) -> Payload:
+ if self._is_multipart:
+ return self._gen_form_data()
+ else:
+ return self._gen_form_urlencoded()
diff --git a/contrib/python/aiohttp/aiohttp/hdrs.py b/contrib/python/aiohttp/aiohttp/hdrs.py
new file mode 100644
index 0000000000..a619f2543e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/hdrs.py
@@ -0,0 +1,114 @@
+"""HTTP Headers constants."""
+
+# After changing the file content call ./tools/gen.py
+# to regenerate the headers parser
+import sys
+from typing import Set
+
+from multidict import istr
+
+if sys.version_info >= (3, 8):
+ from typing import Final
+else:
+ from typing_extensions import Final
+
+METH_ANY: Final[str] = "*"
+METH_CONNECT: Final[str] = "CONNECT"
+METH_HEAD: Final[str] = "HEAD"
+METH_GET: Final[str] = "GET"
+METH_DELETE: Final[str] = "DELETE"
+METH_OPTIONS: Final[str] = "OPTIONS"
+METH_PATCH: Final[str] = "PATCH"
+METH_POST: Final[str] = "POST"
+METH_PUT: Final[str] = "PUT"
+METH_TRACE: Final[str] = "TRACE"
+
+METH_ALL: Final[Set[str]] = {
+ METH_CONNECT,
+ METH_HEAD,
+ METH_GET,
+ METH_DELETE,
+ METH_OPTIONS,
+ METH_PATCH,
+ METH_POST,
+ METH_PUT,
+ METH_TRACE,
+}
+
+ACCEPT: Final[istr] = istr("Accept")
+ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
+ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
+ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
+ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
+ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
+ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
+ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
+ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
+ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
+ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
+ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
+ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
+AGE: Final[istr] = istr("Age")
+ALLOW: Final[istr] = istr("Allow")
+AUTHORIZATION: Final[istr] = istr("Authorization")
+CACHE_CONTROL: Final[istr] = istr("Cache-Control")
+CONNECTION: Final[istr] = istr("Connection")
+CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
+CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
+CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
+CONTENT_LENGTH: Final[istr] = istr("Content-Length")
+CONTENT_LOCATION: Final[istr] = istr("Content-Location")
+CONTENT_MD5: Final[istr] = istr("Content-MD5")
+CONTENT_RANGE: Final[istr] = istr("Content-Range")
+CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
+CONTENT_TYPE: Final[istr] = istr("Content-Type")
+COOKIE: Final[istr] = istr("Cookie")
+DATE: Final[istr] = istr("Date")
+DESTINATION: Final[istr] = istr("Destination")
+DIGEST: Final[istr] = istr("Digest")
+ETAG: Final[istr] = istr("Etag")
+EXPECT: Final[istr] = istr("Expect")
+EXPIRES: Final[istr] = istr("Expires")
+FORWARDED: Final[istr] = istr("Forwarded")
+FROM: Final[istr] = istr("From")
+HOST: Final[istr] = istr("Host")
+IF_MATCH: Final[istr] = istr("If-Match")
+IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
+IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
+IF_RANGE: Final[istr] = istr("If-Range")
+IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
+KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
+LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
+LAST_MODIFIED: Final[istr] = istr("Last-Modified")
+LINK: Final[istr] = istr("Link")
+LOCATION: Final[istr] = istr("Location")
+MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
+ORIGIN: Final[istr] = istr("Origin")
+PRAGMA: Final[istr] = istr("Pragma")
+PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
+PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
+RANGE: Final[istr] = istr("Range")
+REFERER: Final[istr] = istr("Referer")
+RETRY_AFTER: Final[istr] = istr("Retry-After")
+SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
+SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
+SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
+SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
+SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
+SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
+SERVER: Final[istr] = istr("Server")
+SET_COOKIE: Final[istr] = istr("Set-Cookie")
+TE: Final[istr] = istr("TE")
+TRAILER: Final[istr] = istr("Trailer")
+TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
+UPGRADE: Final[istr] = istr("Upgrade")
+URI: Final[istr] = istr("URI")
+USER_AGENT: Final[istr] = istr("User-Agent")
+VARY: Final[istr] = istr("Vary")
+VIA: Final[istr] = istr("Via")
+WANT_DIGEST: Final[istr] = istr("Want-Digest")
+WARNING: Final[istr] = istr("Warning")
+WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
+X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
+X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
+X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
diff --git a/contrib/python/aiohttp/aiohttp/helpers.py b/contrib/python/aiohttp/aiohttp/helpers.py
new file mode 100644
index 0000000000..f30f76ba41
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/helpers.py
@@ -0,0 +1,878 @@
+"""Various helper functions"""
+
+import asyncio
+import base64
+import binascii
+import datetime
+import functools
+import inspect
+import netrc
+import os
+import platform
+import re
+import sys
+import time
+import warnings
+import weakref
+from collections import namedtuple
+from contextlib import suppress
+from email.parser import HeaderParser
+from email.utils import parsedate
+from math import ceil
+from pathlib import Path
+from types import TracebackType
+from typing import (
+ Any,
+ Callable,
+ ContextManager,
+ Dict,
+ Generator,
+ Generic,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+)
+from urllib.parse import quote
+from urllib.request import getproxies, proxy_bypass
+
+import async_timeout
+import attr
+from multidict import MultiDict, MultiDictProxy
+from yarl import URL
+
+from . import hdrs
+from .log import client_logger, internal_logger
+from .typedefs import PathLike, Protocol # noqa
+
+__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
+
+IS_MACOS = platform.system() == "Darwin"
+IS_WINDOWS = platform.system() == "Windows"
+
+PY_36 = sys.version_info >= (3, 6)
+PY_37 = sys.version_info >= (3, 7)
+PY_38 = sys.version_info >= (3, 8)
+PY_310 = sys.version_info >= (3, 10)
+
+if sys.version_info < (3, 7):
+ import idna_ssl
+
+ idna_ssl.patch_match_hostname()
+
+ def all_tasks(
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> Set["asyncio.Task[Any]"]:
+ tasks = list(asyncio.Task.all_tasks(loop))
+ return {t for t in tasks if not t.done()}
+
+
+else:
+ all_tasks = asyncio.all_tasks
+
+
+_T = TypeVar("_T")
+_S = TypeVar("_S")
+
+
+sentinel = object() # type: Any
+NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
+
+# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
+# for compatibility with older versions
+DEBUG = getattr(sys.flags, "dev_mode", False) or (
+ not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
+) # type: bool
+
+
+CHAR = {chr(i) for i in range(0, 128)}
+CTL = {chr(i) for i in range(0, 32)} | {
+ chr(127),
+}
+SEPARATORS = {
+ "(",
+ ")",
+ "<",
+ ">",
+ "@",
+ ",",
+ ";",
+ ":",
+ "\\",
+ '"',
+ "/",
+ "[",
+ "]",
+ "?",
+ "=",
+ "{",
+ "}",
+ " ",
+ chr(9),
+}
+TOKEN = CHAR ^ CTL ^ SEPARATORS
+
+
+class noop:
+ def __await__(self) -> Generator[None, None, None]:
+ yield
+
+
+class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
+ """Http basic authentication helper."""
+
+ def __new__(
+ cls, login: str, password: str = "", encoding: str = "latin1"
+ ) -> "BasicAuth":
+ if login is None:
+ raise ValueError("None is not allowed as login value")
+
+ if password is None:
+ raise ValueError("None is not allowed as password value")
+
+ if ":" in login:
+ raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
+
+ return super().__new__(cls, login, password, encoding)
+
+ @classmethod
+ def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
+ """Create a BasicAuth object from an Authorization HTTP header."""
+ try:
+ auth_type, encoded_credentials = auth_header.split(" ", 1)
+ except ValueError:
+ raise ValueError("Could not parse authorization header.")
+
+ if auth_type.lower() != "basic":
+ raise ValueError("Unknown authorization method %s" % auth_type)
+
+ try:
+ decoded = base64.b64decode(
+ encoded_credentials.encode("ascii"), validate=True
+ ).decode(encoding)
+ except binascii.Error:
+ raise ValueError("Invalid base64 encoding.")
+
+ try:
+ # RFC 2617 HTTP Authentication
+ # https://www.ietf.org/rfc/rfc2617.txt
+ # the colon must be present, but the username and password may be
+ # otherwise blank.
+ username, password = decoded.split(":", 1)
+ except ValueError:
+ raise ValueError("Invalid credentials.")
+
+ return cls(username, password, encoding=encoding)
+
+ @classmethod
+ def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
+ """Create BasicAuth from url."""
+ if not isinstance(url, URL):
+ raise TypeError("url should be yarl.URL instance")
+ if url.user is None:
+ return None
+ return cls(url.user, url.password or "", encoding=encoding)
+
+ def encode(self) -> str:
+ """Encode credentials."""
+ creds = (f"{self.login}:{self.password}").encode(self.encoding)
+ return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
+
+
+def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
+ auth = BasicAuth.from_url(url)
+ if auth is None:
+ return url, None
+ else:
+ return url.with_user(None), auth
+
+
+def netrc_from_env() -> Optional[netrc.netrc]:
+ """Load netrc from file.
+
+ Attempt to load it from the path specified by the env-var
+ NETRC or in the default location in the user's home directory.
+
+ Returns None if it couldn't be found or fails to parse.
+ """
+ netrc_env = os.environ.get("NETRC")
+
+ if netrc_env is not None:
+ netrc_path = Path(netrc_env)
+ else:
+ try:
+ home_dir = Path.home()
+ except RuntimeError as e: # pragma: no cover
+ # if pathlib can't resolve home, it may raise a RuntimeError
+ client_logger.debug(
+ "Could not resolve home directory when "
+ "trying to look for .netrc file: %s",
+ e,
+ )
+ return None
+
+ netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
+
+ try:
+ return netrc.netrc(str(netrc_path))
+ except netrc.NetrcParseError as e:
+ client_logger.warning("Could not parse .netrc file: %s", e)
+ except OSError as e:
+ # we couldn't read the file (doesn't exist, permissions, etc.)
+ if netrc_env or netrc_path.is_file():
+ # only warn if the environment wanted us to load it,
+ # or it appears like the default file does actually exist
+ client_logger.warning("Could not read .netrc file: %s", e)
+
+ return None
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ProxyInfo:
+ proxy: URL
+ proxy_auth: Optional[BasicAuth]
+
+
+def proxies_from_env() -> Dict[str, ProxyInfo]:
+ proxy_urls = {
+ k: URL(v)
+ for k, v in getproxies().items()
+ if k in ("http", "https", "ws", "wss")
+ }
+ netrc_obj = netrc_from_env()
+ stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
+ ret = {}
+ for proto, val in stripped.items():
+ proxy, auth = val
+ if proxy.scheme in ("https", "wss"):
+ client_logger.warning(
+ "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
+ )
+ continue
+ if netrc_obj and auth is None:
+ auth_from_netrc = None
+ if proxy.host is not None:
+ auth_from_netrc = netrc_obj.authenticators(proxy.host)
+ if auth_from_netrc is not None:
+ # auth_from_netrc is a (`user`, `account`, `password`) tuple,
+ # `user` and `account` both can be username,
+ # if `user` is None, use `account`
+ *logins, password = auth_from_netrc
+ login = logins[0] if logins[0] else logins[-1]
+ auth = BasicAuth(cast(str, login), cast(str, password))
+ ret[proto] = ProxyInfo(proxy, auth)
+ return ret
+
+
+def current_task(
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> "Optional[asyncio.Task[Any]]":
+ if sys.version_info >= (3, 7):
+ return asyncio.current_task(loop=loop)
+ else:
+ return asyncio.Task.current_task(loop=loop)
+
+
+def get_running_loop(
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> asyncio.AbstractEventLoop:
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ if not loop.is_running():
+ warnings.warn(
+ "The object should be created within an async function",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+ if loop.get_debug():
+ internal_logger.warning(
+ "The object should be created within an async function", stack_info=True
+ )
+ return loop
+
+
+def isasyncgenfunction(obj: Any) -> bool:
+ func = getattr(inspect, "isasyncgenfunction", None)
+ if func is not None:
+ return func(obj) # type: ignore[no-any-return]
+ else:
+ return False
+
+
+def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
+ """Get a permitted proxy for the given URL from the env."""
+ if url.host is not None and proxy_bypass(url.host):
+ raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
+
+ proxies_in_env = proxies_from_env()
+ try:
+ proxy_info = proxies_in_env[url.scheme]
+ except KeyError:
+ raise LookupError(f"No proxies found for `{url!s}` in the env")
+ else:
+ return proxy_info.proxy, proxy_info.proxy_auth
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class MimeType:
+ type: str
+ subtype: str
+ suffix: str
+ parameters: "MultiDictProxy[str]"
+
+
+@functools.lru_cache(maxsize=56)
+def parse_mimetype(mimetype: str) -> MimeType:
+ """Parses a MIME type into its components.
+
+ mimetype is a MIME type string.
+
+ Returns a MimeType object.
+
+ Example:
+
+ >>> parse_mimetype('text/html; charset=utf-8')
+ MimeType(type='text', subtype='html', suffix='',
+ parameters={'charset': 'utf-8'})
+
+ """
+ if not mimetype:
+ return MimeType(
+ type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
+ )
+
+ parts = mimetype.split(";")
+ params = MultiDict() # type: MultiDict[str]
+ for item in parts[1:]:
+ if not item:
+ continue
+ key, value = cast(
+ Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
+ )
+ params.add(key.lower().strip(), value.strip(' "'))
+
+ fulltype = parts[0].strip().lower()
+ if fulltype == "*":
+ fulltype = "*/*"
+
+ mtype, stype = (
+ cast(Tuple[str, str], fulltype.split("/", 1))
+ if "/" in fulltype
+ else (fulltype, "")
+ )
+ stype, suffix = (
+ cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
+ )
+
+ return MimeType(
+ type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
+ )
+
+
+def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
+ name = getattr(obj, "name", None)
+ if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
+ return Path(name).name
+ return default
+
+
+not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
+QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
+
+
+def quoted_string(content: str) -> str:
+ """Return 7-bit content as quoted-string.
+
+ Format content into a quoted-string as defined in RFC5322 for
+ Internet Message Format. Notice that this is not the 8-bit HTTP
+ format, but the 7-bit email format. Content must be in usascii or
+ a ValueError is raised.
+ """
+ if not (QCONTENT > set(content)):
+ raise ValueError(f"bad content for quoted-string {content!r}")
+ return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
+
+
+def content_disposition_header(
+ disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
+) -> str:
+ """Sets ``Content-Disposition`` header for MIME.
+
+ This is the MIME payload Content-Disposition header from RFC 2183
+ and RFC 7579 section 4.2, not the HTTP Content-Disposition from
+ RFC 6266.
+
+ disptype is a disposition type: inline, attachment, form-data.
+ Should be valid extension token (see RFC 2183)
+
+ quote_fields performs value quoting to 7-bit MIME headers
+ according to RFC 7578. Set to quote_fields to False if recipient
+ can take 8-bit file names and field values.
+
+ _charset specifies the charset to use when quote_fields is True.
+
+ params is a dict with disposition params.
+ """
+ if not disptype or not (TOKEN > set(disptype)):
+ raise ValueError("bad content disposition type {!r}" "".format(disptype))
+
+ value = disptype
+ if params:
+ lparams = []
+ for key, val in params.items():
+ if not key or not (TOKEN > set(key)):
+ raise ValueError(
+ "bad content disposition parameter" " {!r}={!r}".format(key, val)
+ )
+ if quote_fields:
+ if key.lower() == "filename":
+ qval = quote(val, "", encoding=_charset)
+ lparams.append((key, '"%s"' % qval))
+ else:
+ try:
+ qval = quoted_string(val)
+ except ValueError:
+ qval = "".join(
+ (_charset, "''", quote(val, "", encoding=_charset))
+ )
+ lparams.append((key + "*", qval))
+ else:
+ lparams.append((key, '"%s"' % qval))
+ else:
+ qval = val.replace("\\", "\\\\").replace('"', '\\"')
+ lparams.append((key, '"%s"' % qval))
+ sparams = "; ".join("=".join(pair) for pair in lparams)
+ value = "; ".join((value, sparams))
+ return value
+
+
+class _TSelf(Protocol, Generic[_T]):
+ _cache: Dict[str, _T]
+
+
+class reify(Generic[_T]):
+ """Use as a class method decorator.
+
+ It operates almost exactly like
+ the Python `@property` decorator, but it puts the result of the
+ method it decorates into the instance dict after the first call,
+ effectively replacing the function it decorates with an instance
+ variable. It is, in Python parlance, a data descriptor.
+ """
+
+ def __init__(self, wrapped: Callable[..., _T]) -> None:
+ self.wrapped = wrapped
+ self.__doc__ = wrapped.__doc__
+ self.name = wrapped.__name__
+
+ def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
+ try:
+ try:
+ return inst._cache[self.name]
+ except KeyError:
+ val = self.wrapped(inst)
+ inst._cache[self.name] = val
+ return val
+ except AttributeError:
+ if inst is None:
+ return self
+ raise
+
+ def __set__(self, inst: _TSelf[_T], value: _T) -> None:
+ raise AttributeError("reified property is read-only")
+
+
+reify_py = reify
+
+try:
+ from ._helpers import reify as reify_c
+
+ if not NO_EXTENSIONS:
+ reify = reify_c # type: ignore[misc,assignment]
+except ImportError:
+ pass
+
+_ipv4_pattern = (
+ r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
+ r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
+)
+_ipv6_pattern = (
+ r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
+ r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
+ r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
+ r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
+ r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
+ r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
+ r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
+ r":|:(:[A-F0-9]{1,4}){7})$"
+)
+_ipv4_regex = re.compile(_ipv4_pattern)
+_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
+_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
+_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
+
+
+def _is_ip_address(
+ regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
+) -> bool:
+ if host is None:
+ return False
+ if isinstance(host, str):
+ return bool(regex.match(host))
+ elif isinstance(host, (bytes, bytearray, memoryview)):
+ return bool(regexb.match(host))
+ else:
+ raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
+
+
+is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
+is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
+
+
+def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
+ return is_ipv4_address(host) or is_ipv6_address(host)
+
+
+def next_whole_second() -> datetime.datetime:
+ """Return current time rounded up to the next whole second."""
+ return datetime.datetime.now(datetime.timezone.utc).replace(
+ microsecond=0
+ ) + datetime.timedelta(seconds=0)
+
+
+_cached_current_datetime = None # type: Optional[int]
+_cached_formatted_datetime = ""
+
+
+def rfc822_formatted_time() -> str:
+ global _cached_current_datetime
+ global _cached_formatted_datetime
+
+ now = int(time.time())
+ if now != _cached_current_datetime:
+ # Weekday and month names for HTTP date/time formatting;
+ # always English!
+ # Tuples are constants stored in codeobject!
+ _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
+ _monthname = (
+ "", # Dummy so we can use 1-based month numbers
+ "Jan",
+ "Feb",
+ "Mar",
+ "Apr",
+ "May",
+ "Jun",
+ "Jul",
+ "Aug",
+ "Sep",
+ "Oct",
+ "Nov",
+ "Dec",
+ )
+
+ year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
+ _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+ _weekdayname[wd],
+ day,
+ _monthname[month],
+ year,
+ hh,
+ mm,
+ ss,
+ )
+ _cached_current_datetime = now
+ return _cached_formatted_datetime
+
+
+def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
+ ref, name = info
+ ob = ref()
+ if ob is not None:
+ with suppress(Exception):
+ getattr(ob, name)()
+
+
+def weakref_handle(
+ ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
+) -> Optional[asyncio.TimerHandle]:
+ if timeout is not None and timeout > 0:
+ when = loop.time() + timeout
+ if timeout >= 5:
+ when = ceil(when)
+
+ return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
+ return None
+
+
+def call_later(
+ cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
+) -> Optional[asyncio.TimerHandle]:
+ if timeout is not None and timeout > 0:
+ when = loop.time() + timeout
+ if timeout > 5:
+ when = ceil(when)
+ return loop.call_at(when, cb)
+ return None
+
+
+class TimeoutHandle:
+ """Timeout handle"""
+
+ def __init__(
+ self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
+ ) -> None:
+ self._timeout = timeout
+ self._loop = loop
+ self._callbacks = (
+ []
+ ) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
+
+ def register(
+ self, callback: Callable[..., None], *args: Any, **kwargs: Any
+ ) -> None:
+ self._callbacks.append((callback, args, kwargs))
+
+ def close(self) -> None:
+ self._callbacks.clear()
+
+ def start(self) -> Optional[asyncio.Handle]:
+ timeout = self._timeout
+ if timeout is not None and timeout > 0:
+ when = self._loop.time() + timeout
+ if timeout >= 5:
+ when = ceil(when)
+ return self._loop.call_at(when, self.__call__)
+ else:
+ return None
+
+ def timer(self) -> "BaseTimerContext":
+ if self._timeout is not None and self._timeout > 0:
+ timer = TimerContext(self._loop)
+ self.register(timer.timeout)
+ return timer
+ else:
+ return TimerNoop()
+
+ def __call__(self) -> None:
+ for cb, args, kwargs in self._callbacks:
+ with suppress(Exception):
+ cb(*args, **kwargs)
+
+ self._callbacks.clear()
+
+
+class BaseTimerContext(ContextManager["BaseTimerContext"]):
+ pass
+
+
+class TimerNoop(BaseTimerContext):
+ def __enter__(self) -> BaseTimerContext:
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ return
+
+
+class TimerContext(BaseTimerContext):
+ """Low resolution timeout context manager"""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._tasks = [] # type: List[asyncio.Task[Any]]
+ self._cancelled = False
+
+ def __enter__(self) -> BaseTimerContext:
+ task = current_task(loop=self._loop)
+
+ if task is None:
+ raise RuntimeError(
+ "Timeout context manager should be used " "inside a task"
+ )
+
+ if self._cancelled:
+ raise asyncio.TimeoutError from None
+
+ self._tasks.append(task)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> Optional[bool]:
+ if self._tasks:
+ self._tasks.pop()
+
+ if exc_type is asyncio.CancelledError and self._cancelled:
+ raise asyncio.TimeoutError from None
+ return None
+
+ def timeout(self) -> None:
+ if not self._cancelled:
+ for task in set(self._tasks):
+ task.cancel()
+
+ self._cancelled = True
+
+
+def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
+ if delay is None or delay <= 0:
+ return async_timeout.timeout(None)
+
+ loop = get_running_loop()
+ now = loop.time()
+ when = now + delay
+ if delay > 5:
+ when = ceil(when)
+ return async_timeout.timeout_at(when)
+
+
+class HeadersMixin:
+
+ ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
+
+ _content_type = None # type: Optional[str]
+ _content_dict = None # type: Optional[Dict[str, str]]
+ _stored_content_type = sentinel
+
+ def _parse_content_type(self, raw: str) -> None:
+ self._stored_content_type = raw
+ if raw is None:
+ # default value according to RFC 2616
+ self._content_type = "application/octet-stream"
+ self._content_dict = {}
+ else:
+ msg = HeaderParser().parsestr("Content-Type: " + raw)
+ self._content_type = msg.get_content_type()
+ params = msg.get_params()
+ self._content_dict = dict(params[1:]) # First element is content type again
+
+ @property
+ def content_type(self) -> str:
+ """The value of content part for Content-Type HTTP header."""
+ raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
+ if self._stored_content_type != raw:
+ self._parse_content_type(raw)
+ return self._content_type # type: ignore[return-value]
+
+ @property
+ def charset(self) -> Optional[str]:
+ """The value of charset part for Content-Type HTTP header."""
+ raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
+ if self._stored_content_type != raw:
+ self._parse_content_type(raw)
+ return self._content_dict.get("charset") # type: ignore[union-attr]
+
+ @property
+ def content_length(self) -> Optional[int]:
+ """The value of Content-Length HTTP header."""
+ content_length = self._headers.get( # type: ignore[attr-defined]
+ hdrs.CONTENT_LENGTH
+ )
+
+ if content_length is not None:
+ return int(content_length)
+ else:
+ return None
+
+
+def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
+ if not fut.done():
+ fut.set_result(result)
+
+
+def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
+ if not fut.done():
+ fut.set_exception(exc)
+
+
+class ChainMapProxy(Mapping[str, Any]):
+ __slots__ = ("_maps",)
+
+ def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
+ self._maps = tuple(maps)
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(
+ "Inheritance class {} from ChainMapProxy "
+ "is forbidden".format(cls.__name__)
+ )
+
+ def __getitem__(self, key: str) -> Any:
+ for mapping in self._maps:
+ try:
+ return mapping[key]
+ except KeyError:
+ pass
+ raise KeyError(key)
+
+ def get(self, key: str, default: Any = None) -> Any:
+ return self[key] if key in self else default
+
+ def __len__(self) -> int:
+ # reuses stored hash values if possible
+ return len(set().union(*self._maps)) # type: ignore[arg-type]
+
+ def __iter__(self) -> Iterator[str]:
+ d = {} # type: Dict[str, Any]
+ for mapping in reversed(self._maps):
+ # reuses stored hash values if possible
+ d.update(mapping)
+ return iter(d)
+
+ def __contains__(self, key: object) -> bool:
+ return any(key in m for m in self._maps)
+
+ def __bool__(self) -> bool:
+ return any(self._maps)
+
+ def __repr__(self) -> str:
+ content = ", ".join(map(repr, self._maps))
+ return f"ChainMapProxy({content})"
+
+
+# https://tools.ietf.org/html/rfc7232#section-2.3
+_ETAGC = r"[!#-}\x80-\xff]+"
+_ETAGC_RE = re.compile(_ETAGC)
+_QUOTED_ETAG = fr'(W/)?"({_ETAGC})"'
+QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
+LIST_QUOTED_ETAG_RE = re.compile(fr"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
+
+ETAG_ANY = "*"
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class ETag:
+ value: str
+ is_weak: bool = False
+
+
+def validate_etag_value(value: str) -> None:
+ if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
+ raise ValueError(
+ f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
+ )
+
+
+def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
+ """Process a date string, return a datetime object"""
+ if date_str is not None:
+ timetuple = parsedate(date_str)
+ if timetuple is not None:
+ with suppress(ValueError):
+ return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
+ return None
diff --git a/contrib/python/aiohttp/aiohttp/http.py b/contrib/python/aiohttp/aiohttp/http.py
new file mode 100644
index 0000000000..415ffbf563
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http.py
@@ -0,0 +1,72 @@
+import http.server
+import sys
+from typing import Mapping, Tuple
+
+from . import __version__
+from .http_exceptions import HttpProcessingError as HttpProcessingError
+from .http_parser import (
+ HeadersParser as HeadersParser,
+ HttpParser as HttpParser,
+ HttpRequestParser as HttpRequestParser,
+ HttpResponseParser as HttpResponseParser,
+ RawRequestMessage as RawRequestMessage,
+ RawResponseMessage as RawResponseMessage,
+)
+from .http_websocket import (
+ WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
+ WS_KEY as WS_KEY,
+ WebSocketError as WebSocketError,
+ WebSocketReader as WebSocketReader,
+ WebSocketWriter as WebSocketWriter,
+ WSCloseCode as WSCloseCode,
+ WSMessage as WSMessage,
+ WSMsgType as WSMsgType,
+ ws_ext_gen as ws_ext_gen,
+ ws_ext_parse as ws_ext_parse,
+)
+from .http_writer import (
+ HttpVersion as HttpVersion,
+ HttpVersion10 as HttpVersion10,
+ HttpVersion11 as HttpVersion11,
+ StreamWriter as StreamWriter,
+)
+
+__all__ = (
+ "HttpProcessingError",
+ "RESPONSES",
+ "SERVER_SOFTWARE",
+ # .http_writer
+ "StreamWriter",
+ "HttpVersion",
+ "HttpVersion10",
+ "HttpVersion11",
+ # .http_parser
+ "HeadersParser",
+ "HttpParser",
+ "HttpRequestParser",
+ "HttpResponseParser",
+ "RawRequestMessage",
+ "RawResponseMessage",
+ # .http_websocket
+ "WS_CLOSED_MESSAGE",
+ "WS_CLOSING_MESSAGE",
+ "WS_KEY",
+ "WebSocketReader",
+ "WebSocketWriter",
+ "ws_ext_gen",
+ "ws_ext_parse",
+ "WSMessage",
+ "WebSocketError",
+ "WSMsgType",
+ "WSCloseCode",
+)
+
+
+SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
+ sys.version_info, __version__
+) # type: str
+
+RESPONSES = (
+ http.server.BaseHTTPRequestHandler.responses
+) # type: Mapping[int, Tuple[str, str]]
diff --git a/contrib/python/aiohttp/aiohttp/http_exceptions.py b/contrib/python/aiohttp/aiohttp/http_exceptions.py
new file mode 100644
index 0000000000..c885f80f32
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_exceptions.py
@@ -0,0 +1,105 @@
+"""Low-level http related exceptions."""
+
+
+from typing import Optional, Union
+
+from .typedefs import _CIMultiDict
+
+__all__ = ("HttpProcessingError",)
+
+
+class HttpProcessingError(Exception):
+ """HTTP error.
+
+ Shortcut for raising HTTP errors with custom code, message and headers.
+
+ code: HTTP Error code.
+ message: (optional) Error message.
+ headers: (optional) Headers to be sent in response, a list of pairs
+ """
+
+ code = 0
+ message = ""
+ headers = None
+
+ def __init__(
+ self,
+ *,
+ code: Optional[int] = None,
+ message: str = "",
+ headers: Optional[_CIMultiDict] = None,
+ ) -> None:
+ if code is not None:
+ self.code = code
+ self.headers = headers
+ self.message = message
+
+ def __str__(self) -> str:
+ return f"{self.code}, message={self.message!r}"
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}: {self}>"
+
+
+class BadHttpMessage(HttpProcessingError):
+
+ code = 400
+ message = "Bad Request"
+
+ def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
+ super().__init__(message=message, headers=headers)
+ self.args = (message,)
+
+
+class HttpBadRequest(BadHttpMessage):
+
+ code = 400
+ message = "Bad Request"
+
+
+class PayloadEncodingError(BadHttpMessage):
+ """Base class for payload errors"""
+
+
+class ContentEncodingError(PayloadEncodingError):
+ """Content encoding error."""
+
+
+class TransferEncodingError(PayloadEncodingError):
+ """transfer encoding error."""
+
+
+class ContentLengthError(PayloadEncodingError):
+ """Not enough data for satisfy content length header."""
+
+
+class LineTooLong(BadHttpMessage):
+ def __init__(
+ self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
+ ) -> None:
+ super().__init__(
+ f"Got more than {limit} bytes ({actual_size}) when reading {line}."
+ )
+ self.args = (line, limit, actual_size)
+
+
+class InvalidHeader(BadHttpMessage):
+ def __init__(self, hdr: Union[bytes, str]) -> None:
+ if isinstance(hdr, bytes):
+ hdr = hdr.decode("utf-8", "surrogateescape")
+ super().__init__(f"Invalid HTTP Header: {hdr}")
+ self.hdr = hdr
+ self.args = (hdr,)
+
+
+class BadStatusLine(BadHttpMessage):
+ def __init__(self, line: str = "") -> None:
+ if not isinstance(line, str):
+ line = repr(line)
+ super().__init__(f"Bad status line {line!r}")
+ self.args = (line,)
+ self.line = line
+
+
+class InvalidURLError(BadHttpMessage):
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/http_parser.py b/contrib/python/aiohttp/aiohttp/http_parser.py
new file mode 100644
index 0000000000..2dc9482f4f
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_parser.py
@@ -0,0 +1,956 @@
+import abc
+import asyncio
+import collections
+import re
+import string
+import zlib
+from contextlib import suppress
+from enum import IntEnum
+from typing import (
+ Any,
+ Generic,
+ List,
+ NamedTuple,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, istr
+from yarl import URL
+
+from . import hdrs
+from .base_protocol import BaseProtocol
+from .helpers import NO_EXTENSIONS, BaseTimerContext
+from .http_exceptions import (
+ BadHttpMessage,
+ BadStatusLine,
+ ContentEncodingError,
+ ContentLengthError,
+ InvalidHeader,
+ LineTooLong,
+ TransferEncodingError,
+)
+from .http_writer import HttpVersion, HttpVersion10
+from .log import internal_logger
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .typedefs import Final, RawHeaders
+
+try:
+ import brotli
+
+ HAS_BROTLI = True
+except ImportError: # pragma: no cover
+ HAS_BROTLI = False
+
+
+__all__ = (
+ "HeadersParser",
+ "HttpParser",
+ "HttpRequestParser",
+ "HttpResponseParser",
+ "RawRequestMessage",
+ "RawResponseMessage",
+)
+
+ASCIISET: Final[Set[str]] = set(string.printable)
+
+# See https://tools.ietf.org/html/rfc7230#section-3.1.1
+# and https://tools.ietf.org/html/rfc7230#appendix-B
+#
+# method = token
+# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
+# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
+# token = 1*tchar
+METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
+VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
+HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
+
+
+class RawRequestMessage(NamedTuple):
+ method: str
+ path: str
+ version: HttpVersion
+ headers: "CIMultiDictProxy[str]"
+ raw_headers: RawHeaders
+ should_close: bool
+ compression: Optional[str]
+ upgrade: bool
+ chunked: bool
+ url: URL
+
+
+RawResponseMessage = collections.namedtuple(
+ "RawResponseMessage",
+ [
+ "version",
+ "code",
+ "reason",
+ "headers",
+ "raw_headers",
+ "should_close",
+ "compression",
+ "upgrade",
+ "chunked",
+ ],
+)
+
+
+_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
+
+
+class ParseState(IntEnum):
+
+ PARSE_NONE = 0
+ PARSE_LENGTH = 1
+ PARSE_CHUNKED = 2
+ PARSE_UNTIL_EOF = 3
+
+
+class ChunkState(IntEnum):
+ PARSE_CHUNKED_SIZE = 0
+ PARSE_CHUNKED_CHUNK = 1
+ PARSE_CHUNKED_CHUNK_EOF = 2
+ PARSE_MAYBE_TRAILERS = 3
+ PARSE_TRAILERS = 4
+
+
+class HeadersParser:
+ def __init__(
+ self,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ ) -> None:
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
+ headers = CIMultiDict() # type: CIMultiDict[str]
+ raw_headers = []
+
+ lines_idx = 1
+ line = lines[1]
+ line_count = len(lines)
+
+ while line:
+ # Parse initial header name : value pair.
+ try:
+ bname, bvalue = line.split(b":", 1)
+ except ValueError:
+ raise InvalidHeader(line) from None
+
+ bname = bname.strip(b" \t")
+ bvalue = bvalue.lstrip()
+ if HDRRE.search(bname):
+ raise InvalidHeader(bname)
+ if len(bname) > self.max_field_size:
+ raise LineTooLong(
+ "request header name {}".format(
+ bname.decode("utf8", "xmlcharrefreplace")
+ ),
+ str(self.max_field_size),
+ str(len(bname)),
+ )
+
+ header_length = len(bvalue)
+
+ # next line
+ lines_idx += 1
+ line = lines[lines_idx]
+
+ # consume continuation lines
+ continuation = line and line[0] in (32, 9) # (' ', '\t')
+
+ if continuation:
+ bvalue_lst = [bvalue]
+ while continuation:
+ header_length += len(line)
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "xmlcharrefreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+ bvalue_lst.append(line)
+
+ # next line
+ lines_idx += 1
+ if lines_idx < line_count:
+ line = lines[lines_idx]
+ if line:
+ continuation = line[0] in (32, 9) # (' ', '\t')
+ else:
+ line = b""
+ break
+ bvalue = b"".join(bvalue_lst)
+ else:
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "xmlcharrefreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+
+ bvalue = bvalue.strip()
+ name = bname.decode("utf-8", "surrogateescape")
+ value = bvalue.decode("utf-8", "surrogateescape")
+
+ headers.add(name, value)
+ raw_headers.append((bname, bvalue))
+
+ return (CIMultiDictProxy(headers), tuple(raw_headers))
+
+
+class HttpParser(abc.ABC, Generic[_MsgT]):
+ def __init__(
+ self,
+ protocol: Optional[BaseProtocol] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ limit: int = 2 ** 16,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ timer: Optional[BaseTimerContext] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ readall: bool = False,
+ payload_exception: Optional[Type[BaseException]] = None,
+ response_with_body: bool = True,
+ read_until_eof: bool = False,
+ auto_decompress: bool = True,
+ ) -> None:
+ self.protocol = protocol
+ self.loop = loop
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+ self.timer = timer
+ self.code = code
+ self.method = method
+ self.readall = readall
+ self.payload_exception = payload_exception
+ self.response_with_body = response_with_body
+ self.read_until_eof = read_until_eof
+
+ self._lines = [] # type: List[bytes]
+ self._tail = b""
+ self._upgraded = False
+ self._payload = None
+ self._payload_parser = None # type: Optional[HttpPayloadParser]
+ self._auto_decompress = auto_decompress
+ self._limit = limit
+ self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
+
+ @abc.abstractmethod
+ def parse_message(self, lines: List[bytes]) -> _MsgT:
+ pass
+
+ def feed_eof(self) -> Optional[_MsgT]:
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+ else:
+ # try to extract partial message
+ if self._tail:
+ self._lines.append(self._tail)
+
+ if self._lines:
+ if self._lines[-1] != "\r\n":
+ self._lines.append(b"")
+ with suppress(Exception):
+ return self.parse_message(self._lines)
+ return None
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: bytes = b"\r\n",
+ EMPTY: bytes = b"",
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
+ METH_CONNECT: str = hdrs.METH_CONNECT,
+ SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
+ ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
+
+ messages = []
+
+ if self._tail:
+ data, self._tail = self._tail + data, b""
+
+ data_len = len(data)
+ start_pos = 0
+ loop = self.loop
+
+ while start_pos < data_len:
+
+ # read HTTP message (request/response line + headers), \r\n\r\n
+ # and split by lines
+ if self._payload_parser is None and not self._upgraded:
+ pos = data.find(SEP, start_pos)
+ # consume \r\n
+ if pos == start_pos and not self._lines:
+ start_pos = pos + 2
+ continue
+
+ if pos >= start_pos:
+ # line found
+ self._lines.append(data[start_pos:pos])
+ start_pos = pos + 2
+
+ # \r\n\r\n found
+ if self._lines[-1] == EMPTY:
+ try:
+ msg: _MsgT = self.parse_message(self._lines)
+ finally:
+ self._lines.clear()
+
+ def get_content_length() -> Optional[int]:
+ # payload length
+ length_hdr = msg.headers.get(CONTENT_LENGTH)
+ if length_hdr is None:
+ return None
+
+ try:
+ length = int(length_hdr)
+ except ValueError:
+ raise InvalidHeader(CONTENT_LENGTH)
+
+ if length < 0:
+ raise InvalidHeader(CONTENT_LENGTH)
+
+ return length
+
+ length = get_content_length()
+ # do not support old websocket spec
+ if SEC_WEBSOCKET_KEY1 in msg.headers:
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
+
+ self._upgraded = msg.upgrade
+
+ method = getattr(msg, "method", self.method)
+
+ assert self.protocol is not None
+ # calculate payload
+ if (
+ (length is not None and length > 0)
+ or msg.chunked
+ and not msg.upgrade
+ ):
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ readall=self.readall,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ elif method == METH_CONNECT:
+ assert isinstance(msg, RawRequestMessage)
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ self._upgraded = True
+ self._payload_parser = HttpPayloadParser(
+ payload,
+ method=msg.method,
+ compression=msg.compression,
+ readall=True,
+ auto_decompress=self._auto_decompress,
+ )
+ else:
+ if (
+ getattr(msg, "code", 100) >= 199
+ and length is None
+ and self.read_until_eof
+ ):
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ readall=True,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ else:
+ payload = EMPTY_PAYLOAD
+
+ messages.append((msg, payload))
+ else:
+ self._tail = data[start_pos:]
+ data = EMPTY
+ break
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgraded:
+ assert not self._lines
+ break
+
+ # feed payload
+ elif data and start_pos < data_len:
+ assert not self._lines
+ assert self._payload_parser is not None
+ try:
+ eof, data = self._payload_parser.feed_data(data[start_pos:])
+ except BaseException as exc:
+ if self.payload_exception is not None:
+ self._payload_parser.payload.set_exception(
+ self.payload_exception(str(exc))
+ )
+ else:
+ self._payload_parser.payload.set_exception(exc)
+
+ eof = True
+ data = b""
+
+ if eof:
+ start_pos = 0
+ data_len = len(data)
+ self._payload_parser = None
+ continue
+ else:
+ break
+
+ if data and start_pos < data_len:
+ data = data[start_pos:]
+ else:
+ data = EMPTY
+
+ return messages, self._upgraded, data
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple[
+ "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
+ ]:
+ """Parses RFC 5322 headers from a stream.
+
+ Line continuations are supported. Returns list of header name
+ and value pairs. Header name is in upper case.
+ """
+ headers, raw_headers = self._headers_parser.parse_headers(lines)
+ close_conn = None
+ encoding = None
+ upgrade = False
+ chunked = False
+
+ # keep-alive
+ conn = headers.get(hdrs.CONNECTION)
+ if conn:
+ v = conn.lower()
+ if v == "close":
+ close_conn = True
+ elif v == "keep-alive":
+ close_conn = False
+ elif v == "upgrade":
+ upgrade = True
+
+ # encoding
+ enc = headers.get(hdrs.CONTENT_ENCODING)
+ if enc:
+ enc = enc.lower()
+ if enc in ("gzip", "deflate", "br"):
+ encoding = enc
+
+ # chunking
+ te = headers.get(hdrs.TRANSFER_ENCODING)
+ if te is not None:
+ if "chunked" == te.lower():
+ chunked = True
+ else:
+ raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
+
+ if hdrs.CONTENT_LENGTH in headers:
+ raise BadHttpMessage(
+ "Content-Length can't be present with Transfer-Encoding",
+ )
+
+ return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
+
+ def set_upgraded(self, val: bool) -> None:
+ """Set connection upgraded (to websocket) mode.
+
+ :param bool val: new state.
+ """
+ self._upgraded = val
+
+
+class HttpRequestParser(HttpParser[RawRequestMessage]):
+ """Read request status line.
+
+ Exception .http_exceptions.BadStatusLine
+ could be raised in case of any errors in status line.
+ Returns RawRequestMessage.
+ """
+
+ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
+ # request line
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ method, path, version = line.split(None, 2)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ if len(path) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(path))
+ )
+
+ path_part, _hash_separator, url_fragment = path.partition("#")
+ path_part, _question_mark_separator, qs_part = path_part.partition("?")
+
+ # method
+ if not METHRE.match(method):
+ raise BadStatusLine(method)
+
+ # version
+ try:
+ if version.startswith("HTTP/"):
+ n1, n2 = version[5:].split(".", 1)
+ version_o = HttpVersion(int(n1), int(n2))
+ else:
+ raise BadStatusLine(version)
+ except Exception:
+ raise BadStatusLine(version)
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines)
+
+ if close is None: # then the headers weren't set in the request
+ if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
+ close = True
+ else: # HTTP 1.1 must ask to close.
+ close = False
+
+ return RawRequestMessage(
+ method,
+ path,
+ version_o,
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
+ # NOTE: parser does, otherwise it results into the same
+ # NOTE: HTTP Request-Line input producing different
+ # NOTE: `yarl.URL()` objects
+ URL.build(
+ path=path_part,
+ query_string=qs_part,
+ fragment=url_fragment,
+ encoded=True,
+ ),
+ )
+
+
+class HttpResponseParser(HttpParser[RawResponseMessage]):
+ """Read response status line and headers.
+
+ BadStatusLine could be raised in case of any errors in status line.
+ Returns RawResponseMessage.
+ """
+
+ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ version, status = line.split(None, 1)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ try:
+ status, reason = status.split(None, 1)
+ except ValueError:
+ reason = ""
+
+ if len(reason) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(reason))
+ )
+
+ # version
+ match = VERSRE.match(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
+
+ # The status code is a three-digit number
+ try:
+ status_i = int(status)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ if status_i > 999:
+ raise BadStatusLine(line)
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines)
+
+ if close is None:
+ close = version_o <= HttpVersion10
+
+ return RawResponseMessage(
+ version_o,
+ status_i,
+ reason.strip(),
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ )
+
+
+class HttpPayloadParser:
+ def __init__(
+ self,
+ payload: StreamReader,
+ length: Optional[int] = None,
+ chunked: bool = False,
+ compression: Optional[str] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ readall: bool = False,
+ response_with_body: bool = True,
+ auto_decompress: bool = True,
+ ) -> None:
+ self._length = 0
+ self._type = ParseState.PARSE_NONE
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ self._chunk_size = 0
+ self._chunk_tail = b""
+ self._auto_decompress = auto_decompress
+ self.done = False
+
+ # payload decompression wrapper
+ if response_with_body and compression and self._auto_decompress:
+ real_payload = DeflateBuffer(
+ payload, compression
+ ) # type: Union[StreamReader, DeflateBuffer]
+ else:
+ real_payload = payload
+
+ # payload parser
+ if not response_with_body:
+ # don't parse payload if it's not expected to be received
+ self._type = ParseState.PARSE_NONE
+ real_payload.feed_eof()
+ self.done = True
+
+ elif chunked:
+ self._type = ParseState.PARSE_CHUNKED
+ elif length is not None:
+ self._type = ParseState.PARSE_LENGTH
+ self._length = length
+ if self._length == 0:
+ real_payload.feed_eof()
+ self.done = True
+ else:
+ if readall and code != 204:
+ self._type = ParseState.PARSE_UNTIL_EOF
+ elif method in ("PUT", "POST"):
+ internal_logger.warning( # pragma: no cover
+ "Content-Length or Transfer-Encoding header is required"
+ )
+ self._type = ParseState.PARSE_NONE
+ real_payload.feed_eof()
+ self.done = True
+
+ self.payload = real_payload
+
+ def feed_eof(self) -> None:
+ if self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_eof()
+ elif self._type == ParseState.PARSE_LENGTH:
+ raise ContentLengthError(
+ "Not enough data for satisfy content length header."
+ )
+ elif self._type == ParseState.PARSE_CHUNKED:
+ raise TransferEncodingError(
+ "Not enough data for satisfy transfer length header."
+ )
+
+ def feed_data(
+ self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
+ ) -> Tuple[bool, bytes]:
+ # Read specified amount of bytes
+ if self._type == ParseState.PARSE_LENGTH:
+ required = self._length
+ chunk_len = len(chunk)
+
+ if required >= chunk_len:
+ self._length = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ if self._length == 0:
+ self.payload.feed_eof()
+ return True, b""
+ else:
+ self._length = 0
+ self.payload.feed_data(chunk[:required], required)
+ self.payload.feed_eof()
+ return True, chunk[required:]
+
+ # Chunked transfer encoding parser
+ elif self._type == ParseState.PARSE_CHUNKED:
+ if self._chunk_tail:
+ chunk = self._chunk_tail + chunk
+ self._chunk_tail = b""
+
+ while chunk:
+
+ # read next chunk size
+ if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
+ pos = chunk.find(SEP)
+ if pos >= 0:
+ i = chunk.find(CHUNK_EXT, 0, pos)
+ if i >= 0:
+ size_b = chunk[:i] # strip chunk-extensions
+ else:
+ size_b = chunk[:pos]
+
+ try:
+ size = int(bytes(size_b), 16)
+ except ValueError:
+ exc = TransferEncodingError(
+ chunk[:pos].decode("ascii", "surrogateescape")
+ )
+ self.payload.set_exception(exc)
+ raise exc from None
+
+ chunk = chunk[pos + 2 :]
+ if size == 0: # eof marker
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ else:
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
+ self._chunk_size = size
+ self.payload.begin_http_chunk_receiving()
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # read chunk and feed buffer
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
+ required = self._chunk_size
+ chunk_len = len(chunk)
+
+ if required > chunk_len:
+ self._chunk_size = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ return False, b""
+ else:
+ self._chunk_size = 0
+ self.payload.feed_data(chunk[:required], required)
+ chunk = chunk[required:]
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
+ self.payload.end_http_chunk_receiving()
+
+ # toss the CRLF at the end of the chunk
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
+ if chunk[:2] == SEP:
+ chunk = chunk[2:]
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # if stream does not contain trailer, after 0\r\n
+ # we should get another \r\n otherwise
+ # trailers needs to be skiped until \r\n\r\n
+ if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
+ head = chunk[:2]
+ if head == SEP:
+ # end of stream
+ self.payload.feed_eof()
+ return True, chunk[2:]
+ # Both CR and LF, or only LF may not be received yet. It is
+ # expected that CRLF or LF will be shown at the very first
+ # byte next time, otherwise trailers should come. The last
+ # CRLF which marks the end of response might not be
+ # contained in the same TCP segment which delivered the
+ # size indicator.
+ if not head:
+ return False, b""
+ if head == SEP[:1]:
+ self._chunk_tail = head
+ return False, b""
+ self._chunk = ChunkState.PARSE_TRAILERS
+
+ # read and discard trailer up to the CRLF terminator
+ if self._chunk == ChunkState.PARSE_TRAILERS:
+ pos = chunk.find(SEP)
+ if pos >= 0:
+ chunk = chunk[pos + 2 :]
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # Read all bytes until eof
+ elif self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_data(chunk, len(chunk))
+
+ return False, b""
+
+
+class DeflateBuffer:
+ """DeflateStream decompress stream and feed data into specified stream."""
+
+ decompressor: Any
+
+ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
+ self.out = out
+ self.size = 0
+ self.encoding = encoding
+ self._started_decoding = False
+
+ if encoding == "br":
+ if not HAS_BROTLI: # pragma: no cover
+ raise ContentEncodingError(
+ "Can not decode content-encoding: brotli (br). "
+ "Please install `Brotli`"
+ )
+
+ class BrotliDecoder:
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self) -> None:
+ self._obj = brotli.Decompressor()
+
+ def decompress(self, data: bytes) -> bytes:
+ if hasattr(self._obj, "decompress"):
+ return cast(bytes, self._obj.decompress(data))
+ return cast(bytes, self._obj.process(data))
+
+ def flush(self) -> bytes:
+ if hasattr(self._obj, "flush"):
+ return cast(bytes, self._obj.flush())
+ return b""
+
+ self.decompressor = BrotliDecoder()
+ else:
+ zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
+ self.decompressor = zlib.decompressobj(wbits=zlib_mode)
+
+ def set_exception(self, exc: BaseException) -> None:
+ self.out.set_exception(exc)
+
+ def feed_data(self, chunk: bytes, size: int) -> None:
+ if not size:
+ return
+
+ self.size += size
+
+ # RFC1950
+ # bits 0..3 = CM = 0b1000 = 8 = "deflate"
+ # bits 4..7 = CINFO = 1..7 = windows size.
+ if (
+ not self._started_decoding
+ and self.encoding == "deflate"
+ and chunk[0] & 0xF != 8
+ ):
+ # Change the decoder to decompress incorrectly compressed data
+ # Actually we should issue a warning about non-RFC-compliant data.
+ self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
+
+ try:
+ chunk = self.decompressor.decompress(chunk)
+ except Exception:
+ raise ContentEncodingError(
+ "Can not decode content-encoding: %s" % self.encoding
+ )
+
+ self._started_decoding = True
+
+ if chunk:
+ self.out.feed_data(chunk, len(chunk))
+
+ def feed_eof(self) -> None:
+ chunk = self.decompressor.flush()
+
+ if chunk or self.size > 0:
+ self.out.feed_data(chunk, len(chunk))
+ if self.encoding == "deflate" and not self.decompressor.eof:
+ raise ContentEncodingError("deflate")
+
+ self.out.feed_eof()
+
+ def begin_http_chunk_receiving(self) -> None:
+ self.out.begin_http_chunk_receiving()
+
+ def end_http_chunk_receiving(self) -> None:
+ self.out.end_http_chunk_receiving()
+
+
+HttpRequestParserPy = HttpRequestParser
+HttpResponseParserPy = HttpResponseParser
+RawRequestMessagePy = RawRequestMessage
+RawResponseMessagePy = RawResponseMessage
+
+try:
+ if not NO_EXTENSIONS:
+ from ._http_parser import ( # type: ignore[import,no-redef]
+ HttpRequestParser,
+ HttpResponseParser,
+ RawRequestMessage,
+ RawResponseMessage,
+ )
+
+ HttpRequestParserC = HttpRequestParser
+ HttpResponseParserC = HttpResponseParser
+ RawRequestMessageC = RawRequestMessage
+ RawResponseMessageC = RawResponseMessage
+except ImportError: # pragma: no cover
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/http_websocket.py b/contrib/python/aiohttp/aiohttp/http_websocket.py
new file mode 100644
index 0000000000..991a149d09
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_websocket.py
@@ -0,0 +1,701 @@
+"""WebSocket protocol versions 13 and 8."""
+
+import asyncio
+import collections
+import json
+import random
+import re
+import sys
+import zlib
+from enum import IntEnum
+from struct import Struct
+from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
+
+from .base_protocol import BaseProtocol
+from .helpers import NO_EXTENSIONS
+from .streams import DataQueue
+from .typedefs import Final
+
+__all__ = (
+ "WS_CLOSED_MESSAGE",
+ "WS_CLOSING_MESSAGE",
+ "WS_KEY",
+ "WebSocketReader",
+ "WebSocketWriter",
+ "WSMessage",
+ "WebSocketError",
+ "WSMsgType",
+ "WSCloseCode",
+)
+
+
+class WSCloseCode(IntEnum):
+ OK = 1000
+ GOING_AWAY = 1001
+ PROTOCOL_ERROR = 1002
+ UNSUPPORTED_DATA = 1003
+ ABNORMAL_CLOSURE = 1006
+ INVALID_TEXT = 1007
+ POLICY_VIOLATION = 1008
+ MESSAGE_TOO_BIG = 1009
+ MANDATORY_EXTENSION = 1010
+ INTERNAL_ERROR = 1011
+ SERVICE_RESTART = 1012
+ TRY_AGAIN_LATER = 1013
+ BAD_GATEWAY = 1014
+
+
+ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
+
+
+class WSMsgType(IntEnum):
+ # websocket spec types
+ CONTINUATION = 0x0
+ TEXT = 0x1
+ BINARY = 0x2
+ PING = 0x9
+ PONG = 0xA
+ CLOSE = 0x8
+
+ # aiohttp specific types
+ CLOSING = 0x100
+ CLOSED = 0x101
+ ERROR = 0x102
+
+ text = TEXT
+ binary = BINARY
+ ping = PING
+ pong = PONG
+ close = CLOSE
+ closing = CLOSING
+ closed = CLOSED
+ error = ERROR
+
+
+WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
+
+
+UNPACK_LEN2 = Struct("!H").unpack_from
+UNPACK_LEN3 = Struct("!Q").unpack_from
+UNPACK_CLOSE_CODE = Struct("!H").unpack
+PACK_LEN1 = Struct("!BB").pack
+PACK_LEN2 = Struct("!BBH").pack
+PACK_LEN3 = Struct("!BBQ").pack
+PACK_CLOSE_CODE = Struct("!H").pack
+MSG_SIZE: Final[int] = 2 ** 14
+DEFAULT_LIMIT: Final[int] = 2 ** 16
+
+
+_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
+
+
+class WSMessage(_WSMessageBase):
+ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
+ """Return parsed JSON data.
+
+ .. versionadded:: 0.22
+ """
+ return loads(self.data)
+
+
+WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
+WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
+
+
+class WebSocketError(Exception):
+ """WebSocket protocol parser error."""
+
+ def __init__(self, code: int, message: str) -> None:
+ self.code = code
+ super().__init__(code, message)
+
+ def __str__(self) -> str:
+ return cast(str, self.args[1])
+
+
+class WSHandshakeError(Exception):
+ """WebSocket protocol handshake error."""
+
+
+native_byteorder: Final[str] = sys.byteorder
+
+
+# Used by _websocket_mask_python
+_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
+
+
+def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
+ """Websocket masking function.
+
+ `mask` is a `bytes` object of length 4; `data` is a `bytearray`
+ object of any length. The contents of `data` are masked with `mask`,
+ as specified in section 5.3 of RFC 6455.
+
+ Note that this function mutates the `data` argument.
+
+ This pure-python implementation may be replaced by an optimized
+ version when available.
+
+ """
+ assert isinstance(data, bytearray), data
+ assert len(mask) == 4, mask
+
+ if data:
+ a, b, c, d = (_XOR_TABLE[n] for n in mask)
+ data[::4] = data[::4].translate(a)
+ data[1::4] = data[1::4].translate(b)
+ data[2::4] = data[2::4].translate(c)
+ data[3::4] = data[3::4].translate(d)
+
+
+if NO_EXTENSIONS: # pragma: no cover
+ _websocket_mask = _websocket_mask_python
+else:
+ try:
+ from ._websocket import _websocket_mask_cython # type: ignore[import]
+
+ _websocket_mask = _websocket_mask_cython
+ except ImportError: # pragma: no cover
+ _websocket_mask = _websocket_mask_python
+
+_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
+
+
+_WS_EXT_RE: Final[Pattern[str]] = re.compile(
+ r"^(?:;\s*(?:"
+ r"(server_no_context_takeover)|"
+ r"(client_no_context_takeover)|"
+ r"(server_max_window_bits(?:=(\d+))?)|"
+ r"(client_max_window_bits(?:=(\d+))?)))*$"
+)
+
+_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
+
+
+def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
+ if not extstr:
+ return 0, False
+
+ compress = 0
+ notakeover = False
+ for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
+ defext = ext.group(1)
+ # Return compress = 15 when get `permessage-deflate`
+ if not defext:
+ compress = 15
+ break
+ match = _WS_EXT_RE.match(defext)
+ if match:
+ compress = 15
+ if isserver:
+ # Server never fail to detect compress handshake.
+ # Server does not need to send max wbit to client
+ if match.group(4):
+ compress = int(match.group(4))
+ # Group3 must match if group4 matches
+ # Compress wbit 8 does not support in zlib
+ # If compress level not support,
+ # CONTINUE to next extension
+ if compress > 15 or compress < 9:
+ compress = 0
+ continue
+ if match.group(1):
+ notakeover = True
+ # Ignore regex group 5 & 6 for client_max_window_bits
+ break
+ else:
+ if match.group(6):
+ compress = int(match.group(6))
+ # Group5 must match if group6 matches
+ # Compress wbit 8 does not support in zlib
+ # If compress level not support,
+ # FAIL the parse progress
+ if compress > 15 or compress < 9:
+ raise WSHandshakeError("Invalid window size")
+ if match.group(2):
+ notakeover = True
+ # Ignore regex group 5 & 6 for client_max_window_bits
+ break
+ # Return Fail if client side and not match
+ elif not isserver:
+ raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
+
+ return compress, notakeover
+
+
+def ws_ext_gen(
+ compress: int = 15, isserver: bool = False, server_notakeover: bool = False
+) -> str:
+ # client_notakeover=False not used for server
+ # compress wbit 8 does not support in zlib
+ if compress < 9 or compress > 15:
+ raise ValueError(
+ "Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
+ )
+ enabledext = ["permessage-deflate"]
+ if not isserver:
+ enabledext.append("client_max_window_bits")
+
+ if compress < 15:
+ enabledext.append("server_max_window_bits=" + str(compress))
+ if server_notakeover:
+ enabledext.append("server_no_context_takeover")
+ # if client_notakeover:
+ # enabledext.append('client_no_context_takeover')
+ return "; ".join(enabledext)
+
+
+class WSParserState(IntEnum):
+ READ_HEADER = 1
+ READ_PAYLOAD_LENGTH = 2
+ READ_PAYLOAD_MASK = 3
+ READ_PAYLOAD = 4
+
+
+class WebSocketReader:
+ def __init__(
+ self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
+ ) -> None:
+ self.queue = queue
+ self._max_msg_size = max_msg_size
+
+ self._exc = None # type: Optional[BaseException]
+ self._partial = bytearray()
+ self._state = WSParserState.READ_HEADER
+
+ self._opcode = None # type: Optional[int]
+ self._frame_fin = False
+ self._frame_opcode = None # type: Optional[int]
+ self._frame_payload = bytearray()
+
+ self._tail = b""
+ self._has_mask = False
+ self._frame_mask = None # type: Optional[bytes]
+ self._payload_length = 0
+ self._payload_length_flag = 0
+ self._compressed = None # type: Optional[bool]
+ self._decompressobj = None # type: Any # zlib.decompressobj actually
+ self._compress = compress
+
+ def feed_eof(self) -> None:
+ self.queue.feed_eof()
+
+ def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
+ if self._exc:
+ return True, data
+
+ try:
+ return self._feed_data(data)
+ except Exception as exc:
+ self._exc = exc
+ self.queue.set_exception(exc)
+ return True, b""
+
+ def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
+ for fin, opcode, payload, compressed in self.parse_frame(data):
+ if compressed and not self._decompressobj:
+ self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
+ if opcode == WSMsgType.CLOSE:
+ if len(payload) >= 2:
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close code: {close_code}",
+ )
+ try:
+ close_message = payload[2:].decode("utf-8")
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+ msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
+ elif payload:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
+ )
+ else:
+ msg = WSMessage(WSMsgType.CLOSE, 0, "")
+
+ self.queue.feed_data(msg, 0)
+
+ elif opcode == WSMsgType.PING:
+ self.queue.feed_data(
+ WSMessage(WSMsgType.PING, payload, ""), len(payload)
+ )
+
+ elif opcode == WSMsgType.PONG:
+ self.queue.feed_data(
+ WSMessage(WSMsgType.PONG, payload, ""), len(payload)
+ )
+
+ elif (
+ opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
+ and self._opcode is None
+ ):
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
+ )
+ else:
+ # load text/binary
+ if not fin:
+ # got partial frame payload
+ if opcode != WSMsgType.CONTINUATION:
+ self._opcode = opcode
+ self._partial.extend(payload)
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ "Message size {} exceeds limit {}".format(
+ len(self._partial), self._max_msg_size
+ ),
+ )
+ else:
+ # previous frame was non finished
+ # we should get continuation opcode
+ if self._partial:
+ if opcode != WSMsgType.CONTINUATION:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "The opcode in non-fin frame is expected "
+ "to be zero, got {!r}".format(opcode),
+ )
+
+ if opcode == WSMsgType.CONTINUATION:
+ assert self._opcode is not None
+ opcode = self._opcode
+ self._opcode = None
+
+ self._partial.extend(payload)
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ "Message size {} exceeds limit {}".format(
+ len(self._partial), self._max_msg_size
+ ),
+ )
+
+ # Decompress process must to be done after all packets
+ # received.
+ if compressed:
+ self._partial.extend(_WS_DEFLATE_TRAILING)
+ payload_merged = self._decompressobj.decompress(
+ self._partial, self._max_msg_size
+ )
+ if self._decompressobj.unconsumed_tail:
+ left = len(self._decompressobj.unconsumed_tail)
+ raise WebSocketError(
+ WSCloseCode.MESSAGE_TOO_BIG,
+ "Decompressed message size {} exceeds limit {}".format(
+ self._max_msg_size + left, self._max_msg_size
+ ),
+ )
+ else:
+ payload_merged = bytes(self._partial)
+
+ self._partial.clear()
+
+ if opcode == WSMsgType.TEXT:
+ try:
+ text = payload_merged.decode("utf-8")
+ self.queue.feed_data(
+ WSMessage(WSMsgType.TEXT, text, ""), len(text)
+ )
+ except UnicodeDecodeError as exc:
+ raise WebSocketError(
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
+ ) from exc
+ else:
+ self.queue.feed_data(
+ WSMessage(WSMsgType.BINARY, payload_merged, ""),
+ len(payload_merged),
+ )
+
+ return False, b""
+
+ def parse_frame(
+ self, buf: bytes
+ ) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
+ """Return the next frame from the socket."""
+ frames = []
+ if self._tail:
+ buf, self._tail = self._tail + buf, b""
+
+ start_pos = 0
+ buf_length = len(buf)
+
+ while True:
+ # read header
+ if self._state == WSParserState.READ_HEADER:
+ if buf_length - start_pos >= 2:
+ data = buf[start_pos : start_pos + 2]
+ start_pos += 2
+ first_byte, second_byte = data
+
+ fin = (first_byte >> 7) & 1
+ rsv1 = (first_byte >> 6) & 1
+ rsv2 = (first_byte >> 5) & 1
+ rsv3 = (first_byte >> 4) & 1
+ opcode = first_byte & 0xF
+
+ # frame-fin = %x0 ; more frames of this message follow
+ # / %x1 ; final frame of this message
+ # frame-rsv1 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv2 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ # frame-rsv3 = %x0 ;
+ # 1 bit, MUST be 0 unless negotiated otherwise
+ #
+ # Remove rsv1 from this test for deflate development
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ if opcode > 0x7 and fin == 0:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received fragmented control frame",
+ )
+
+ has_mask = (second_byte >> 7) & 1
+ length = second_byte & 0x7F
+
+ # Control frames MUST have a payload
+ # length of 125 bytes or less
+ if opcode > 0x7 and length > 125:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Control frame payload cannot be " "larger than 125 bytes",
+ )
+
+ # Set compress status if last package is FIN
+ # OR set compress status if this is first fragment
+ # Raise error if not first fragment with rsv1 = 0x1
+ if self._frame_fin or self._compressed is None:
+ self._compressed = True if rsv1 else False
+ elif rsv1:
+ raise WebSocketError(
+ WSCloseCode.PROTOCOL_ERROR,
+ "Received frame with non-zero reserved bits",
+ )
+
+ self._frame_fin = bool(fin)
+ self._frame_opcode = opcode
+ self._has_mask = bool(has_mask)
+ self._payload_length_flag = length
+ self._state = WSParserState.READ_PAYLOAD_LENGTH
+ else:
+ break
+
+ # read payload length
+ if self._state == WSParserState.READ_PAYLOAD_LENGTH:
+ length = self._payload_length_flag
+ if length == 126:
+ if buf_length - start_pos >= 2:
+ data = buf[start_pos : start_pos + 2]
+ start_pos += 2
+ length = UNPACK_LEN2(data)[0]
+ self._payload_length = length
+ self._state = (
+ WSParserState.READ_PAYLOAD_MASK
+ if self._has_mask
+ else WSParserState.READ_PAYLOAD
+ )
+ else:
+ break
+ elif length > 126:
+ if buf_length - start_pos >= 8:
+ data = buf[start_pos : start_pos + 8]
+ start_pos += 8
+ length = UNPACK_LEN3(data)[0]
+ self._payload_length = length
+ self._state = (
+ WSParserState.READ_PAYLOAD_MASK
+ if self._has_mask
+ else WSParserState.READ_PAYLOAD
+ )
+ else:
+ break
+ else:
+ self._payload_length = length
+ self._state = (
+ WSParserState.READ_PAYLOAD_MASK
+ if self._has_mask
+ else WSParserState.READ_PAYLOAD
+ )
+
+ # read payload mask
+ if self._state == WSParserState.READ_PAYLOAD_MASK:
+ if buf_length - start_pos >= 4:
+ self._frame_mask = buf[start_pos : start_pos + 4]
+ start_pos += 4
+ self._state = WSParserState.READ_PAYLOAD
+ else:
+ break
+
+ if self._state == WSParserState.READ_PAYLOAD:
+ length = self._payload_length
+ payload = self._frame_payload
+
+ chunk_len = buf_length - start_pos
+ if length >= chunk_len:
+ self._payload_length = length - chunk_len
+ payload.extend(buf[start_pos:])
+ start_pos = buf_length
+ else:
+ self._payload_length = 0
+ payload.extend(buf[start_pos : start_pos + length])
+ start_pos = start_pos + length
+
+ if self._payload_length == 0:
+ if self._has_mask:
+ assert self._frame_mask is not None
+ _websocket_mask(self._frame_mask, payload)
+
+ frames.append(
+ (self._frame_fin, self._frame_opcode, payload, self._compressed)
+ )
+
+ self._frame_payload = bytearray()
+ self._state = WSParserState.READ_HEADER
+ else:
+ break
+
+ self._tail = buf[start_pos:]
+
+ return frames
+
+
+class WebSocketWriter:
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ transport: asyncio.Transport,
+ *,
+ use_mask: bool = False,
+ limit: int = DEFAULT_LIMIT,
+ random: Any = random.Random(),
+ compress: int = 0,
+ notakeover: bool = False,
+ ) -> None:
+ self.protocol = protocol
+ self.transport = transport
+ self.use_mask = use_mask
+ self.randrange = random.randrange
+ self.compress = compress
+ self.notakeover = notakeover
+ self._closing = False
+ self._limit = limit
+ self._output_size = 0
+ self._compressobj = None # type: Any # actually compressobj
+
+ async def _send_frame(
+ self, message: bytes, opcode: int, compress: Optional[int] = None
+ ) -> None:
+ """Send a frame over the websocket with message as its payload."""
+ if self._closing and not (opcode & WSMsgType.CLOSE):
+ raise ConnectionResetError("Cannot write to closing transport")
+
+ rsv = 0
+
+ # Only compress larger packets (disabled)
+ # Does small packet needs to be compressed?
+ # if self.compress and opcode < 8 and len(message) > 124:
+ if (compress or self.compress) and opcode < 8:
+ if compress:
+ # Do not set self._compress if compressing is for this frame
+ compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
+ else: # self.compress
+ if not self._compressobj:
+ self._compressobj = zlib.compressobj(
+ level=zlib.Z_BEST_SPEED, wbits=-self.compress
+ )
+ compressobj = self._compressobj
+
+ message = compressobj.compress(message)
+ message = message + compressobj.flush(
+ zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
+ )
+ if message.endswith(_WS_DEFLATE_TRAILING):
+ message = message[:-4]
+ rsv = rsv | 0x40
+
+ msg_length = len(message)
+
+ use_mask = self.use_mask
+ if use_mask:
+ mask_bit = 0x80
+ else:
+ mask_bit = 0
+
+ if msg_length < 126:
+ header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
+ elif msg_length < (1 << 16):
+ header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
+ else:
+ header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
+ if use_mask:
+ mask = self.randrange(0, 0xFFFFFFFF)
+ mask = mask.to_bytes(4, "big")
+ message = bytearray(message)
+ _websocket_mask(mask, message)
+ self._write(header + mask + message)
+ self._output_size += len(header) + len(mask) + len(message)
+ else:
+ if len(message) > MSG_SIZE:
+ self._write(header)
+ self._write(message)
+ else:
+ self._write(header + message)
+
+ self._output_size += len(header) + len(message)
+
+ if self._output_size > self._limit:
+ self._output_size = 0
+ await self.protocol._drain_helper()
+
+ def _write(self, data: bytes) -> None:
+ if self.transport is None or self.transport.is_closing():
+ raise ConnectionResetError("Cannot write to closing transport")
+ self.transport.write(data)
+
+ async def pong(self, message: bytes = b"") -> None:
+ """Send pong message."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ await self._send_frame(message, WSMsgType.PONG)
+
+ async def ping(self, message: bytes = b"") -> None:
+ """Send ping message."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ await self._send_frame(message, WSMsgType.PING)
+
+ async def send(
+ self,
+ message: Union[str, bytes],
+ binary: bool = False,
+ compress: Optional[int] = None,
+ ) -> None:
+ """Send a frame over the websocket with message as its payload."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ if binary:
+ await self._send_frame(message, WSMsgType.BINARY, compress)
+ else:
+ await self._send_frame(message, WSMsgType.TEXT, compress)
+
+ async def close(self, code: int = 1000, message: bytes = b"") -> None:
+ """Close the websocket, sending the specified code and message."""
+ if isinstance(message, str):
+ message = message.encode("utf-8")
+ try:
+ await self._send_frame(
+ PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
+ )
+ finally:
+ self._closing = True
diff --git a/contrib/python/aiohttp/aiohttp/http_writer.py b/contrib/python/aiohttp/aiohttp/http_writer.py
new file mode 100644
index 0000000000..e09144736c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/http_writer.py
@@ -0,0 +1,200 @@
+"""Http related parsers and protocol."""
+
+import asyncio
+import zlib
+from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
+
+from multidict import CIMultiDict
+
+from .abc import AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .helpers import NO_EXTENSIONS
+
+__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
+
+
+class HttpVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+HttpVersion10 = HttpVersion(1, 0)
+HttpVersion11 = HttpVersion(1, 1)
+
+
+_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
+_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
+
+
+class StreamWriter(AbstractStreamWriter):
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ loop: asyncio.AbstractEventLoop,
+ on_chunk_sent: _T_OnChunkSent = None,
+ on_headers_sent: _T_OnHeadersSent = None,
+ ) -> None:
+ self._protocol = protocol
+ self._transport = protocol.transport
+
+ self.loop = loop
+ self.length = None
+ self.chunked = False
+ self.buffer_size = 0
+ self.output_size = 0
+
+ self._eof = False
+ self._compress = None # type: Any
+ self._drain_waiter = None
+
+ self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
+ self._on_headers_sent = on_headers_sent # type: _T_OnHeadersSent
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ return self._transport
+
+ @property
+ def protocol(self) -> BaseProtocol:
+ return self._protocol
+
+ def enable_chunking(self) -> None:
+ self.chunked = True
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
+ ) -> None:
+ zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
+ self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
+
+ def _write(self, chunk: bytes) -> None:
+ size = len(chunk)
+ self.buffer_size += size
+ self.output_size += size
+
+ if self._transport is None or self._transport.is_closing():
+ raise ConnectionResetError("Cannot write to closing transport")
+ self._transport.write(chunk)
+
+ async def write(
+ self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
+ ) -> None:
+ """Writes chunk of data to a stream.
+
+ write_eof() indicates end of stream.
+ writer can't be used after write_eof() method being called.
+ write() return drain future.
+ """
+ if self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ if isinstance(chunk, memoryview):
+ if chunk.nbytes != len(chunk):
+ # just reshape it
+ chunk = chunk.cast("c")
+
+ if self._compress is not None:
+ chunk = self._compress.compress(chunk)
+ if not chunk:
+ return
+
+ if self.length is not None:
+ chunk_len = len(chunk)
+ if self.length >= chunk_len:
+ self.length = self.length - chunk_len
+ else:
+ chunk = chunk[: self.length]
+ self.length = 0
+ if not chunk:
+ return
+
+ if chunk:
+ if self.chunked:
+ chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
+ chunk = chunk_len_pre + chunk + b"\r\n"
+
+ self._write(chunk)
+
+ if self.buffer_size > LIMIT and drain:
+ self.buffer_size = 0
+ await self.drain()
+
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write request/response status and headers."""
+ if self._on_headers_sent is not None:
+ await self._on_headers_sent(headers)
+
+ # status + headers
+ buf = _serialize_headers(status_line, headers)
+ self._write(buf)
+
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ if self._eof:
+ return
+
+ if chunk and self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ if self._compress:
+ if chunk:
+ chunk = self._compress.compress(chunk)
+
+ chunk = chunk + self._compress.flush()
+ if chunk and self.chunked:
+ chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
+ chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
+ else:
+ if self.chunked:
+ if chunk:
+ chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
+ chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
+ else:
+ chunk = b"0\r\n\r\n"
+
+ if chunk:
+ self._write(chunk)
+
+ await self.drain()
+
+ self._eof = True
+ self._transport = None
+
+ async def drain(self) -> None:
+ """Flush the write buffer.
+
+ The intended use is to write
+
+ await w.write(data)
+ await w.drain()
+ """
+ if self._protocol.transport is not None:
+ await self._protocol._drain_helper()
+
+
+def _safe_header(string: str) -> str:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return detected in headers. "
+ "Potential header injection attack."
+ )
+ return string
+
+
+def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
+ headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
+ line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
+ return line.encode("utf-8")
+
+
+_serialize_headers = _py_serialize_headers
+
+try:
+ import aiohttp._http_writer as _http_writer # type: ignore[import]
+
+ _c_serialize_headers = _http_writer._serialize_headers
+ if not NO_EXTENSIONS:
+ _serialize_headers = _c_serialize_headers
+except ImportError:
+ pass
diff --git a/contrib/python/aiohttp/aiohttp/locks.py b/contrib/python/aiohttp/aiohttp/locks.py
new file mode 100644
index 0000000000..df65e3e47d
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/locks.py
@@ -0,0 +1,41 @@
+import asyncio
+import collections
+from typing import Any, Deque, Optional
+
+
+class EventResultOrError:
+ """Event asyncio lock helper class.
+
+ Wraps the Event asyncio lock allowing either to awake the
+ locked Tasks without any error or raising an exception.
+
+ thanks to @vorpalsmith for the simple design.
+ """
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._exc = None # type: Optional[BaseException]
+ self._event = asyncio.Event()
+ self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
+
+ def set(self, exc: Optional[BaseException] = None) -> None:
+ self._exc = exc
+ self._event.set()
+
+ async def wait(self) -> Any:
+ waiter = self._loop.create_task(self._event.wait())
+ self._waiters.append(waiter)
+ try:
+ val = await waiter
+ finally:
+ self._waiters.remove(waiter)
+
+ if self._exc is not None:
+ raise self._exc
+
+ return val
+
+ def cancel(self) -> None:
+ """Cancel all waiters"""
+ for waiter in self._waiters:
+ waiter.cancel()
diff --git a/contrib/python/aiohttp/aiohttp/log.py b/contrib/python/aiohttp/aiohttp/log.py
new file mode 100644
index 0000000000..3cecea2bac
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/log.py
@@ -0,0 +1,8 @@
+import logging
+
+access_logger = logging.getLogger("aiohttp.access")
+client_logger = logging.getLogger("aiohttp.client")
+internal_logger = logging.getLogger("aiohttp.internal")
+server_logger = logging.getLogger("aiohttp.server")
+web_logger = logging.getLogger("aiohttp.web")
+ws_logger = logging.getLogger("aiohttp.websocket")
diff --git a/contrib/python/aiohttp/aiohttp/multipart.py b/contrib/python/aiohttp/aiohttp/multipart.py
new file mode 100644
index 0000000000..c84e20044f
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/multipart.py
@@ -0,0 +1,963 @@
+import base64
+import binascii
+import json
+import re
+import uuid
+import warnings
+import zlib
+from collections import deque
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncIterator,
+ Deque,
+ Dict,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+from urllib.parse import parse_qsl, unquote, urlencode
+
+from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
+
+from .hdrs import (
+ CONTENT_DISPOSITION,
+ CONTENT_ENCODING,
+ CONTENT_LENGTH,
+ CONTENT_TRANSFER_ENCODING,
+ CONTENT_TYPE,
+)
+from .helpers import CHAR, TOKEN, parse_mimetype, reify
+from .http import HeadersParser
+from .payload import (
+ JsonPayload,
+ LookupError,
+ Order,
+ Payload,
+ StringPayload,
+ get_payload,
+ payload_type,
+)
+from .streams import StreamReader
+
+__all__ = (
+ "MultipartReader",
+ "MultipartWriter",
+ "BodyPartReader",
+ "BadContentDispositionHeader",
+ "BadContentDispositionParam",
+ "parse_content_disposition",
+ "content_disposition_filename",
+)
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client_reqrep import ClientResponse
+
+
+class BadContentDispositionHeader(RuntimeWarning):
+ pass
+
+
+class BadContentDispositionParam(RuntimeWarning):
+ pass
+
+
+def parse_content_disposition(
+ header: Optional[str],
+) -> Tuple[Optional[str], Dict[str, str]]:
+ def is_token(string: str) -> bool:
+ return bool(string) and TOKEN >= set(string)
+
+ def is_quoted(string: str) -> bool:
+ return string[0] == string[-1] == '"'
+
+ def is_rfc5987(string: str) -> bool:
+ return is_token(string) and string.count("'") == 2
+
+ def is_extended_param(string: str) -> bool:
+ return string.endswith("*")
+
+ def is_continuous_param(string: str) -> bool:
+ pos = string.find("*") + 1
+ if not pos:
+ return False
+ substring = string[pos:-1] if string.endswith("*") else string[pos:]
+ return substring.isdigit()
+
+ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
+ return re.sub(f"\\\\([{chars}])", "\\1", text)
+
+ if not header:
+ return None, {}
+
+ disptype, *parts = header.split(";")
+ if not is_token(disptype):
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ params = {} # type: Dict[str, str]
+ while parts:
+ item = parts.pop(0)
+
+ if "=" not in item:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ key, value = item.split("=", 1)
+ key = key.lower().strip()
+ value = value.lstrip()
+
+ if key in params:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ if not is_token(key):
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ elif is_continuous_param(key):
+ if is_quoted(value):
+ value = unescape(value[1:-1])
+ elif not is_token(value):
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ elif is_extended_param(key):
+ if is_rfc5987(value):
+ encoding, _, value = value.split("'", 2)
+ encoding = encoding or "utf-8"
+ else:
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ try:
+ value = unquote(value, encoding, "strict")
+ except UnicodeDecodeError: # pragma: nocover
+ warnings.warn(BadContentDispositionParam(item))
+ continue
+
+ else:
+ failed = True
+ if is_quoted(value):
+ failed = False
+ value = unescape(value[1:-1].lstrip("\\/"))
+ elif is_token(value):
+ failed = False
+ elif parts:
+ # maybe just ; in filename, in any case this is just
+ # one case fix, for proper fix we need to redesign parser
+ _value = f"{value};{parts[0]}"
+ if is_quoted(_value):
+ parts.pop(0)
+ value = unescape(_value[1:-1].lstrip("\\/"))
+ failed = False
+
+ if failed:
+ warnings.warn(BadContentDispositionHeader(header))
+ return None, {}
+
+ params[key] = value
+
+ return disptype.lower(), params
+
+
+def content_disposition_filename(
+ params: Mapping[str, str], name: str = "filename"
+) -> Optional[str]:
+ name_suf = "%s*" % name
+ if not params:
+ return None
+ elif name_suf in params:
+ return params[name_suf]
+ elif name in params:
+ return params[name]
+ else:
+ parts = []
+ fnparams = sorted(
+ (key, value) for key, value in params.items() if key.startswith(name_suf)
+ )
+ for num, (key, value) in enumerate(fnparams):
+ _, tail = key.split("*", 1)
+ if tail.endswith("*"):
+ tail = tail[:-1]
+ if tail == str(num):
+ parts.append(value)
+ else:
+ break
+ if not parts:
+ return None
+ value = "".join(parts)
+ if "'" in value:
+ encoding, _, value = value.split("'", 2)
+ encoding = encoding or "utf-8"
+ return unquote(value, encoding, "strict")
+ return value
+
+
+class MultipartResponseWrapper:
+ """Wrapper around the MultipartReader.
+
+ It takes care about
+ underlying connection and close it when it needs in.
+ """
+
+ def __init__(
+ self,
+ resp: "ClientResponse",
+ stream: "MultipartReader",
+ ) -> None:
+ self.resp = resp
+ self.stream = stream
+
+ def __aiter__(self) -> "MultipartResponseWrapper":
+ return self
+
+ async def __anext__(
+ self,
+ ) -> Union["MultipartReader", "BodyPartReader"]:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ def at_eof(self) -> bool:
+ """Returns True when all response data had been read."""
+ return self.resp.content.at_eof()
+
+ async def next(
+ self,
+ ) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
+ """Emits next multipart reader object."""
+ item = await self.stream.next()
+ if self.stream.at_eof():
+ await self.release()
+ return item
+
+ async def release(self) -> None:
+ """Release the connection gracefully.
+
+ All remaining content is read to the void.
+ """
+ await self.resp.release()
+
+
+class BodyPartReader:
+ """Multipart reader for single body part."""
+
+ chunk_size = 8192
+
+ def __init__(
+ self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
+ ) -> None:
+ self.headers = headers
+ self._boundary = boundary
+ self._content = content
+ self._at_eof = False
+ length = self.headers.get(CONTENT_LENGTH, None)
+ self._length = int(length) if length is not None else None
+ self._read_bytes = 0
+ # TODO: typeing.Deque is not supported by Python 3.5
+ self._unread: Deque[bytes] = deque()
+ self._prev_chunk = None # type: Optional[bytes]
+ self._content_eof = 0
+ self._cache = {} # type: Dict[str, Any]
+
+ def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
+ return self # type: ignore[return-value]
+
+ async def __anext__(self) -> bytes:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ async def next(self) -> Optional[bytes]:
+ item = await self.read()
+ if not item:
+ return None
+ return item
+
+ async def read(self, *, decode: bool = False) -> bytes:
+ """Reads body part data.
+
+ decode: Decodes data following by encoding
+ method from Content-Encoding header. If it missed
+ data remains untouched
+ """
+ if self._at_eof:
+ return b""
+ data = bytearray()
+ while not self._at_eof:
+ data.extend(await self.read_chunk(self.chunk_size))
+ if decode:
+ return self.decode(data)
+ return data
+
+ async def read_chunk(self, size: int = chunk_size) -> bytes:
+ """Reads body part content chunk of the specified size.
+
+ size: chunk size
+ """
+ if self._at_eof:
+ return b""
+ if self._length:
+ chunk = await self._read_chunk_from_length(size)
+ else:
+ chunk = await self._read_chunk_from_stream(size)
+
+ self._read_bytes += len(chunk)
+ if self._read_bytes == self._length:
+ self._at_eof = True
+ if self._at_eof:
+ clrf = await self._content.readline()
+ assert (
+ b"\r\n" == clrf
+ ), "reader did not read all the data or it is malformed"
+ return chunk
+
+ async def _read_chunk_from_length(self, size: int) -> bytes:
+ # Reads body part content chunk of the specified size.
+ # The body part must has Content-Length header with proper value.
+ assert self._length is not None, "Content-Length required for chunked read"
+ chunk_size = min(size, self._length - self._read_bytes)
+ chunk = await self._content.read(chunk_size)
+ return chunk
+
+ async def _read_chunk_from_stream(self, size: int) -> bytes:
+ # Reads content chunk of body part with unknown length.
+ # The Content-Length header for body part is not necessary.
+ assert (
+ size >= len(self._boundary) + 2
+ ), "Chunk size must be greater or equal than boundary length + 2"
+ first_chunk = self._prev_chunk is None
+ if first_chunk:
+ self._prev_chunk = await self._content.read(size)
+
+ chunk = await self._content.read(size)
+ self._content_eof += int(self._content.at_eof())
+ assert self._content_eof < 3, "Reading after EOF"
+ assert self._prev_chunk is not None
+ window = self._prev_chunk + chunk
+ sub = b"\r\n" + self._boundary
+ if first_chunk:
+ idx = window.find(sub)
+ else:
+ idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
+ if idx >= 0:
+ # pushing boundary back to content
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", category=DeprecationWarning)
+ self._content.unread_data(window[idx:])
+ if size > idx:
+ self._prev_chunk = self._prev_chunk[:idx]
+ chunk = window[len(self._prev_chunk) : idx]
+ if not chunk:
+ self._at_eof = True
+ result = self._prev_chunk
+ self._prev_chunk = chunk
+ return result
+
+ async def readline(self) -> bytes:
+ """Reads body part by line by line."""
+ if self._at_eof:
+ return b""
+
+ if self._unread:
+ line = self._unread.popleft()
+ else:
+ line = await self._content.readline()
+
+ if line.startswith(self._boundary):
+ # the very last boundary may not come with \r\n,
+ # so set single rules for everyone
+ sline = line.rstrip(b"\r\n")
+ boundary = self._boundary
+ last_boundary = self._boundary + b"--"
+ # ensure that we read exactly the boundary, not something alike
+ if sline == boundary or sline == last_boundary:
+ self._at_eof = True
+ self._unread.append(line)
+ return b""
+ else:
+ next_line = await self._content.readline()
+ if next_line.startswith(self._boundary):
+ line = line[:-2] # strip CRLF but only once
+ self._unread.append(next_line)
+
+ return line
+
+ async def release(self) -> None:
+ """Like read(), but reads all the data to the void."""
+ if self._at_eof:
+ return
+ while not self._at_eof:
+ await self.read_chunk(self.chunk_size)
+
+ async def text(self, *, encoding: Optional[str] = None) -> str:
+ """Like read(), but assumes that body part contains text data."""
+ data = await self.read(decode=True)
+ # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
+ # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
+ encoding = encoding or self.get_charset(default="utf-8")
+ return data.decode(encoding)
+
+ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
+ """Like read(), but assumes that body parts contains JSON data."""
+ data = await self.read(decode=True)
+ if not data:
+ return None
+ encoding = encoding or self.get_charset(default="utf-8")
+ return cast(Dict[str, Any], json.loads(data.decode(encoding)))
+
+ async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
+ """Like read(), but assumes that body parts contain form urlencoded data."""
+ data = await self.read(decode=True)
+ if not data:
+ return []
+ if encoding is not None:
+ real_encoding = encoding
+ else:
+ real_encoding = self.get_charset(default="utf-8")
+ return parse_qsl(
+ data.rstrip().decode(real_encoding),
+ keep_blank_values=True,
+ encoding=real_encoding,
+ )
+
+ def at_eof(self) -> bool:
+ """Returns True if the boundary was reached or False otherwise."""
+ return self._at_eof
+
+ def decode(self, data: bytes) -> bytes:
+ """Decodes data.
+
+ Decoding is done according the specified Content-Encoding
+ or Content-Transfer-Encoding headers value.
+ """
+ if CONTENT_TRANSFER_ENCODING in self.headers:
+ data = self._decode_content_transfer(data)
+ if CONTENT_ENCODING in self.headers:
+ return self._decode_content(data)
+ return data
+
+ def _decode_content(self, data: bytes) -> bytes:
+ encoding = self.headers.get(CONTENT_ENCODING, "").lower()
+
+ if encoding == "deflate":
+ return zlib.decompress(data, -zlib.MAX_WBITS)
+ elif encoding == "gzip":
+ return zlib.decompress(data, 16 + zlib.MAX_WBITS)
+ elif encoding == "identity":
+ return data
+ else:
+ raise RuntimeError(f"unknown content encoding: {encoding}")
+
+ def _decode_content_transfer(self, data: bytes) -> bytes:
+ encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
+
+ if encoding == "base64":
+ return base64.b64decode(data)
+ elif encoding == "quoted-printable":
+ return binascii.a2b_qp(data)
+ elif encoding in ("binary", "8bit", "7bit"):
+ return data
+ else:
+ raise RuntimeError(
+ "unknown content transfer encoding: {}" "".format(encoding)
+ )
+
+ def get_charset(self, default: str) -> str:
+ """Returns charset parameter from Content-Type header or default."""
+ ctype = self.headers.get(CONTENT_TYPE, "")
+ mimetype = parse_mimetype(ctype)
+ return mimetype.parameters.get("charset", default)
+
+ @reify
+ def name(self) -> Optional[str]:
+ """Returns name specified in Content-Disposition header.
+
+ If the header is missing or malformed, returns None.
+ """
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
+ return content_disposition_filename(params, "name")
+
+ @reify
+ def filename(self) -> Optional[str]:
+ """Returns filename specified in Content-Disposition header.
+
+ Returns None if the header is missing or malformed.
+ """
+ _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
+ return content_disposition_filename(params, "filename")
+
+
+@payload_type(BodyPartReader, order=Order.try_first)
+class BodyPartReaderPayload(Payload):
+ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value, *args, **kwargs)
+
+ params = {} # type: Dict[str, str]
+ if value.name is not None:
+ params["name"] = value.name
+ if value.filename is not None:
+ params["filename"] = value.filename
+
+ if params:
+ self.set_content_disposition("attachment", True, **params)
+
+ async def write(self, writer: Any) -> None:
+ field = self._value
+ chunk = await field.read_chunk(size=2 ** 16)
+ while chunk:
+ await writer.write(field.decode(chunk))
+ chunk = await field.read_chunk(size=2 ** 16)
+
+
+class MultipartReader:
+ """Multipart body reader."""
+
+ #: Response wrapper, used when multipart readers constructs from response.
+ response_wrapper_cls = MultipartResponseWrapper
+ #: Multipart reader class, used to handle multipart/* body parts.
+ #: None points to type(self)
+ multipart_reader_cls = None
+ #: Body part reader class for non multipart/* content types.
+ part_reader_cls = BodyPartReader
+
+ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
+ self.headers = headers
+ self._boundary = ("--" + self._get_boundary()).encode()
+ self._content = content
+ self._last_part = (
+ None
+ ) # type: Optional[Union['MultipartReader', BodyPartReader]]
+ self._at_eof = False
+ self._at_bof = True
+ self._unread = [] # type: List[bytes]
+
+ def __aiter__(
+ self,
+ ) -> AsyncIterator["BodyPartReader"]:
+ return self # type: ignore[return-value]
+
+ async def __anext__(
+ self,
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
+ part = await self.next()
+ if part is None:
+ raise StopAsyncIteration
+ return part
+
+ @classmethod
+ def from_response(
+ cls,
+ response: "ClientResponse",
+ ) -> MultipartResponseWrapper:
+ """Constructs reader instance from HTTP response.
+
+ :param response: :class:`~aiohttp.client.ClientResponse` instance
+ """
+ obj = cls.response_wrapper_cls(
+ response, cls(response.headers, response.content)
+ )
+ return obj
+
+ def at_eof(self) -> bool:
+ """Returns True if the final boundary was reached, false otherwise."""
+ return self._at_eof
+
+ async def next(
+ self,
+ ) -> Optional[Union["MultipartReader", BodyPartReader]]:
+ """Emits the next multipart body part."""
+ # So, if we're at BOF, we need to skip till the boundary.
+ if self._at_eof:
+ return None
+ await self._maybe_release_last_part()
+ if self._at_bof:
+ await self._read_until_first_boundary()
+ self._at_bof = False
+ else:
+ await self._read_boundary()
+ if self._at_eof: # we just read the last boundary, nothing to do there
+ return None
+ self._last_part = await self.fetch_next_part()
+ return self._last_part
+
+ async def release(self) -> None:
+ """Reads all the body parts to the void till the final boundary."""
+ while not self._at_eof:
+ item = await self.next()
+ if item is None:
+ break
+ await item.release()
+
+ async def fetch_next_part(
+ self,
+ ) -> Union["MultipartReader", BodyPartReader]:
+ """Returns the next body part reader."""
+ headers = await self._read_headers()
+ return self._get_part_reader(headers)
+
+ def _get_part_reader(
+ self,
+ headers: "CIMultiDictProxy[str]",
+ ) -> Union["MultipartReader", BodyPartReader]:
+ """Dispatches the response by the `Content-Type` header.
+
+ Returns a suitable reader instance.
+
+ :param dict headers: Response headers
+ """
+ ctype = headers.get(CONTENT_TYPE, "")
+ mimetype = parse_mimetype(ctype)
+
+ if mimetype.type == "multipart":
+ if self.multipart_reader_cls is None:
+ return type(self)(headers, self._content)
+ return self.multipart_reader_cls(headers, self._content)
+ else:
+ return self.part_reader_cls(self._boundary, headers, self._content)
+
+ def _get_boundary(self) -> str:
+ mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
+
+ assert mimetype.type == "multipart", "multipart/* content type expected"
+
+ if "boundary" not in mimetype.parameters:
+ raise ValueError(
+ "boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
+ )
+
+ boundary = mimetype.parameters["boundary"]
+ if len(boundary) > 70:
+ raise ValueError("boundary %r is too long (70 chars max)" % boundary)
+
+ return boundary
+
+ async def _readline(self) -> bytes:
+ if self._unread:
+ return self._unread.pop()
+ return await self._content.readline()
+
+ async def _read_until_first_boundary(self) -> None:
+ while True:
+ chunk = await self._readline()
+ if chunk == b"":
+ raise ValueError(
+ "Could not find starting boundary %r" % (self._boundary)
+ )
+ chunk = chunk.rstrip()
+ if chunk == self._boundary:
+ return
+ elif chunk == self._boundary + b"--":
+ self._at_eof = True
+ return
+
+ async def _read_boundary(self) -> None:
+ chunk = (await self._readline()).rstrip()
+ if chunk == self._boundary:
+ pass
+ elif chunk == self._boundary + b"--":
+ self._at_eof = True
+ epilogue = await self._readline()
+ next_line = await self._readline()
+
+ # the epilogue is expected and then either the end of input or the
+ # parent multipart boundary, if the parent boundary is found then
+ # it should be marked as unread and handed to the parent for
+ # processing
+ if next_line[:2] == b"--":
+ self._unread.append(next_line)
+ # otherwise the request is likely missing an epilogue and both
+ # lines should be passed to the parent for processing
+ # (this handles the old behavior gracefully)
+ else:
+ self._unread.extend([next_line, epilogue])
+ else:
+ raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
+
+ async def _read_headers(self) -> "CIMultiDictProxy[str]":
+ lines = [b""]
+ while True:
+ chunk = await self._content.readline()
+ chunk = chunk.strip()
+ lines.append(chunk)
+ if not chunk:
+ break
+ parser = HeadersParser()
+ headers, raw_headers = parser.parse_headers(lines)
+ return headers
+
+ async def _maybe_release_last_part(self) -> None:
+ """Ensures that the last read body part is read completely."""
+ if self._last_part is not None:
+ if not self._last_part.at_eof():
+ await self._last_part.release()
+ self._unread.extend(self._last_part._unread)
+ self._last_part = None
+
+
+_Part = Tuple[Payload, str, str]
+
+
+class MultipartWriter(Payload):
+ """Multipart body writer."""
+
+ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
+ boundary = boundary if boundary is not None else uuid.uuid4().hex
+ # The underlying Payload API demands a str (utf-8), not bytes,
+ # so we need to ensure we don't lose anything during conversion.
+ # As a result, require the boundary to be ASCII only.
+ # In both situations.
+
+ try:
+ self._boundary = boundary.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError("boundary should contain ASCII only chars") from None
+ ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
+
+ super().__init__(None, content_type=ctype)
+
+ self._parts = [] # type: List[_Part]
+
+ def __enter__(self) -> "MultipartWriter":
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ pass
+
+ def __iter__(self) -> Iterator[_Part]:
+ return iter(self._parts)
+
+ def __len__(self) -> int:
+ return len(self._parts)
+
+ def __bool__(self) -> bool:
+ return True
+
+ _valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
+ _invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
+
+ @property
+ def _boundary_value(self) -> str:
+ """Wrap boundary parameter value in quotes, if necessary.
+
+ Reads self.boundary and returns a unicode sting.
+ """
+ # Refer to RFCs 7231, 7230, 5234.
+ #
+ # parameter = token "=" ( token / quoted-string )
+ # token = 1*tchar
+ # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
+ # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
+ # obs-text = %x80-FF
+ # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
+ # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
+ # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
+ # / DIGIT / ALPHA
+ # ; any VCHAR, except delimiters
+ # VCHAR = %x21-7E
+ value = self._boundary
+ if re.match(self._valid_tchar_regex, value):
+ return value.decode("ascii") # cannot fail
+
+ if re.search(self._invalid_qdtext_char_regex, value):
+ raise ValueError("boundary value contains invalid characters")
+
+ # escape %x5C and %x22
+ quoted_value_content = value.replace(b"\\", b"\\\\")
+ quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
+
+ return '"' + quoted_value_content.decode("ascii") + '"'
+
+ @property
+ def boundary(self) -> str:
+ return self._boundary.decode("ascii")
+
+ def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
+ if headers is None:
+ headers = CIMultiDict()
+
+ if isinstance(obj, Payload):
+ obj.headers.update(headers)
+ return self.append_payload(obj)
+ else:
+ try:
+ payload = get_payload(obj, headers=headers)
+ except LookupError:
+ raise TypeError("Cannot create payload from %r" % obj)
+ else:
+ return self.append_payload(payload)
+
+ def append_payload(self, payload: Payload) -> Payload:
+ """Adds a new body part to multipart writer."""
+ # compression
+ encoding = payload.headers.get(
+ CONTENT_ENCODING,
+ "",
+ ).lower() # type: Optional[str]
+ if encoding and encoding not in ("deflate", "gzip", "identity"):
+ raise RuntimeError(f"unknown content encoding: {encoding}")
+ if encoding == "identity":
+ encoding = None
+
+ # te encoding
+ te_encoding = payload.headers.get(
+ CONTENT_TRANSFER_ENCODING,
+ "",
+ ).lower() # type: Optional[str]
+ if te_encoding not in ("", "base64", "quoted-printable", "binary"):
+ raise RuntimeError(
+ "unknown content transfer encoding: {}" "".format(te_encoding)
+ )
+ if te_encoding == "binary":
+ te_encoding = None
+
+ # size
+ size = payload.size
+ if size is not None and not (encoding or te_encoding):
+ payload.headers[CONTENT_LENGTH] = str(size)
+
+ self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
+ return payload
+
+ def append_json(
+ self, obj: Any, headers: Optional[MultiMapping[str]] = None
+ ) -> Payload:
+ """Helper to append JSON part."""
+ if headers is None:
+ headers = CIMultiDict()
+
+ return self.append_payload(JsonPayload(obj, headers=headers))
+
+ def append_form(
+ self,
+ obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
+ headers: Optional[MultiMapping[str]] = None,
+ ) -> Payload:
+ """Helper to append form urlencoded part."""
+ assert isinstance(obj, (Sequence, Mapping))
+
+ if headers is None:
+ headers = CIMultiDict()
+
+ if isinstance(obj, Mapping):
+ obj = list(obj.items())
+ data = urlencode(obj, doseq=True)
+
+ return self.append_payload(
+ StringPayload(
+ data, headers=headers, content_type="application/x-www-form-urlencoded"
+ )
+ )
+
+ @property
+ def size(self) -> Optional[int]:
+ """Size of the payload."""
+ total = 0
+ for part, encoding, te_encoding in self._parts:
+ if encoding or te_encoding or part.size is None:
+ return None
+
+ total += int(
+ 2
+ + len(self._boundary)
+ + 2
+ + part.size # b'--'+self._boundary+b'\r\n'
+ + len(part._binary_headers)
+ + 2 # b'\r\n'
+ )
+
+ total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
+ return total
+
+ async def write(self, writer: Any, close_boundary: bool = True) -> None:
+ """Write body."""
+ for part, encoding, te_encoding in self._parts:
+ await writer.write(b"--" + self._boundary + b"\r\n")
+ await writer.write(part._binary_headers)
+
+ if encoding or te_encoding:
+ w = MultipartPayloadWriter(writer)
+ if encoding:
+ w.enable_compression(encoding)
+ if te_encoding:
+ w.enable_encoding(te_encoding)
+ await part.write(w) # type: ignore[arg-type]
+ await w.write_eof()
+ else:
+ await part.write(writer)
+
+ await writer.write(b"\r\n")
+
+ if close_boundary:
+ await writer.write(b"--" + self._boundary + b"--\r\n")
+
+
+class MultipartPayloadWriter:
+ def __init__(self, writer: Any) -> None:
+ self._writer = writer
+ self._encoding = None # type: Optional[str]
+ self._compress = None # type: Any
+ self._encoding_buffer = None # type: Optional[bytearray]
+
+ def enable_encoding(self, encoding: str) -> None:
+ if encoding == "base64":
+ self._encoding = encoding
+ self._encoding_buffer = bytearray()
+ elif encoding == "quoted-printable":
+ self._encoding = "quoted-printable"
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
+ ) -> None:
+ zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
+ self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
+
+ async def write_eof(self) -> None:
+ if self._compress is not None:
+ chunk = self._compress.flush()
+ if chunk:
+ self._compress = None
+ await self.write(chunk)
+
+ if self._encoding == "base64":
+ if self._encoding_buffer:
+ await self._writer.write(base64.b64encode(self._encoding_buffer))
+
+ async def write(self, chunk: bytes) -> None:
+ if self._compress is not None:
+ if chunk:
+ chunk = self._compress.compress(chunk)
+ if not chunk:
+ return
+
+ if self._encoding == "base64":
+ buf = self._encoding_buffer
+ assert buf is not None
+ buf.extend(chunk)
+
+ if buf:
+ div, mod = divmod(len(buf), 3)
+ enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
+ if enc_chunk:
+ b64chunk = base64.b64encode(enc_chunk)
+ await self._writer.write(b64chunk)
+ elif self._encoding == "quoted-printable":
+ await self._writer.write(binascii.b2a_qp(chunk))
+ else:
+ await self._writer.write(chunk)
diff --git a/contrib/python/aiohttp/aiohttp/payload.py b/contrib/python/aiohttp/aiohttp/payload.py
new file mode 100644
index 0000000000..2ee90beea8
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/payload.py
@@ -0,0 +1,465 @@
+import asyncio
+import enum
+import io
+import json
+import mimetypes
+import os
+import warnings
+from abc import ABC, abstractmethod
+from itertools import chain
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Any,
+ ByteString,
+ Dict,
+ Iterable,
+ Optional,
+ TextIO,
+ Tuple,
+ Type,
+ Union,
+)
+
+from multidict import CIMultiDict
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import (
+ PY_36,
+ content_disposition_header,
+ guess_filename,
+ parse_mimetype,
+ sentinel,
+)
+from .streams import StreamReader
+from .typedefs import Final, JSONEncoder, _CIMultiDict
+
+__all__ = (
+ "PAYLOAD_REGISTRY",
+ "get_payload",
+ "payload_type",
+ "Payload",
+ "BytesPayload",
+ "StringPayload",
+ "IOBasePayload",
+ "BytesIOPayload",
+ "BufferedReaderPayload",
+ "TextIOPayload",
+ "StringIOPayload",
+ "JsonPayload",
+ "AsyncIterablePayload",
+)
+
+TOO_LARGE_BYTES_BODY: Final[int] = 2 ** 20 # 1 MB
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import List
+
+
+class LookupError(Exception):
+ pass
+
+
+class Order(str, enum.Enum):
+ normal = "normal"
+ try_first = "try_first"
+ try_last = "try_last"
+
+
+def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
+ return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
+
+
+def register_payload(
+ factory: Type["Payload"], type: Any, *, order: Order = Order.normal
+) -> None:
+ PAYLOAD_REGISTRY.register(factory, type, order=order)
+
+
+class payload_type:
+ def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
+ self.type = type
+ self.order = order
+
+ def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
+ register_payload(factory, self.type, order=self.order)
+ return factory
+
+
+PayloadType = Type["Payload"]
+_PayloadRegistryItem = Tuple[PayloadType, Any]
+
+
+class PayloadRegistry:
+ """Payload registry.
+
+ note: we need zope.interface for more efficient adapter search
+ """
+
+ def __init__(self) -> None:
+ self._first = [] # type: List[_PayloadRegistryItem]
+ self._normal = [] # type: List[_PayloadRegistryItem]
+ self._last = [] # type: List[_PayloadRegistryItem]
+
+ def get(
+ self,
+ data: Any,
+ *args: Any,
+ _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
+ **kwargs: Any,
+ ) -> "Payload":
+ if isinstance(data, Payload):
+ return data
+ for factory, type in _CHAIN(self._first, self._normal, self._last):
+ if isinstance(data, type):
+ return factory(data, *args, **kwargs)
+
+ raise LookupError()
+
+ def register(
+ self, factory: PayloadType, type: Any, *, order: Order = Order.normal
+ ) -> None:
+ if order is Order.try_first:
+ self._first.append((factory, type))
+ elif order is Order.normal:
+ self._normal.append((factory, type))
+ elif order is Order.try_last:
+ self._last.append((factory, type))
+ else:
+ raise ValueError(f"Unsupported order {order!r}")
+
+
+class Payload(ABC):
+
+ _default_content_type = "application/octet-stream" # type: str
+ _size = None # type: Optional[int]
+
+ def __init__(
+ self,
+ value: Any,
+ headers: Optional[
+ Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
+ ] = None,
+ content_type: Optional[str] = sentinel,
+ filename: Optional[str] = None,
+ encoding: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._encoding = encoding
+ self._filename = filename
+ self._headers = CIMultiDict() # type: _CIMultiDict
+ self._value = value
+ if content_type is not sentinel and content_type is not None:
+ self._headers[hdrs.CONTENT_TYPE] = content_type
+ elif self._filename is not None:
+ content_type = mimetypes.guess_type(self._filename)[0]
+ if content_type is None:
+ content_type = self._default_content_type
+ self._headers[hdrs.CONTENT_TYPE] = content_type
+ else:
+ self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
+ self._headers.update(headers or {})
+
+ @property
+ def size(self) -> Optional[int]:
+ """Size of the payload."""
+ return self._size
+
+ @property
+ def filename(self) -> Optional[str]:
+ """Filename of the payload."""
+ return self._filename
+
+ @property
+ def headers(self) -> _CIMultiDict:
+ """Custom item headers"""
+ return self._headers
+
+ @property
+ def _binary_headers(self) -> bytes:
+ return (
+ "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
+ "utf-8"
+ )
+ + b"\r\n"
+ )
+
+ @property
+ def encoding(self) -> Optional[str]:
+ """Payload encoding"""
+ return self._encoding
+
+ @property
+ def content_type(self) -> str:
+ """Content type"""
+ return self._headers[hdrs.CONTENT_TYPE]
+
+ def set_content_disposition(
+ self,
+ disptype: str,
+ quote_fields: bool = True,
+ _charset: str = "utf-8",
+ **params: Any,
+ ) -> None:
+ """Sets ``Content-Disposition`` header."""
+ self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
+ disptype, quote_fields=quote_fields, _charset=_charset, **params
+ )
+
+ @abstractmethod
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """Write payload.
+
+ writer is an AbstractStreamWriter instance:
+ """
+
+
+class BytesPayload(Payload):
+ def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
+ if not isinstance(value, (bytes, bytearray, memoryview)):
+ raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
+
+ if "content_type" not in kwargs:
+ kwargs["content_type"] = "application/octet-stream"
+
+ super().__init__(value, *args, **kwargs)
+
+ if isinstance(value, memoryview):
+ self._size = value.nbytes
+ else:
+ self._size = len(value)
+
+ if self._size > TOO_LARGE_BYTES_BODY:
+ if PY_36:
+ kwargs = {"source": self}
+ else:
+ kwargs = {}
+ warnings.warn(
+ "Sending a large body directly with raw bytes might"
+ " lock the event loop. You should probably pass an "
+ "io.BytesIO object instead",
+ ResourceWarning,
+ **kwargs,
+ )
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await writer.write(self._value)
+
+
+class StringPayload(BytesPayload):
+ def __init__(
+ self,
+ value: str,
+ *args: Any,
+ encoding: Optional[str] = None,
+ content_type: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+
+ if encoding is None:
+ if content_type is None:
+ real_encoding = "utf-8"
+ content_type = "text/plain; charset=utf-8"
+ else:
+ mimetype = parse_mimetype(content_type)
+ real_encoding = mimetype.parameters.get("charset", "utf-8")
+ else:
+ if content_type is None:
+ content_type = "text/plain; charset=%s" % encoding
+ real_encoding = encoding
+
+ super().__init__(
+ value.encode(real_encoding),
+ encoding=real_encoding,
+ content_type=content_type,
+ *args,
+ **kwargs,
+ )
+
+
+class StringIOPayload(StringPayload):
+ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
+ super().__init__(value.read(), *args, **kwargs)
+
+
+class IOBasePayload(Payload):
+ _value: IO[Any]
+
+ def __init__(
+ self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
+ ) -> None:
+ if "filename" not in kwargs:
+ kwargs["filename"] = guess_filename(value)
+
+ super().__init__(value, *args, **kwargs)
+
+ if self._filename is not None and disposition is not None:
+ if hdrs.CONTENT_DISPOSITION not in self.headers:
+ self.set_content_disposition(disposition, filename=self._filename)
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ loop = asyncio.get_event_loop()
+ try:
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ while chunk:
+ await writer.write(chunk)
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ finally:
+ await loop.run_in_executor(None, self._value.close)
+
+
+class TextIOPayload(IOBasePayload):
+ _value: TextIO
+
+ def __init__(
+ self,
+ value: TextIO,
+ *args: Any,
+ encoding: Optional[str] = None,
+ content_type: Optional[str] = None,
+ **kwargs: Any,
+ ) -> None:
+
+ if encoding is None:
+ if content_type is None:
+ encoding = "utf-8"
+ content_type = "text/plain; charset=utf-8"
+ else:
+ mimetype = parse_mimetype(content_type)
+ encoding = mimetype.parameters.get("charset", "utf-8")
+ else:
+ if content_type is None:
+ content_type = "text/plain; charset=%s" % encoding
+
+ super().__init__(
+ value,
+ content_type=content_type,
+ encoding=encoding,
+ *args,
+ **kwargs,
+ )
+
+ @property
+ def size(self) -> Optional[int]:
+ try:
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
+ except OSError:
+ return None
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ loop = asyncio.get_event_loop()
+ try:
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ while chunk:
+ data = (
+ chunk.encode(encoding=self._encoding)
+ if self._encoding
+ else chunk.encode()
+ )
+ await writer.write(data)
+ chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
+ finally:
+ await loop.run_in_executor(None, self._value.close)
+
+
+class BytesIOPayload(IOBasePayload):
+ @property
+ def size(self) -> int:
+ position = self._value.tell()
+ end = self._value.seek(0, os.SEEK_END)
+ self._value.seek(position)
+ return end - position
+
+
+class BufferedReaderPayload(IOBasePayload):
+ @property
+ def size(self) -> Optional[int]:
+ try:
+ return os.fstat(self._value.fileno()).st_size - self._value.tell()
+ except OSError:
+ # data.fileno() is not supported, e.g.
+ # io.BufferedReader(io.BytesIO(b'data'))
+ return None
+
+
+class JsonPayload(BytesPayload):
+ def __init__(
+ self,
+ value: Any,
+ encoding: str = "utf-8",
+ content_type: str = "application/json",
+ dumps: JSONEncoder = json.dumps,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
+
+ super().__init__(
+ dumps(value).encode(encoding),
+ content_type=content_type,
+ encoding=encoding,
+ *args,
+ **kwargs,
+ )
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import AsyncIterable, AsyncIterator
+
+ _AsyncIterator = AsyncIterator[bytes]
+ _AsyncIterable = AsyncIterable[bytes]
+else:
+ from collections.abc import AsyncIterable, AsyncIterator
+
+ _AsyncIterator = AsyncIterator
+ _AsyncIterable = AsyncIterable
+
+
+class AsyncIterablePayload(Payload):
+
+ _iter = None # type: Optional[_AsyncIterator]
+
+ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
+ if not isinstance(value, AsyncIterable):
+ raise TypeError(
+ "value argument must support "
+ "collections.abc.AsyncIterablebe interface, "
+ "got {!r}".format(type(value))
+ )
+
+ if "content_type" not in kwargs:
+ kwargs["content_type"] = "application/octet-stream"
+
+ super().__init__(value, *args, **kwargs)
+
+ self._iter = value.__aiter__()
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ if self._iter:
+ try:
+ # iter is not None check prevents rare cases
+ # when the case iterable is used twice
+ while True:
+ chunk = await self._iter.__anext__()
+ await writer.write(chunk)
+ except StopAsyncIteration:
+ self._iter = None
+
+
+class StreamReaderPayload(AsyncIterablePayload):
+ def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value.iter_any(), *args, **kwargs)
+
+
+PAYLOAD_REGISTRY = PayloadRegistry()
+PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
+PAYLOAD_REGISTRY.register(StringPayload, str)
+PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
+PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
+PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
+PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
+PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
+PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
+# try_last for giving a chance to more specialized async interables like
+# multidict.BodyPartReaderPayload override the default
+PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
diff --git a/contrib/python/aiohttp/aiohttp/payload_streamer.py b/contrib/python/aiohttp/aiohttp/payload_streamer.py
new file mode 100644
index 0000000000..9f8b8bc57c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/payload_streamer.py
@@ -0,0 +1,75 @@
+"""
+Payload implemenation for coroutines as data provider.
+
+As a simple case, you can upload data from file::
+
+ @aiohttp.streamer
+ async def file_sender(writer, file_name=None):
+ with open(file_name, 'rb') as f:
+ chunk = f.read(2**16)
+ while chunk:
+ await writer.write(chunk)
+
+ chunk = f.read(2**16)
+
+Then you can use `file_sender` like this:
+
+ async with session.post('http://httpbin.org/post',
+ data=file_sender(file_name='huge_file')) as resp:
+ print(await resp.text())
+
+..note:: Coroutine must accept `writer` as first argument
+
+"""
+
+import types
+import warnings
+from typing import Any, Awaitable, Callable, Dict, Tuple
+
+from .abc import AbstractStreamWriter
+from .payload import Payload, payload_type
+
+__all__ = ("streamer",)
+
+
+class _stream_wrapper:
+ def __init__(
+ self,
+ coro: Callable[..., Awaitable[None]],
+ args: Tuple[Any, ...],
+ kwargs: Dict[str, Any],
+ ) -> None:
+ self.coro = types.coroutine(coro)
+ self.args = args
+ self.kwargs = kwargs
+
+ async def __call__(self, writer: AbstractStreamWriter) -> None:
+ await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
+
+
+class streamer:
+ def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
+ warnings.warn(
+ "@streamer is deprecated, use async generators instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.coro = coro
+
+ def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
+ return _stream_wrapper(self.coro, args, kwargs)
+
+
+@payload_type(_stream_wrapper)
+class StreamWrapperPayload(Payload):
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await self._value(writer)
+
+
+@payload_type(streamer)
+class StreamPayload(StreamWrapperPayload):
+ def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value(), *args, **kwargs)
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ await self._value(writer)
diff --git a/contrib/python/aiohttp/aiohttp/py.typed b/contrib/python/aiohttp/aiohttp/py.typed
new file mode 100644
index 0000000000..f5642f79f2
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/py.typed
@@ -0,0 +1 @@
+Marker
diff --git a/contrib/python/aiohttp/aiohttp/pytest_plugin.py b/contrib/python/aiohttp/aiohttp/pytest_plugin.py
new file mode 100644
index 0000000000..dd9a9f6179
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/pytest_plugin.py
@@ -0,0 +1,391 @@
+import asyncio
+import contextlib
+import warnings
+from collections.abc import Callable
+from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
+
+import pytest
+
+from aiohttp.helpers import PY_37, isasyncgenfunction
+from aiohttp.web import Application
+
+from .test_utils import (
+ BaseTestServer,
+ RawTestServer,
+ TestClient,
+ TestServer,
+ loop_context,
+ setup_test_loop,
+ teardown_test_loop,
+ unused_port as _unused_port,
+)
+
+try:
+ import uvloop
+except ImportError: # pragma: no cover
+ uvloop = None
+
+try:
+ import tokio
+except ImportError: # pragma: no cover
+ tokio = None
+
+AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
+
+
+def pytest_addoption(parser): # type: ignore[no-untyped-def]
+ parser.addoption(
+ "--aiohttp-fast",
+ action="store_true",
+ default=False,
+ help="run tests faster by disabling extra checks",
+ )
+ parser.addoption(
+ "--aiohttp-loop",
+ action="store",
+ default="pyloop",
+ help="run tests with specific loop: pyloop, uvloop, tokio or all",
+ )
+ parser.addoption(
+ "--aiohttp-enable-loop-debug",
+ action="store_true",
+ default=False,
+ help="enable event loop debug mode",
+ )
+
+
+def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
+ """Set up pytest fixture.
+
+ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
+ """
+ func = fixturedef.func
+
+ if isasyncgenfunction(func):
+ # async generator fixture
+ is_async_gen = True
+ elif asyncio.iscoroutinefunction(func):
+ # regular async fixture
+ is_async_gen = False
+ else:
+ # not an async fixture, nothing to do
+ return
+
+ strip_request = False
+ if "request" not in fixturedef.argnames:
+ fixturedef.argnames += ("request",)
+ strip_request = True
+
+ def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
+ request = kwargs["request"]
+ if strip_request:
+ del kwargs["request"]
+
+ # if neither the fixture nor the test use the 'loop' fixture,
+ # 'getfixturevalue' will fail because the test is not parameterized
+ # (this can be removed someday if 'loop' is no longer parameterized)
+ if "loop" not in request.fixturenames:
+ raise Exception(
+ "Asynchronous fixtures must depend on the 'loop' fixture or "
+ "be used in tests depending from it."
+ )
+
+ _loop = request.getfixturevalue("loop")
+
+ if is_async_gen:
+ # for async generators, we need to advance the generator once,
+ # then advance it again in a finalizer
+ gen = func(*args, **kwargs)
+
+ def finalizer(): # type: ignore[no-untyped-def]
+ try:
+ return _loop.run_until_complete(gen.__anext__())
+ except StopAsyncIteration:
+ pass
+
+ request.addfinalizer(finalizer)
+ return _loop.run_until_complete(gen.__anext__())
+ else:
+ return _loop.run_until_complete(func(*args, **kwargs))
+
+ fixturedef.func = wrapper
+
+
+@pytest.fixture
+def fast(request): # type: ignore[no-untyped-def]
+ """--fast config option"""
+ return request.config.getoption("--aiohttp-fast")
+
+
+@pytest.fixture
+def loop_debug(request): # type: ignore[no-untyped-def]
+ """--enable-loop-debug config option"""
+ return request.config.getoption("--aiohttp-enable-loop-debug")
+
+
+@contextlib.contextmanager
+def _runtime_warning_context(): # type: ignore[no-untyped-def]
+ """Context manager which checks for RuntimeWarnings.
+
+ This exists specifically to
+ avoid "coroutine 'X' was never awaited" warnings being missed.
+
+ If RuntimeWarnings occur in the context a RuntimeError is raised.
+ """
+ with warnings.catch_warnings(record=True) as _warnings:
+ yield
+ rw = [
+ "{w.filename}:{w.lineno}:{w.message}".format(w=w)
+ for w in _warnings
+ if w.category == RuntimeWarning
+ ]
+ if rw:
+ raise RuntimeError(
+ "{} Runtime Warning{},\n{}".format(
+ len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
+ )
+ )
+
+
+@contextlib.contextmanager
+def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
+ """Passthrough loop context.
+
+ Sets up and tears down a loop unless one is passed in via the loop
+ argument when it's passed straight through.
+ """
+ if loop:
+ # loop already exists, pass it straight through
+ yield loop
+ else:
+ # this shadows loop_context's standard behavior
+ loop = setup_test_loop()
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
+ """Fix pytest collecting for coroutines."""
+ if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
+ return list(collector._genfunctions(name, obj))
+
+
+def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
+ """Run coroutines in an event loop instead of a normal function call."""
+ fast = pyfuncitem.config.getoption("--aiohttp-fast")
+ if asyncio.iscoroutinefunction(pyfuncitem.function):
+ existing_loop = pyfuncitem.funcargs.get(
+ "proactor_loop"
+ ) or pyfuncitem.funcargs.get("loop", None)
+ with _runtime_warning_context():
+ with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
+ testargs = {
+ arg: pyfuncitem.funcargs[arg]
+ for arg in pyfuncitem._fixtureinfo.argnames
+ }
+ _loop.run_until_complete(pyfuncitem.obj(**testargs))
+
+ return True
+
+
+def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
+ if "loop_factory" not in metafunc.fixturenames:
+ return
+
+ loops = metafunc.config.option.aiohttp_loop
+ avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
+
+ if uvloop is not None: # pragma: no cover
+ avail_factories["uvloop"] = uvloop.EventLoopPolicy
+
+ if tokio is not None: # pragma: no cover
+ avail_factories["tokio"] = tokio.EventLoopPolicy
+
+ if loops == "all":
+ loops = "pyloop,uvloop?,tokio?"
+
+ factories = {} # type: ignore[var-annotated]
+ for name in loops.split(","):
+ required = not name.endswith("?")
+ name = name.strip(" ?")
+ if name not in avail_factories: # pragma: no cover
+ if required:
+ raise ValueError(
+ "Unknown loop '%s', available loops: %s"
+ % (name, list(factories.keys()))
+ )
+ else:
+ continue
+ factories[name] = avail_factories[name]
+ metafunc.parametrize(
+ "loop_factory", list(factories.values()), ids=list(factories.keys())
+ )
+
+
+@pytest.fixture
+def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
+ """Return an instance of the event loop."""
+ policy = loop_factory()
+ asyncio.set_event_loop_policy(policy)
+ with loop_context(fast=fast) as _loop:
+ if loop_debug:
+ _loop.set_debug(True) # pragma: no cover
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def proactor_loop(): # type: ignore[no-untyped-def]
+ if not PY_37:
+ policy = asyncio.get_event_loop_policy()
+ policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ else:
+ policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
+ asyncio.set_event_loop_policy(policy)
+
+ with loop_context(policy.new_event_loop) as _loop:
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_unused_port fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_unused_port
+
+
+@pytest.fixture
+def aiohttp_unused_port(): # type: ignore[no-untyped-def]
+ """Return a port that is unused on the current host."""
+ return _unused_port
+
+
+@pytest.fixture
+def aiohttp_server(loop): # type: ignore[no-untyped-def]
+ """Factory to create a TestServer instance, given an app.
+
+ aiohttp_server(app, **kwargs)
+ """
+ servers = []
+
+ async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
+ server = TestServer(app, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_server
+
+
+@pytest.fixture
+def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
+ """Factory to create a RawTestServer instance, given a web handler.
+
+ aiohttp_raw_server(handler, **kwargs)
+ """
+ servers = []
+
+ async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
+ server = RawTestServer(handler, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
+ aiohttp_raw_server,
+):
+ warnings.warn(
+ "Deprecated, use aiohttp_raw_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_raw_server
+
+
+@pytest.fixture
+def aiohttp_client(
+ loop: asyncio.AbstractEventLoop,
+) -> Generator[AiohttpClient, None, None]:
+ """Factory to create a TestClient instance.
+
+ aiohttp_client(app, **kwargs)
+ aiohttp_client(server, **kwargs)
+ aiohttp_client(raw_server, **kwargs)
+ """
+ clients = []
+
+ async def go(
+ __param: Union[Application, BaseTestServer],
+ *args: Any,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any
+ ) -> TestClient:
+
+ if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
+ __param, (Application, BaseTestServer)
+ ):
+ __param = __param(loop, *args, **kwargs)
+ kwargs = {}
+ else:
+ assert not args, "args should be empty"
+
+ if isinstance(__param, Application):
+ server_kwargs = server_kwargs or {}
+ server = TestServer(__param, loop=loop, **server_kwargs)
+ client = TestClient(server, loop=loop, **kwargs)
+ elif isinstance(__param, BaseTestServer):
+ client = TestClient(__param, loop=loop, **kwargs)
+ else:
+ raise ValueError("Unknown argument type: %r" % type(__param))
+
+ await client.start_server()
+ clients.append(client)
+ return client
+
+ yield go
+
+ async def finalize() -> None:
+ while clients:
+ await clients.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_client fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_client
diff --git a/contrib/python/aiohttp/aiohttp/resolver.py b/contrib/python/aiohttp/aiohttp/resolver.py
new file mode 100644
index 0000000000..6668fa80ec
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/resolver.py
@@ -0,0 +1,160 @@
+import asyncio
+import socket
+from typing import Any, Dict, List, Optional, Type, Union
+
+from .abc import AbstractResolver
+from .helpers import get_running_loop
+
+__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
+
+try:
+ import aiodns
+
+ # aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
+except ImportError: # pragma: no cover
+ aiodns = None
+
+aiodns_default = False
+
+
+class ThreadedResolver(AbstractResolver):
+ """Threaded resolver.
+
+ Uses an Executor for synchronous getaddrinfo() calls.
+ concurrent.futures.ThreadPoolExecutor is used by default.
+ """
+
+ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = get_running_loop(loop)
+
+ async def resolve(
+ self, hostname: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ infos = await self._loop.getaddrinfo(
+ hostname,
+ port,
+ type=socket.SOCK_STREAM,
+ family=family,
+ # flags=socket.AI_ADDRCONFIG,
+ )
+
+ hosts = []
+ for family, _, proto, _, address in infos:
+ if family == socket.AF_INET6:
+ if len(address) < 3:
+ # IPv6 is not supported by Python build,
+ # or IPv6 is not enabled in the host
+ continue
+ if address[3]: # type: ignore[misc]
+ # This is essential for link-local IPv6 addresses.
+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use
+ # getnameinfo() unconditionally, but performance makes sense.
+ host, _port = socket.getnameinfo(
+ address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
+ )
+ port = int(_port)
+ else:
+ host, port = address[:2]
+ else: # IPv4
+ assert family == socket.AF_INET
+ host, port = address # type: ignore[misc]
+ hosts.append(
+ {
+ "hostname": hostname,
+ "host": host,
+ "port": port,
+ "family": family,
+ "proto": proto,
+ "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
+ }
+ )
+
+ return hosts
+
+ async def close(self) -> None:
+ pass
+
+
+class AsyncResolver(AbstractResolver):
+ """Use the `aiodns` package to make asynchronous DNS lookups"""
+
+ def __init__(
+ self,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ *args: Any,
+ **kwargs: Any
+ ) -> None:
+ if aiodns is None:
+ raise RuntimeError("Resolver requires aiodns library")
+
+ self._loop = get_running_loop(loop)
+ self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
+
+ if not hasattr(self._resolver, "gethostbyname"):
+ # aiodns 1.1 is not available, fallback to DNSResolver.query
+ self.resolve = self._resolve_with_query # type: ignore
+
+ async def resolve(
+ self, host: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ try:
+ resp = await self._resolver.gethostbyname(host, family)
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(msg) from exc
+ hosts = []
+ for address in resp.addresses:
+ hosts.append(
+ {
+ "hostname": host,
+ "host": address,
+ "port": port,
+ "family": family,
+ "proto": 0,
+ "flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
+ }
+ )
+
+ if not hosts:
+ raise OSError("DNS lookup failed")
+
+ return hosts
+
+ async def _resolve_with_query(
+ self, host: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ if family == socket.AF_INET6:
+ qtype = "AAAA"
+ else:
+ qtype = "A"
+
+ try:
+ resp = await self._resolver.query(host, qtype)
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(msg) from exc
+
+ hosts = []
+ for rr in resp:
+ hosts.append(
+ {
+ "hostname": host,
+ "host": rr.host,
+ "port": port,
+ "family": family,
+ "proto": 0,
+ "flags": socket.AI_NUMERICHOST,
+ }
+ )
+
+ if not hosts:
+ raise OSError("DNS lookup failed")
+
+ return hosts
+
+ async def close(self) -> None:
+ self._resolver.cancel()
+
+
+_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
+DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
diff --git a/contrib/python/aiohttp/aiohttp/streams.py b/contrib/python/aiohttp/aiohttp/streams.py
new file mode 100644
index 0000000000..055848877e
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/streams.py
@@ -0,0 +1,660 @@
+import asyncio
+import collections
+import warnings
+from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
+
+from .base_protocol import BaseProtocol
+from .helpers import BaseTimerContext, set_exception, set_result
+from .log import internal_logger
+from .typedefs import Final
+
+__all__ = (
+ "EMPTY_PAYLOAD",
+ "EofStream",
+ "StreamReader",
+ "DataQueue",
+ "FlowControlDataQueue",
+)
+
+_T = TypeVar("_T")
+
+
+class EofStream(Exception):
+ """eof stream indication."""
+
+
+class AsyncStreamIterator(Generic[_T]):
+ def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
+ self.read_func = read_func
+
+ def __aiter__(self) -> "AsyncStreamIterator[_T]":
+ return self
+
+ async def __anext__(self) -> _T:
+ try:
+ rv = await self.read_func()
+ except EofStream:
+ raise StopAsyncIteration
+ if rv == b"":
+ raise StopAsyncIteration
+ return rv
+
+
+class ChunkTupleAsyncStreamIterator:
+ def __init__(self, stream: "StreamReader") -> None:
+ self._stream = stream
+
+ def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
+ return self
+
+ async def __anext__(self) -> Tuple[bytes, bool]:
+ rv = await self._stream.readchunk()
+ if rv == (b"", False):
+ raise StopAsyncIteration
+ return rv
+
+
+class AsyncStreamReaderMixin:
+ def __aiter__(self) -> AsyncStreamIterator[bytes]:
+ return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
+
+ def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
+ """Returns an asynchronous iterator that yields chunks of size n.
+
+ Python-3.5 available for Python 3.5+ only
+ """
+ return AsyncStreamIterator(
+ lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
+ )
+
+ def iter_any(self) -> AsyncStreamIterator[bytes]:
+ """Yield all available data as soon as it is received.
+
+ Python-3.5 available for Python 3.5+ only
+ """
+ return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
+
+ def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
+ """Yield chunks of data as they are received by the server.
+
+ The yielded objects are tuples
+ of (bytes, bool) as returned by the StreamReader.readchunk method.
+
+ Python-3.5 available for Python 3.5+ only
+ """
+ return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
+
+
+class StreamReader(AsyncStreamReaderMixin):
+ """An enhancement of asyncio.StreamReader.
+
+ Supports asynchronous iteration by line, chunk or as available::
+
+ async for line in reader:
+ ...
+ async for chunk in reader.iter_chunked(1024):
+ ...
+ async for slice in reader.iter_any():
+ ...
+
+ """
+
+ total_bytes = 0
+
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ limit: int,
+ *,
+ timer: Optional[BaseTimerContext] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ ) -> None:
+ self._protocol = protocol
+ self._low_water = limit
+ self._high_water = limit * 2
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ self._loop = loop
+ self._size = 0
+ self._cursor = 0
+ self._http_chunk_splits = None # type: Optional[List[int]]
+ self._buffer = collections.deque() # type: Deque[bytes]
+ self._buffer_offset = 0
+ self._eof = False
+ self._waiter = None # type: Optional[asyncio.Future[None]]
+ self._eof_waiter = None # type: Optional[asyncio.Future[None]]
+ self._exception = None # type: Optional[BaseException]
+ self._timer = timer
+ self._eof_callbacks = [] # type: List[Callable[[], None]]
+
+ def __repr__(self) -> str:
+ info = [self.__class__.__name__]
+ if self._size:
+ info.append("%d bytes" % self._size)
+ if self._eof:
+ info.append("eof")
+ if self._low_water != 2 ** 16: # default limit
+ info.append("low=%d high=%d" % (self._low_water, self._high_water))
+ if self._waiter:
+ info.append("w=%r" % self._waiter)
+ if self._exception:
+ info.append("e=%r" % self._exception)
+ return "<%s>" % " ".join(info)
+
+ def get_read_buffer_limits(self) -> Tuple[int, int]:
+ return (self._low_water, self._high_water)
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(self, exc: BaseException) -> None:
+ self._exception = exc
+ self._eof_callbacks.clear()
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_exception(waiter, exc)
+
+ waiter = self._eof_waiter
+ if waiter is not None:
+ self._eof_waiter = None
+ set_exception(waiter, exc)
+
+ def on_eof(self, callback: Callable[[], None]) -> None:
+ if self._eof:
+ try:
+ callback()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+ else:
+ self._eof_callbacks.append(callback)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ waiter = self._eof_waiter
+ if waiter is not None:
+ self._eof_waiter = None
+ set_result(waiter, None)
+
+ for cb in self._eof_callbacks:
+ try:
+ cb()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+
+ self._eof_callbacks.clear()
+
+ def is_eof(self) -> bool:
+ """Return True if 'feed_eof' was called."""
+ return self._eof
+
+ def at_eof(self) -> bool:
+ """Return True if the buffer is empty and 'feed_eof' was called."""
+ return self._eof and not self._buffer
+
+ async def wait_eof(self) -> None:
+ if self._eof:
+ return
+
+ assert self._eof_waiter is None
+ self._eof_waiter = self._loop.create_future()
+ try:
+ await self._eof_waiter
+ finally:
+ self._eof_waiter = None
+
+ def unread_data(self, data: bytes) -> None:
+ """rollback reading some data from stream, inserting it to buffer head."""
+ warnings.warn(
+ "unread_data() is deprecated "
+ "and will be removed in future releases (#3260)",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if not data:
+ return
+
+ if self._buffer_offset:
+ self._buffer[0] = self._buffer[0][self._buffer_offset :]
+ self._buffer_offset = 0
+ self._size += len(data)
+ self._cursor -= len(data)
+ self._buffer.appendleft(data)
+ self._eof_counter = 0
+
+ # TODO: size is ignored, remove the param later
+ def feed_data(self, data: bytes, size: int = 0) -> None:
+ assert not self._eof, "feed_data after feed_eof"
+
+ if not data:
+ return
+
+ self._size += len(data)
+ self._buffer.append(data)
+ self.total_bytes += len(data)
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ if self._size > self._high_water and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ def begin_http_chunk_receiving(self) -> None:
+ if self._http_chunk_splits is None:
+ if self.total_bytes:
+ raise RuntimeError(
+ "Called begin_http_chunk_receiving when" "some data was already fed"
+ )
+ self._http_chunk_splits = []
+
+ def end_http_chunk_receiving(self) -> None:
+ if self._http_chunk_splits is None:
+ raise RuntimeError(
+ "Called end_chunk_receiving without calling "
+ "begin_chunk_receiving first"
+ )
+
+ # self._http_chunk_splits contains logical byte offsets from start of
+ # the body transfer. Each offset is the offset of the end of a chunk.
+ # "Logical" means bytes, accessible for a user.
+ # If no chunks containig logical data were received, current position
+ # is difinitely zero.
+ pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
+
+ if self.total_bytes == pos:
+ # We should not add empty chunks here. So we check for that.
+ # Note, when chunked + gzip is used, we can receive a chunk
+ # of compressed data, but that data may not be enough for gzip FSM
+ # to yield any uncompressed data. That's why current position may
+ # not change after receiving a chunk.
+ return
+
+ self._http_chunk_splits.append(self.total_bytes)
+
+ # wake up readchunk when end of http chunk received
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ async def _wait(self, func_name: str) -> None:
+ # StreamReader uses a future to link the protocol feed_data() method
+ # to a read coroutine. Running two read coroutines at the same time
+ # would have an unexpected behaviour. It would not possible to know
+ # which coroutine would get the next data.
+ if self._waiter is not None:
+ raise RuntimeError(
+ "%s() called while another coroutine is "
+ "already waiting for incoming data" % func_name
+ )
+
+ waiter = self._waiter = self._loop.create_future()
+ try:
+ if self._timer:
+ with self._timer:
+ await waiter
+ else:
+ await waiter
+ finally:
+ self._waiter = None
+
+ async def readline(self) -> bytes:
+ return await self.readuntil()
+
+ async def readuntil(self, separator: bytes = b"\n") -> bytes:
+ seplen = len(separator)
+ if seplen == 0:
+ raise ValueError("Separator should be at least one-byte string")
+
+ if self._exception is not None:
+ raise self._exception
+
+ chunk = b""
+ chunk_size = 0
+ not_enough = True
+
+ while not_enough:
+ while self._buffer and not_enough:
+ offset = self._buffer_offset
+ ichar = self._buffer[0].find(separator, offset) + 1
+ # Read from current offset to found separator or to the end.
+ data = self._read_nowait_chunk(ichar - offset if ichar else -1)
+ chunk += data
+ chunk_size += len(data)
+ if ichar:
+ not_enough = False
+
+ if chunk_size > self._high_water:
+ raise ValueError("Chunk too big")
+
+ if self._eof:
+ break
+
+ if not_enough:
+ await self._wait("readuntil")
+
+ return chunk
+
+ async def read(self, n: int = -1) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ # migration problem; with DataQueue you have to catch
+ # EofStream exception, so common way is to run payload.read() inside
+ # infinite loop. what can cause real infinite loop with StreamReader
+ # lets keep this code one major release.
+ if __debug__:
+ if self._eof and not self._buffer:
+ self._eof_counter = getattr(self, "_eof_counter", 0) + 1
+ if self._eof_counter > 5:
+ internal_logger.warning(
+ "Multiple access to StreamReader in eof state, "
+ "might be infinite loop.",
+ stack_info=True,
+ )
+
+ if not n:
+ return b""
+
+ if n < 0:
+ # This used to just loop creating a new waiter hoping to
+ # collect everything in self._buffer, but that would
+ # deadlock if the subprocess sends more than self.limit
+ # bytes. So just call self.readany() until EOF.
+ blocks = []
+ while True:
+ block = await self.readany()
+ if not block:
+ break
+ blocks.append(block)
+ return b"".join(blocks)
+
+ # TODO: should be `if` instead of `while`
+ # because waiter maybe triggered on chunk end,
+ # without feeding any data
+ while not self._buffer and not self._eof:
+ await self._wait("read")
+
+ return self._read_nowait(n)
+
+ async def readany(self) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ # TODO: should be `if` instead of `while`
+ # because waiter maybe triggered on chunk end,
+ # without feeding any data
+ while not self._buffer and not self._eof:
+ await self._wait("readany")
+
+ return self._read_nowait(-1)
+
+ async def readchunk(self) -> Tuple[bytes, bool]:
+ """Returns a tuple of (data, end_of_http_chunk).
+
+ When chunked transfer
+ encoding is used, end_of_http_chunk is a boolean indicating if the end
+ of the data corresponds to the end of a HTTP chunk , otherwise it is
+ always False.
+ """
+ while True:
+ if self._exception is not None:
+ raise self._exception
+
+ while self._http_chunk_splits:
+ pos = self._http_chunk_splits.pop(0)
+ if pos == self._cursor:
+ return (b"", True)
+ if pos > self._cursor:
+ return (self._read_nowait(pos - self._cursor), True)
+ internal_logger.warning(
+ "Skipping HTTP chunk end due to data "
+ "consumption beyond chunk boundary"
+ )
+
+ if self._buffer:
+ return (self._read_nowait_chunk(-1), False)
+ # return (self._read_nowait(-1), False)
+
+ if self._eof:
+ # Special case for signifying EOF.
+ # (b'', True) is not a final return value actually.
+ return (b"", False)
+
+ await self._wait("readchunk")
+
+ async def readexactly(self, n: int) -> bytes:
+ if self._exception is not None:
+ raise self._exception
+
+ blocks = [] # type: List[bytes]
+ while n > 0:
+ block = await self.read(n)
+ if not block:
+ partial = b"".join(blocks)
+ raise asyncio.IncompleteReadError(partial, len(partial) + n)
+ blocks.append(block)
+ n -= len(block)
+
+ return b"".join(blocks)
+
+ def read_nowait(self, n: int = -1) -> bytes:
+ # default was changed to be consistent with .read(-1)
+ #
+ # I believe the most users don't know about the method and
+ # they are not affected.
+ if self._exception is not None:
+ raise self._exception
+
+ if self._waiter and not self._waiter.done():
+ raise RuntimeError(
+ "Called while some coroutine is waiting for incoming data."
+ )
+
+ return self._read_nowait(n)
+
+ def _read_nowait_chunk(self, n: int) -> bytes:
+ first_buffer = self._buffer[0]
+ offset = self._buffer_offset
+ if n != -1 and len(first_buffer) - offset > n:
+ data = first_buffer[offset : offset + n]
+ self._buffer_offset += n
+
+ elif offset:
+ self._buffer.popleft()
+ data = first_buffer[offset:]
+ self._buffer_offset = 0
+
+ else:
+ data = self._buffer.popleft()
+
+ self._size -= len(data)
+ self._cursor += len(data)
+
+ chunk_splits = self._http_chunk_splits
+ # Prevent memory leak: drop useless chunk splits
+ while chunk_splits and chunk_splits[0] < self._cursor:
+ chunk_splits.pop(0)
+
+ if self._size < self._low_water and self._protocol._reading_paused:
+ self._protocol.resume_reading()
+ return data
+
+ def _read_nowait(self, n: int) -> bytes:
+ """Read not more than n bytes, or whole buffer if n == -1"""
+ chunks = []
+
+ while self._buffer:
+ chunk = self._read_nowait_chunk(n)
+ chunks.append(chunk)
+ if n != -1:
+ n -= len(chunk)
+ if n == 0:
+ break
+
+ return b"".join(chunks) if chunks else b""
+
+
+class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
+ def __init__(self) -> None:
+ pass
+
+ def exception(self) -> Optional[BaseException]:
+ return None
+
+ def set_exception(self, exc: BaseException) -> None:
+ pass
+
+ def on_eof(self, callback: Callable[[], None]) -> None:
+ try:
+ callback()
+ except Exception:
+ internal_logger.exception("Exception in eof callback")
+
+ def feed_eof(self) -> None:
+ pass
+
+ def is_eof(self) -> bool:
+ return True
+
+ def at_eof(self) -> bool:
+ return True
+
+ async def wait_eof(self) -> None:
+ return
+
+ def feed_data(self, data: bytes, n: int = 0) -> None:
+ pass
+
+ async def readline(self) -> bytes:
+ return b""
+
+ async def read(self, n: int = -1) -> bytes:
+ return b""
+
+ # TODO add async def readuntil
+
+ async def readany(self) -> bytes:
+ return b""
+
+ async def readchunk(self) -> Tuple[bytes, bool]:
+ return (b"", True)
+
+ async def readexactly(self, n: int) -> bytes:
+ raise asyncio.IncompleteReadError(b"", n)
+
+ def read_nowait(self, n: int = -1) -> bytes:
+ return b""
+
+
+EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
+
+
+class DataQueue(Generic[_T]):
+ """DataQueue is a general-purpose blocking queue with one reader."""
+
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
+ self._loop = loop
+ self._eof = False
+ self._waiter = None # type: Optional[asyncio.Future[None]]
+ self._exception = None # type: Optional[BaseException]
+ self._size = 0
+ self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
+
+ def __len__(self) -> int:
+ return len(self._buffer)
+
+ def is_eof(self) -> bool:
+ return self._eof
+
+ def at_eof(self) -> bool:
+ return self._eof and not self._buffer
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ def set_exception(self, exc: BaseException) -> None:
+ self._eof = True
+ self._exception = exc
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_exception(waiter, exc)
+
+ def feed_data(self, data: _T, size: int = 0) -> None:
+ self._size += size
+ self._buffer.append((data, size))
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ def feed_eof(self) -> None:
+ self._eof = True
+
+ waiter = self._waiter
+ if waiter is not None:
+ self._waiter = None
+ set_result(waiter, None)
+
+ async def read(self) -> _T:
+ if not self._buffer and not self._eof:
+ assert not self._waiter
+ self._waiter = self._loop.create_future()
+ try:
+ await self._waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._waiter = None
+ raise
+
+ if self._buffer:
+ data, size = self._buffer.popleft()
+ self._size -= size
+ return data
+ else:
+ if self._exception is not None:
+ raise self._exception
+ else:
+ raise EofStream
+
+ def __aiter__(self) -> AsyncStreamIterator[_T]:
+ return AsyncStreamIterator(self.read)
+
+
+class FlowControlDataQueue(DataQueue[_T]):
+ """FlowControlDataQueue resumes and pauses an underlying stream.
+
+ It is a destination for parsed data.
+ """
+
+ def __init__(
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
+ ) -> None:
+ super().__init__(loop=loop)
+
+ self._protocol = protocol
+ self._limit = limit * 2
+
+ def feed_data(self, data: _T, size: int = 0) -> None:
+ super().feed_data(data, size)
+
+ if self._size > self._limit and not self._protocol._reading_paused:
+ self._protocol.pause_reading()
+
+ async def read(self) -> _T:
+ try:
+ return await super().read()
+ finally:
+ if self._size < self._limit and self._protocol._reading_paused:
+ self._protocol.resume_reading()
diff --git a/contrib/python/aiohttp/aiohttp/tcp_helpers.py b/contrib/python/aiohttp/aiohttp/tcp_helpers.py
new file mode 100644
index 0000000000..0e1dbf1655
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/tcp_helpers.py
@@ -0,0 +1,38 @@
+"""Helper methods to tune a TCP connection"""
+
+import asyncio
+import socket
+from contextlib import suppress
+from typing import Optional # noqa
+
+__all__ = ("tcp_keepalive", "tcp_nodelay")
+
+
+if hasattr(socket, "SO_KEEPALIVE"):
+
+ def tcp_keepalive(transport: asyncio.Transport) -> None:
+ sock = transport.get_extra_info("socket")
+ if sock is not None:
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+
+
+else:
+
+ def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
+ pass
+
+
+def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
+ sock = transport.get_extra_info("socket")
+
+ if sock is None:
+ return
+
+ if sock.family not in (socket.AF_INET, socket.AF_INET6):
+ return
+
+ value = bool(value)
+
+ # socket may be closed already, on windows OSError get raised
+ with suppress(OSError):
+ sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
diff --git a/contrib/python/aiohttp/aiohttp/test_utils.py b/contrib/python/aiohttp/aiohttp/test_utils.py
new file mode 100644
index 0000000000..361dae486c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/test_utils.py
@@ -0,0 +1,698 @@
+"""Utilities shared by tests."""
+
+import asyncio
+import contextlib
+import gc
+import inspect
+import ipaddress
+import os
+import socket
+import sys
+import warnings
+from abc import ABC, abstractmethod
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Iterator,
+ List,
+ Optional,
+ Type,
+ Union,
+ cast,
+)
+from unittest import mock
+
+from aiosignal import Signal
+from multidict import CIMultiDict, CIMultiDictProxy
+from yarl import URL
+
+import aiohttp
+from aiohttp.client import _RequestContextManager, _WSRequestContextManager
+
+from . import ClientSession, hdrs
+from .abc import AbstractCookieJar
+from .client_reqrep import ClientResponse
+from .client_ws import ClientWebSocketResponse
+from .helpers import PY_38, sentinel
+from .http import HttpVersion, RawRequestMessage
+from .web import (
+ Application,
+ AppRunner,
+ BaseRunner,
+ Request,
+ Server,
+ ServerRunner,
+ SockSite,
+ UrlMappingMatchInfo,
+)
+from .web_protocol import _RequestHandler
+
+if TYPE_CHECKING: # pragma: no cover
+ from ssl import SSLContext
+else:
+ SSLContext = None
+
+if PY_38:
+ from unittest import IsolatedAsyncioTestCase as TestCase
+else:
+ from asynctest import TestCase # type: ignore[no-redef]
+
+REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
+
+
+def get_unused_port_socket(
+ host: str, family: socket.AddressFamily = socket.AF_INET
+) -> socket.socket:
+ return get_port_socket(host, 0, family)
+
+
+def get_port_socket(
+ host: str, port: int, family: socket.AddressFamily
+) -> socket.socket:
+ s = socket.socket(family, socket.SOCK_STREAM)
+ if REUSE_ADDRESS:
+ # Windows has different semantics for SO_REUSEADDR,
+ # so don't set it. Ref:
+ # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ s.bind((host, port))
+ return s
+
+
+def unused_port() -> int:
+ """Return a port that is unused on the current host."""
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.bind(("127.0.0.1", 0))
+ return cast(int, s.getsockname()[1])
+
+
+class BaseTestServer(ABC):
+ __test__ = False
+
+ def __init__(
+ self,
+ *,
+ scheme: Union[str, object] = sentinel,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ skip_url_asserts: bool = False,
+ socket_factory: Callable[
+ [str, int, socket.AddressFamily], socket.socket
+ ] = get_port_socket,
+ **kwargs: Any,
+ ) -> None:
+ self._loop = loop
+ self.runner = None # type: Optional[BaseRunner]
+ self._root = None # type: Optional[URL]
+ self.host = host
+ self.port = port
+ self._closed = False
+ self.scheme = scheme
+ self.skip_url_asserts = skip_url_asserts
+ self.socket_factory = socket_factory
+
+ async def start_server(
+ self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
+ ) -> None:
+ if self.runner:
+ return
+ self._loop = loop
+ self._ssl = kwargs.pop("ssl", None)
+ self.runner = await self._make_runner(**kwargs)
+ await self.runner.setup()
+ if not self.port:
+ self.port = 0
+ try:
+ version = ipaddress.ip_address(self.host).version
+ except ValueError:
+ version = 4
+ family = socket.AF_INET6 if version == 6 else socket.AF_INET
+ _sock = self.socket_factory(self.host, self.port, family)
+ self.host, self.port = _sock.getsockname()[:2]
+ site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
+ await site.start()
+ server = site._server
+ assert server is not None
+ sockets = server.sockets
+ assert sockets is not None
+ self.port = sockets[0].getsockname()[1]
+ if self.scheme is sentinel:
+ if self._ssl:
+ scheme = "https"
+ else:
+ scheme = "http"
+ self.scheme = scheme
+ self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
+
+ @abstractmethod # pragma: no cover
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ pass
+
+ def make_url(self, path: str) -> URL:
+ assert self._root is not None
+ url = URL(path)
+ if not self.skip_url_asserts:
+ assert not url.is_absolute()
+ return self._root.join(url)
+ else:
+ return URL(str(self._root) + path)
+
+ @property
+ def started(self) -> bool:
+ return self.runner is not None
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def handler(self) -> Server:
+ # for backward compatibility
+ # web.Server instance
+ runner = self.runner
+ assert runner is not None
+ assert runner.server is not None
+ return runner.server
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run when the object is garbage collected, and on
+ exit when used as a context manager.
+
+ """
+ if self.started and not self.closed:
+ assert self.runner is not None
+ await self.runner.cleanup()
+ self._root = None
+ self.port = None
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "BaseTestServer":
+ await self.start_server(loop=self._loop)
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class TestServer(BaseTestServer):
+ def __init__(
+ self,
+ app: Application,
+ *,
+ scheme: Union[str, object] = sentinel,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ):
+ self.app = app
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ return AppRunner(self.app, **kwargs)
+
+
+class RawTestServer(BaseTestServer):
+ def __init__(
+ self,
+ handler: _RequestHandler,
+ *,
+ scheme: Union[str, object] = sentinel,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._handler = handler
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
+ srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
+ return ServerRunner(srv, debug=debug, **kwargs)
+
+
+class TestClient:
+ """
+ A test client implementation.
+
+ To write functional tests for aiohttp based servers.
+
+ """
+
+ __test__ = False
+
+ def __init__(
+ self,
+ server: BaseTestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any,
+ ) -> None:
+ if not isinstance(server, BaseTestServer):
+ raise TypeError(
+ "server must be TestServer " "instance, found type: %r" % type(server)
+ )
+ self._server = server
+ self._loop = loop
+ if cookie_jar is None:
+ cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
+ self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
+ self._closed = False
+ self._responses = [] # type: List[ClientResponse]
+ self._websockets = [] # type: List[ClientWebSocketResponse]
+
+ async def start_server(self) -> None:
+ await self._server.start_server(loop=self._loop)
+
+ @property
+ def host(self) -> str:
+ return self._server.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._server.port
+
+ @property
+ def server(self) -> BaseTestServer:
+ return self._server
+
+ @property
+ def app(self) -> Optional[Application]:
+ return cast(Optional[Application], getattr(self._server, "app", None))
+
+ @property
+ def session(self) -> ClientSession:
+ """An internal aiohttp.ClientSession.
+
+ Unlike the methods on the TestClient, client session requests
+ do not automatically include the host in the url queried, and
+ will require an absolute path to the resource.
+
+ """
+ return self._session
+
+ def make_url(self, path: str) -> URL:
+ return self._server.make_url(path)
+
+ async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
+ resp = await self._session.request(method, self.make_url(path), **kwargs)
+ # save it to close later
+ self._responses.append(resp)
+ return resp
+
+ def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Routes a request to tested http server.
+
+ The interface is identical to aiohttp.ClientSession.request,
+ except the loop kwarg is overridden by the instance used by the
+ test server.
+
+ """
+ return _RequestContextManager(self._request(method, path, **kwargs))
+
+ def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP GET request."""
+ return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
+
+ def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP POST request."""
+ return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
+
+ def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP OPTIONS request."""
+ return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
+
+ def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP HEAD request."""
+ return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
+
+ def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PUT request."""
+ return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
+
+ def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
+
+ def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
+
+ def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
+ """Initiate websocket connection.
+
+ The api corresponds to aiohttp.ClientSession.ws_connect.
+
+ """
+ return _WSRequestContextManager(self._ws_connect(path, **kwargs))
+
+ async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
+ ws = await self._session.ws_connect(self.make_url(path), **kwargs)
+ self._websockets.append(ws)
+ return ws
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run on exit when used as a(n) (asynchronous)
+ context manager.
+
+ """
+ if not self._closed:
+ for resp in self._responses:
+ resp.close()
+ for ws in self._websockets:
+ await ws.close()
+ await self._session.close()
+ await self._server.close()
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "TestClient":
+ await self.start_server()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class AioHTTPTestCase(TestCase):
+ """A base class to allow for unittest web applications using aiohttp.
+
+ Provides the following:
+
+ * self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
+ * self.loop (asyncio.BaseEventLoop): the event loop in which the
+ application and server are running.
+ * self.app (aiohttp.web.Application): the application returned by
+ self.get_application()
+
+ Note that the TestClient's methods are asynchronous: you have to
+ execute function on the test client using asynchronous methods.
+ """
+
+ async def get_application(self) -> Application:
+ """Get application.
+
+ This method should be overridden
+ to return the aiohttp.web.Application
+ object to test.
+ """
+ return self.get_app()
+
+ def get_app(self) -> Application:
+ """Obsolete method used to constructing web application.
+
+ Use .get_application() coroutine instead.
+ """
+ raise RuntimeError("Did you forget to define get_application()?")
+
+ def setUp(self) -> None:
+ try:
+ self.loop = asyncio.get_running_loop()
+ except (AttributeError, RuntimeError): # AttributeError->py36
+ self.loop = asyncio.get_event_loop_policy().get_event_loop()
+
+ self.loop.run_until_complete(self.setUpAsync())
+
+ async def setUpAsync(self) -> None:
+ self.app = await self.get_application()
+ self.server = await self.get_server(self.app)
+ self.client = await self.get_client(self.server)
+
+ await self.client.start_server()
+
+ def tearDown(self) -> None:
+ self.loop.run_until_complete(self.tearDownAsync())
+
+ async def tearDownAsync(self) -> None:
+ await self.client.close()
+
+ async def get_server(self, app: Application) -> TestServer:
+ """Return a TestServer instance."""
+ return TestServer(app, loop=self.loop)
+
+ async def get_client(self, server: TestServer) -> TestClient:
+ """Return a TestClient instance."""
+ return TestClient(server, loop=self.loop)
+
+
+def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
+ """
+ A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
+
+ In 3.8+, this does nothing.
+ """
+ warnings.warn(
+ "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return func
+
+
+_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
+
+
+@contextlib.contextmanager
+def loop_context(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
+) -> Iterator[asyncio.AbstractEventLoop]:
+ """A contextmanager that creates an event_loop, for test purposes.
+
+ Handles the creation and cleanup of a test loop.
+ """
+ loop = setup_test_loop(loop_factory)
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def setup_test_loop(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
+) -> asyncio.AbstractEventLoop:
+ """Create and return an asyncio.BaseEventLoop instance.
+
+ The caller should also call teardown_test_loop,
+ once they are done with the loop.
+ """
+ loop = loop_factory()
+ try:
+ module = loop.__class__.__module__
+ skip_watcher = "uvloop" in module
+ except AttributeError: # pragma: no cover
+ # Just in case
+ skip_watcher = True
+ asyncio.set_event_loop(loop)
+ if sys.platform != "win32" and not skip_watcher:
+ policy = asyncio.get_event_loop_policy()
+ watcher: asyncio.AbstractChildWatcher
+ try: # Python >= 3.8
+ # Refs:
+ # * https://github.com/pytest-dev/pytest-xdist/issues/620
+ # * https://stackoverflow.com/a/58614689/595220
+ # * https://bugs.python.org/issue35621
+ # * https://github.com/python/cpython/pull/14344
+ watcher = asyncio.ThreadedChildWatcher()
+ except AttributeError: # Python < 3.8
+ watcher = asyncio.SafeChildWatcher()
+ watcher.attach_loop(loop)
+ with contextlib.suppress(NotImplementedError):
+ policy.set_child_watcher(watcher)
+ return loop
+
+
+def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
+ """Teardown and cleanup an event_loop created by setup_test_loop."""
+ closed = loop.is_closed()
+ if not closed:
+ loop.call_soon(loop.stop)
+ loop.run_forever()
+ loop.close()
+
+ if not fast:
+ gc.collect()
+
+ asyncio.set_event_loop(None)
+
+
+def _create_app_mock() -> mock.MagicMock:
+ def get_dict(app: Any, key: str) -> Any:
+ return app.__app_dict[key]
+
+ def set_dict(app: Any, key: str, value: Any) -> None:
+ app.__app_dict[key] = value
+
+ app = mock.MagicMock()
+ app.__app_dict = {}
+ app.__getitem__ = get_dict
+ app.__setitem__ = set_dict
+
+ app._debug = False
+ app.on_response_prepare = Signal(app)
+ app.on_response_prepare.freeze()
+ return app
+
+
+def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
+ transport = mock.Mock()
+
+ def get_extra_info(key: str) -> Optional[SSLContext]:
+ if key == "sslcontext":
+ return sslcontext
+ else:
+ return None
+
+ transport.get_extra_info.side_effect = get_extra_info
+ return transport
+
+
+def make_mocked_request(
+ method: str,
+ path: str,
+ headers: Any = None,
+ *,
+ match_info: Any = sentinel,
+ version: HttpVersion = HttpVersion(1, 1),
+ closing: bool = False,
+ app: Any = None,
+ writer: Any = sentinel,
+ protocol: Any = sentinel,
+ transport: Any = sentinel,
+ payload: Any = sentinel,
+ sslcontext: Optional[SSLContext] = None,
+ client_max_size: int = 1024 ** 2,
+ loop: Any = ...,
+) -> Request:
+ """Creates mocked web.Request testing purposes.
+
+ Useful in unit tests, when spinning full web server is overkill or
+ specific conditions and errors are hard to trigger.
+ """
+ task = mock.Mock()
+ if loop is ...:
+ loop = mock.Mock()
+ loop.create_future.return_value = ()
+
+ if version < HttpVersion(1, 1):
+ closing = True
+
+ if headers:
+ headers = CIMultiDictProxy(CIMultiDict(headers))
+ raw_hdrs = tuple(
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
+ )
+ else:
+ headers = CIMultiDictProxy(CIMultiDict())
+ raw_hdrs = ()
+
+ chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+
+ message = RawRequestMessage(
+ method,
+ path,
+ version,
+ headers,
+ raw_hdrs,
+ closing,
+ None,
+ False,
+ chunked,
+ URL(path),
+ )
+ if app is None:
+ app = _create_app_mock()
+
+ if transport is sentinel:
+ transport = _create_transport(sslcontext)
+
+ if protocol is sentinel:
+ protocol = mock.Mock()
+ protocol.transport = transport
+
+ if writer is sentinel:
+ writer = mock.Mock()
+ writer.write_headers = make_mocked_coro(None)
+ writer.write = make_mocked_coro(None)
+ writer.write_eof = make_mocked_coro(None)
+ writer.drain = make_mocked_coro(None)
+ writer.transport = transport
+
+ protocol.transport = transport
+ protocol.writer = writer
+
+ if payload is sentinel:
+ payload = mock.Mock()
+
+ req = Request(
+ message, payload, protocol, writer, task, loop, client_max_size=client_max_size
+ )
+
+ match_info = UrlMappingMatchInfo(
+ {} if match_info is sentinel else match_info, mock.Mock()
+ )
+ match_info.add_app(app)
+ req._match_info = match_info
+
+ return req
+
+
+def make_mocked_coro(
+ return_value: Any = sentinel, raise_exception: Any = sentinel
+) -> Any:
+ """Creates a coroutine mock."""
+
+ async def mock_coro(*args: Any, **kwargs: Any) -> Any:
+ if raise_exception is not sentinel:
+ raise raise_exception
+ if not inspect.isawaitable(return_value):
+ return return_value
+ await return_value
+
+ return mock.Mock(wraps=mock_coro)
diff --git a/contrib/python/aiohttp/aiohttp/tracing.py b/contrib/python/aiohttp/aiohttp/tracing.py
new file mode 100644
index 0000000000..0e118a3997
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/tracing.py
@@ -0,0 +1,472 @@
+from types import SimpleNamespace
+from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
+
+import attr
+from aiosignal import Signal
+from multidict import CIMultiDict
+from yarl import URL
+
+from .client_reqrep import ClientResponse
+
+if TYPE_CHECKING: # pragma: no cover
+ from .client import ClientSession
+ from .typedefs import Protocol
+
+ _ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
+
+ class _SignalCallback(Protocol[_ParamT_contra]):
+ def __call__(
+ self,
+ __client_session: ClientSession,
+ __trace_config_ctx: SimpleNamespace,
+ __params: _ParamT_contra,
+ ) -> Awaitable[None]:
+ ...
+
+
+__all__ = (
+ "TraceConfig",
+ "TraceRequestStartParams",
+ "TraceRequestEndParams",
+ "TraceRequestExceptionParams",
+ "TraceConnectionQueuedStartParams",
+ "TraceConnectionQueuedEndParams",
+ "TraceConnectionCreateStartParams",
+ "TraceConnectionCreateEndParams",
+ "TraceConnectionReuseconnParams",
+ "TraceDnsResolveHostStartParams",
+ "TraceDnsResolveHostEndParams",
+ "TraceDnsCacheHitParams",
+ "TraceDnsCacheMissParams",
+ "TraceRequestRedirectParams",
+ "TraceRequestChunkSentParams",
+ "TraceResponseChunkReceivedParams",
+ "TraceRequestHeadersSentParams",
+)
+
+
+class TraceConfig:
+ """First-class used to trace requests launched via ClientSession objects."""
+
+ def __init__(
+ self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
+ ) -> None:
+ self._on_request_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestStartParams]]
+ self._on_request_chunk_sent = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]]
+ self._on_response_chunk_received = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]]
+ self._on_request_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestEndParams]]
+ self._on_request_exception = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestExceptionParams]]
+ self._on_request_redirect = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestRedirectParams]]
+ self._on_connection_queued_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]]
+ self._on_connection_queued_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]]
+ self._on_connection_create_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]]
+ self._on_connection_create_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]]
+ self._on_connection_reuseconn = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]]
+ self._on_dns_resolvehost_start = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]]
+ self._on_dns_resolvehost_end = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]]
+ self._on_dns_cache_hit = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]]
+ self._on_dns_cache_miss = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]]
+ self._on_request_headers_sent = Signal(
+ self
+ ) # type: Signal[_SignalCallback[TraceRequestHeadersSentParams]]
+
+ self._trace_config_ctx_factory = trace_config_ctx_factory
+
+ def trace_config_ctx(
+ self, trace_request_ctx: Optional[SimpleNamespace] = None
+ ) -> SimpleNamespace:
+ """Return a new trace_config_ctx instance"""
+ return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
+
+ def freeze(self) -> None:
+ self._on_request_start.freeze()
+ self._on_request_chunk_sent.freeze()
+ self._on_response_chunk_received.freeze()
+ self._on_request_end.freeze()
+ self._on_request_exception.freeze()
+ self._on_request_redirect.freeze()
+ self._on_connection_queued_start.freeze()
+ self._on_connection_queued_end.freeze()
+ self._on_connection_create_start.freeze()
+ self._on_connection_create_end.freeze()
+ self._on_connection_reuseconn.freeze()
+ self._on_dns_resolvehost_start.freeze()
+ self._on_dns_resolvehost_end.freeze()
+ self._on_dns_cache_hit.freeze()
+ self._on_dns_cache_miss.freeze()
+ self._on_request_headers_sent.freeze()
+
+ @property
+ def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
+ return self._on_request_start
+
+ @property
+ def on_request_chunk_sent(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
+ return self._on_request_chunk_sent
+
+ @property
+ def on_response_chunk_received(
+ self,
+ ) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
+ return self._on_response_chunk_received
+
+ @property
+ def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
+ return self._on_request_end
+
+ @property
+ def on_request_exception(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
+ return self._on_request_exception
+
+ @property
+ def on_request_redirect(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
+ return self._on_request_redirect
+
+ @property
+ def on_connection_queued_start(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
+ return self._on_connection_queued_start
+
+ @property
+ def on_connection_queued_end(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
+ return self._on_connection_queued_end
+
+ @property
+ def on_connection_create_start(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
+ return self._on_connection_create_start
+
+ @property
+ def on_connection_create_end(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
+ return self._on_connection_create_end
+
+ @property
+ def on_connection_reuseconn(
+ self,
+ ) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
+ return self._on_connection_reuseconn
+
+ @property
+ def on_dns_resolvehost_start(
+ self,
+ ) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
+ return self._on_dns_resolvehost_start
+
+ @property
+ def on_dns_resolvehost_end(
+ self,
+ ) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
+ return self._on_dns_resolvehost_end
+
+ @property
+ def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
+ return self._on_dns_cache_hit
+
+ @property
+ def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
+ return self._on_dns_cache_miss
+
+ @property
+ def on_request_headers_sent(
+ self,
+ ) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
+ return self._on_request_headers_sent
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestStartParams:
+ """Parameters sent by the `on_request_start` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestChunkSentParams:
+ """Parameters sent by the `on_request_chunk_sent` signal"""
+
+ method: str
+ url: URL
+ chunk: bytes
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceResponseChunkReceivedParams:
+ """Parameters sent by the `on_response_chunk_received` signal"""
+
+ method: str
+ url: URL
+ chunk: bytes
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestEndParams:
+ """Parameters sent by the `on_request_end` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ response: ClientResponse
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestExceptionParams:
+ """Parameters sent by the `on_request_exception` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ exception: BaseException
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestRedirectParams:
+ """Parameters sent by the `on_request_redirect` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+ response: ClientResponse
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionQueuedStartParams:
+ """Parameters sent by the `on_connection_queued_start` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionQueuedEndParams:
+ """Parameters sent by the `on_connection_queued_end` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionCreateStartParams:
+ """Parameters sent by the `on_connection_create_start` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionCreateEndParams:
+ """Parameters sent by the `on_connection_create_end` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceConnectionReuseconnParams:
+ """Parameters sent by the `on_connection_reuseconn` signal"""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsResolveHostStartParams:
+ """Parameters sent by the `on_dns_resolvehost_start` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsResolveHostEndParams:
+ """Parameters sent by the `on_dns_resolvehost_end` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsCacheHitParams:
+ """Parameters sent by the `on_dns_cache_hit` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceDnsCacheMissParams:
+ """Parameters sent by the `on_dns_cache_miss` signal"""
+
+ host: str
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class TraceRequestHeadersSentParams:
+ """Parameters sent by the `on_request_headers_sent` signal"""
+
+ method: str
+ url: URL
+ headers: "CIMultiDict[str]"
+
+
+class Trace:
+ """Internal dependency holder class.
+
+ Used to keep together the main dependencies used
+ at the moment of send a signal.
+ """
+
+ def __init__(
+ self,
+ session: "ClientSession",
+ trace_config: TraceConfig,
+ trace_config_ctx: SimpleNamespace,
+ ) -> None:
+ self._trace_config = trace_config
+ self._trace_config_ctx = trace_config_ctx
+ self._session = session
+
+ async def send_request_start(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ return await self._trace_config.on_request_start.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestStartParams(method, url, headers),
+ )
+
+ async def send_request_chunk_sent(
+ self, method: str, url: URL, chunk: bytes
+ ) -> None:
+ return await self._trace_config.on_request_chunk_sent.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestChunkSentParams(method, url, chunk),
+ )
+
+ async def send_response_chunk_received(
+ self, method: str, url: URL, chunk: bytes
+ ) -> None:
+ return await self._trace_config.on_response_chunk_received.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceResponseChunkReceivedParams(method, url, chunk),
+ )
+
+ async def send_request_end(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ response: ClientResponse,
+ ) -> None:
+ return await self._trace_config.on_request_end.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestEndParams(method, url, headers, response),
+ )
+
+ async def send_request_exception(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ exception: BaseException,
+ ) -> None:
+ return await self._trace_config.on_request_exception.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestExceptionParams(method, url, headers, exception),
+ )
+
+ async def send_request_redirect(
+ self,
+ method: str,
+ url: URL,
+ headers: "CIMultiDict[str]",
+ response: ClientResponse,
+ ) -> None:
+ return await self._trace_config._on_request_redirect.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestRedirectParams(method, url, headers, response),
+ )
+
+ async def send_connection_queued_start(self) -> None:
+ return await self._trace_config.on_connection_queued_start.send(
+ self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
+ )
+
+ async def send_connection_queued_end(self) -> None:
+ return await self._trace_config.on_connection_queued_end.send(
+ self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
+ )
+
+ async def send_connection_create_start(self) -> None:
+ return await self._trace_config.on_connection_create_start.send(
+ self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
+ )
+
+ async def send_connection_create_end(self) -> None:
+ return await self._trace_config.on_connection_create_end.send(
+ self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
+ )
+
+ async def send_connection_reuseconn(self) -> None:
+ return await self._trace_config.on_connection_reuseconn.send(
+ self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
+ )
+
+ async def send_dns_resolvehost_start(self, host: str) -> None:
+ return await self._trace_config.on_dns_resolvehost_start.send(
+ self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
+ )
+
+ async def send_dns_resolvehost_end(self, host: str) -> None:
+ return await self._trace_config.on_dns_resolvehost_end.send(
+ self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
+ )
+
+ async def send_dns_cache_hit(self, host: str) -> None:
+ return await self._trace_config.on_dns_cache_hit.send(
+ self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
+ )
+
+ async def send_dns_cache_miss(self, host: str) -> None:
+ return await self._trace_config.on_dns_cache_miss.send(
+ self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
+ )
+
+ async def send_request_headers(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
+ ) -> None:
+ return await self._trace_config._on_request_headers_sent.send(
+ self._session,
+ self._trace_config_ctx,
+ TraceRequestHeadersSentParams(method, url, headers),
+ )
diff --git a/contrib/python/aiohttp/aiohttp/typedefs.py b/contrib/python/aiohttp/aiohttp/typedefs.py
new file mode 100644
index 0000000000..84283d9a46
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/typedefs.py
@@ -0,0 +1,64 @@
+import json
+import os
+import sys
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ Mapping,
+ Tuple,
+ Union,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
+from yarl import URL
+
+# These are for other modules to use (to avoid repeating the conditional import).
+if sys.version_info >= (3, 8):
+ from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
+else:
+ from typing_extensions import ( # noqa: F401
+ Final,
+ Protocol as Protocol,
+ TypedDict as TypedDict,
+ )
+
+DEFAULT_JSON_ENCODER = json.dumps
+DEFAULT_JSON_DECODER = json.loads
+
+if TYPE_CHECKING: # pragma: no cover
+ _CIMultiDict = CIMultiDict[str]
+ _CIMultiDictProxy = CIMultiDictProxy[str]
+ _MultiDict = MultiDict[str]
+ _MultiDictProxy = MultiDictProxy[str]
+ from http.cookies import BaseCookie, Morsel
+
+ from .web import Request, StreamResponse
+else:
+ _CIMultiDict = CIMultiDict
+ _CIMultiDictProxy = CIMultiDictProxy
+ _MultiDict = MultiDict
+ _MultiDictProxy = MultiDictProxy
+
+Byteish = Union[bytes, bytearray, memoryview]
+JSONEncoder = Callable[[Any], str]
+JSONDecoder = Callable[[str], Any]
+LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
+RawHeaders = Tuple[Tuple[bytes, bytes], ...]
+StrOrURL = Union[str, URL]
+
+LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+LooseCookiesIterables = Iterable[
+ Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+]
+LooseCookies = Union[
+ LooseCookiesMappings,
+ LooseCookiesIterables,
+ "BaseCookie[str]",
+]
+
+Handler = Callable[["Request"], Awaitable["StreamResponse"]]
+
+PathLike = Union[str, "os.PathLike[str]"]
diff --git a/contrib/python/aiohttp/aiohttp/web.py b/contrib/python/aiohttp/aiohttp/web.py
new file mode 100644
index 0000000000..864428b49b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web.py
@@ -0,0 +1,586 @@
+import asyncio
+import logging
+import socket
+import sys
+from argparse import ArgumentParser
+from collections.abc import Iterable
+from importlib import import_module
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Iterable as TypingIterable,
+ List,
+ Optional,
+ Set,
+ Type,
+ Union,
+ cast,
+)
+
+from .abc import AbstractAccessLogger
+from .helpers import all_tasks
+from .log import access_logger
+from .web_app import Application as Application, CleanupError as CleanupError
+from .web_exceptions import (
+ HTTPAccepted as HTTPAccepted,
+ HTTPBadGateway as HTTPBadGateway,
+ HTTPBadRequest as HTTPBadRequest,
+ HTTPClientError as HTTPClientError,
+ HTTPConflict as HTTPConflict,
+ HTTPCreated as HTTPCreated,
+ HTTPError as HTTPError,
+ HTTPException as HTTPException,
+ HTTPExpectationFailed as HTTPExpectationFailed,
+ HTTPFailedDependency as HTTPFailedDependency,
+ HTTPForbidden as HTTPForbidden,
+ HTTPFound as HTTPFound,
+ HTTPGatewayTimeout as HTTPGatewayTimeout,
+ HTTPGone as HTTPGone,
+ HTTPInsufficientStorage as HTTPInsufficientStorage,
+ HTTPInternalServerError as HTTPInternalServerError,
+ HTTPLengthRequired as HTTPLengthRequired,
+ HTTPMethodNotAllowed as HTTPMethodNotAllowed,
+ HTTPMisdirectedRequest as HTTPMisdirectedRequest,
+ HTTPMovedPermanently as HTTPMovedPermanently,
+ HTTPMultipleChoices as HTTPMultipleChoices,
+ HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
+ HTTPNoContent as HTTPNoContent,
+ HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
+ HTTPNotAcceptable as HTTPNotAcceptable,
+ HTTPNotExtended as HTTPNotExtended,
+ HTTPNotFound as HTTPNotFound,
+ HTTPNotImplemented as HTTPNotImplemented,
+ HTTPNotModified as HTTPNotModified,
+ HTTPOk as HTTPOk,
+ HTTPPartialContent as HTTPPartialContent,
+ HTTPPaymentRequired as HTTPPaymentRequired,
+ HTTPPermanentRedirect as HTTPPermanentRedirect,
+ HTTPPreconditionFailed as HTTPPreconditionFailed,
+ HTTPPreconditionRequired as HTTPPreconditionRequired,
+ HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
+ HTTPRedirection as HTTPRedirection,
+ HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
+ HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
+ HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
+ HTTPRequestTimeout as HTTPRequestTimeout,
+ HTTPRequestURITooLong as HTTPRequestURITooLong,
+ HTTPResetContent as HTTPResetContent,
+ HTTPSeeOther as HTTPSeeOther,
+ HTTPServerError as HTTPServerError,
+ HTTPServiceUnavailable as HTTPServiceUnavailable,
+ HTTPSuccessful as HTTPSuccessful,
+ HTTPTemporaryRedirect as HTTPTemporaryRedirect,
+ HTTPTooManyRequests as HTTPTooManyRequests,
+ HTTPUnauthorized as HTTPUnauthorized,
+ HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
+ HTTPUnprocessableEntity as HTTPUnprocessableEntity,
+ HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
+ HTTPUpgradeRequired as HTTPUpgradeRequired,
+ HTTPUseProxy as HTTPUseProxy,
+ HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
+ HTTPVersionNotSupported as HTTPVersionNotSupported,
+)
+from .web_fileresponse import FileResponse as FileResponse
+from .web_log import AccessLogger
+from .web_middlewares import (
+ middleware as middleware,
+ normalize_path_middleware as normalize_path_middleware,
+)
+from .web_protocol import (
+ PayloadAccessError as PayloadAccessError,
+ RequestHandler as RequestHandler,
+ RequestPayloadError as RequestPayloadError,
+)
+from .web_request import (
+ BaseRequest as BaseRequest,
+ FileField as FileField,
+ Request as Request,
+)
+from .web_response import (
+ ContentCoding as ContentCoding,
+ Response as Response,
+ StreamResponse as StreamResponse,
+ json_response as json_response,
+)
+from .web_routedef import (
+ AbstractRouteDef as AbstractRouteDef,
+ RouteDef as RouteDef,
+ RouteTableDef as RouteTableDef,
+ StaticDef as StaticDef,
+ delete as delete,
+ get as get,
+ head as head,
+ options as options,
+ patch as patch,
+ post as post,
+ put as put,
+ route as route,
+ static as static,
+ view as view,
+)
+from .web_runner import (
+ AppRunner as AppRunner,
+ BaseRunner as BaseRunner,
+ BaseSite as BaseSite,
+ GracefulExit as GracefulExit,
+ NamedPipeSite as NamedPipeSite,
+ ServerRunner as ServerRunner,
+ SockSite as SockSite,
+ TCPSite as TCPSite,
+ UnixSite as UnixSite,
+)
+from .web_server import Server as Server
+from .web_urldispatcher import (
+ AbstractResource as AbstractResource,
+ AbstractRoute as AbstractRoute,
+ DynamicResource as DynamicResource,
+ PlainResource as PlainResource,
+ Resource as Resource,
+ ResourceRoute as ResourceRoute,
+ StaticResource as StaticResource,
+ UrlDispatcher as UrlDispatcher,
+ UrlMappingMatchInfo as UrlMappingMatchInfo,
+ View as View,
+)
+from .web_ws import (
+ WebSocketReady as WebSocketReady,
+ WebSocketResponse as WebSocketResponse,
+ WSMsgType as WSMsgType,
+)
+
+__all__ = (
+ # web_app
+ "Application",
+ "CleanupError",
+ # web_exceptions
+ "HTTPAccepted",
+ "HTTPBadGateway",
+ "HTTPBadRequest",
+ "HTTPClientError",
+ "HTTPConflict",
+ "HTTPCreated",
+ "HTTPError",
+ "HTTPException",
+ "HTTPExpectationFailed",
+ "HTTPFailedDependency",
+ "HTTPForbidden",
+ "HTTPFound",
+ "HTTPGatewayTimeout",
+ "HTTPGone",
+ "HTTPInsufficientStorage",
+ "HTTPInternalServerError",
+ "HTTPLengthRequired",
+ "HTTPMethodNotAllowed",
+ "HTTPMisdirectedRequest",
+ "HTTPMovedPermanently",
+ "HTTPMultipleChoices",
+ "HTTPNetworkAuthenticationRequired",
+ "HTTPNoContent",
+ "HTTPNonAuthoritativeInformation",
+ "HTTPNotAcceptable",
+ "HTTPNotExtended",
+ "HTTPNotFound",
+ "HTTPNotImplemented",
+ "HTTPNotModified",
+ "HTTPOk",
+ "HTTPPartialContent",
+ "HTTPPaymentRequired",
+ "HTTPPermanentRedirect",
+ "HTTPPreconditionFailed",
+ "HTTPPreconditionRequired",
+ "HTTPProxyAuthenticationRequired",
+ "HTTPRedirection",
+ "HTTPRequestEntityTooLarge",
+ "HTTPRequestHeaderFieldsTooLarge",
+ "HTTPRequestRangeNotSatisfiable",
+ "HTTPRequestTimeout",
+ "HTTPRequestURITooLong",
+ "HTTPResetContent",
+ "HTTPSeeOther",
+ "HTTPServerError",
+ "HTTPServiceUnavailable",
+ "HTTPSuccessful",
+ "HTTPTemporaryRedirect",
+ "HTTPTooManyRequests",
+ "HTTPUnauthorized",
+ "HTTPUnavailableForLegalReasons",
+ "HTTPUnprocessableEntity",
+ "HTTPUnsupportedMediaType",
+ "HTTPUpgradeRequired",
+ "HTTPUseProxy",
+ "HTTPVariantAlsoNegotiates",
+ "HTTPVersionNotSupported",
+ # web_fileresponse
+ "FileResponse",
+ # web_middlewares
+ "middleware",
+ "normalize_path_middleware",
+ # web_protocol
+ "PayloadAccessError",
+ "RequestHandler",
+ "RequestPayloadError",
+ # web_request
+ "BaseRequest",
+ "FileField",
+ "Request",
+ # web_response
+ "ContentCoding",
+ "Response",
+ "StreamResponse",
+ "json_response",
+ # web_routedef
+ "AbstractRouteDef",
+ "RouteDef",
+ "RouteTableDef",
+ "StaticDef",
+ "delete",
+ "get",
+ "head",
+ "options",
+ "patch",
+ "post",
+ "put",
+ "route",
+ "static",
+ "view",
+ # web_runner
+ "AppRunner",
+ "BaseRunner",
+ "BaseSite",
+ "GracefulExit",
+ "ServerRunner",
+ "SockSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ # web_server
+ "Server",
+ # web_urldispatcher
+ "AbstractResource",
+ "AbstractRoute",
+ "DynamicResource",
+ "PlainResource",
+ "Resource",
+ "ResourceRoute",
+ "StaticResource",
+ "UrlDispatcher",
+ "UrlMappingMatchInfo",
+ "View",
+ # web_ws
+ "WebSocketReady",
+ "WebSocketResponse",
+ "WSMsgType",
+ # web
+ "run_app",
+)
+
+
+try:
+ from ssl import SSLContext
+except ImportError: # pragma: no cover
+ SSLContext = Any # type: ignore[misc,assignment]
+
+HostSequence = TypingIterable[str]
+
+
+async def _run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Optional[str] = None,
+ sock: Optional[socket.socket] = None,
+ shutdown_timeout: float = 60.0,
+ keepalive_timeout: float = 75.0,
+ ssl_context: Optional[SSLContext] = None,
+ print: Callable[..., None] = print,
+ backlog: int = 128,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ access_log: Optional[logging.Logger] = access_logger,
+ handle_signals: bool = True,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+) -> None:
+ # A internal functio to actually do all dirty job for application running
+ if asyncio.iscoroutine(app):
+ app = await app # type: ignore[misc]
+
+ app = cast(Application, app)
+
+ runner = AppRunner(
+ app,
+ handle_signals=handle_signals,
+ access_log_class=access_log_class,
+ access_log_format=access_log_format,
+ access_log=access_log,
+ keepalive_timeout=keepalive_timeout,
+ )
+
+ await runner.setup()
+
+ sites = [] # type: List[BaseSite]
+
+ try:
+ if host is not None:
+ if isinstance(host, (str, bytes, bytearray, memoryview)):
+ sites.append(
+ TCPSite(
+ runner,
+ host,
+ port,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ else:
+ for h in host:
+ sites.append(
+ TCPSite(
+ runner,
+ h,
+ port,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ elif path is None and sock is None or port is not None:
+ sites.append(
+ TCPSite(
+ runner,
+ port=port,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+
+ if path is not None:
+ if isinstance(path, (str, bytes, bytearray, memoryview)):
+ sites.append(
+ UnixSite(
+ runner,
+ path,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for p in path:
+ sites.append(
+ UnixSite(
+ runner,
+ p,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+
+ if sock is not None:
+ if not isinstance(sock, Iterable):
+ sites.append(
+ SockSite(
+ runner,
+ sock,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for s in sock:
+ sites.append(
+ SockSite(
+ runner,
+ s,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ for site in sites:
+ await site.start()
+
+ if print: # pragma: no branch
+ names = sorted(str(s.name) for s in runner.sites)
+ print(
+ "======== Running on {} ========\n"
+ "(Press CTRL+C to quit)".format(", ".join(names))
+ )
+
+ # sleep forever by 1 hour intervals,
+ # on Windows before Python 3.8 wake up every 1 second to handle
+ # Ctrl+C smoothly
+ if sys.platform == "win32" and sys.version_info < (3, 8):
+ delay = 1
+ else:
+ delay = 3600
+
+ while True:
+ await asyncio.sleep(delay)
+ finally:
+ await runner.cleanup()
+
+
+def _cancel_tasks(
+ to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
+) -> None:
+ if not to_cancel:
+ return
+
+ for task in to_cancel:
+ task.cancel()
+
+ loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
+
+ for task in to_cancel:
+ if task.cancelled():
+ continue
+ if task.exception() is not None:
+ loop.call_exception_handler(
+ {
+ "message": "unhandled exception during asyncio.run() shutdown",
+ "exception": task.exception(),
+ "task": task,
+ }
+ )
+
+
+def run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Optional[str] = None,
+ sock: Optional[socket.socket] = None,
+ shutdown_timeout: float = 60.0,
+ keepalive_timeout: float = 75.0,
+ ssl_context: Optional[SSLContext] = None,
+ print: Callable[..., None] = print,
+ backlog: int = 128,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ access_log: Optional[logging.Logger] = access_logger,
+ handle_signals: bool = True,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> None:
+ """Run an app locally"""
+ if loop is None:
+ loop = asyncio.new_event_loop()
+
+ # Configure if and only if in debugging mode and using the default logger
+ if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
+ if access_log.level == logging.NOTSET:
+ access_log.setLevel(logging.DEBUG)
+ if not access_log.hasHandlers():
+ access_log.addHandler(logging.StreamHandler())
+
+ main_task = loop.create_task(
+ _run_app(
+ app,
+ host=host,
+ port=port,
+ path=path,
+ sock=sock,
+ shutdown_timeout=shutdown_timeout,
+ keepalive_timeout=keepalive_timeout,
+ ssl_context=ssl_context,
+ print=print,
+ backlog=backlog,
+ access_log_class=access_log_class,
+ access_log_format=access_log_format,
+ access_log=access_log,
+ handle_signals=handle_signals,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+
+ try:
+ asyncio.set_event_loop(loop)
+ loop.run_until_complete(main_task)
+ except (GracefulExit, KeyboardInterrupt): # pragma: no cover
+ pass
+ finally:
+ _cancel_tasks({main_task}, loop)
+ _cancel_tasks(all_tasks(loop), loop)
+ loop.run_until_complete(loop.shutdown_asyncgens())
+ loop.close()
+
+
+def main(argv: List[str]) -> None:
+ arg_parser = ArgumentParser(
+ description="aiohttp.web Application server", prog="aiohttp.web"
+ )
+ arg_parser.add_argument(
+ "entry_func",
+ help=(
+ "Callable returning the `aiohttp.web.Application` instance to "
+ "run. Should be specified in the 'module:function' syntax."
+ ),
+ metavar="entry-func",
+ )
+ arg_parser.add_argument(
+ "-H",
+ "--hostname",
+ help="TCP/IP hostname to serve on (default: %(default)r)",
+ default="localhost",
+ )
+ arg_parser.add_argument(
+ "-P",
+ "--port",
+ help="TCP/IP port to serve on (default: %(default)r)",
+ type=int,
+ default="8080",
+ )
+ arg_parser.add_argument(
+ "-U",
+ "--path",
+ help="Unix file system path to serve on. Specifying a path will cause "
+ "hostname and port arguments to be ignored.",
+ )
+ args, extra_argv = arg_parser.parse_known_args(argv)
+
+ # Import logic
+ mod_str, _, func_str = args.entry_func.partition(":")
+ if not func_str or not mod_str:
+ arg_parser.error("'entry-func' not in 'module:function' syntax")
+ if mod_str.startswith("."):
+ arg_parser.error("relative module names not supported")
+ try:
+ module = import_module(mod_str)
+ except ImportError as ex:
+ arg_parser.error(f"unable to import {mod_str}: {ex}")
+ try:
+ func = getattr(module, func_str)
+ except AttributeError:
+ arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
+
+ # Compatibility logic
+ if args.path is not None and not hasattr(socket, "AF_UNIX"):
+ arg_parser.error(
+ "file system paths not supported by your operating" " environment"
+ )
+
+ logging.basicConfig(level=logging.DEBUG)
+
+ app = func(extra_argv)
+ run_app(app, host=args.hostname, port=args.port, path=args.path)
+ arg_parser.exit(message="Stopped\n")
+
+
+if __name__ == "__main__": # pragma: no branch
+ main(sys.argv[1:]) # pragma: no cover
diff --git a/contrib/python/aiohttp/aiohttp/web_app.py b/contrib/python/aiohttp/aiohttp/web_app.py
new file mode 100644
index 0000000000..d5dc90ed42
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_app.py
@@ -0,0 +1,557 @@
+import asyncio
+import logging
+import warnings
+from functools import partial, update_wrapper
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ AsyncIterator,
+ Awaitable,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from aiosignal import Signal
+from frozenlist import FrozenList
+
+from . import hdrs
+from .abc import (
+ AbstractAccessLogger,
+ AbstractMatchInfo,
+ AbstractRouter,
+ AbstractStreamWriter,
+)
+from .helpers import DEBUG
+from .http_parser import RawRequestMessage
+from .log import web_logger
+from .streams import StreamReader
+from .web_log import AccessLogger
+from .web_middlewares import _fix_request_current_app
+from .web_protocol import RequestHandler
+from .web_request import Request
+from .web_response import StreamResponse
+from .web_routedef import AbstractRouteDef
+from .web_server import Server
+from .web_urldispatcher import (
+ AbstractResource,
+ AbstractRoute,
+ Domain,
+ MaskDomain,
+ MatchedSubAppResource,
+ PrefixedSubAppResource,
+ UrlDispatcher,
+)
+
+__all__ = ("Application", "CleanupError")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .typedefs import Handler
+
+ _AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
+ _RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
+ _Middleware = Union[
+ Callable[[Request, Handler], Awaitable[StreamResponse]],
+ Callable[["Application", Handler], Awaitable[Handler]], # old-style
+ ]
+ _Middlewares = FrozenList[_Middleware]
+ _MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
+ _Subapps = List["Application"]
+else:
+ # No type checker mode, skip types
+ _AppSignal = Signal
+ _RespPrepareSignal = Signal
+ _Middleware = Callable
+ _Middlewares = FrozenList
+ _MiddlewaresHandlers = Optional[Sequence]
+ _Subapps = List
+
+
+class Application(MutableMapping[str, Any]):
+ ATTRS = frozenset(
+ [
+ "logger",
+ "_debug",
+ "_router",
+ "_loop",
+ "_handler_args",
+ "_middlewares",
+ "_middlewares_handlers",
+ "_run_middlewares",
+ "_state",
+ "_frozen",
+ "_pre_frozen",
+ "_subapps",
+ "_on_response_prepare",
+ "_on_startup",
+ "_on_shutdown",
+ "_on_cleanup",
+ "_client_max_size",
+ "_cleanup_ctx",
+ ]
+ )
+
+ def __init__(
+ self,
+ *,
+ logger: logging.Logger = web_logger,
+ router: Optional[UrlDispatcher] = None,
+ middlewares: Iterable[_Middleware] = (),
+ handler_args: Optional[Mapping[str, Any]] = None,
+ client_max_size: int = 1024 ** 2,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ debug: Any = ..., # mypy doesn't support ellipsis
+ ) -> None:
+ if router is None:
+ router = UrlDispatcher()
+ else:
+ warnings.warn(
+ "router argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+ assert isinstance(router, AbstractRouter), router
+
+ if loop is not None:
+ warnings.warn(
+ "loop argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+
+ if debug is not ...:
+ warnings.warn(
+ "debug argument is deprecated", DeprecationWarning, stacklevel=2
+ )
+ self._debug = debug
+ self._router = router # type: UrlDispatcher
+ self._loop = loop
+ self._handler_args = handler_args
+ self.logger = logger
+
+ self._middlewares = FrozenList(middlewares) # type: _Middlewares
+
+ # initialized on freezing
+ self._middlewares_handlers = None # type: _MiddlewaresHandlers
+ # initialized on freezing
+ self._run_middlewares = None # type: Optional[bool]
+
+ self._state = {} # type: Dict[str, Any]
+ self._frozen = False
+ self._pre_frozen = False
+ self._subapps = [] # type: _Subapps
+
+ self._on_response_prepare = Signal(self) # type: _RespPrepareSignal
+ self._on_startup = Signal(self) # type: _AppSignal
+ self._on_shutdown = Signal(self) # type: _AppSignal
+ self._on_cleanup = Signal(self) # type: _AppSignal
+ self._cleanup_ctx = CleanupContext()
+ self._on_startup.append(self._cleanup_ctx._on_startup)
+ self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
+ self._client_max_size = client_max_size
+
+ def __init_subclass__(cls: Type["Application"]) -> None:
+ warnings.warn(
+ "Inheritance class {} from web.Application "
+ "is discouraged".format(cls.__name__),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ if DEBUG: # pragma: no cover
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom web.Application.{} attribute "
+ "is discouraged".format(name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ # MutableMapping API
+
+ def __eq__(self, other: object) -> bool:
+ return self is other
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def _check_frozen(self) -> None:
+ if self._frozen:
+ warnings.warn(
+ "Changing state of started or joined " "application is deprecated",
+ DeprecationWarning,
+ stacklevel=3,
+ )
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._check_frozen()
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ self._check_frozen()
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ ########
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop:
+ # Technically the loop can be None
+ # but we mask it by explicit type cast
+ # to provide more convinient type annotation
+ warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
+ return cast(asyncio.AbstractEventLoop, self._loop)
+
+ def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ if self._loop is not None and self._loop is not loop:
+ raise RuntimeError(
+ "web.Application instance initialized with different loop"
+ )
+
+ self._loop = loop
+
+ # set loop debug
+ if self._debug is ...:
+ self._debug = loop.get_debug()
+
+ # set loop to sub applications
+ for subapp in self._subapps:
+ subapp._set_loop(loop)
+
+ @property
+ def pre_frozen(self) -> bool:
+ return self._pre_frozen
+
+ def pre_freeze(self) -> None:
+ if self._pre_frozen:
+ return
+
+ self._pre_frozen = True
+ self._middlewares.freeze()
+ self._router.freeze()
+ self._on_response_prepare.freeze()
+ self._cleanup_ctx.freeze()
+ self._on_startup.freeze()
+ self._on_shutdown.freeze()
+ self._on_cleanup.freeze()
+ self._middlewares_handlers = tuple(self._prepare_middleware())
+
+ # If current app and any subapp do not have middlewares avoid run all
+ # of the code footprint that it implies, which have a middleware
+ # hardcoded per app that sets up the current_app attribute. If no
+ # middlewares are configured the handler will receive the proper
+ # current_app without needing all of this code.
+ self._run_middlewares = True if self.middlewares else False
+
+ for subapp in self._subapps:
+ subapp.pre_freeze()
+ self._run_middlewares = self._run_middlewares or subapp._run_middlewares
+
+ @property
+ def frozen(self) -> bool:
+ return self._frozen
+
+ def freeze(self) -> None:
+ if self._frozen:
+ return
+
+ self.pre_freeze()
+ self._frozen = True
+ for subapp in self._subapps:
+ subapp.freeze()
+
+ @property
+ def debug(self) -> bool:
+ warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
+ return self._debug # type: ignore[no-any-return]
+
+ def _reg_subapp_signals(self, subapp: "Application") -> None:
+ def reg_handler(signame: str) -> None:
+ subsig = getattr(subapp, signame)
+
+ async def handler(app: "Application") -> None:
+ await subsig.send(subapp)
+
+ appsig = getattr(self, signame)
+ appsig.append(handler)
+
+ reg_handler("on_startup")
+ reg_handler("on_shutdown")
+ reg_handler("on_cleanup")
+
+ def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
+ if not isinstance(prefix, str):
+ raise TypeError("Prefix must be str")
+ prefix = prefix.rstrip("/")
+ if not prefix:
+ raise ValueError("Prefix cannot be empty")
+ factory = partial(PrefixedSubAppResource, prefix, subapp)
+ return self._add_subapp(factory, subapp)
+
+ def _add_subapp(
+ self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
+ ) -> AbstractResource:
+ if self.frozen:
+ raise RuntimeError("Cannot add sub application to frozen application")
+ if subapp.frozen:
+ raise RuntimeError("Cannot add frozen application")
+ resource = resource_factory()
+ self.router.register_resource(resource)
+ self._reg_subapp_signals(subapp)
+ self._subapps.append(subapp)
+ subapp.pre_freeze()
+ if self._loop is not None:
+ subapp._set_loop(self._loop)
+ return resource
+
+ def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
+ if not isinstance(domain, str):
+ raise TypeError("Domain must be str")
+ elif "*" in domain:
+ rule = MaskDomain(domain) # type: Domain
+ else:
+ rule = Domain(domain)
+ factory = partial(MatchedSubAppResource, rule, subapp)
+ return self._add_subapp(factory, subapp)
+
+ def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
+ return self.router.add_routes(routes)
+
+ @property
+ def on_response_prepare(self) -> _RespPrepareSignal:
+ return self._on_response_prepare
+
+ @property
+ def on_startup(self) -> _AppSignal:
+ return self._on_startup
+
+ @property
+ def on_shutdown(self) -> _AppSignal:
+ return self._on_shutdown
+
+ @property
+ def on_cleanup(self) -> _AppSignal:
+ return self._on_cleanup
+
+ @property
+ def cleanup_ctx(self) -> "CleanupContext":
+ return self._cleanup_ctx
+
+ @property
+ def router(self) -> UrlDispatcher:
+ return self._router
+
+ @property
+ def middlewares(self) -> _Middlewares:
+ return self._middlewares
+
+ def _make_handler(
+ self,
+ *,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ **kwargs: Any,
+ ) -> Server:
+
+ if not issubclass(access_log_class, AbstractAccessLogger):
+ raise TypeError(
+ "access_log_class must be subclass of "
+ "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
+ )
+
+ self._set_loop(loop)
+ self.freeze()
+
+ kwargs["debug"] = self._debug
+ kwargs["access_log_class"] = access_log_class
+ if self._handler_args:
+ for k, v in self._handler_args.items():
+ kwargs[k] = v
+
+ return Server(
+ self._handle, # type: ignore[arg-type]
+ request_factory=self._make_request,
+ loop=self._loop,
+ **kwargs,
+ )
+
+ def make_handler(
+ self,
+ *,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ **kwargs: Any,
+ ) -> Server:
+
+ warnings.warn(
+ "Application.make_handler(...) is deprecated, " "use AppRunner API instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ return self._make_handler(
+ loop=loop, access_log_class=access_log_class, **kwargs
+ )
+
+ async def startup(self) -> None:
+ """Causes on_startup signal
+
+ Should be called in the event loop along with the request handler.
+ """
+ await self.on_startup.send(self)
+
+ async def shutdown(self) -> None:
+ """Causes on_shutdown signal
+
+ Should be called before cleanup()
+ """
+ await self.on_shutdown.send(self)
+
+ async def cleanup(self) -> None:
+ """Causes on_cleanup signal
+
+ Should be called after shutdown()
+ """
+ if self.on_cleanup.frozen:
+ await self.on_cleanup.send(self)
+ else:
+ # If an exception occurs in startup, ensure cleanup contexts are completed.
+ await self._cleanup_ctx._on_cleanup(self)
+
+ def _make_request(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: RequestHandler,
+ writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ _cls: Type[Request] = Request,
+ ) -> Request:
+ return _cls(
+ message,
+ payload,
+ protocol,
+ writer,
+ task,
+ self._loop,
+ client_max_size=self._client_max_size,
+ )
+
+ def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
+ for m in reversed(self._middlewares):
+ if getattr(m, "__middleware_version__", None) == 1:
+ yield m, True
+ else:
+ warnings.warn(
+ 'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ yield m, False
+
+ yield _fix_request_current_app(self), True
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ loop = asyncio.get_event_loop()
+ debug = loop.get_debug()
+ match_info = await self._router.resolve(request)
+ if debug: # pragma: no cover
+ if not isinstance(match_info, AbstractMatchInfo):
+ raise TypeError(
+ "match_info should be AbstractMatchInfo "
+ "instance, not {!r}".format(match_info)
+ )
+ match_info.add_app(self)
+
+ match_info.freeze()
+
+ resp = None
+ request._match_info = match_info
+ expect = request.headers.get(hdrs.EXPECT)
+ if expect:
+ resp = await match_info.expect_handler(request)
+ await request.writer.drain()
+
+ if resp is None:
+ handler = match_info.handler
+
+ if self._run_middlewares:
+ for app in match_info.apps[::-1]:
+ for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
+ if new_style:
+ handler = update_wrapper(
+ partial(m, handler=handler), handler
+ )
+ else:
+ handler = await m(app, handler) # type: ignore[arg-type]
+
+ resp = await handler(request)
+
+ return resp
+
+ def __call__(self) -> "Application":
+ """gunicorn compatibility"""
+ return self
+
+ def __repr__(self) -> str:
+ return f"<Application 0x{id(self):x}>"
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class CleanupError(RuntimeError):
+ @property
+ def exceptions(self) -> List[BaseException]:
+ return cast(List[BaseException], self.args[1])
+
+
+if TYPE_CHECKING: # pragma: no cover
+ _CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
+else:
+ _CleanupContextBase = FrozenList
+
+
+class CleanupContext(_CleanupContextBase):
+ def __init__(self) -> None:
+ super().__init__()
+ self._exits = [] # type: List[AsyncIterator[None]]
+
+ async def _on_startup(self, app: Application) -> None:
+ for cb in self:
+ it = cb(app).__aiter__()
+ await it.__anext__()
+ self._exits.append(it)
+
+ async def _on_cleanup(self, app: Application) -> None:
+ errors = []
+ for it in reversed(self._exits):
+ try:
+ await it.__anext__()
+ except StopAsyncIteration:
+ pass
+ except Exception as exc:
+ errors.append(exc)
+ else:
+ errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
+ if errors:
+ if len(errors) == 1:
+ raise errors[0]
+ else:
+ raise CleanupError("Multiple errors on cleanup stage", errors)
diff --git a/contrib/python/aiohttp/aiohttp/web_exceptions.py b/contrib/python/aiohttp/aiohttp/web_exceptions.py
new file mode 100644
index 0000000000..2eadca0386
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_exceptions.py
@@ -0,0 +1,441 @@
+import warnings
+from typing import Any, Dict, Iterable, List, Optional, Set # noqa
+
+from yarl import URL
+
+from .typedefs import LooseHeaders, StrOrURL
+from .web_response import Response
+
+__all__ = (
+ "HTTPException",
+ "HTTPError",
+ "HTTPRedirection",
+ "HTTPSuccessful",
+ "HTTPOk",
+ "HTTPCreated",
+ "HTTPAccepted",
+ "HTTPNonAuthoritativeInformation",
+ "HTTPNoContent",
+ "HTTPResetContent",
+ "HTTPPartialContent",
+ "HTTPMultipleChoices",
+ "HTTPMovedPermanently",
+ "HTTPFound",
+ "HTTPSeeOther",
+ "HTTPNotModified",
+ "HTTPUseProxy",
+ "HTTPTemporaryRedirect",
+ "HTTPPermanentRedirect",
+ "HTTPClientError",
+ "HTTPBadRequest",
+ "HTTPUnauthorized",
+ "HTTPPaymentRequired",
+ "HTTPForbidden",
+ "HTTPNotFound",
+ "HTTPMethodNotAllowed",
+ "HTTPNotAcceptable",
+ "HTTPProxyAuthenticationRequired",
+ "HTTPRequestTimeout",
+ "HTTPConflict",
+ "HTTPGone",
+ "HTTPLengthRequired",
+ "HTTPPreconditionFailed",
+ "HTTPRequestEntityTooLarge",
+ "HTTPRequestURITooLong",
+ "HTTPUnsupportedMediaType",
+ "HTTPRequestRangeNotSatisfiable",
+ "HTTPExpectationFailed",
+ "HTTPMisdirectedRequest",
+ "HTTPUnprocessableEntity",
+ "HTTPFailedDependency",
+ "HTTPUpgradeRequired",
+ "HTTPPreconditionRequired",
+ "HTTPTooManyRequests",
+ "HTTPRequestHeaderFieldsTooLarge",
+ "HTTPUnavailableForLegalReasons",
+ "HTTPServerError",
+ "HTTPInternalServerError",
+ "HTTPNotImplemented",
+ "HTTPBadGateway",
+ "HTTPServiceUnavailable",
+ "HTTPGatewayTimeout",
+ "HTTPVersionNotSupported",
+ "HTTPVariantAlsoNegotiates",
+ "HTTPInsufficientStorage",
+ "HTTPNotExtended",
+ "HTTPNetworkAuthenticationRequired",
+)
+
+
+############################################################
+# HTTP Exceptions
+############################################################
+
+
+class HTTPException(Response, Exception):
+
+ # You should set in subclasses:
+ # status = 200
+
+ status_code = -1
+ empty_body = False
+
+ __http_exception__ = True
+
+ def __init__(
+ self,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ if body is not None:
+ warnings.warn(
+ "body argument is deprecated for http web exceptions",
+ DeprecationWarning,
+ )
+ Response.__init__(
+ self,
+ status=self.status_code,
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ Exception.__init__(self, self.reason)
+ if self.body is None and not self.empty_body:
+ self.text = f"{self.status}: {self.reason}"
+
+ def __bool__(self) -> bool:
+ return True
+
+
+class HTTPError(HTTPException):
+ """Base class for exceptions with status codes in the 400s and 500s."""
+
+
+class HTTPRedirection(HTTPException):
+ """Base class for exceptions with status codes in the 300s."""
+
+
+class HTTPSuccessful(HTTPException):
+ """Base class for exceptions with status codes in the 200s."""
+
+
+class HTTPOk(HTTPSuccessful):
+ status_code = 200
+
+
+class HTTPCreated(HTTPSuccessful):
+ status_code = 201
+
+
+class HTTPAccepted(HTTPSuccessful):
+ status_code = 202
+
+
+class HTTPNonAuthoritativeInformation(HTTPSuccessful):
+ status_code = 203
+
+
+class HTTPNoContent(HTTPSuccessful):
+ status_code = 204
+ empty_body = True
+
+
+class HTTPResetContent(HTTPSuccessful):
+ status_code = 205
+ empty_body = True
+
+
+class HTTPPartialContent(HTTPSuccessful):
+ status_code = 206
+
+
+############################################################
+# 3xx redirection
+############################################################
+
+
+class _HTTPMove(HTTPRedirection):
+ def __init__(
+ self,
+ location: StrOrURL,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ if not location:
+ raise ValueError("HTTP redirects need a location to redirect to.")
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Location"] = str(URL(location))
+ self.location = location
+
+
+class HTTPMultipleChoices(_HTTPMove):
+ status_code = 300
+
+
+class HTTPMovedPermanently(_HTTPMove):
+ status_code = 301
+
+
+class HTTPFound(_HTTPMove):
+ status_code = 302
+
+
+# This one is safe after a POST (the redirected location will be
+# retrieved with GET):
+class HTTPSeeOther(_HTTPMove):
+ status_code = 303
+
+
+class HTTPNotModified(HTTPRedirection):
+ # FIXME: this should include a date or etag header
+ status_code = 304
+ empty_body = True
+
+
+class HTTPUseProxy(_HTTPMove):
+ # Not a move, but looks a little like one
+ status_code = 305
+
+
+class HTTPTemporaryRedirect(_HTTPMove):
+ status_code = 307
+
+
+class HTTPPermanentRedirect(_HTTPMove):
+ status_code = 308
+
+
+############################################################
+# 4xx client error
+############################################################
+
+
+class HTTPClientError(HTTPError):
+ pass
+
+
+class HTTPBadRequest(HTTPClientError):
+ status_code = 400
+
+
+class HTTPUnauthorized(HTTPClientError):
+ status_code = 401
+
+
+class HTTPPaymentRequired(HTTPClientError):
+ status_code = 402
+
+
+class HTTPForbidden(HTTPClientError):
+ status_code = 403
+
+
+class HTTPNotFound(HTTPClientError):
+ status_code = 404
+
+
+class HTTPMethodNotAllowed(HTTPClientError):
+ status_code = 405
+
+ def __init__(
+ self,
+ method: str,
+ allowed_methods: Iterable[str],
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ allow = ",".join(sorted(allowed_methods))
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Allow"] = allow
+ self.allowed_methods = set(allowed_methods) # type: Set[str]
+ self.method = method.upper()
+
+
+class HTTPNotAcceptable(HTTPClientError):
+ status_code = 406
+
+
+class HTTPProxyAuthenticationRequired(HTTPClientError):
+ status_code = 407
+
+
+class HTTPRequestTimeout(HTTPClientError):
+ status_code = 408
+
+
+class HTTPConflict(HTTPClientError):
+ status_code = 409
+
+
+class HTTPGone(HTTPClientError):
+ status_code = 410
+
+
+class HTTPLengthRequired(HTTPClientError):
+ status_code = 411
+
+
+class HTTPPreconditionFailed(HTTPClientError):
+ status_code = 412
+
+
+class HTTPRequestEntityTooLarge(HTTPClientError):
+ status_code = 413
+
+ def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
+ kwargs.setdefault(
+ "text",
+ "Maximum request body size {} exceeded, "
+ "actual body size {}".format(max_size, actual_size),
+ )
+ super().__init__(**kwargs)
+
+
+class HTTPRequestURITooLong(HTTPClientError):
+ status_code = 414
+
+
+class HTTPUnsupportedMediaType(HTTPClientError):
+ status_code = 415
+
+
+class HTTPRequestRangeNotSatisfiable(HTTPClientError):
+ status_code = 416
+
+
+class HTTPExpectationFailed(HTTPClientError):
+ status_code = 417
+
+
+class HTTPMisdirectedRequest(HTTPClientError):
+ status_code = 421
+
+
+class HTTPUnprocessableEntity(HTTPClientError):
+ status_code = 422
+
+
+class HTTPFailedDependency(HTTPClientError):
+ status_code = 424
+
+
+class HTTPUpgradeRequired(HTTPClientError):
+ status_code = 426
+
+
+class HTTPPreconditionRequired(HTTPClientError):
+ status_code = 428
+
+
+class HTTPTooManyRequests(HTTPClientError):
+ status_code = 429
+
+
+class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
+ status_code = 431
+
+
+class HTTPUnavailableForLegalReasons(HTTPClientError):
+ status_code = 451
+
+ def __init__(
+ self,
+ link: str,
+ *,
+ headers: Optional[LooseHeaders] = None,
+ reason: Optional[str] = None,
+ body: Any = None,
+ text: Optional[str] = None,
+ content_type: Optional[str] = None,
+ ) -> None:
+ super().__init__(
+ headers=headers,
+ reason=reason,
+ body=body,
+ text=text,
+ content_type=content_type,
+ )
+ self.headers["Link"] = '<%s>; rel="blocked-by"' % link
+ self.link = link
+
+
+############################################################
+# 5xx Server Error
+############################################################
+# Response status codes beginning with the digit "5" indicate cases in
+# which the server is aware that it has erred or is incapable of
+# performing the request. Except when responding to a HEAD request, the
+# server SHOULD include an entity containing an explanation of the error
+# situation, and whether it is a temporary or permanent condition. User
+# agents SHOULD display any included entity to the user. These response
+# codes are applicable to any request method.
+
+
+class HTTPServerError(HTTPError):
+ pass
+
+
+class HTTPInternalServerError(HTTPServerError):
+ status_code = 500
+
+
+class HTTPNotImplemented(HTTPServerError):
+ status_code = 501
+
+
+class HTTPBadGateway(HTTPServerError):
+ status_code = 502
+
+
+class HTTPServiceUnavailable(HTTPServerError):
+ status_code = 503
+
+
+class HTTPGatewayTimeout(HTTPServerError):
+ status_code = 504
+
+
+class HTTPVersionNotSupported(HTTPServerError):
+ status_code = 505
+
+
+class HTTPVariantAlsoNegotiates(HTTPServerError):
+ status_code = 506
+
+
+class HTTPInsufficientStorage(HTTPServerError):
+ status_code = 507
+
+
+class HTTPNotExtended(HTTPServerError):
+ status_code = 510
+
+
+class HTTPNetworkAuthenticationRequired(HTTPServerError):
+ status_code = 511
diff --git a/contrib/python/aiohttp/aiohttp/web_fileresponse.py b/contrib/python/aiohttp/aiohttp/web_fileresponse.py
new file mode 100644
index 0000000000..f41ed3fd0a
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_fileresponse.py
@@ -0,0 +1,288 @@
+import asyncio
+import mimetypes
+import os
+import pathlib
+import sys
+from typing import ( # noqa
+ IO,
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ Union,
+ cast,
+)
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import ETAG_ANY, ETag
+from .typedefs import Final, LooseHeaders
+from .web_exceptions import (
+ HTTPNotModified,
+ HTTPPartialContent,
+ HTTPPreconditionFailed,
+ HTTPRequestRangeNotSatisfiable,
+)
+from .web_response import StreamResponse
+
+__all__ = ("FileResponse",)
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_request import BaseRequest
+
+
+_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
+
+
+NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
+
+
+class FileResponse(StreamResponse):
+ """A response object can be used to send files."""
+
+ def __init__(
+ self,
+ path: Union[str, pathlib.Path],
+ chunk_size: int = 256 * 1024,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ ) -> None:
+ super().__init__(status=status, reason=reason, headers=headers)
+
+ if isinstance(path, str):
+ path = pathlib.Path(path)
+
+ self._path = path
+ self._chunk_size = chunk_size
+
+ async def _sendfile_fallback(
+ self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
+ ) -> AbstractStreamWriter:
+ # To keep memory usage low,fobj is transferred in chunks
+ # controlled by the constructor's chunk_size argument.
+
+ chunk_size = self._chunk_size
+ loop = asyncio.get_event_loop()
+
+ await loop.run_in_executor(None, fobj.seek, offset)
+
+ chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
+ while chunk:
+ await writer.write(chunk)
+ count = count - chunk_size
+ if count <= 0:
+ break
+ chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
+
+ await writer.drain()
+ return writer
+
+ async def _sendfile(
+ self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
+ ) -> AbstractStreamWriter:
+ writer = await super().prepare(request)
+ assert writer is not None
+
+ if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
+ return await self._sendfile_fallback(writer, fobj, offset, count)
+
+ loop = request._loop
+ transport = request.transport
+ assert transport is not None
+
+ try:
+ await loop.sendfile(transport, fobj, offset, count)
+ except NotImplementedError:
+ return await self._sendfile_fallback(writer, fobj, offset, count)
+
+ await super().write_eof()
+ return writer
+
+ @staticmethod
+ def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
+ if len(etags) == 1 and etags[0].value == ETAG_ANY:
+ return True
+ return any(etag.value == etag_value for etag in etags if not etag.is_weak)
+
+ async def _not_modified(
+ self, request: "BaseRequest", etag_value: str, last_modified: float
+ ) -> Optional[AbstractStreamWriter]:
+ self.set_status(HTTPNotModified.status_code)
+ self._length_check = False
+ self.etag = etag_value # type: ignore[assignment]
+ self.last_modified = last_modified # type: ignore[assignment]
+ # Delete any Content-Length headers provided by user. HTTP 304
+ # should always have empty response body
+ return await super().prepare(request)
+
+ async def _precondition_failed(
+ self, request: "BaseRequest"
+ ) -> Optional[AbstractStreamWriter]:
+ self.set_status(HTTPPreconditionFailed.status_code)
+ self.content_length = 0
+ return await super().prepare(request)
+
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
+ filepath = self._path
+
+ gzip = False
+ if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
+ gzip_path = filepath.with_name(filepath.name + ".gz")
+
+ if gzip_path.is_file():
+ filepath = gzip_path
+ gzip = True
+
+ loop = asyncio.get_event_loop()
+ st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
+
+ etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
+ last_modified = st.st_mtime
+
+ # https://tools.ietf.org/html/rfc7232#section-6
+ ifmatch = request.if_match
+ if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
+ return await self._precondition_failed(request)
+
+ unmodsince = request.if_unmodified_since
+ if (
+ unmodsince is not None
+ and ifmatch is None
+ and st.st_mtime > unmodsince.timestamp()
+ ):
+ return await self._precondition_failed(request)
+
+ ifnonematch = request.if_none_match
+ if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
+ return await self._not_modified(request, etag_value, last_modified)
+
+ modsince = request.if_modified_since
+ if (
+ modsince is not None
+ and ifnonematch is None
+ and st.st_mtime <= modsince.timestamp()
+ ):
+ return await self._not_modified(request, etag_value, last_modified)
+
+ if hdrs.CONTENT_TYPE not in self.headers:
+ ct, encoding = mimetypes.guess_type(str(filepath))
+ if not ct:
+ ct = "application/octet-stream"
+ should_set_ct = True
+ else:
+ encoding = "gzip" if gzip else None
+ should_set_ct = False
+
+ status = self._status
+ file_size = st.st_size
+ count = file_size
+
+ start = None
+
+ ifrange = request.if_range
+ if ifrange is None or st.st_mtime <= ifrange.timestamp():
+ # If-Range header check:
+ # condition = cached date >= last modification date
+ # return 206 if True else 200.
+ # if False:
+ # Range header would not be processed, return 200
+ # if True but Range header missing
+ # return 200
+ try:
+ rng = request.http_range
+ start = rng.start
+ end = rng.stop
+ except ValueError:
+ # https://tools.ietf.org/html/rfc7233:
+ # A server generating a 416 (Range Not Satisfiable) response to
+ # a byte-range request SHOULD send a Content-Range header field
+ # with an unsatisfied-range value.
+ # The complete-length in a 416 response indicates the current
+ # length of the selected representation.
+ #
+ # Will do the same below. Many servers ignore this and do not
+ # send a Content-Range header with HTTP 416
+ self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
+ return await super().prepare(request)
+
+ # If a range request has been made, convert start, end slice
+ # notation into file pointer offset and count
+ if start is not None or end is not None:
+ if start < 0 and end is None: # return tail of file
+ start += file_size
+ if start < 0:
+ # if Range:bytes=-1000 in request header but file size
+ # is only 200, there would be trouble without this
+ start = 0
+ count = file_size - start
+ else:
+ # rfc7233:If the last-byte-pos value is
+ # absent, or if the value is greater than or equal to
+ # the current length of the representation data,
+ # the byte range is interpreted as the remainder
+ # of the representation (i.e., the server replaces the
+ # value of last-byte-pos with a value that is one less than
+ # the current length of the selected representation).
+ count = (
+ min(end if end is not None else file_size, file_size) - start
+ )
+
+ if start >= file_size:
+ # HTTP 416 should be returned in this case.
+ #
+ # According to https://tools.ietf.org/html/rfc7233:
+ # If a valid byte-range-set includes at least one
+ # byte-range-spec with a first-byte-pos that is less than
+ # the current length of the representation, or at least one
+ # suffix-byte-range-spec with a non-zero suffix-length,
+ # then the byte-range-set is satisfiable. Otherwise, the
+ # byte-range-set is unsatisfiable.
+ self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
+ self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
+ return await super().prepare(request)
+
+ status = HTTPPartialContent.status_code
+ # Even though you are sending the whole file, you should still
+ # return a HTTP 206 for a Range request.
+ self.set_status(status)
+
+ if should_set_ct:
+ self.content_type = ct # type: ignore[assignment]
+ if encoding:
+ self.headers[hdrs.CONTENT_ENCODING] = encoding
+ if gzip:
+ self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
+
+ self.etag = etag_value # type: ignore[assignment]
+ self.last_modified = st.st_mtime # type: ignore[assignment]
+ self.content_length = count
+
+ self.headers[hdrs.ACCEPT_RANGES] = "bytes"
+
+ real_start = cast(int, start)
+
+ if status == HTTPPartialContent.status_code:
+ self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
+ real_start, real_start + count - 1, file_size
+ )
+
+ # If we are sending 0 bytes calling sendfile() will throw a ValueError
+ if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
+ return await super().prepare(request)
+
+ fobj = await loop.run_in_executor(None, filepath.open, "rb")
+ if start: # be aware that start could be None or int=0 here.
+ offset = start
+ else:
+ offset = 0
+
+ try:
+ return await self._sendfile(request, fobj, offset, count)
+ finally:
+ await loop.run_in_executor(None, fobj.close)
diff --git a/contrib/python/aiohttp/aiohttp/web_log.py b/contrib/python/aiohttp/aiohttp/web_log.py
new file mode 100644
index 0000000000..a977c1ba5c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_log.py
@@ -0,0 +1,208 @@
+import datetime
+import functools
+import logging
+import os
+import re
+from collections import namedtuple
+from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
+
+from .abc import AbstractAccessLogger
+from .web_request import BaseRequest
+from .web_response import StreamResponse
+
+KeyMethod = namedtuple("KeyMethod", "key method")
+
+
+class AccessLogger(AbstractAccessLogger):
+ """Helper object to log access.
+
+ Usage:
+ log = logging.getLogger("spam")
+ log_format = "%a %{User-Agent}i"
+ access_logger = AccessLogger(log, log_format)
+ access_logger.log(request, response, time)
+
+ Format:
+ %% The percent sign
+ %a Remote IP-address (IP-address of proxy if using reverse proxy)
+ %t Time when the request was started to process
+ %P The process ID of the child that serviced the request
+ %r First line of request
+ %s Response status code
+ %b Size of response in bytes, including HTTP headers
+ %T Time taken to serve the request, in seconds
+ %Tf Time taken to serve the request, in seconds with floating fraction
+ in .06f format
+ %D Time taken to serve the request, in microseconds
+ %{FOO}i request.headers['FOO']
+ %{FOO}o response.headers['FOO']
+ %{FOO}e os.environ['FOO']
+
+ """
+
+ LOG_FORMAT_MAP = {
+ "a": "remote_address",
+ "t": "request_start_time",
+ "P": "process_id",
+ "r": "first_request_line",
+ "s": "response_status",
+ "b": "response_size",
+ "T": "request_time",
+ "Tf": "request_time_frac",
+ "D": "request_time_micro",
+ "i": "request_header",
+ "o": "response_header",
+ }
+
+ LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
+ FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
+ CLEANUP_RE = re.compile(r"(%[^s])")
+ _FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]]
+
+ def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
+ """Initialise the logger.
+
+ logger is a logger object to be used for logging.
+ log_format is a string with apache compatible log format description.
+
+ """
+ super().__init__(logger, log_format=log_format)
+
+ _compiled_format = AccessLogger._FORMAT_CACHE.get(log_format)
+ if not _compiled_format:
+ _compiled_format = self.compile_format(log_format)
+ AccessLogger._FORMAT_CACHE[log_format] = _compiled_format
+
+ self._log_format, self._methods = _compiled_format
+
+ def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
+ """Translate log_format into form usable by modulo formatting
+
+ All known atoms will be replaced with %s
+ Also methods for formatting of those atoms will be added to
+ _methods in appropriate order
+
+ For example we have log_format = "%a %t"
+ This format will be translated to "%s %s"
+ Also contents of _methods will be
+ [self._format_a, self._format_t]
+ These method will be called and results will be passed
+ to translated string format.
+
+ Each _format_* method receive 'args' which is list of arguments
+ given to self.log
+
+ Exceptions are _format_e, _format_i and _format_o methods which
+ also receive key name (by functools.partial)
+
+ """
+ # list of (key, method) tuples, we don't use an OrderedDict as users
+ # can repeat the same key more than once
+ methods = list()
+
+ for atom in self.FORMAT_RE.findall(log_format):
+ if atom[1] == "":
+ format_key1 = self.LOG_FORMAT_MAP[atom[0]]
+ m = getattr(AccessLogger, "_format_%s" % atom[0])
+ key_method = KeyMethod(format_key1, m)
+ else:
+ format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
+ m = getattr(AccessLogger, "_format_%s" % atom[2])
+ key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
+
+ methods.append(key_method)
+
+ log_format = self.FORMAT_RE.sub(r"%s", log_format)
+ log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
+ return log_format, methods
+
+ @staticmethod
+ def _format_i(
+ key: str, request: BaseRequest, response: StreamResponse, time: float
+ ) -> str:
+ if request is None:
+ return "(no headers)"
+
+ # suboptimal, make istr(key) once
+ return request.headers.get(key, "-")
+
+ @staticmethod
+ def _format_o(
+ key: str, request: BaseRequest, response: StreamResponse, time: float
+ ) -> str:
+ # suboptimal, make istr(key) once
+ return response.headers.get(key, "-")
+
+ @staticmethod
+ def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ if request is None:
+ return "-"
+ ip = request.remote
+ return ip if ip is not None else "-"
+
+ @staticmethod
+ def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ now = datetime.datetime.utcnow()
+ start_time = now - datetime.timedelta(seconds=time)
+ return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]")
+
+ @staticmethod
+ def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return "<%s>" % os.getpid()
+
+ @staticmethod
+ def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ if request is None:
+ return "-"
+ return "{} {} HTTP/{}.{}".format(
+ request.method,
+ request.path_qs,
+ request.version.major,
+ request.version.minor,
+ )
+
+ @staticmethod
+ def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
+ return response.status
+
+ @staticmethod
+ def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
+ return response.body_length
+
+ @staticmethod
+ def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return str(round(time))
+
+ @staticmethod
+ def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return "%06f" % time
+
+ @staticmethod
+ def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
+ return str(round(time * 1000000))
+
+ def _format_line(
+ self, request: BaseRequest, response: StreamResponse, time: float
+ ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
+ return [(key, method(request, response, time)) for key, method in self._methods]
+
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ try:
+ fmt_info = self._format_line(request, response, time)
+
+ values = list()
+ extra = dict()
+ for key, value in fmt_info:
+ values.append(value)
+
+ if key.__class__ is str:
+ extra[key] = value
+ else:
+ k1, k2 = key # type: ignore[misc]
+ dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
+ dct[k2] = value # type: ignore[index,has-type]
+ extra[k1] = dct # type: ignore[has-type,assignment]
+
+ self.logger.info(self._log_format % tuple(values), extra=extra)
+ except Exception:
+ self.logger.exception("Error in logging")
diff --git a/contrib/python/aiohttp/aiohttp/web_middlewares.py b/contrib/python/aiohttp/aiohttp/web_middlewares.py
new file mode 100644
index 0000000000..fabcc449a2
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_middlewares.py
@@ -0,0 +1,119 @@
+import re
+from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
+
+from .typedefs import Handler
+from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
+from .web_request import Request
+from .web_response import StreamResponse
+from .web_urldispatcher import SystemRoute
+
+__all__ = (
+ "middleware",
+ "normalize_path_middleware",
+)
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+
+_Func = TypeVar("_Func")
+
+
+async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
+ alt_request = request.clone(rel_url=path)
+
+ match_info = await request.app.router.resolve(alt_request)
+ alt_request._match_info = match_info
+
+ if match_info.http_exception is None:
+ return True, alt_request
+
+ return False, request
+
+
+def middleware(f: _Func) -> _Func:
+ f.__middleware_version__ = 1 # type: ignore[attr-defined]
+ return f
+
+
+_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
+
+
+def normalize_path_middleware(
+ *,
+ append_slash: bool = True,
+ remove_slash: bool = False,
+ merge_slashes: bool = True,
+ redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect,
+) -> _Middleware:
+ """Factory for producing a middleware that normalizes the path of a request.
+
+ Normalizing means:
+ - Add or remove a trailing slash to the path.
+ - Double slashes are replaced by one.
+
+ The middleware returns as soon as it finds a path that resolves
+ correctly. The order if both merge and append/remove are enabled is
+ 1) merge slashes
+ 2) append/remove slash
+ 3) both merge slashes and append/remove slash.
+ If the path resolves with at least one of those conditions, it will
+ redirect to the new path.
+
+ Only one of `append_slash` and `remove_slash` can be enabled. If both
+ are `True` the factory will raise an assertion error
+
+ If `append_slash` is `True` the middleware will append a slash when
+ needed. If a resource is defined with trailing slash and the request
+ comes without it, it will append it automatically.
+
+ If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
+ the middleware will remove trailing slashes and redirect if the resource
+ is defined
+
+ If merge_slashes is True, merge multiple consecutive slashes in the
+ path into one.
+ """
+ correct_configuration = not (append_slash and remove_slash)
+ assert correct_configuration, "Cannot both remove and append slash"
+
+ @middleware
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
+ if isinstance(request.match_info.route, SystemRoute):
+ paths_to_check = []
+ if "?" in request.raw_path:
+ path, query = request.raw_path.split("?", 1)
+ query = "?" + query
+ else:
+ query = ""
+ path = request.raw_path
+
+ if merge_slashes:
+ paths_to_check.append(re.sub("//+", "/", path))
+ if append_slash and not request.path.endswith("/"):
+ paths_to_check.append(path + "/")
+ if remove_slash and request.path.endswith("/"):
+ paths_to_check.append(path[:-1])
+ if merge_slashes and append_slash:
+ paths_to_check.append(re.sub("//+", "/", path + "/"))
+ if merge_slashes and remove_slash:
+ merged_slashes = re.sub("//+", "/", path)
+ paths_to_check.append(merged_slashes[:-1])
+
+ for path in paths_to_check:
+ path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
+ resolves, request = await _check_request_resolves(request, path)
+ if resolves:
+ raise redirect_class(request.raw_path + query)
+
+ return await handler(request)
+
+ return impl
+
+
+def _fix_request_current_app(app: "Application") -> _Middleware:
+ @middleware
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
+ with request.match_info.set_current_app(app):
+ return await handler(request)
+
+ return impl
diff --git a/contrib/python/aiohttp/aiohttp/web_protocol.py b/contrib/python/aiohttp/aiohttp/web_protocol.py
new file mode 100644
index 0000000000..ad0c0498e3
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_protocol.py
@@ -0,0 +1,681 @@
+import asyncio
+import asyncio.streams
+import traceback
+import warnings
+from collections import deque
+from contextlib import suppress
+from html import escape as html_escape
+from http import HTTPStatus
+from logging import Logger
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Deque,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+import yarl
+
+from .abc import AbstractAccessLogger, AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .helpers import ceil_timeout
+from .http import (
+ HttpProcessingError,
+ HttpRequestParser,
+ HttpVersion10,
+ RawRequestMessage,
+ StreamWriter,
+)
+from .log import access_logger, server_logger
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .tcp_helpers import tcp_keepalive
+from .web_exceptions import HTTPException
+from .web_log import AccessLogger
+from .web_request import BaseRequest
+from .web_response import Response, StreamResponse
+
+__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_server import Server
+
+
+_RequestFactory = Callable[
+ [
+ RawRequestMessage,
+ StreamReader,
+ "RequestHandler",
+ AbstractStreamWriter,
+ "asyncio.Task[None]",
+ ],
+ BaseRequest,
+]
+
+_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
+
+ERROR = RawRequestMessage(
+ "UNKNOWN",
+ "/",
+ HttpVersion10,
+ {}, # type: ignore[arg-type]
+ {}, # type: ignore[arg-type]
+ True,
+ None,
+ False,
+ False,
+ yarl.URL("/"),
+)
+
+
+class RequestPayloadError(Exception):
+ """Payload parsing error."""
+
+
+class PayloadAccessError(Exception):
+ """Payload was accessed after response was sent."""
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class _ErrInfo:
+ status: int
+ exc: BaseException
+ message: str
+
+
+_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
+
+
+class RequestHandler(BaseProtocol):
+ """HTTP protocol implementation.
+
+ RequestHandler handles incoming HTTP request. It reads request line,
+ request headers and request payload and calls handle_request() method.
+ By default it always returns with 404 response.
+
+ RequestHandler handles errors in incoming request, like bad
+ status line, bad headers or incomplete payload. If any error occurs,
+ connection gets closed.
+
+ keepalive_timeout -- number of seconds before closing
+ keep-alive connection
+
+ tcp_keepalive -- TCP keep-alive is on, default is on
+
+ debug -- enable debug mode
+
+ logger -- custom logger object
+
+ access_log_class -- custom class for access_logger
+
+ access_log -- custom logging object
+
+ access_log_format -- access log format string
+
+ loop -- Optional event loop
+
+ max_line_size -- Optional maximum header line size
+
+ max_field_size -- Optional maximum header field size
+
+ max_headers -- Optional maximum header size
+
+ """
+
+ KEEPALIVE_RESCHEDULE_DELAY = 1
+
+ __slots__ = (
+ "_request_count",
+ "_keepalive",
+ "_manager",
+ "_request_handler",
+ "_request_factory",
+ "_tcp_keepalive",
+ "_keepalive_time",
+ "_keepalive_handle",
+ "_keepalive_timeout",
+ "_lingering_time",
+ "_messages",
+ "_message_tail",
+ "_waiter",
+ "_task_handler",
+ "_upgrade",
+ "_payload_parser",
+ "_request_parser",
+ "_reading_paused",
+ "logger",
+ "debug",
+ "access_log",
+ "access_logger",
+ "_close",
+ "_force_close",
+ "_current_request",
+ )
+
+ def __init__(
+ self,
+ manager: "Server",
+ *,
+ loop: asyncio.AbstractEventLoop,
+ keepalive_timeout: float = 75.0, # NGINX default is 75 secs
+ tcp_keepalive: bool = True,
+ logger: Logger = server_logger,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log: Logger = access_logger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ debug: bool = False,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ lingering_time: float = 10.0,
+ read_bufsize: int = 2 ** 16,
+ auto_decompress: bool = True,
+ ):
+ super().__init__(loop)
+
+ self._request_count = 0
+ self._keepalive = False
+ self._current_request = None # type: Optional[BaseRequest]
+ self._manager = manager # type: Optional[Server]
+ self._request_handler: Optional[_RequestHandler] = manager.request_handler
+ self._request_factory: Optional[_RequestFactory] = manager.request_factory
+
+ self._tcp_keepalive = tcp_keepalive
+ # placeholder to be replaced on keepalive timeout setup
+ self._keepalive_time = 0.0
+ self._keepalive_handle = None # type: Optional[asyncio.Handle]
+ self._keepalive_timeout = keepalive_timeout
+ self._lingering_time = float(lingering_time)
+
+ self._messages: Deque[_MsgType] = deque()
+ self._message_tail = b""
+
+ self._waiter = None # type: Optional[asyncio.Future[None]]
+ self._task_handler = None # type: Optional[asyncio.Task[None]]
+
+ self._upgrade = False
+ self._payload_parser = None # type: Any
+ self._request_parser = HttpRequestParser(
+ self,
+ loop,
+ read_bufsize,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
+ max_headers=max_headers,
+ payload_exception=RequestPayloadError,
+ auto_decompress=auto_decompress,
+ ) # type: Optional[HttpRequestParser]
+
+ self.logger = logger
+ self.debug = debug
+ self.access_log = access_log
+ if access_log:
+ self.access_logger = access_log_class(
+ access_log, access_log_format
+ ) # type: Optional[AbstractAccessLogger]
+ else:
+ self.access_logger = None
+
+ self._close = False
+ self._force_close = False
+
+ def __repr__(self) -> str:
+ return "<{} {}>".format(
+ self.__class__.__name__,
+ "connected" if self.transport is not None else "disconnected",
+ )
+
+ @property
+ def keepalive_timeout(self) -> float:
+ return self._keepalive_timeout
+
+ async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
+ """Do worker process exit preparations.
+
+ We need to clean up everything and stop accepting requests.
+ It is especially important for keep-alive connections.
+ """
+ self._force_close = True
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ if self._waiter:
+ self._waiter.cancel()
+
+ # wait for handlers
+ with suppress(asyncio.CancelledError, asyncio.TimeoutError):
+ async with ceil_timeout(timeout):
+ if self._current_request is not None:
+ self._current_request._cancel(asyncio.CancelledError())
+
+ if self._task_handler is not None and not self._task_handler.done():
+ await self._task_handler
+
+ # force-close non-idle handler
+ if self._task_handler is not None:
+ self._task_handler.cancel()
+
+ if self.transport is not None:
+ self.transport.close()
+ self.transport = None
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ super().connection_made(transport)
+
+ real_transport = cast(asyncio.Transport, transport)
+ if self._tcp_keepalive:
+ tcp_keepalive(real_transport)
+
+ self._task_handler = self._loop.create_task(self.start())
+ assert self._manager is not None
+ self._manager.connection_made(self, real_transport)
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ if self._manager is None:
+ return
+ self._manager.connection_lost(self, exc)
+
+ super().connection_lost(exc)
+
+ self._manager = None
+ self._force_close = True
+ self._request_factory = None
+ self._request_handler = None
+ self._request_parser = None
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ if self._current_request is not None:
+ if exc is None:
+ exc = ConnectionResetError("Connection lost")
+ self._current_request._cancel(exc)
+
+ if self._task_handler is not None:
+ self._task_handler.cancel()
+ if self._waiter is not None:
+ self._waiter.cancel()
+
+ self._task_handler = None
+
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+
+ def set_parser(self, parser: Any) -> None:
+ # Actual type is WebReader
+ assert self._payload_parser is None
+
+ self._payload_parser = parser
+
+ if self._message_tail:
+ self._payload_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+
+ def eof_received(self) -> None:
+ pass
+
+ def data_received(self, data: bytes) -> None:
+ if self._force_close or self._close:
+ return
+ # parse http messages
+ messages: Sequence[_MsgType]
+ if self._payload_parser is None and not self._upgrade:
+ assert self._request_parser is not None
+ try:
+ messages, upgraded, tail = self._request_parser.feed_data(data)
+ except HttpProcessingError as exc:
+ messages = [
+ (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
+ ]
+ upgraded = False
+ tail = b""
+
+ for msg, payload in messages or ():
+ self._request_count += 1
+ self._messages.append((msg, payload))
+
+ waiter = self._waiter
+ if messages and waiter is not None and not waiter.done():
+ # don't set result twice
+ waiter.set_result(None)
+
+ self._upgrade = upgraded
+ if upgraded and tail:
+ self._message_tail = tail
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgrade and data:
+ self._message_tail += data
+
+ # feed payload
+ elif data:
+ eof, tail = self._payload_parser.feed_data(data)
+ if eof:
+ self.close()
+
+ def keep_alive(self, val: bool) -> None:
+ """Set keep-alive connection mode.
+
+ :param bool val: new state.
+ """
+ self._keepalive = val
+ if self._keepalive_handle:
+ self._keepalive_handle.cancel()
+ self._keepalive_handle = None
+
+ def close(self) -> None:
+ """Close connection.
+
+ Stop accepting new pipelining messages and close
+ connection when handlers done processing messages.
+ """
+ self._close = True
+ if self._waiter:
+ self._waiter.cancel()
+
+ def force_close(self) -> None:
+ """Forcefully close connection."""
+ self._force_close = True
+ if self._waiter:
+ self._waiter.cancel()
+ if self.transport is not None:
+ self.transport.close()
+ self.transport = None
+
+ def log_access(
+ self, request: BaseRequest, response: StreamResponse, time: float
+ ) -> None:
+ if self.access_logger is not None:
+ self.access_logger.log(request, response, self._loop.time() - time)
+
+ def log_debug(self, *args: Any, **kw: Any) -> None:
+ if self.debug:
+ self.logger.debug(*args, **kw)
+
+ def log_exception(self, *args: Any, **kw: Any) -> None:
+ self.logger.exception(*args, **kw)
+
+ def _process_keepalive(self) -> None:
+ if self._force_close or not self._keepalive:
+ return
+
+ next = self._keepalive_time + self._keepalive_timeout
+
+ # handler in idle state
+ if self._waiter:
+ if self._loop.time() > next:
+ self.force_close()
+ return
+
+ # not all request handlers are done,
+ # reschedule itself to next second
+ self._keepalive_handle = self._loop.call_later(
+ self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive
+ )
+
+ async def _handle_request(
+ self,
+ request: BaseRequest,
+ start_time: float,
+ request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
+ ) -> Tuple[StreamResponse, bool]:
+ assert self._request_handler is not None
+ try:
+ try:
+ self._current_request = request
+ resp = await request_handler(request)
+ finally:
+ self._current_request = None
+ except HTTPException as exc:
+ resp = exc
+ reset = await self.finish_response(request, resp, start_time)
+ except asyncio.CancelledError:
+ raise
+ except asyncio.TimeoutError as exc:
+ self.log_debug("Request handler timed out.", exc_info=exc)
+ resp = self.handle_error(request, 504)
+ reset = await self.finish_response(request, resp, start_time)
+ except Exception as exc:
+ resp = self.handle_error(request, 500, exc)
+ reset = await self.finish_response(request, resp, start_time)
+ else:
+ # Deprecation warning (See #2415)
+ if getattr(resp, "__http_exception__", False):
+ warnings.warn(
+ "returning HTTPException object is deprecated "
+ "(#2415) and will be removed, "
+ "please raise the exception instead",
+ DeprecationWarning,
+ )
+
+ reset = await self.finish_response(request, resp, start_time)
+
+ return resp, reset
+
+ async def start(self) -> None:
+ """Process incoming request.
+
+ It reads request line, request headers and request payload, then
+ calls handle_request() method. Subclass has to override
+ handle_request(). start() handles various exceptions in request
+ or response handling. Connection is being closed always unless
+ keep_alive(True) specified.
+ """
+ loop = self._loop
+ handler = self._task_handler
+ assert handler is not None
+ manager = self._manager
+ assert manager is not None
+ keepalive_timeout = self._keepalive_timeout
+ resp = None
+ assert self._request_factory is not None
+ assert self._request_handler is not None
+
+ while not self._force_close:
+ if not self._messages:
+ try:
+ # wait for next request
+ self._waiter = loop.create_future()
+ await self._waiter
+ except asyncio.CancelledError:
+ break
+ finally:
+ self._waiter = None
+
+ message, payload = self._messages.popleft()
+
+ start = loop.time()
+
+ manager.requests_count += 1
+ writer = StreamWriter(self, loop)
+ if isinstance(message, _ErrInfo):
+ # make request_factory work
+ request_handler = self._make_error_handler(message)
+ message = ERROR
+ else:
+ request_handler = self._request_handler
+
+ request = self._request_factory(message, payload, self, writer, handler)
+ try:
+ # a new task is used for copy context vars (#3406)
+ task = self._loop.create_task(
+ self._handle_request(request, start, request_handler)
+ )
+ try:
+ resp, reset = await task
+ except (asyncio.CancelledError, ConnectionError):
+ self.log_debug("Ignored premature client disconnection")
+ break
+
+ # Drop the processed task from asyncio.Task.all_tasks() early
+ del task
+ if reset:
+ self.log_debug("Ignored premature client disconnection 2")
+ break
+
+ # notify server about keep-alive
+ self._keepalive = bool(resp.keep_alive)
+
+ # check payload
+ if not payload.is_eof():
+ lingering_time = self._lingering_time
+ if not self._force_close and lingering_time:
+ self.log_debug(
+ "Start lingering close timer for %s sec.", lingering_time
+ )
+
+ now = loop.time()
+ end_t = now + lingering_time
+
+ with suppress(asyncio.TimeoutError, asyncio.CancelledError):
+ while not payload.is_eof() and now < end_t:
+ async with ceil_timeout(end_t - now):
+ # read and ignore
+ await payload.readany()
+ now = loop.time()
+
+ # if payload still uncompleted
+ if not payload.is_eof() and not self._force_close:
+ self.log_debug("Uncompleted request.")
+ self.close()
+
+ payload.set_exception(PayloadAccessError())
+
+ except asyncio.CancelledError:
+ self.log_debug("Ignored premature client disconnection ")
+ break
+ except RuntimeError as exc:
+ if self.debug:
+ self.log_exception("Unhandled runtime exception", exc_info=exc)
+ self.force_close()
+ except Exception as exc:
+ self.log_exception("Unhandled exception", exc_info=exc)
+ self.force_close()
+ finally:
+ if self.transport is None and resp is not None:
+ self.log_debug("Ignored premature client disconnection.")
+ elif not self._force_close:
+ if self._keepalive and not self._close:
+ # start keep-alive timer
+ if keepalive_timeout is not None:
+ now = self._loop.time()
+ self._keepalive_time = now
+ if self._keepalive_handle is None:
+ self._keepalive_handle = loop.call_at(
+ now + keepalive_timeout, self._process_keepalive
+ )
+ else:
+ break
+
+ # remove handler, close transport if no handlers left
+ if not self._force_close:
+ self._task_handler = None
+ if self.transport is not None:
+ self.transport.close()
+
+ async def finish_response(
+ self, request: BaseRequest, resp: StreamResponse, start_time: float
+ ) -> bool:
+ """Prepare the response and write_eof, then log access.
+
+ This has to
+ be called within the context of any exception so the access logger
+ can get exception information. Returns True if the client disconnects
+ prematurely.
+ """
+ if self._request_parser is not None:
+ self._request_parser.set_upgraded(False)
+ self._upgrade = False
+ if self._message_tail:
+ self._request_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+ try:
+ prepare_meth = resp.prepare
+ except AttributeError:
+ if resp is None:
+ raise RuntimeError("Missing return " "statement on request handler")
+ else:
+ raise RuntimeError(
+ "Web-handler should return "
+ "a response instance, "
+ "got {!r}".format(resp)
+ )
+ try:
+ await prepare_meth(request)
+ await resp.write_eof()
+ except ConnectionError:
+ self.log_access(request, resp, start_time)
+ return True
+ else:
+ self.log_access(request, resp, start_time)
+ return False
+
+ def handle_error(
+ self,
+ request: BaseRequest,
+ status: int = 500,
+ exc: Optional[BaseException] = None,
+ message: Optional[str] = None,
+ ) -> StreamResponse:
+ """Handle errors.
+
+ Returns HTTP response with specific status code. Logs additional
+ information. It always closes current connection.
+ """
+ self.log_exception("Error handling request", exc_info=exc)
+
+ # some data already got sent, connection is broken
+ if request.writer.output_size > 0:
+ raise ConnectionError(
+ "Response is sent already, cannot send another response "
+ "with the error message"
+ )
+
+ ct = "text/plain"
+ if status == HTTPStatus.INTERNAL_SERVER_ERROR:
+ title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
+ msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
+ tb = None
+ if self.debug:
+ with suppress(Exception):
+ tb = traceback.format_exc()
+
+ if "text/html" in request.headers.get("Accept", ""):
+ if tb:
+ tb = html_escape(tb)
+ msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
+ message = (
+ "<html><head>"
+ "<title>{title}</title>"
+ "</head><body>\n<h1>{title}</h1>"
+ "\n{msg}\n</body></html>\n"
+ ).format(title=title, msg=msg)
+ ct = "text/html"
+ else:
+ if tb:
+ msg = tb
+ message = title + "\n\n" + msg
+
+ resp = Response(status=status, text=message, content_type=ct)
+ resp.force_close()
+
+ return resp
+
+ def _make_error_handler(
+ self, err_info: _ErrInfo
+ ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
+ async def handler(request: BaseRequest) -> StreamResponse:
+ return self.handle_error(
+ request, err_info.status, err_info.exc, err_info.message
+ )
+
+ return handler
diff --git a/contrib/python/aiohttp/aiohttp/web_request.py b/contrib/python/aiohttp/aiohttp/web_request.py
new file mode 100644
index 0000000000..b3574cafb3
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_request.py
@@ -0,0 +1,874 @@
+import asyncio
+import datetime
+import io
+import re
+import socket
+import string
+import tempfile
+import types
+import warnings
+from http.cookies import SimpleCookie
+from types import MappingProxyType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterator,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Pattern,
+ Tuple,
+ Union,
+ cast,
+)
+from urllib.parse import parse_qsl
+
+import attr
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
+from yarl import URL
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import (
+ DEBUG,
+ ETAG_ANY,
+ LIST_QUOTED_ETAG_RE,
+ ChainMapProxy,
+ ETag,
+ HeadersMixin,
+ parse_http_date,
+ reify,
+ sentinel,
+)
+from .http_parser import RawRequestMessage
+from .http_writer import HttpVersion
+from .multipart import BodyPartReader, MultipartReader
+from .streams import EmptyStreamReader, StreamReader
+from .typedefs import (
+ DEFAULT_JSON_DECODER,
+ Final,
+ JSONDecoder,
+ LooseHeaders,
+ RawHeaders,
+ StrOrURL,
+)
+from .web_exceptions import HTTPRequestEntityTooLarge
+from .web_response import StreamResponse
+
+__all__ = ("BaseRequest", "FileField", "Request")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+ from .web_protocol import RequestHandler
+ from .web_urldispatcher import UrlMappingMatchInfo
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class FileField:
+ name: str
+ filename: str
+ file: io.BufferedReader
+ content_type: str
+ headers: "CIMultiDictProxy[str]"
+
+
+_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
+# '-' at the end to prevent interpretation as range in a char class
+
+_TOKEN: Final[str] = fr"[{_TCHAR}]+"
+
+_QDTEXT: Final[str] = r"[{}]".format(
+ r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
+)
+# qdtext includes 0x5C to escape 0x5D ('\]')
+# qdtext excludes obs-text (because obsoleted, and encoding not specified)
+
+_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
+
+_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
+ qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
+)
+
+_FORWARDED_PAIR: Final[
+ str
+] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
+ token=_TOKEN, quoted_string=_QUOTED_STRING
+)
+
+_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
+# same pattern as _QUOTED_PAIR but contains a capture group
+
+_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
+
+############################################################
+# HTTP Request
+############################################################
+
+
+class BaseRequest(MutableMapping[str, Any], HeadersMixin):
+
+ POST_METHODS = {
+ hdrs.METH_PATCH,
+ hdrs.METH_POST,
+ hdrs.METH_PUT,
+ hdrs.METH_TRACE,
+ hdrs.METH_DELETE,
+ }
+
+ ATTRS = HeadersMixin.ATTRS | frozenset(
+ [
+ "_message",
+ "_protocol",
+ "_payload_writer",
+ "_payload",
+ "_headers",
+ "_method",
+ "_version",
+ "_rel_url",
+ "_post",
+ "_read_bytes",
+ "_state",
+ "_cache",
+ "_task",
+ "_client_max_size",
+ "_loop",
+ "_transport_sslcontext",
+ "_transport_peername",
+ ]
+ )
+
+ def __init__(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: "RequestHandler",
+ payload_writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ loop: asyncio.AbstractEventLoop,
+ *,
+ client_max_size: int = 1024 ** 2,
+ state: Optional[Dict[str, Any]] = None,
+ scheme: Optional[str] = None,
+ host: Optional[str] = None,
+ remote: Optional[str] = None,
+ ) -> None:
+ if state is None:
+ state = {}
+ self._message = message
+ self._protocol = protocol
+ self._payload_writer = payload_writer
+
+ self._payload = payload
+ self._headers = message.headers
+ self._method = message.method
+ self._version = message.version
+ self._rel_url = message.url
+ self._post = (
+ None
+ ) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
+ self._read_bytes = None # type: Optional[bytes]
+
+ self._state = state
+ self._cache = {} # type: Dict[str, Any]
+ self._task = task
+ self._client_max_size = client_max_size
+ self._loop = loop
+
+ transport = self._protocol.transport
+ assert transport is not None
+ self._transport_sslcontext = transport.get_extra_info("sslcontext")
+ self._transport_peername = transport.get_extra_info("peername")
+
+ if scheme is not None:
+ self._cache["scheme"] = scheme
+ if host is not None:
+ self._cache["host"] = host
+ if remote is not None:
+ self._cache["remote"] = remote
+
+ def clone(
+ self,
+ *,
+ method: str = sentinel,
+ rel_url: StrOrURL = sentinel,
+ headers: LooseHeaders = sentinel,
+ scheme: str = sentinel,
+ host: str = sentinel,
+ remote: str = sentinel,
+ ) -> "BaseRequest":
+ """Clone itself with replacement some attributes.
+
+ Creates and returns a new instance of Request object. If no parameters
+ are given, an exact copy is returned. If a parameter is not passed, it
+ will reuse the one from the current request object.
+ """
+ if self._read_bytes:
+ raise RuntimeError("Cannot clone request " "after reading its content")
+
+ dct = {} # type: Dict[str, Any]
+ if method is not sentinel:
+ dct["method"] = method
+ if rel_url is not sentinel:
+ new_url = URL(rel_url)
+ dct["url"] = new_url
+ dct["path"] = str(new_url)
+ if headers is not sentinel:
+ # a copy semantic
+ dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
+ dct["raw_headers"] = tuple(
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
+ )
+
+ message = self._message._replace(**dct)
+
+ kwargs = {}
+ if scheme is not sentinel:
+ kwargs["scheme"] = scheme
+ if host is not sentinel:
+ kwargs["host"] = host
+ if remote is not sentinel:
+ kwargs["remote"] = remote
+
+ return self.__class__(
+ message,
+ self._payload,
+ self._protocol,
+ self._payload_writer,
+ self._task,
+ self._loop,
+ client_max_size=self._client_max_size,
+ state=self._state.copy(),
+ **kwargs,
+ )
+
+ @property
+ def task(self) -> "asyncio.Task[None]":
+ return self._task
+
+ @property
+ def protocol(self) -> "RequestHandler":
+ return self._protocol
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ if self._protocol is None:
+ return None
+ return self._protocol.transport
+
+ @property
+ def writer(self) -> AbstractStreamWriter:
+ return self._payload_writer
+
+ @reify
+ def message(self) -> RawRequestMessage:
+ warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
+ return self._message
+
+ @reify
+ def rel_url(self) -> URL:
+ return self._rel_url
+
+ @reify
+ def loop(self) -> asyncio.AbstractEventLoop:
+ warnings.warn(
+ "request.loop property is deprecated", DeprecationWarning, stacklevel=2
+ )
+ return self._loop
+
+ # MutableMapping API
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ ########
+
+ @reify
+ def secure(self) -> bool:
+ """A bool indicating if the request is handled with SSL."""
+ return self.scheme == "https"
+
+ @reify
+ def forwarded(self) -> Tuple[Mapping[str, str], ...]:
+ """A tuple containing all parsed Forwarded header(s).
+
+ Makes an effort to parse Forwarded headers as specified by RFC 7239:
+
+ - It adds one (immutable) dictionary per Forwarded 'field-value', ie
+ per proxy. The element corresponds to the data in the Forwarded
+ field-value added by the first proxy encountered by the client. Each
+ subsequent item corresponds to those added by later proxies.
+ - It checks that every value has valid syntax in general as specified
+ in section 4: either a 'token' or a 'quoted-string'.
+ - It un-escapes found escape sequences.
+ - It does NOT validate 'by' and 'for' contents as specified in section
+ 6.
+ - It does NOT validate 'host' contents (Host ABNF).
+ - It does NOT validate 'proto' contents for valid URI scheme names.
+
+ Returns a tuple containing one or more immutable dicts
+ """
+ elems = []
+ for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
+ length = len(field_value)
+ pos = 0
+ need_separator = False
+ elem = {} # type: Dict[str, str]
+ elems.append(types.MappingProxyType(elem))
+ while 0 <= pos < length:
+ match = _FORWARDED_PAIR_RE.match(field_value, pos)
+ if match is not None: # got a valid forwarded-pair
+ if need_separator:
+ # bad syntax here, skip to next comma
+ pos = field_value.find(",", pos)
+ else:
+ name, value, port = match.groups()
+ if value[0] == '"':
+ # quoted string: remove quotes and unescape
+ value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
+ if port:
+ value += port
+ elem[name.lower()] = value
+ pos += len(match.group(0))
+ need_separator = True
+ elif field_value[pos] == ",": # next forwarded-element
+ need_separator = False
+ elem = {}
+ elems.append(types.MappingProxyType(elem))
+ pos += 1
+ elif field_value[pos] == ";": # next forwarded-pair
+ need_separator = False
+ pos += 1
+ elif field_value[pos] in " \t":
+ # Allow whitespace even between forwarded-pairs, though
+ # RFC 7239 doesn't. This simplifies code and is in line
+ # with Postel's law.
+ pos += 1
+ else:
+ # bad syntax here, skip to next comma
+ pos = field_value.find(",", pos)
+ return tuple(elems)
+
+ @reify
+ def scheme(self) -> str:
+ """A string representing the scheme of the request.
+
+ Hostname is resolved in this order:
+
+ - overridden value by .clone(scheme=new_scheme) call.
+ - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
+
+ 'http' or 'https'.
+ """
+ if self._transport_sslcontext:
+ return "https"
+ else:
+ return "http"
+
+ @reify
+ def method(self) -> str:
+ """Read only property for getting HTTP method.
+
+ The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
+ """
+ return self._method
+
+ @reify
+ def version(self) -> HttpVersion:
+ """Read only property for getting HTTP version of request.
+
+ Returns aiohttp.protocol.HttpVersion instance.
+ """
+ return self._version
+
+ @reify
+ def host(self) -> str:
+ """Hostname of the request.
+
+ Hostname is resolved in this order:
+
+ - overridden value by .clone(host=new_host) call.
+ - HOST HTTP header
+ - socket.getfqdn() value
+ """
+ host = self._message.headers.get(hdrs.HOST)
+ if host is not None:
+ return host
+ return socket.getfqdn()
+
+ @reify
+ def remote(self) -> Optional[str]:
+ """Remote IP of client initiated HTTP request.
+
+ The IP is resolved in this order:
+
+ - overridden value by .clone(remote=new_remote) call.
+ - peername of opened socket
+ """
+ if self._transport_peername is None:
+ return None
+ if isinstance(self._transport_peername, (list, tuple)):
+ return str(self._transport_peername[0])
+ return str(self._transport_peername)
+
+ @reify
+ def url(self) -> URL:
+ url = URL.build(scheme=self.scheme, host=self.host)
+ return url.join(self._rel_url)
+
+ @reify
+ def path(self) -> str:
+ """The URL including *PATH INFO* without the host or scheme.
+
+ E.g., ``/app/blog``
+ """
+ return self._rel_url.path
+
+ @reify
+ def path_qs(self) -> str:
+ """The URL including PATH_INFO and the query string.
+
+ E.g, /app/blog?id=10
+ """
+ return str(self._rel_url)
+
+ @reify
+ def raw_path(self) -> str:
+ """The URL including raw *PATH INFO* without the host or scheme.
+
+ Warning, the path is unquoted and may contains non valid URL characters
+
+ E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
+ """
+ return self._message.path
+
+ @reify
+ def query(self) -> "MultiDictProxy[str]":
+ """A multidict with all the variables in the query string."""
+ return MultiDictProxy(self._rel_url.query)
+
+ @reify
+ def query_string(self) -> str:
+ """The query string in the URL.
+
+ E.g., id=10
+ """
+ return self._rel_url.query_string
+
+ @reify
+ def headers(self) -> "CIMultiDictProxy[str]":
+ """A case-insensitive multidict proxy with all headers."""
+ return self._headers
+
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ """A sequence of pairs for all headers."""
+ return self._message.raw_headers
+
+ @reify
+ def if_modified_since(self) -> Optional[datetime.datetime]:
+ """The value of If-Modified-Since HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
+
+ @reify
+ def if_unmodified_since(self) -> Optional[datetime.datetime]:
+ """The value of If-Unmodified-Since HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
+
+ @staticmethod
+ def _etag_values(etag_header: str) -> Iterator[ETag]:
+ """Extract `ETag` objects from raw header."""
+ if etag_header == ETAG_ANY:
+ yield ETag(
+ is_weak=False,
+ value=ETAG_ANY,
+ )
+ else:
+ for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
+ is_weak, value, garbage = match.group(2, 3, 4)
+ # Any symbol captured by 4th group means
+ # that the following sequence is invalid.
+ if garbage:
+ break
+
+ yield ETag(
+ is_weak=bool(is_weak),
+ value=value,
+ )
+
+ @classmethod
+ def _if_match_or_none_impl(
+ cls, header_value: Optional[str]
+ ) -> Optional[Tuple[ETag, ...]]:
+ if not header_value:
+ return None
+
+ return tuple(cls._etag_values(header_value))
+
+ @reify
+ def if_match(self) -> Optional[Tuple[ETag, ...]]:
+ """The value of If-Match HTTP header, or None.
+
+ This header is represented as a `tuple` of `ETag` objects.
+ """
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
+
+ @reify
+ def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
+ """The value of If-None-Match HTTP header, or None.
+
+ This header is represented as a `tuple` of `ETag` objects.
+ """
+ return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
+
+ @reify
+ def if_range(self) -> Optional[datetime.datetime]:
+ """The value of If-Range HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self.headers.get(hdrs.IF_RANGE))
+
+ @reify
+ def keep_alive(self) -> bool:
+ """Is keepalive enabled by client?"""
+ return not self._message.should_close
+
+ @reify
+ def cookies(self) -> Mapping[str, str]:
+ """Return request cookies.
+
+ A read-only dictionary-like object.
+ """
+ raw = self.headers.get(hdrs.COOKIE, "")
+ parsed = SimpleCookie(raw) # type: SimpleCookie[str]
+ return MappingProxyType({key: val.value for key, val in parsed.items()})
+
+ @reify
+ def http_range(self) -> slice:
+ """The content of Range HTTP header.
+
+ Return a slice instance.
+
+ """
+ rng = self._headers.get(hdrs.RANGE)
+ start, end = None, None
+ if rng is not None:
+ try:
+ pattern = r"^bytes=(\d*)-(\d*)$"
+ start, end = re.findall(pattern, rng)[0]
+ except IndexError: # pattern was not found in header
+ raise ValueError("range not in acceptable format")
+
+ end = int(end) if end else None
+ start = int(start) if start else None
+
+ if start is None and end is not None:
+ # end with no start is to return tail of content
+ start = -end
+ end = None
+
+ if start is not None and end is not None:
+ # end is inclusive in range header, exclusive for slice
+ end += 1
+
+ if start >= end:
+ raise ValueError("start cannot be after end")
+
+ if start is end is None: # No valid range supplied
+ raise ValueError("No start or end of range specified")
+
+ return slice(start, end, 1)
+
+ @reify
+ def content(self) -> StreamReader:
+ """Return raw payload stream."""
+ return self._payload
+
+ @property
+ def has_body(self) -> bool:
+ """Return True if request's HTTP BODY can be read, False otherwise."""
+ warnings.warn(
+ "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
+ )
+ return not self._payload.at_eof()
+
+ @property
+ def can_read_body(self) -> bool:
+ """Return True if request's HTTP BODY can be read, False otherwise."""
+ return not self._payload.at_eof()
+
+ @reify
+ def body_exists(self) -> bool:
+ """Return True if request has HTTP BODY, False otherwise."""
+ return type(self._payload) is not EmptyStreamReader
+
+ async def release(self) -> None:
+ """Release request.
+
+ Eat unread part of HTTP BODY if present.
+ """
+ while not self._payload.at_eof():
+ await self._payload.readany()
+
+ async def read(self) -> bytes:
+ """Read request body if present.
+
+ Returns bytes object with full request content.
+ """
+ if self._read_bytes is None:
+ body = bytearray()
+ while True:
+ chunk = await self._payload.readany()
+ body.extend(chunk)
+ if self._client_max_size:
+ body_size = len(body)
+ if body_size >= self._client_max_size:
+ raise HTTPRequestEntityTooLarge(
+ max_size=self._client_max_size, actual_size=body_size
+ )
+ if not chunk:
+ break
+ self._read_bytes = bytes(body)
+ return self._read_bytes
+
+ async def text(self) -> str:
+ """Return BODY as text using encoding from .charset."""
+ bytes_body = await self.read()
+ encoding = self.charset or "utf-8"
+ return bytes_body.decode(encoding)
+
+ async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
+ """Return BODY as JSON."""
+ body = await self.text()
+ return loads(body)
+
+ async def multipart(self) -> MultipartReader:
+ """Return async iterator to process BODY as multipart."""
+ return MultipartReader(self._headers, self._payload)
+
+ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
+ """Return POST parameters."""
+ if self._post is not None:
+ return self._post
+ if self._method not in self.POST_METHODS:
+ self._post = MultiDictProxy(MultiDict())
+ return self._post
+
+ content_type = self.content_type
+ if content_type not in (
+ "",
+ "application/x-www-form-urlencoded",
+ "multipart/form-data",
+ ):
+ self._post = MultiDictProxy(MultiDict())
+ return self._post
+
+ out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
+
+ if content_type == "multipart/form-data":
+ multipart = await self.multipart()
+ max_size = self._client_max_size
+
+ field = await multipart.next()
+ while field is not None:
+ size = 0
+ field_ct = field.headers.get(hdrs.CONTENT_TYPE)
+
+ if isinstance(field, BodyPartReader):
+ assert field.name is not None
+
+ # Note that according to RFC 7578, the Content-Type header
+ # is optional, even for files, so we can't assume it's
+ # present.
+ # https://tools.ietf.org/html/rfc7578#section-4.4
+ if field.filename:
+ # store file in temp file
+ tmp = tempfile.TemporaryFile()
+ chunk = await field.read_chunk(size=2 ** 16)
+ while chunk:
+ chunk = field.decode(chunk)
+ tmp.write(chunk)
+ size += len(chunk)
+ if 0 < max_size < size:
+ tmp.close()
+ raise HTTPRequestEntityTooLarge(
+ max_size=max_size, actual_size=size
+ )
+ chunk = await field.read_chunk(size=2 ** 16)
+ tmp.seek(0)
+
+ if field_ct is None:
+ field_ct = "application/octet-stream"
+
+ ff = FileField(
+ field.name,
+ field.filename,
+ cast(io.BufferedReader, tmp),
+ field_ct,
+ field.headers,
+ )
+ out.add(field.name, ff)
+ else:
+ # deal with ordinary data
+ value = await field.read(decode=True)
+ if field_ct is None or field_ct.startswith("text/"):
+ charset = field.get_charset(default="utf-8")
+ out.add(field.name, value.decode(charset))
+ else:
+ out.add(field.name, value)
+ size += len(value)
+ if 0 < max_size < size:
+ raise HTTPRequestEntityTooLarge(
+ max_size=max_size, actual_size=size
+ )
+ else:
+ raise ValueError(
+ "To decode nested multipart you need " "to use custom reader",
+ )
+
+ field = await multipart.next()
+ else:
+ data = await self.read()
+ if data:
+ charset = self.charset or "utf-8"
+ out.extend(
+ parse_qsl(
+ data.rstrip().decode(charset),
+ keep_blank_values=True,
+ encoding=charset,
+ )
+ )
+
+ self._post = MultiDictProxy(out)
+ return self._post
+
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
+ """Extra info from protocol transport"""
+ protocol = self._protocol
+ if protocol is None:
+ return default
+
+ transport = protocol.transport
+ if transport is None:
+ return default
+
+ return transport.get_extra_info(name, default)
+
+ def __repr__(self) -> str:
+ ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
+ "ascii"
+ )
+ return "<{} {} {} >".format(
+ self.__class__.__name__, self._method, ascii_encodable_path
+ )
+
+ def __eq__(self, other: object) -> bool:
+ return id(self) == id(other)
+
+ def __bool__(self) -> bool:
+ return True
+
+ async def _prepare_hook(self, response: StreamResponse) -> None:
+ return
+
+ def _cancel(self, exc: BaseException) -> None:
+ self._payload.set_exception(exc)
+
+
+class Request(BaseRequest):
+
+ ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+
+ # matchdict, route_name, handler
+ # or information about traversal lookup
+
+ # initialized after route resolving
+ self._match_info = None # type: Optional[UrlMappingMatchInfo]
+
+ if DEBUG:
+
+ def __setattr__(self, name: str, val: Any) -> None:
+ if name not in self.ATTRS:
+ warnings.warn(
+ "Setting custom {}.{} attribute "
+ "is discouraged".format(self.__class__.__name__, name),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ super().__setattr__(name, val)
+
+ def clone(
+ self,
+ *,
+ method: str = sentinel,
+ rel_url: StrOrURL = sentinel,
+ headers: LooseHeaders = sentinel,
+ scheme: str = sentinel,
+ host: str = sentinel,
+ remote: str = sentinel,
+ ) -> "Request":
+ ret = super().clone(
+ method=method,
+ rel_url=rel_url,
+ headers=headers,
+ scheme=scheme,
+ host=host,
+ remote=remote,
+ )
+ new_ret = cast(Request, ret)
+ new_ret._match_info = self._match_info
+ return new_ret
+
+ @reify
+ def match_info(self) -> "UrlMappingMatchInfo":
+ """Result of route resolving."""
+ match_info = self._match_info
+ assert match_info is not None
+ return match_info
+
+ @property
+ def app(self) -> "Application":
+ """Application instance."""
+ match_info = self._match_info
+ assert match_info is not None
+ return match_info.current_app
+
+ @property
+ def config_dict(self) -> ChainMapProxy:
+ match_info = self._match_info
+ assert match_info is not None
+ lst = match_info.apps
+ app = self.app
+ idx = lst.index(app)
+ sublist = list(reversed(lst[: idx + 1]))
+ return ChainMapProxy(sublist)
+
+ async def _prepare_hook(self, response: StreamResponse) -> None:
+ match_info = self._match_info
+ if match_info is None:
+ return
+ for app in match_info._apps:
+ await app.on_response_prepare.send(self, response)
diff --git a/contrib/python/aiohttp/aiohttp/web_response.py b/contrib/python/aiohttp/aiohttp/web_response.py
new file mode 100644
index 0000000000..7880ab2d02
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_response.py
@@ -0,0 +1,825 @@
+import asyncio
+import collections.abc
+import datetime
+import enum
+import json
+import math
+import time
+import warnings
+import zlib
+from concurrent.futures import Executor
+from http.cookies import Morsel, SimpleCookie
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Iterator,
+ Mapping,
+ MutableMapping,
+ Optional,
+ Tuple,
+ Union,
+ cast,
+)
+
+from multidict import CIMultiDict, istr
+
+from . import hdrs, payload
+from .abc import AbstractStreamWriter
+from .helpers import (
+ ETAG_ANY,
+ PY_38,
+ QUOTED_ETAG_RE,
+ ETag,
+ HeadersMixin,
+ parse_http_date,
+ rfc822_formatted_time,
+ sentinel,
+ validate_etag_value,
+)
+from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
+from .payload import Payload
+from .typedefs import JSONEncoder, LooseHeaders
+
+__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_request import BaseRequest
+
+ BaseClass = MutableMapping[str, Any]
+else:
+ BaseClass = collections.abc.MutableMapping
+
+
+if not PY_38:
+ # allow samesite to be used in python < 3.8
+ # already permitted in python 3.8, see https://bugs.python.org/issue29613
+ Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
+
+
+class ContentCoding(enum.Enum):
+ # The content codings that we have support for.
+ #
+ # Additional registered codings are listed at:
+ # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
+ deflate = "deflate"
+ gzip = "gzip"
+ identity = "identity"
+
+
+############################################################
+# HTTP Response classes
+############################################################
+
+
+class StreamResponse(BaseClass, HeadersMixin):
+
+ _length_check = True
+
+ def __init__(
+ self,
+ *,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ ) -> None:
+ self._body = None
+ self._keep_alive = None # type: Optional[bool]
+ self._chunked = False
+ self._compression = False
+ self._compression_force = None # type: Optional[ContentCoding]
+ self._cookies = SimpleCookie() # type: SimpleCookie[str]
+
+ self._req = None # type: Optional[BaseRequest]
+ self._payload_writer = None # type: Optional[AbstractStreamWriter]
+ self._eof_sent = False
+ self._body_length = 0
+ self._state = {} # type: Dict[str, Any]
+
+ if headers is not None:
+ self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
+ else:
+ self._headers = CIMultiDict()
+
+ self.set_status(status, reason)
+
+ @property
+ def prepared(self) -> bool:
+ return self._payload_writer is not None
+
+ @property
+ def task(self) -> "Optional[asyncio.Task[None]]":
+ if self._req:
+ return self._req.task
+ else:
+ return None
+
+ @property
+ def status(self) -> int:
+ return self._status
+
+ @property
+ def chunked(self) -> bool:
+ return self._chunked
+
+ @property
+ def compression(self) -> bool:
+ return self._compression
+
+ @property
+ def reason(self) -> str:
+ return self._reason
+
+ def set_status(
+ self,
+ status: int,
+ reason: Optional[str] = None,
+ _RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES,
+ ) -> None:
+ assert not self.prepared, (
+ "Cannot change the response status code after " "the headers have been sent"
+ )
+ self._status = int(status)
+ if reason is None:
+ try:
+ reason = _RESPONSES[self._status][0]
+ except Exception:
+ reason = ""
+ self._reason = reason
+
+ @property
+ def keep_alive(self) -> Optional[bool]:
+ return self._keep_alive
+
+ def force_close(self) -> None:
+ self._keep_alive = False
+
+ @property
+ def body_length(self) -> int:
+ return self._body_length
+
+ @property
+ def output_length(self) -> int:
+ warnings.warn("output_length is deprecated", DeprecationWarning)
+ assert self._payload_writer
+ return self._payload_writer.buffer_size
+
+ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
+ """Enables automatic chunked transfer encoding."""
+ self._chunked = True
+
+ if hdrs.CONTENT_LENGTH in self._headers:
+ raise RuntimeError(
+ "You can't enable chunked encoding when " "a content length is set"
+ )
+ if chunk_size is not None:
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
+
+ def enable_compression(
+ self, force: Optional[Union[bool, ContentCoding]] = None
+ ) -> None:
+ """Enables response compression encoding."""
+ # Backwards compatibility for when force was a bool <0.17.
+ if type(force) == bool:
+ force = ContentCoding.deflate if force else ContentCoding.identity
+ warnings.warn(
+ "Using boolean for force is deprecated #3318", DeprecationWarning
+ )
+ elif force is not None:
+ assert isinstance(force, ContentCoding), (
+ "force should one of " "None, bool or " "ContentEncoding"
+ )
+
+ self._compression = True
+ self._compression_force = force
+
+ @property
+ def headers(self) -> "CIMultiDict[str]":
+ return self._headers
+
+ @property
+ def cookies(self) -> "SimpleCookie[str]":
+ return self._cookies
+
+ def set_cookie(
+ self,
+ name: str,
+ value: str,
+ *,
+ expires: Optional[str] = None,
+ domain: Optional[str] = None,
+ max_age: Optional[Union[int, str]] = None,
+ path: str = "/",
+ secure: Optional[bool] = None,
+ httponly: Optional[bool] = None,
+ version: Optional[str] = None,
+ samesite: Optional[str] = None,
+ ) -> None:
+ """Set or update response cookie.
+
+ Sets new cookie or updates existent with new value.
+ Also updates only those params which are not None.
+ """
+ old = self._cookies.get(name)
+ if old is not None and old.coded_value == "":
+ # deleted cookie
+ self._cookies.pop(name, None)
+
+ self._cookies[name] = value
+ c = self._cookies[name]
+
+ if expires is not None:
+ c["expires"] = expires
+ elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
+ del c["expires"]
+
+ if domain is not None:
+ c["domain"] = domain
+
+ if max_age is not None:
+ c["max-age"] = str(max_age)
+ elif "max-age" in c:
+ del c["max-age"]
+
+ c["path"] = path
+
+ if secure is not None:
+ c["secure"] = secure
+ if httponly is not None:
+ c["httponly"] = httponly
+ if version is not None:
+ c["version"] = version
+ if samesite is not None:
+ c["samesite"] = samesite
+
+ def del_cookie(
+ self, name: str, *, domain: Optional[str] = None, path: str = "/"
+ ) -> None:
+ """Delete cookie.
+
+ Creates new empty expired cookie.
+ """
+ # TODO: do we need domain/path here?
+ self._cookies.pop(name, None)
+ self.set_cookie(
+ name,
+ "",
+ max_age=0,
+ expires="Thu, 01 Jan 1970 00:00:00 GMT",
+ domain=domain,
+ path=path,
+ )
+
+ @property
+ def content_length(self) -> Optional[int]:
+ # Just a placeholder for adding setter
+ return super().content_length
+
+ @content_length.setter
+ def content_length(self, value: Optional[int]) -> None:
+ if value is not None:
+ value = int(value)
+ if self._chunked:
+ raise RuntimeError(
+ "You can't set content length when " "chunked encoding is enable"
+ )
+ self._headers[hdrs.CONTENT_LENGTH] = str(value)
+ else:
+ self._headers.pop(hdrs.CONTENT_LENGTH, None)
+
+ @property
+ def content_type(self) -> str:
+ # Just a placeholder for adding setter
+ return super().content_type
+
+ @content_type.setter
+ def content_type(self, value: str) -> None:
+ self.content_type # read header values if needed
+ self._content_type = str(value)
+ self._generate_content_type_header()
+
+ @property
+ def charset(self) -> Optional[str]:
+ # Just a placeholder for adding setter
+ return super().charset
+
+ @charset.setter
+ def charset(self, value: Optional[str]) -> None:
+ ctype = self.content_type # read header values if needed
+ if ctype == "application/octet-stream":
+ raise RuntimeError(
+ "Setting charset for application/octet-stream "
+ "doesn't make sense, setup content_type first"
+ )
+ assert self._content_dict is not None
+ if value is None:
+ self._content_dict.pop("charset", None)
+ else:
+ self._content_dict["charset"] = str(value).lower()
+ self._generate_content_type_header()
+
+ @property
+ def last_modified(self) -> Optional[datetime.datetime]:
+ """The value of Last-Modified HTTP header, or None.
+
+ This header is represented as a `datetime` object.
+ """
+ return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
+
+ @last_modified.setter
+ def last_modified(
+ self, value: Optional[Union[int, float, datetime.datetime, str]]
+ ) -> None:
+ if value is None:
+ self._headers.pop(hdrs.LAST_MODIFIED, None)
+ elif isinstance(value, (int, float)):
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
+ )
+ elif isinstance(value, datetime.datetime):
+ self._headers[hdrs.LAST_MODIFIED] = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
+ )
+ elif isinstance(value, str):
+ self._headers[hdrs.LAST_MODIFIED] = value
+
+ @property
+ def etag(self) -> Optional[ETag]:
+ quoted_value = self._headers.get(hdrs.ETAG)
+ if not quoted_value:
+ return None
+ elif quoted_value == ETAG_ANY:
+ return ETag(value=ETAG_ANY)
+ match = QUOTED_ETAG_RE.fullmatch(quoted_value)
+ if not match:
+ return None
+ is_weak, value = match.group(1, 2)
+ return ETag(
+ is_weak=bool(is_weak),
+ value=value,
+ )
+
+ @etag.setter
+ def etag(self, value: Optional[Union[ETag, str]]) -> None:
+ if value is None:
+ self._headers.pop(hdrs.ETAG, None)
+ elif (isinstance(value, str) and value == ETAG_ANY) or (
+ isinstance(value, ETag) and value.value == ETAG_ANY
+ ):
+ self._headers[hdrs.ETAG] = ETAG_ANY
+ elif isinstance(value, str):
+ validate_etag_value(value)
+ self._headers[hdrs.ETAG] = f'"{value}"'
+ elif isinstance(value, ETag) and isinstance(value.value, str):
+ validate_etag_value(value.value)
+ hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
+ self._headers[hdrs.ETAG] = hdr_value
+ else:
+ raise ValueError(
+ f"Unsupported etag type: {type(value)}. "
+ f"etag must be str, ETag or None"
+ )
+
+ def _generate_content_type_header(
+ self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
+ ) -> None:
+ assert self._content_dict is not None
+ assert self._content_type is not None
+ params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
+ if params:
+ ctype = self._content_type + "; " + params
+ else:
+ ctype = self._content_type
+ self._headers[CONTENT_TYPE] = ctype
+
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
+ if coding != ContentCoding.identity:
+ assert self._payload_writer is not None
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
+ self._payload_writer.enable_compression(coding.value)
+ # Compressed payload may have different content length,
+ # remove the header
+ self._headers.popall(hdrs.CONTENT_LENGTH, None)
+
+ async def _start_compression(self, request: "BaseRequest") -> None:
+ if self._compression_force:
+ await self._do_start_compression(self._compression_force)
+ else:
+ accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
+ for coding in ContentCoding:
+ if coding.value in accept_encoding:
+ await self._do_start_compression(coding)
+ return
+
+ async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
+ if self._eof_sent:
+ return None
+ if self._payload_writer is not None:
+ return self._payload_writer
+
+ return await self._start(request)
+
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
+ self._req = request
+ writer = self._payload_writer = request._payload_writer
+
+ await self._prepare_headers()
+ await request._prepare_hook(self)
+ await self._write_headers()
+
+ return writer
+
+ async def _prepare_headers(self) -> None:
+ request = self._req
+ assert request is not None
+ writer = self._payload_writer
+ assert writer is not None
+ keep_alive = self._keep_alive
+ if keep_alive is None:
+ keep_alive = request.keep_alive
+ self._keep_alive = keep_alive
+
+ version = request.version
+
+ headers = self._headers
+ for cookie in self._cookies.values():
+ value = cookie.output(header="")[1:]
+ headers.add(hdrs.SET_COOKIE, value)
+
+ if self._compression:
+ await self._start_compression(request)
+
+ if self._chunked:
+ if version != HttpVersion11:
+ raise RuntimeError(
+ "Using chunked encoding is forbidden "
+ "for HTTP/{0.major}.{0.minor}".format(request.version)
+ )
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ if hdrs.CONTENT_LENGTH in headers:
+ del headers[hdrs.CONTENT_LENGTH]
+ elif self._length_check:
+ writer.length = self.content_length
+ if writer.length is None:
+ if version >= HttpVersion11 and self.status != 204:
+ writer.enable_chunking()
+ headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ if hdrs.CONTENT_LENGTH in headers:
+ del headers[hdrs.CONTENT_LENGTH]
+ else:
+ keep_alive = False
+ # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
+ # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
+ elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
+ del headers[hdrs.CONTENT_LENGTH]
+
+ if self.status not in (204, 304):
+ headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
+ headers.setdefault(hdrs.DATE, rfc822_formatted_time())
+ headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
+
+ # connection header
+ if hdrs.CONNECTION not in headers:
+ if keep_alive:
+ if version == HttpVersion10:
+ headers[hdrs.CONNECTION] = "keep-alive"
+ else:
+ if version == HttpVersion11:
+ headers[hdrs.CONNECTION] = "close"
+
+ async def _write_headers(self) -> None:
+ request = self._req
+ assert request is not None
+ writer = self._payload_writer
+ assert writer is not None
+ # status line
+ version = request.version
+ status_line = "HTTP/{}.{} {} {}".format(
+ version[0], version[1], self._status, self._reason
+ )
+ await writer.write_headers(status_line, self._headers)
+
+ async def write(self, data: bytes) -> None:
+ assert isinstance(
+ data, (bytes, bytearray, memoryview)
+ ), "data argument must be byte-ish (%r)" % type(data)
+
+ if self._eof_sent:
+ raise RuntimeError("Cannot call write() after write_eof()")
+ if self._payload_writer is None:
+ raise RuntimeError("Cannot call write() before prepare()")
+
+ await self._payload_writer.write(data)
+
+ async def drain(self) -> None:
+ assert not self._eof_sent, "EOF has already been sent"
+ assert self._payload_writer is not None, "Response has not been started"
+ warnings.warn(
+ "drain method is deprecated, use await resp.write()",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ await self._payload_writer.drain()
+
+ async def write_eof(self, data: bytes = b"") -> None:
+ assert isinstance(
+ data, (bytes, bytearray, memoryview)
+ ), "data argument must be byte-ish (%r)" % type(data)
+
+ if self._eof_sent:
+ return
+
+ assert self._payload_writer is not None, "Response has not been started"
+
+ await self._payload_writer.write_eof(data)
+ self._eof_sent = True
+ self._req = None
+ self._body_length = self._payload_writer.output_size
+ self._payload_writer = None
+
+ def __repr__(self) -> str:
+ if self._eof_sent:
+ info = "eof"
+ elif self.prepared:
+ assert self._req is not None
+ info = f"{self._req.method} {self._req.path} "
+ else:
+ info = "not prepared"
+ return f"<{self.__class__.__name__} {self.reason} {info}>"
+
+ def __getitem__(self, key: str) -> Any:
+ return self._state[key]
+
+ def __setitem__(self, key: str, value: Any) -> None:
+ self._state[key] = value
+
+ def __delitem__(self, key: str) -> None:
+ del self._state[key]
+
+ def __len__(self) -> int:
+ return len(self._state)
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._state)
+
+ def __hash__(self) -> int:
+ return hash(id(self))
+
+ def __eq__(self, other: object) -> bool:
+ return self is other
+
+
+class Response(StreamResponse):
+ def __init__(
+ self,
+ *,
+ body: Any = None,
+ status: int = 200,
+ reason: Optional[str] = None,
+ text: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ content_type: Optional[str] = None,
+ charset: Optional[str] = None,
+ zlib_executor_size: Optional[int] = None,
+ zlib_executor: Optional[Executor] = None,
+ ) -> None:
+ if body is not None and text is not None:
+ raise ValueError("body and text are not allowed together")
+
+ if headers is None:
+ real_headers = CIMultiDict() # type: CIMultiDict[str]
+ elif not isinstance(headers, CIMultiDict):
+ real_headers = CIMultiDict(headers)
+ else:
+ real_headers = headers # = cast('CIMultiDict[str]', headers)
+
+ if content_type is not None and "charset" in content_type:
+ raise ValueError("charset must not be in content_type " "argument")
+
+ if text is not None:
+ if hdrs.CONTENT_TYPE in real_headers:
+ if content_type or charset:
+ raise ValueError(
+ "passing both Content-Type header and "
+ "content_type or charset params "
+ "is forbidden"
+ )
+ else:
+ # fast path for filling headers
+ if not isinstance(text, str):
+ raise TypeError("text argument must be str (%r)" % type(text))
+ if content_type is None:
+ content_type = "text/plain"
+ if charset is None:
+ charset = "utf-8"
+ real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
+ body = text.encode(charset)
+ text = None
+ else:
+ if hdrs.CONTENT_TYPE in real_headers:
+ if content_type is not None or charset is not None:
+ raise ValueError(
+ "passing both Content-Type header and "
+ "content_type or charset params "
+ "is forbidden"
+ )
+ else:
+ if content_type is not None:
+ if charset is not None:
+ content_type += "; charset=" + charset
+ real_headers[hdrs.CONTENT_TYPE] = content_type
+
+ super().__init__(status=status, reason=reason, headers=real_headers)
+
+ if text is not None:
+ self.text = text
+ else:
+ self.body = body
+
+ self._compressed_body = None # type: Optional[bytes]
+ self._zlib_executor_size = zlib_executor_size
+ self._zlib_executor = zlib_executor
+
+ @property
+ def body(self) -> Optional[Union[bytes, Payload]]:
+ return self._body
+
+ @body.setter
+ def body(
+ self,
+ body: bytes,
+ CONTENT_TYPE: istr = hdrs.CONTENT_TYPE,
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
+ ) -> None:
+ if body is None:
+ self._body = None # type: Optional[bytes]
+ self._body_payload = False # type: bool
+ elif isinstance(body, (bytes, bytearray)):
+ self._body = body
+ self._body_payload = False
+ else:
+ try:
+ self._body = body = payload.PAYLOAD_REGISTRY.get(body)
+ except payload.LookupError:
+ raise ValueError("Unsupported body type %r" % type(body))
+
+ self._body_payload = True
+
+ headers = self._headers
+
+ # set content-length header if needed
+ if not self._chunked and CONTENT_LENGTH not in headers:
+ size = body.size
+ if size is not None:
+ headers[CONTENT_LENGTH] = str(size)
+
+ # set content-type
+ if CONTENT_TYPE not in headers:
+ headers[CONTENT_TYPE] = body.content_type
+
+ # copy payload headers
+ if body.headers:
+ for (key, value) in body.headers.items():
+ if key not in headers:
+ headers[key] = value
+
+ self._compressed_body = None
+
+ @property
+ def text(self) -> Optional[str]:
+ if self._body is None:
+ return None
+ return self._body.decode(self.charset or "utf-8")
+
+ @text.setter
+ def text(self, text: str) -> None:
+ assert text is None or isinstance(
+ text, str
+ ), "text argument must be str (%r)" % type(text)
+
+ if self.content_type == "application/octet-stream":
+ self.content_type = "text/plain"
+ if self.charset is None:
+ self.charset = "utf-8"
+
+ self._body = text.encode(self.charset)
+ self._body_payload = False
+ self._compressed_body = None
+
+ @property
+ def content_length(self) -> Optional[int]:
+ if self._chunked:
+ return None
+
+ if hdrs.CONTENT_LENGTH in self._headers:
+ return super().content_length
+
+ if self._compressed_body is not None:
+ # Return length of the compressed body
+ return len(self._compressed_body)
+ elif self._body_payload:
+ # A payload without content length, or a compressed payload
+ return None
+ elif self._body is not None:
+ return len(self._body)
+ else:
+ return 0
+
+ @content_length.setter
+ def content_length(self, value: Optional[int]) -> None:
+ raise RuntimeError("Content length is set automatically")
+
+ async def write_eof(self, data: bytes = b"") -> None:
+ if self._eof_sent:
+ return
+ if self._compressed_body is None:
+ body = self._body # type: Optional[Union[bytes, Payload]]
+ else:
+ body = self._compressed_body
+ assert not data, f"data arg is not supported, got {data!r}"
+ assert self._req is not None
+ assert self._payload_writer is not None
+ if body is not None:
+ if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]:
+ await super().write_eof()
+ elif self._body_payload:
+ payload = cast(Payload, body)
+ await payload.write(self._payload_writer)
+ await super().write_eof()
+ else:
+ await super().write_eof(cast(bytes, body))
+ else:
+ await super().write_eof()
+
+ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
+ if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
+ if not self._body_payload:
+ if self._body is not None:
+ self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
+ else:
+ self._headers[hdrs.CONTENT_LENGTH] = "0"
+
+ return await super()._start(request)
+
+ def _compress_body(self, zlib_mode: int) -> None:
+ assert zlib_mode > 0
+ compressobj = zlib.compressobj(wbits=zlib_mode)
+ body_in = self._body
+ assert body_in is not None
+ self._compressed_body = compressobj.compress(body_in) + compressobj.flush()
+
+ async def _do_start_compression(self, coding: ContentCoding) -> None:
+ if self._body_payload or self._chunked:
+ return await super()._do_start_compression(coding)
+
+ if coding != ContentCoding.identity:
+ # Instead of using _payload_writer.enable_compression,
+ # compress the whole body
+ zlib_mode = (
+ 16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS
+ )
+ body_in = self._body
+ assert body_in is not None
+ if (
+ self._zlib_executor_size is not None
+ and len(body_in) > self._zlib_executor_size
+ ):
+ await asyncio.get_event_loop().run_in_executor(
+ self._zlib_executor, self._compress_body, zlib_mode
+ )
+ else:
+ self._compress_body(zlib_mode)
+
+ body_out = self._compressed_body
+ assert body_out is not None
+
+ self._headers[hdrs.CONTENT_ENCODING] = coding.value
+ self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
+
+
+def json_response(
+ data: Any = sentinel,
+ *,
+ text: Optional[str] = None,
+ body: Optional[bytes] = None,
+ status: int = 200,
+ reason: Optional[str] = None,
+ headers: Optional[LooseHeaders] = None,
+ content_type: str = "application/json",
+ dumps: JSONEncoder = json.dumps,
+) -> Response:
+ if data is not sentinel:
+ if text or body:
+ raise ValueError("only one of data, text, or body should be specified")
+ else:
+ text = dumps(data)
+ return Response(
+ text=text,
+ body=body,
+ status=status,
+ reason=reason,
+ headers=headers,
+ content_type=content_type,
+ )
diff --git a/contrib/python/aiohttp/aiohttp/web_routedef.py b/contrib/python/aiohttp/aiohttp/web_routedef.py
new file mode 100644
index 0000000000..671e5c7f46
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_routedef.py
@@ -0,0 +1,213 @@
+import abc
+import os # noqa
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Type,
+ Union,
+ overload,
+)
+
+import attr
+
+from . import hdrs
+from .abc import AbstractView
+from .typedefs import Handler, PathLike
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_request import Request
+ from .web_response import StreamResponse
+ from .web_urldispatcher import AbstractRoute, UrlDispatcher
+else:
+ Request = StreamResponse = UrlDispatcher = AbstractRoute = None
+
+
+__all__ = (
+ "AbstractRouteDef",
+ "RouteDef",
+ "StaticDef",
+ "RouteTableDef",
+ "head",
+ "options",
+ "get",
+ "post",
+ "patch",
+ "put",
+ "delete",
+ "route",
+ "view",
+ "static",
+)
+
+
+class AbstractRouteDef(abc.ABC):
+ @abc.abstractmethod
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ pass # pragma: no cover
+
+
+_HandlerType = Union[Type[AbstractView], Handler]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class RouteDef(AbstractRouteDef):
+ method: str
+ path: str
+ handler: _HandlerType
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
+ method=self.method, path=self.path, handler=self.handler, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ if self.method in hdrs.METH_ALL:
+ reg = getattr(router, "add_" + self.method.lower())
+ return [reg(self.path, self.handler, **self.kwargs)]
+ else:
+ return [
+ router.add_route(self.method, self.path, self.handler, **self.kwargs)
+ ]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class StaticDef(AbstractRouteDef):
+ prefix: str
+ path: PathLike
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return "<StaticDef {prefix} -> {path}" "{info}>".format(
+ prefix=self.prefix, path=self.path, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ resource = router.add_static(self.prefix, self.path, **self.kwargs)
+ routes = resource.get_info().get("routes", {})
+ return list(routes.values())
+
+
+def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return RouteDef(method, path, handler, kwargs)
+
+
+def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_HEAD, path, handler, **kwargs)
+
+
+def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
+
+
+def get(
+ path: str,
+ handler: _HandlerType,
+ *,
+ name: Optional[str] = None,
+ allow_head: bool = True,
+ **kwargs: Any,
+) -> RouteDef:
+ return route(
+ hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
+ )
+
+
+def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_POST, path, handler, **kwargs)
+
+
+def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PUT, path, handler, **kwargs)
+
+
+def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PATCH, path, handler, **kwargs)
+
+
+def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_DELETE, path, handler, **kwargs)
+
+
+def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_ANY, path, handler, **kwargs)
+
+
+def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
+ return StaticDef(prefix, path, kwargs)
+
+
+_Deco = Callable[[_HandlerType], _HandlerType]
+
+
+class RouteTableDef(Sequence[AbstractRouteDef]):
+ """Route definition table"""
+
+ def __init__(self) -> None:
+ self._items = [] # type: List[AbstractRouteDef]
+
+ def __repr__(self) -> str:
+ return f"<RouteTableDef count={len(self._items)}>"
+
+ @overload
+ def __getitem__(self, index: int) -> AbstractRouteDef:
+ ...
+
+ @overload
+ def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
+ ...
+
+ def __getitem__(self, index): # type: ignore[no-untyped-def]
+ return self._items[index]
+
+ def __iter__(self) -> Iterator[AbstractRouteDef]:
+ return iter(self._items)
+
+ def __len__(self) -> int:
+ return len(self._items)
+
+ def __contains__(self, item: object) -> bool:
+ return item in self._items
+
+ def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
+ def inner(handler: _HandlerType) -> _HandlerType:
+ self._items.append(RouteDef(method, path, handler, kwargs))
+ return handler
+
+ return inner
+
+ def head(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_HEAD, path, **kwargs)
+
+ def get(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_GET, path, **kwargs)
+
+ def post(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_POST, path, **kwargs)
+
+ def put(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PUT, path, **kwargs)
+
+ def patch(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PATCH, path, **kwargs)
+
+ def delete(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_DELETE, path, **kwargs)
+
+ def view(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_ANY, path, **kwargs)
+
+ def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
+ self._items.append(StaticDef(prefix, path, kwargs))
diff --git a/contrib/python/aiohttp/aiohttp/web_runner.py b/contrib/python/aiohttp/aiohttp/web_runner.py
new file mode 100644
index 0000000000..f4a64bff66
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_runner.py
@@ -0,0 +1,381 @@
+import asyncio
+import signal
+import socket
+from abc import ABC, abstractmethod
+from typing import Any, List, Optional, Set
+
+from yarl import URL
+
+from .web_app import Application
+from .web_server import Server
+
+try:
+ from ssl import SSLContext
+except ImportError:
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = (
+ "BaseSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ "SockSite",
+ "BaseRunner",
+ "AppRunner",
+ "ServerRunner",
+ "GracefulExit",
+)
+
+
+class GracefulExit(SystemExit):
+ code = 1
+
+
+def _raise_graceful_exit() -> None:
+ raise GracefulExit()
+
+
+class BaseSite(ABC):
+ __slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ if runner.server is None:
+ raise RuntimeError("Call runner.setup() before making a site")
+ self._runner = runner
+ self._shutdown_timeout = shutdown_timeout
+ self._ssl_context = ssl_context
+ self._backlog = backlog
+ self._server = None # type: Optional[asyncio.AbstractServer]
+
+ @property
+ @abstractmethod
+ def name(self) -> str:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def start(self) -> None:
+ self._runner._reg_site(self)
+
+ async def stop(self) -> None:
+ self._runner._check_site(self)
+ if self._server is None:
+ self._runner._unreg_site(self)
+ return # not started yet
+ self._server.close()
+ # named pipes do not have wait_closed property
+ if hasattr(self._server, "wait_closed"):
+ await self._server.wait_closed()
+ await self._runner.shutdown()
+ assert self._runner.server
+ await self._runner.server.shutdown(self._shutdown_timeout)
+ self._runner._unreg_site(self)
+
+
+class TCPSite(BaseSite):
+ __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._host = host
+ if port is None:
+ port = 8443 if self._ssl_context else 8080
+ self._port = port
+ self._reuse_address = reuse_address
+ self._reuse_port = reuse_port
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ host = "0.0.0.0" if self._host is None else self._host
+ return str(URL.build(scheme=scheme, host=host, port=self._port))
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server,
+ self._host,
+ self._port,
+ ssl=self._ssl_context,
+ backlog=self._backlog,
+ reuse_address=self._reuse_address,
+ reuse_port=self._reuse_port,
+ )
+
+
+class UnixSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ path: str,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ return f"{scheme}://unix:{self._path}:"
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_unix_server(
+ server, self._path, ssl=self._ssl_context, backlog=self._backlog
+ )
+
+
+class NamedPipeSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
+ ) -> None:
+ loop = asyncio.get_event_loop()
+ if not isinstance(
+ loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ ):
+ raise RuntimeError(
+ "Named Pipes only available in proactor" "loop under windows"
+ )
+ super().__init__(runner, shutdown_timeout=shutdown_timeout)
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ return self._path
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ _server = await loop.start_serving_pipe( # type: ignore[attr-defined]
+ server, self._path
+ )
+ self._server = _server[0]
+
+
+class SockSite(BaseSite):
+ __slots__ = ("_sock", "_name")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ sock: socket.socket,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._sock = sock
+ scheme = "https" if self._ssl_context else "http"
+ if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
+ name = f"{scheme}://unix:{sock.getsockname()}:"
+ else:
+ host, port = sock.getsockname()[:2]
+ name = str(URL.build(scheme=scheme, host=host, port=port))
+ self._name = name
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
+ )
+
+
+class BaseRunner(ABC):
+ __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites")
+
+ def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
+ self._handle_signals = handle_signals
+ self._kwargs = kwargs
+ self._server = None # type: Optional[Server]
+ self._sites = [] # type: List[BaseSite]
+
+ @property
+ def server(self) -> Optional[Server]:
+ return self._server
+
+ @property
+ def addresses(self) -> List[Any]:
+ ret = [] # type: List[Any]
+ for site in self._sites:
+ server = site._server
+ if server is not None:
+ sockets = server.sockets
+ if sockets is not None:
+ for sock in sockets:
+ ret.append(sock.getsockname())
+ return ret
+
+ @property
+ def sites(self) -> Set[BaseSite]:
+ return set(self._sites)
+
+ async def setup(self) -> None:
+ loop = asyncio.get_event_loop()
+
+ if self._handle_signals:
+ try:
+ loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
+ loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
+ except NotImplementedError: # pragma: no cover
+ # add_signal_handler is not implemented on Windows
+ pass
+
+ self._server = await self._make_server()
+
+ @abstractmethod
+ async def shutdown(self) -> None:
+ pass # pragma: no cover
+
+ async def cleanup(self) -> None:
+ loop = asyncio.get_event_loop()
+
+ # The loop over sites is intentional, an exception on gather()
+ # leaves self._sites in unpredictable state.
+ # The loop guaranties that a site is either deleted on success or
+ # still present on failure
+ for site in list(self._sites):
+ await site.stop()
+ await self._cleanup_server()
+ self._server = None
+ if self._handle_signals:
+ try:
+ loop.remove_signal_handler(signal.SIGINT)
+ loop.remove_signal_handler(signal.SIGTERM)
+ except NotImplementedError: # pragma: no cover
+ # remove_signal_handler is not implemented on Windows
+ pass
+
+ @abstractmethod
+ async def _make_server(self) -> Server:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def _cleanup_server(self) -> None:
+ pass # pragma: no cover
+
+ def _reg_site(self, site: BaseSite) -> None:
+ if site in self._sites:
+ raise RuntimeError(f"Site {site} is already registered in runner {self}")
+ self._sites.append(site)
+
+ def _check_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+
+ def _unreg_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+ self._sites.remove(site)
+
+
+class ServerRunner(BaseRunner):
+ """Low-level web server runner"""
+
+ __slots__ = ("_web_server",)
+
+ def __init__(
+ self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ self._web_server = web_server
+
+ async def shutdown(self) -> None:
+ pass
+
+ async def _make_server(self) -> Server:
+ return self._web_server
+
+ async def _cleanup_server(self) -> None:
+ pass
+
+
+class AppRunner(BaseRunner):
+ """Web Application runner"""
+
+ __slots__ = ("_app",)
+
+ def __init__(
+ self, app: Application, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ if not isinstance(app, Application):
+ raise TypeError(
+ "The first argument should be web.Application "
+ "instance, got {!r}".format(app)
+ )
+ self._app = app
+
+ @property
+ def app(self) -> Application:
+ return self._app
+
+ async def shutdown(self) -> None:
+ await self._app.shutdown()
+
+ async def _make_server(self) -> Server:
+ loop = asyncio.get_event_loop()
+ self._app._set_loop(loop)
+ self._app.on_startup.freeze()
+ await self._app.startup()
+ self._app.freeze()
+
+ return self._app._make_handler(loop=loop, **self._kwargs)
+
+ async def _cleanup_server(self) -> None:
+ await self._app.cleanup()
diff --git a/contrib/python/aiohttp/aiohttp/web_server.py b/contrib/python/aiohttp/aiohttp/web_server.py
new file mode 100644
index 0000000000..5657ed9c80
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_server.py
@@ -0,0 +1,62 @@
+"""Low level HTTP server."""
+import asyncio
+from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa
+
+from .abc import AbstractStreamWriter
+from .helpers import get_running_loop
+from .http_parser import RawRequestMessage
+from .streams import StreamReader
+from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
+from .web_request import BaseRequest
+
+__all__ = ("Server",)
+
+
+class Server:
+ def __init__(
+ self,
+ handler: _RequestHandler,
+ *,
+ request_factory: Optional[_RequestFactory] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any
+ ) -> None:
+ self._loop = get_running_loop(loop)
+ self._connections = {} # type: Dict[RequestHandler, asyncio.Transport]
+ self._kwargs = kwargs
+ self.requests_count = 0
+ self.request_handler = handler
+ self.request_factory = request_factory or self._make_request
+
+ @property
+ def connections(self) -> List[RequestHandler]:
+ return list(self._connections.keys())
+
+ def connection_made(
+ self, handler: RequestHandler, transport: asyncio.Transport
+ ) -> None:
+ self._connections[handler] = transport
+
+ def connection_lost(
+ self, handler: RequestHandler, exc: Optional[BaseException] = None
+ ) -> None:
+ if handler in self._connections:
+ del self._connections[handler]
+
+ def _make_request(
+ self,
+ message: RawRequestMessage,
+ payload: StreamReader,
+ protocol: RequestHandler,
+ writer: AbstractStreamWriter,
+ task: "asyncio.Task[None]",
+ ) -> BaseRequest:
+ return BaseRequest(message, payload, protocol, writer, task, self._loop)
+
+ async def shutdown(self, timeout: Optional[float] = None) -> None:
+ coros = [conn.shutdown(timeout) for conn in self._connections]
+ await asyncio.gather(*coros)
+ self._connections.clear()
+
+ def __call__(self) -> RequestHandler:
+ return RequestHandler(self, loop=self._loop, **self._kwargs)
diff --git a/contrib/python/aiohttp/aiohttp/web_urldispatcher.py b/contrib/python/aiohttp/aiohttp/web_urldispatcher.py
new file mode 100644
index 0000000000..73ec4c05d0
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_urldispatcher.py
@@ -0,0 +1,1220 @@
+import abc
+import asyncio
+import base64
+import hashlib
+import inspect
+import keyword
+import os
+import re
+import warnings
+from contextlib import contextmanager
+from functools import wraps
+from pathlib import Path
+from types import MappingProxyType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Container,
+ Dict,
+ Generator,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Pattern,
+ Set,
+ Sized,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined]
+
+from . import hdrs
+from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
+from .helpers import DEBUG
+from .http import HttpVersion11
+from .typedefs import Final, Handler, PathLike, TypedDict
+from .web_exceptions import (
+ HTTPException,
+ HTTPExpectationFailed,
+ HTTPForbidden,
+ HTTPMethodNotAllowed,
+ HTTPNotFound,
+)
+from .web_fileresponse import FileResponse
+from .web_request import Request
+from .web_response import Response, StreamResponse
+from .web_routedef import AbstractRouteDef
+
+__all__ = (
+ "UrlDispatcher",
+ "UrlMappingMatchInfo",
+ "AbstractResource",
+ "Resource",
+ "PlainResource",
+ "DynamicResource",
+ "AbstractRoute",
+ "ResourceRoute",
+ "StaticResource",
+ "View",
+)
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from .web_app import Application
+
+ BaseDict = Dict[str, str]
+else:
+ BaseDict = dict
+
+YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
+
+HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
+ r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
+)
+ROUTE_RE: Final[Pattern[str]] = re.compile(
+ r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
+)
+PATH_SEP: Final[str] = re.escape("/")
+
+
+_ExpectHandler = Callable[[Request], Awaitable[None]]
+_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
+
+
+class _InfoDict(TypedDict, total=False):
+ path: str
+
+ formatter: str
+ pattern: Pattern[str]
+
+ directory: Path
+ prefix: str
+ routes: Mapping[str, "AbstractRoute"]
+
+ app: "Application"
+
+ domain: str
+
+ rule: "AbstractRuleMatching"
+
+ http_exception: HTTPException
+
+
+class AbstractResource(Sized, Iterable["AbstractRoute"]):
+ def __init__(self, *, name: Optional[str] = None) -> None:
+ self._name = name
+
+ @property
+ def name(self) -> Optional[str]:
+ return self._name
+
+ @property
+ @abc.abstractmethod
+ def canonical(self) -> str:
+ """Exposes the resource's canonical path.
+
+ For example '/foo/bar/{name}'
+
+ """
+
+ @abc.abstractmethod # pragma: no branch
+ def url_for(self, **kwargs: str) -> URL:
+ """Construct url for resource with additional params."""
+
+ @abc.abstractmethod # pragma: no branch
+ async def resolve(self, request: Request) -> _Resolve:
+ """Resolve resource.
+
+ Return (UrlMappingMatchInfo, allowed_methods) pair.
+ """
+
+ @abc.abstractmethod
+ def add_prefix(self, prefix: str) -> None:
+ """Add a prefix to processed URLs.
+
+ Required for subapplications support.
+ """
+
+ @abc.abstractmethod
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ def freeze(self) -> None:
+ pass
+
+ @abc.abstractmethod
+ def raw_match(self, path: str) -> bool:
+ """Perform a raw match against path"""
+
+
+class AbstractRoute(abc.ABC):
+ def __init__(
+ self,
+ method: str,
+ handler: Union[Handler, Type[AbstractView]],
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ resource: Optional[AbstractResource] = None,
+ ) -> None:
+
+ if expect_handler is None:
+ expect_handler = _default_expect_handler
+
+ assert asyncio.iscoroutinefunction(
+ expect_handler
+ ), f"Coroutine is expected, got {expect_handler!r}"
+
+ method = method.upper()
+ if not HTTP_METHOD_RE.match(method):
+ raise ValueError(f"{method} is not allowed HTTP method")
+
+ assert callable(handler), handler
+ if asyncio.iscoroutinefunction(handler):
+ pass
+ elif inspect.isgeneratorfunction(handler):
+ warnings.warn(
+ "Bare generators are deprecated, " "use @coroutine wrapper",
+ DeprecationWarning,
+ )
+ elif isinstance(handler, type) and issubclass(handler, AbstractView):
+ pass
+ else:
+ warnings.warn(
+ "Bare functions are deprecated, " "use async ones", DeprecationWarning
+ )
+
+ @wraps(handler)
+ async def handler_wrapper(request: Request) -> StreamResponse:
+ result = old_handler(request)
+ if asyncio.iscoroutine(result):
+ return await result
+ return result # type: ignore[return-value]
+
+ old_handler = handler
+ handler = handler_wrapper
+
+ self._method = method
+ self._handler = handler
+ self._expect_handler = expect_handler
+ self._resource = resource
+
+ @property
+ def method(self) -> str:
+ return self._method
+
+ @property
+ def handler(self) -> Handler:
+ return self._handler
+
+ @property
+ @abc.abstractmethod
+ def name(self) -> Optional[str]:
+ """Optional route's name, always equals to resource's name."""
+
+ @property
+ def resource(self) -> Optional[AbstractResource]:
+ return self._resource
+
+ @abc.abstractmethod
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ @abc.abstractmethod # pragma: no branch
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ """Construct url for route with additional params."""
+
+ async def handle_expect_header(self, request: Request) -> None:
+ await self._expect_handler(request)
+
+
+class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
+ def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
+ super().__init__(match_dict)
+ self._route = route
+ self._apps = [] # type: List[Application]
+ self._current_app = None # type: Optional[Application]
+ self._frozen = False
+
+ @property
+ def handler(self) -> Handler:
+ return self._route.handler
+
+ @property
+ def route(self) -> AbstractRoute:
+ return self._route
+
+ @property
+ def expect_handler(self) -> _ExpectHandler:
+ return self._route.handle_expect_header
+
+ @property
+ def http_exception(self) -> Optional[HTTPException]:
+ return None
+
+ def get_info(self) -> _InfoDict: # type: ignore[override]
+ return self._route.get_info()
+
+ @property
+ def apps(self) -> Tuple["Application", ...]:
+ return tuple(self._apps)
+
+ def add_app(self, app: "Application") -> None:
+ if self._frozen:
+ raise RuntimeError("Cannot change apps stack after .freeze() call")
+ if self._current_app is None:
+ self._current_app = app
+ self._apps.insert(0, app)
+
+ @property
+ def current_app(self) -> "Application":
+ app = self._current_app
+ assert app is not None
+ return app
+
+ @contextmanager
+ def set_current_app(self, app: "Application") -> Generator[None, None, None]:
+ if DEBUG: # pragma: no cover
+ if app not in self._apps:
+ raise RuntimeError(
+ "Expected one of the following apps {!r}, got {!r}".format(
+ self._apps, app
+ )
+ )
+ prev = self._current_app
+ self._current_app = app
+ try:
+ yield
+ finally:
+ self._current_app = prev
+
+ def freeze(self) -> None:
+ self._frozen = True
+
+ def __repr__(self) -> str:
+ return f"<MatchInfo {super().__repr__()}: {self._route}>"
+
+
+class MatchInfoError(UrlMappingMatchInfo):
+ def __init__(self, http_exception: HTTPException) -> None:
+ self._exception = http_exception
+ super().__init__({}, SystemRoute(self._exception))
+
+ @property
+ def http_exception(self) -> HTTPException:
+ return self._exception
+
+ def __repr__(self) -> str:
+ return "<MatchInfoError {}: {}>".format(
+ self._exception.status, self._exception.reason
+ )
+
+
+async def _default_expect_handler(request: Request) -> None:
+ """Default handler for Expect header.
+
+ Just send "100 Continue" to client.
+ raise HTTPExpectationFailed if value of header is not "100-continue"
+ """
+ expect = request.headers.get(hdrs.EXPECT, "")
+ if request.version == HttpVersion11:
+ if expect.lower() == "100-continue":
+ await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
+ else:
+ raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
+
+
+class Resource(AbstractResource):
+ def __init__(self, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ self._routes = [] # type: List[ResourceRoute]
+
+ def add_route(
+ self,
+ method: str,
+ handler: Union[Type[AbstractView], Handler],
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> "ResourceRoute":
+
+ for route_obj in self._routes:
+ if route_obj.method == method or route_obj.method == hdrs.METH_ANY:
+ raise RuntimeError(
+ "Added route will never be executed, "
+ "method {route.method} is already "
+ "registered".format(route=route_obj)
+ )
+
+ route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
+ self.register_route(route_obj)
+ return route_obj
+
+ def register_route(self, route: "ResourceRoute") -> None:
+ assert isinstance(
+ route, ResourceRoute
+ ), f"Instance of Route class is required, got {route!r}"
+ self._routes.append(route)
+
+ async def resolve(self, request: Request) -> _Resolve:
+ allowed_methods = set() # type: Set[str]
+
+ match_dict = self._match(request.rel_url.raw_path)
+ if match_dict is None:
+ return None, allowed_methods
+
+ for route_obj in self._routes:
+ route_method = route_obj.method
+ allowed_methods.add(route_method)
+
+ if route_method == request.method or route_method == hdrs.METH_ANY:
+ return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods)
+ else:
+ return None, allowed_methods
+
+ @abc.abstractmethod
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ pass # pragma: no cover
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._routes)
+
+ # TODO: implement all abstract methods
+
+
+class PlainResource(Resource):
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ assert not path or path.startswith("/")
+ self._path = path
+
+ @property
+ def canonical(self) -> str:
+ return self._path
+
+ def freeze(self) -> None:
+ if not self._path:
+ self._path = "/"
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._path = prefix + self._path
+
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ # string comparison is about 10 times faster than regexp matching
+ if self._path == path:
+ return {}
+ else:
+ return None
+
+ def raw_match(self, path: str) -> bool:
+ return self._path == path
+
+ def get_info(self) -> _InfoDict:
+ return {"path": self._path}
+
+ def url_for(self) -> URL: # type: ignore[override]
+ return URL.build(path=self._path, encoded=True)
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "' " if self.name is not None else ""
+ return f"<PlainResource {name} {self._path}>"
+
+
+class DynamicResource(Resource):
+
+ DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
+ DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
+ GOOD = r"[^{}/]+"
+
+ def __init__(self, path: str, *, name: Optional[str] = None) -> None:
+ super().__init__(name=name)
+ pattern = ""
+ formatter = ""
+ for part in ROUTE_RE.split(path):
+ match = self.DYN.fullmatch(part)
+ if match:
+ pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
+ formatter += "{" + match.group("var") + "}"
+ continue
+
+ match = self.DYN_WITH_RE.fullmatch(part)
+ if match:
+ pattern += "(?P<{var}>{re})".format(**match.groupdict())
+ formatter += "{" + match.group("var") + "}"
+ continue
+
+ if "{" in part or "}" in part:
+ raise ValueError(f"Invalid path '{path}'['{part}']")
+
+ part = _requote_path(part)
+ formatter += part
+ pattern += re.escape(part)
+
+ try:
+ compiled = re.compile(pattern)
+ except re.error as exc:
+ raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
+ assert compiled.pattern.startswith(PATH_SEP)
+ assert formatter.startswith("/")
+ self._pattern = compiled
+ self._formatter = formatter
+
+ @property
+ def canonical(self) -> str:
+ return self._formatter
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
+ self._formatter = prefix + self._formatter
+
+ def _match(self, path: str) -> Optional[Dict[str, str]]:
+ match = self._pattern.fullmatch(path)
+ if match is None:
+ return None
+ else:
+ return {
+ key: _unquote_path(value) for key, value in match.groupdict().items()
+ }
+
+ def raw_match(self, path: str) -> bool:
+ return self._formatter == path
+
+ def get_info(self) -> _InfoDict:
+ return {"formatter": self._formatter, "pattern": self._pattern}
+
+ def url_for(self, **parts: str) -> URL:
+ url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
+ return URL.build(path=url, encoded=True)
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "' " if self.name is not None else ""
+ return "<DynamicResource {name} {formatter}>".format(
+ name=name, formatter=self._formatter
+ )
+
+
+class PrefixResource(AbstractResource):
+ def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:
+ assert not prefix or prefix.startswith("/"), prefix
+ assert prefix in ("", "/") or not prefix.endswith("/"), prefix
+ super().__init__(name=name)
+ self._prefix = _requote_path(prefix)
+ self._prefix2 = self._prefix + "/"
+
+ @property
+ def canonical(self) -> str:
+ return self._prefix
+
+ def add_prefix(self, prefix: str) -> None:
+ assert prefix.startswith("/")
+ assert not prefix.endswith("/")
+ assert len(prefix) > 1
+ self._prefix = prefix + self._prefix
+ self._prefix2 = self._prefix + "/"
+
+ def raw_match(self, prefix: str) -> bool:
+ return False
+
+ # TODO: impl missing abstract methods
+
+
+class StaticResource(PrefixResource):
+ VERSION_KEY = "v"
+
+ def __init__(
+ self,
+ prefix: str,
+ directory: PathLike,
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ chunk_size: int = 256 * 1024,
+ show_index: bool = False,
+ follow_symlinks: bool = False,
+ append_version: bool = False,
+ ) -> None:
+ super().__init__(prefix, name=name)
+ try:
+ directory = Path(directory)
+ if str(directory).startswith("~"):
+ directory = Path(os.path.expanduser(str(directory)))
+ directory = directory.resolve()
+ if not directory.is_dir():
+ raise ValueError("Not a directory")
+ except (FileNotFoundError, ValueError) as error:
+ raise ValueError(f"No directory exists at '{directory}'") from error
+ self._directory = directory
+ self._show_index = show_index
+ self._chunk_size = chunk_size
+ self._follow_symlinks = follow_symlinks
+ self._expect_handler = expect_handler
+ self._append_version = append_version
+
+ self._routes = {
+ "GET": ResourceRoute(
+ "GET", self._handle, self, expect_handler=expect_handler
+ ),
+ "HEAD": ResourceRoute(
+ "HEAD", self._handle, self, expect_handler=expect_handler
+ ),
+ }
+
+ def url_for( # type: ignore[override]
+ self,
+ *,
+ filename: Union[str, Path],
+ append_version: Optional[bool] = None,
+ ) -> URL:
+ if append_version is None:
+ append_version = self._append_version
+ if isinstance(filename, Path):
+ filename = str(filename)
+ filename = filename.lstrip("/")
+
+ url = URL.build(path=self._prefix, encoded=True)
+ # filename is not encoded
+ if YARL_VERSION < (1, 6):
+ url = url / filename.replace("%", "%25")
+ else:
+ url = url / filename
+
+ if append_version:
+ try:
+ filepath = self._directory.joinpath(filename).resolve()
+ if not self._follow_symlinks:
+ filepath.relative_to(self._directory)
+ except (ValueError, FileNotFoundError):
+ # ValueError for case when path point to symlink
+ # with follow_symlinks is False
+ return url # relatively safe
+ if filepath.is_file():
+ # TODO cache file content
+ # with file watcher for cache invalidation
+ with filepath.open("rb") as f:
+ file_bytes = f.read()
+ h = self._get_file_hash(file_bytes)
+ url = url.with_query({self.VERSION_KEY: h})
+ return url
+ return url
+
+ @staticmethod
+ def _get_file_hash(byte_array: bytes) -> str:
+ m = hashlib.sha256() # todo sha256 can be configurable param
+ m.update(byte_array)
+ b64 = base64.urlsafe_b64encode(m.digest())
+ return b64.decode("ascii")
+
+ def get_info(self) -> _InfoDict:
+ return {
+ "directory": self._directory,
+ "prefix": self._prefix,
+ "routes": self._routes,
+ }
+
+ def set_options_route(self, handler: Handler) -> None:
+ if "OPTIONS" in self._routes:
+ raise RuntimeError("OPTIONS route was set already")
+ self._routes["OPTIONS"] = ResourceRoute(
+ "OPTIONS", handler, self, expect_handler=self._expect_handler
+ )
+
+ async def resolve(self, request: Request) -> _Resolve:
+ path = request.rel_url.raw_path
+ method = request.method
+ allowed_methods = set(self._routes)
+ if not path.startswith(self._prefix2) and path != self._prefix:
+ return None, set()
+
+ if method not in allowed_methods:
+ return None, allowed_methods
+
+ match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])}
+ return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._routes.values())
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ rel_url = request.match_info["filename"]
+ try:
+ filename = Path(rel_url)
+ if filename.anchor:
+ # rel_url is an absolute name like
+ # /static/\\machine_name\c$ or /static/D:\path
+ # where the static dir is totally different
+ raise HTTPForbidden()
+ filepath = self._directory.joinpath(filename).resolve()
+ if not self._follow_symlinks:
+ filepath.relative_to(self._directory)
+ except (ValueError, FileNotFoundError) as error:
+ # relatively safe
+ raise HTTPNotFound() from error
+ except HTTPForbidden:
+ raise
+ except Exception as error:
+ # perm error or other kind!
+ request.app.logger.exception(error)
+ raise HTTPNotFound() from error
+
+ # on opening a dir, load its contents if allowed
+ if filepath.is_dir():
+ if self._show_index:
+ try:
+ return Response(
+ text=self._directory_as_html(filepath), content_type="text/html"
+ )
+ except PermissionError:
+ raise HTTPForbidden()
+ else:
+ raise HTTPForbidden()
+ elif filepath.is_file():
+ return FileResponse(filepath, chunk_size=self._chunk_size)
+ else:
+ raise HTTPNotFound
+
+ def _directory_as_html(self, filepath: Path) -> str:
+ # returns directory's index as html
+
+ # sanity check
+ assert filepath.is_dir()
+
+ relative_path_to_dir = filepath.relative_to(self._directory).as_posix()
+ index_of = f"Index of /{relative_path_to_dir}"
+ h1 = f"<h1>{index_of}</h1>"
+
+ index_list = []
+ dir_index = filepath.iterdir()
+ for _file in sorted(dir_index):
+ # show file url as relative to static path
+ rel_path = _file.relative_to(self._directory).as_posix()
+ file_url = self._prefix + "/" + rel_path
+
+ # if file is a directory, add '/' to the end of the name
+ if _file.is_dir():
+ file_name = f"{_file.name}/"
+ else:
+ file_name = _file.name
+
+ index_list.append(
+ '<li><a href="{url}">{name}</a></li>'.format(
+ url=file_url, name=file_name
+ )
+ )
+ ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
+ body = f"<body>\n{h1}\n{ul}\n</body>"
+
+ head_str = f"<head>\n<title>{index_of}</title>\n</head>"
+ html = f"<html>\n{head_str}\n{body}\n</html>"
+
+ return html
+
+ def __repr__(self) -> str:
+ name = "'" + self.name + "'" if self.name is not None else ""
+ return "<StaticResource {name} {path} -> {directory!r}>".format(
+ name=name, path=self._prefix, directory=self._directory
+ )
+
+
+class PrefixedSubAppResource(PrefixResource):
+ def __init__(self, prefix: str, app: "Application") -> None:
+ super().__init__(prefix)
+ self._app = app
+ for resource in app.router.resources():
+ resource.add_prefix(prefix)
+
+ def add_prefix(self, prefix: str) -> None:
+ super().add_prefix(prefix)
+ for resource in self._app.router.resources():
+ resource.add_prefix(prefix)
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ raise RuntimeError(".url_for() is not supported " "by sub-application root")
+
+ def get_info(self) -> _InfoDict:
+ return {"app": self._app, "prefix": self._prefix}
+
+ async def resolve(self, request: Request) -> _Resolve:
+ if (
+ not request.url.raw_path.startswith(self._prefix2)
+ and request.url.raw_path != self._prefix
+ ):
+ return None, set()
+ match_info = await self._app.router.resolve(request)
+ match_info.add_app(self._app)
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
+ methods = match_info.http_exception.allowed_methods
+ else:
+ methods = set()
+ return match_info, methods
+
+ def __len__(self) -> int:
+ return len(self._app.router.routes())
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ return iter(self._app.router.routes())
+
+ def __repr__(self) -> str:
+ return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
+ prefix=self._prefix, app=self._app
+ )
+
+
+class AbstractRuleMatching(abc.ABC):
+ @abc.abstractmethod # pragma: no branch
+ async def match(self, request: Request) -> bool:
+ """Return bool if the request satisfies the criteria"""
+
+ @abc.abstractmethod # pragma: no branch
+ def get_info(self) -> _InfoDict:
+ """Return a dict with additional info useful for introspection"""
+
+ @property
+ @abc.abstractmethod # pragma: no branch
+ def canonical(self) -> str:
+ """Return a str"""
+
+
+class Domain(AbstractRuleMatching):
+ re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
+
+ def __init__(self, domain: str) -> None:
+ super().__init__()
+ self._domain = self.validation(domain)
+
+ @property
+ def canonical(self) -> str:
+ return self._domain
+
+ def validation(self, domain: str) -> str:
+ if not isinstance(domain, str):
+ raise TypeError("Domain must be str")
+ domain = domain.rstrip(".").lower()
+ if not domain:
+ raise ValueError("Domain cannot be empty")
+ elif "://" in domain:
+ raise ValueError("Scheme not supported")
+ url = URL("http://" + domain)
+ assert url.raw_host is not None
+ if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
+ raise ValueError("Domain not valid")
+ if url.port == 80:
+ return url.raw_host
+ return f"{url.raw_host}:{url.port}"
+
+ async def match(self, request: Request) -> bool:
+ host = request.headers.get(hdrs.HOST)
+ if not host:
+ return False
+ return self.match_domain(host)
+
+ def match_domain(self, host: str) -> bool:
+ return host.lower() == self._domain
+
+ def get_info(self) -> _InfoDict:
+ return {"domain": self._domain}
+
+
+class MaskDomain(Domain):
+ re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
+
+ def __init__(self, domain: str) -> None:
+ super().__init__(domain)
+ mask = self._domain.replace(".", r"\.").replace("*", ".*")
+ self._mask = re.compile(mask)
+
+ @property
+ def canonical(self) -> str:
+ return self._mask.pattern
+
+ def match_domain(self, host: str) -> bool:
+ return self._mask.fullmatch(host) is not None
+
+
+class MatchedSubAppResource(PrefixedSubAppResource):
+ def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
+ AbstractResource.__init__(self)
+ self._prefix = ""
+ self._app = app
+ self._rule = rule
+
+ @property
+ def canonical(self) -> str:
+ return self._rule.canonical
+
+ def get_info(self) -> _InfoDict:
+ return {"app": self._app, "rule": self._rule}
+
+ async def resolve(self, request: Request) -> _Resolve:
+ if not await self._rule.match(request):
+ return None, set()
+ match_info = await self._app.router.resolve(request)
+ match_info.add_app(self._app)
+ if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
+ methods = match_info.http_exception.allowed_methods
+ else:
+ methods = set()
+ return match_info, methods
+
+ def __repr__(self) -> str:
+ return "<MatchedSubAppResource -> {app!r}>" "".format(app=self._app)
+
+
+class ResourceRoute(AbstractRoute):
+ """A route with resource"""
+
+ def __init__(
+ self,
+ method: str,
+ handler: Union[Handler, Type[AbstractView]],
+ resource: AbstractResource,
+ *,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> None:
+ super().__init__(
+ method, handler, expect_handler=expect_handler, resource=resource
+ )
+
+ def __repr__(self) -> str:
+ return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
+ method=self.method, resource=self._resource, handler=self.handler
+ )
+
+ @property
+ def name(self) -> Optional[str]:
+ if self._resource is None:
+ return None
+ return self._resource.name
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ """Construct url for route with additional params."""
+ assert self._resource is not None
+ return self._resource.url_for(*args, **kwargs)
+
+ def get_info(self) -> _InfoDict:
+ assert self._resource is not None
+ return self._resource.get_info()
+
+
+class SystemRoute(AbstractRoute):
+ def __init__(self, http_exception: HTTPException) -> None:
+ super().__init__(hdrs.METH_ANY, self._handle)
+ self._http_exception = http_exception
+
+ def url_for(self, *args: str, **kwargs: str) -> URL:
+ raise RuntimeError(".url_for() is not allowed for SystemRoute")
+
+ @property
+ def name(self) -> Optional[str]:
+ return None
+
+ def get_info(self) -> _InfoDict:
+ return {"http_exception": self._http_exception}
+
+ async def _handle(self, request: Request) -> StreamResponse:
+ raise self._http_exception
+
+ @property
+ def status(self) -> int:
+ return self._http_exception.status
+
+ @property
+ def reason(self) -> str:
+ return self._http_exception.reason
+
+ def __repr__(self) -> str:
+ return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
+
+
+class View(AbstractView):
+ async def _iter(self) -> StreamResponse:
+ if self.request.method not in hdrs.METH_ALL:
+ self._raise_allowed_methods()
+ method: Callable[[], Awaitable[StreamResponse]] = getattr(
+ self, self.request.method.lower(), None
+ )
+ if method is None:
+ self._raise_allowed_methods()
+ resp = await method()
+ return resp
+
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
+ return self._iter().__await__()
+
+ def _raise_allowed_methods(self) -> None:
+ allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
+ raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
+
+
+class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
+ def __init__(self, resources: List[AbstractResource]) -> None:
+ self._resources = resources
+
+ def __len__(self) -> int:
+ return len(self._resources)
+
+ def __iter__(self) -> Iterator[AbstractResource]:
+ yield from self._resources
+
+ def __contains__(self, resource: object) -> bool:
+ return resource in self._resources
+
+
+class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
+ def __init__(self, resources: List[AbstractResource]):
+ self._routes = [] # type: List[AbstractRoute]
+ for resource in resources:
+ for route in resource:
+ self._routes.append(route)
+
+ def __len__(self) -> int:
+ return len(self._routes)
+
+ def __iter__(self) -> Iterator[AbstractRoute]:
+ yield from self._routes
+
+ def __contains__(self, route: object) -> bool:
+ return route in self._routes
+
+
+class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
+
+ NAME_SPLIT_RE = re.compile(r"[.:-]")
+
+ def __init__(self) -> None:
+ super().__init__()
+ self._resources = [] # type: List[AbstractResource]
+ self._named_resources = {} # type: Dict[str, AbstractResource]
+
+ async def resolve(self, request: Request) -> UrlMappingMatchInfo:
+ method = request.method
+ allowed_methods = set() # type: Set[str]
+
+ for resource in self._resources:
+ match_dict, allowed = await resource.resolve(request)
+ if match_dict is not None:
+ return match_dict
+ else:
+ allowed_methods |= allowed
+
+ if allowed_methods:
+ return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods))
+ else:
+ return MatchInfoError(HTTPNotFound())
+
+ def __iter__(self) -> Iterator[str]:
+ return iter(self._named_resources)
+
+ def __len__(self) -> int:
+ return len(self._named_resources)
+
+ def __contains__(self, resource: object) -> bool:
+ return resource in self._named_resources
+
+ def __getitem__(self, name: str) -> AbstractResource:
+ return self._named_resources[name]
+
+ def resources(self) -> ResourcesView:
+ return ResourcesView(self._resources)
+
+ def routes(self) -> RoutesView:
+ return RoutesView(self._resources)
+
+ def named_resources(self) -> Mapping[str, AbstractResource]:
+ return MappingProxyType(self._named_resources)
+
+ def register_resource(self, resource: AbstractResource) -> None:
+ assert isinstance(
+ resource, AbstractResource
+ ), f"Instance of AbstractResource class is required, got {resource!r}"
+ if self.frozen:
+ raise RuntimeError("Cannot register a resource into frozen router.")
+
+ name = resource.name
+
+ if name is not None:
+ parts = self.NAME_SPLIT_RE.split(name)
+ for part in parts:
+ if keyword.iskeyword(part):
+ raise ValueError(
+ f"Incorrect route name {name!r}, "
+ "python keywords cannot be used "
+ "for route name"
+ )
+ if not part.isidentifier():
+ raise ValueError(
+ "Incorrect route name {!r}, "
+ "the name should be a sequence of "
+ "python identifiers separated "
+ "by dash, dot or column".format(name)
+ )
+ if name in self._named_resources:
+ raise ValueError(
+ "Duplicate {!r}, "
+ "already handled by {!r}".format(name, self._named_resources[name])
+ )
+ self._named_resources[name] = resource
+ self._resources.append(resource)
+
+ def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:
+ if path and not path.startswith("/"):
+ raise ValueError("path should be started with / or be empty")
+ # Reuse last added resource if path and name are the same
+ if self._resources:
+ resource = self._resources[-1]
+ if resource.name == name and resource.raw_match(path):
+ return cast(Resource, resource)
+ if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
+ resource = PlainResource(_requote_path(path), name=name)
+ self.register_resource(resource)
+ return resource
+ resource = DynamicResource(path, name=name)
+ self.register_resource(resource)
+ return resource
+
+ def add_route(
+ self,
+ method: str,
+ path: str,
+ handler: Union[Handler, Type[AbstractView]],
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ ) -> AbstractRoute:
+ resource = self.add_resource(path, name=name)
+ return resource.add_route(method, handler, expect_handler=expect_handler)
+
+ def add_static(
+ self,
+ prefix: str,
+ path: PathLike,
+ *,
+ name: Optional[str] = None,
+ expect_handler: Optional[_ExpectHandler] = None,
+ chunk_size: int = 256 * 1024,
+ show_index: bool = False,
+ follow_symlinks: bool = False,
+ append_version: bool = False,
+ ) -> AbstractResource:
+ """Add static files view.
+
+ prefix - url prefix
+ path - folder with files
+
+ """
+ assert prefix.startswith("/")
+ if prefix.endswith("/"):
+ prefix = prefix[:-1]
+ resource = StaticResource(
+ prefix,
+ path,
+ name=name,
+ expect_handler=expect_handler,
+ chunk_size=chunk_size,
+ show_index=show_index,
+ follow_symlinks=follow_symlinks,
+ append_version=append_version,
+ )
+ self.register_resource(resource)
+ return resource
+
+ def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method HEAD."""
+ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
+
+ def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method OPTIONS."""
+ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
+
+ def add_get(
+ self,
+ path: str,
+ handler: Handler,
+ *,
+ name: Optional[str] = None,
+ allow_head: bool = True,
+ **kwargs: Any,
+ ) -> AbstractRoute:
+ """Shortcut for add_route with method GET.
+
+ If allow_head is true, another
+ route is added allowing head requests to the same endpoint.
+ """
+ resource = self.add_resource(path, name=name)
+ if allow_head:
+ resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
+ return resource.add_route(hdrs.METH_GET, handler, **kwargs)
+
+ def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method POST."""
+ return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
+
+ def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method PUT."""
+ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
+
+ def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method PATCH."""
+ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
+
+ def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
+ """Shortcut for add_route with method DELETE."""
+ return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
+
+ def add_view(
+ self, path: str, handler: Type[AbstractView], **kwargs: Any
+ ) -> AbstractRoute:
+ """Shortcut for add_route with ANY methods for a class-based view."""
+ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
+
+ def freeze(self) -> None:
+ super().freeze()
+ for resource in self._resources:
+ resource.freeze()
+
+ def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
+ """Append routes to route table.
+
+ Parameter should be a sequence of RouteDef objects.
+
+ Returns a list of registered AbstractRoute instances.
+ """
+ registered_routes = []
+ for route_def in routes:
+ registered_routes.extend(route_def.register(self))
+ return registered_routes
+
+
+def _quote_path(value: str) -> str:
+ if YARL_VERSION < (1, 6):
+ value = value.replace("%", "%25")
+ return URL.build(path=value, encoded=False).raw_path
+
+
+def _unquote_path(value: str) -> str:
+ return URL.build(path=value, encoded=True).path
+
+
+def _requote_path(value: str) -> str:
+ # Quote non-ascii characters and other characters which must be quoted,
+ # but preserve existing %-sequences.
+ result = _quote_path(value)
+ if "%" in value:
+ result = result.replace("%25", "%")
+ return result
diff --git a/contrib/python/aiohttp/aiohttp/web_ws.py b/contrib/python/aiohttp/aiohttp/web_ws.py
new file mode 100644
index 0000000000..16b0a1747c
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/web_ws.py
@@ -0,0 +1,487 @@
+import asyncio
+import base64
+import binascii
+import hashlib
+import json
+from typing import Any, Iterable, Optional, Tuple, cast
+
+import async_timeout
+import attr
+from multidict import CIMultiDict
+
+from . import hdrs
+from .abc import AbstractStreamWriter
+from .helpers import call_later, set_result
+from .http import (
+ WS_CLOSED_MESSAGE,
+ WS_CLOSING_MESSAGE,
+ WS_KEY,
+ WebSocketError,
+ WebSocketReader,
+ WebSocketWriter,
+ WSCloseCode,
+ WSMessage,
+ WSMsgType as WSMsgType,
+ ws_ext_gen,
+ ws_ext_parse,
+)
+from .log import ws_logger
+from .streams import EofStream, FlowControlDataQueue
+from .typedefs import Final, JSONDecoder, JSONEncoder
+from .web_exceptions import HTTPBadRequest, HTTPException
+from .web_request import BaseRequest
+from .web_response import StreamResponse
+
+__all__ = (
+ "WebSocketResponse",
+ "WebSocketReady",
+ "WSMsgType",
+)
+
+THRESHOLD_CONNLOST_ACCESS: Final[int] = 5
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class WebSocketReady:
+ ok: bool
+ protocol: Optional[str]
+
+ def __bool__(self) -> bool:
+ return self.ok
+
+
+class WebSocketResponse(StreamResponse):
+
+ _length_check = False
+
+ def __init__(
+ self,
+ *,
+ timeout: float = 10.0,
+ receive_timeout: Optional[float] = None,
+ autoclose: bool = True,
+ autoping: bool = True,
+ heartbeat: Optional[float] = None,
+ protocols: Iterable[str] = (),
+ compress: bool = True,
+ max_msg_size: int = 4 * 1024 * 1024,
+ ) -> None:
+ super().__init__(status=101)
+ self._protocols = protocols
+ self._ws_protocol = None # type: Optional[str]
+ self._writer = None # type: Optional[WebSocketWriter]
+ self._reader = None # type: Optional[FlowControlDataQueue[WSMessage]]
+ self._closed = False
+ self._closing = False
+ self._conn_lost = 0
+ self._close_code = None # type: Optional[int]
+ self._loop = None # type: Optional[asyncio.AbstractEventLoop]
+ self._waiting = None # type: Optional[asyncio.Future[bool]]
+ self._exception = None # type: Optional[BaseException]
+ self._timeout = timeout
+ self._receive_timeout = receive_timeout
+ self._autoclose = autoclose
+ self._autoping = autoping
+ self._heartbeat = heartbeat
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
+ if heartbeat is not None:
+ self._pong_heartbeat = heartbeat / 2.0
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
+ self._compress = compress
+ self._max_msg_size = max_msg_size
+
+ def _cancel_heartbeat(self) -> None:
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = None
+
+ if self._heartbeat_cb is not None:
+ self._heartbeat_cb.cancel()
+ self._heartbeat_cb = None
+
+ def _reset_heartbeat(self) -> None:
+ self._cancel_heartbeat()
+
+ if self._heartbeat is not None:
+ assert self._loop is not None
+ self._heartbeat_cb = call_later(
+ self._send_heartbeat, self._heartbeat, self._loop
+ )
+
+ def _send_heartbeat(self) -> None:
+ if self._heartbeat is not None and not self._closed:
+ assert self._loop is not None
+ # fire-and-forget a task is not perfect but maybe ok for
+ # sending ping. Otherwise we need a long-living heartbeat
+ # task in the class.
+ self._loop.create_task(self._writer.ping()) # type: ignore[union-attr]
+
+ if self._pong_response_cb is not None:
+ self._pong_response_cb.cancel()
+ self._pong_response_cb = call_later(
+ self._pong_not_received, self._pong_heartbeat, self._loop
+ )
+
+ def _pong_not_received(self) -> None:
+ if self._req is not None and self._req.transport is not None:
+ self._closed = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = asyncio.TimeoutError()
+ self._req.transport.close()
+
+ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter:
+ # make pre-check to don't hide it by do_handshake() exceptions
+ if self._payload_writer is not None:
+ return self._payload_writer
+
+ protocol, writer = self._pre_start(request)
+ payload_writer = await super().prepare(request)
+ assert payload_writer is not None
+ self._post_start(request, protocol, writer)
+ await payload_writer.drain()
+ return payload_writer
+
+ def _handshake(
+ self, request: BaseRequest
+ ) -> Tuple["CIMultiDict[str]", str, bool, bool]:
+ headers = request.headers
+ if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():
+ raise HTTPBadRequest(
+ text=(
+ "No WebSocket UPGRADE hdr: {}\n Can "
+ '"Upgrade" only to "WebSocket".'
+ ).format(headers.get(hdrs.UPGRADE))
+ )
+
+ if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():
+ raise HTTPBadRequest(
+ text="No CONNECTION upgrade hdr: {}".format(
+ headers.get(hdrs.CONNECTION)
+ )
+ )
+
+ # find common sub-protocol between client and server
+ protocol = None
+ if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:
+ req_protocols = [
+ str(proto.strip())
+ for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
+ ]
+
+ for proto in req_protocols:
+ if proto in self._protocols:
+ protocol = proto
+ break
+ else:
+ # No overlap found: Return no protocol as per spec
+ ws_logger.warning(
+ "Client protocols %r don’t overlap server-known ones %r",
+ req_protocols,
+ self._protocols,
+ )
+
+ # check supported version
+ version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")
+ if version not in ("13", "8", "7"):
+ raise HTTPBadRequest(text=f"Unsupported version: {version}")
+
+ # check client handshake for validity
+ key = headers.get(hdrs.SEC_WEBSOCKET_KEY)
+ try:
+ if not key or len(base64.b64decode(key)) != 16:
+ raise HTTPBadRequest(text=f"Handshake error: {key!r}")
+ except binascii.Error:
+ raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None
+
+ accept_val = base64.b64encode(
+ hashlib.sha1(key.encode() + WS_KEY).digest()
+ ).decode()
+ response_headers = CIMultiDict( # type: ignore[var-annotated]
+ {
+ hdrs.UPGRADE: "websocket", # type: ignore[arg-type]
+ hdrs.CONNECTION: "upgrade",
+ hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
+ }
+ )
+
+ notakeover = False
+ compress = 0
+ if self._compress:
+ extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
+ # Server side always get return with no exception.
+ # If something happened, just drop compress extension
+ compress, notakeover = ws_ext_parse(extensions, isserver=True)
+ if compress:
+ enabledext = ws_ext_gen(
+ compress=compress, isserver=True, server_notakeover=notakeover
+ )
+ response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext
+
+ if protocol:
+ response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
+ return (
+ response_headers,
+ protocol,
+ compress,
+ notakeover,
+ ) # type: ignore[return-value]
+
+ def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]:
+ self._loop = request._loop
+
+ headers, protocol, compress, notakeover = self._handshake(request)
+
+ self.set_status(101)
+ self.headers.update(headers)
+ self.force_close()
+ self._compress = compress
+ transport = request._protocol.transport
+ assert transport is not None
+ writer = WebSocketWriter(
+ request._protocol, transport, compress=compress, notakeover=notakeover
+ )
+
+ return protocol, writer
+
+ def _post_start(
+ self, request: BaseRequest, protocol: str, writer: WebSocketWriter
+ ) -> None:
+ self._ws_protocol = protocol
+ self._writer = writer
+
+ self._reset_heartbeat()
+
+ loop = self._loop
+ assert loop is not None
+ self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop)
+ request.protocol.set_parser(
+ WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
+ )
+ # disable HTTP keepalive for WebSocket
+ request.protocol.keep_alive(False)
+
+ def can_prepare(self, request: BaseRequest) -> WebSocketReady:
+ if self._writer is not None:
+ raise RuntimeError("Already started")
+ try:
+ _, protocol, _, _ = self._handshake(request)
+ except HTTPException:
+ return WebSocketReady(False, None)
+ else:
+ return WebSocketReady(True, protocol)
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def close_code(self) -> Optional[int]:
+ return self._close_code
+
+ @property
+ def ws_protocol(self) -> Optional[str]:
+ return self._ws_protocol
+
+ @property
+ def compress(self) -> bool:
+ return self._compress
+
+ def exception(self) -> Optional[BaseException]:
+ return self._exception
+
+ async def ping(self, message: bytes = b"") -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ await self._writer.ping(message)
+
+ async def pong(self, message: bytes = b"") -> None:
+ # unsolicited pong
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ await self._writer.pong(message)
+
+ async def send_str(self, data: str, compress: Optional[bool] = None) -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ if not isinstance(data, str):
+ raise TypeError("data argument must be str (%r)" % type(data))
+ await self._writer.send(data, binary=False, compress=compress)
+
+ async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+ if not isinstance(data, (bytes, bytearray, memoryview)):
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
+ await self._writer.send(data, binary=True, compress=compress)
+
+ async def send_json(
+ self,
+ data: Any,
+ compress: Optional[bool] = None,
+ *,
+ dumps: JSONEncoder = json.dumps,
+ ) -> None:
+ await self.send_str(dumps(data), compress=compress)
+
+ async def write_eof(self) -> None: # type: ignore[override]
+ if self._eof_sent:
+ return
+ if self._payload_writer is None:
+ raise RuntimeError("Response has not been started")
+
+ await self.close()
+ self._eof_sent = True
+
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
+ if self._writer is None:
+ raise RuntimeError("Call .prepare() first")
+
+ self._cancel_heartbeat()
+ reader = self._reader
+ assert reader is not None
+
+ # we need to break `receive()` cycle first,
+ # `close()` may be called from different task
+ if self._waiting is not None and not self._closed:
+ reader.feed_data(WS_CLOSING_MESSAGE, 0)
+ await self._waiting
+
+ if not self._closed:
+ self._closed = True
+ try:
+ await self._writer.close(code, message)
+ writer = self._payload_writer
+ assert writer is not None
+ await writer.drain()
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ return True
+
+ if self._closing:
+ return True
+
+ reader = self._reader
+ assert reader is not None
+ try:
+ async with async_timeout.timeout(self._timeout):
+ msg = await reader.read()
+ except asyncio.CancelledError:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except Exception as exc:
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = exc
+ return True
+
+ if msg.type == WSMsgType.CLOSE:
+ self._close_code = msg.data
+ return True
+
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ self._exception = asyncio.TimeoutError()
+ return True
+ else:
+ return False
+
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
+ if self._reader is None:
+ raise RuntimeError("Call .prepare() first")
+
+ loop = self._loop
+ assert loop is not None
+ while True:
+ if self._waiting is not None:
+ raise RuntimeError("Concurrent call to receive() is not allowed")
+
+ if self._closed:
+ self._conn_lost += 1
+ if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
+ raise RuntimeError("WebSocket connection is closed.")
+ return WS_CLOSED_MESSAGE
+ elif self._closing:
+ return WS_CLOSING_MESSAGE
+
+ try:
+ self._waiting = loop.create_future()
+ try:
+ async with async_timeout.timeout(timeout or self._receive_timeout):
+ msg = await self._reader.read()
+ self._reset_heartbeat()
+ finally:
+ waiter = self._waiting
+ set_result(waiter, True)
+ self._waiting = None
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ raise
+ except EofStream:
+ self._close_code = WSCloseCode.OK
+ await self.close()
+ return WSMessage(WSMsgType.CLOSED, None, None)
+ except WebSocketError as exc:
+ self._close_code = exc.code
+ await self.close(code=exc.code)
+ return WSMessage(WSMsgType.ERROR, exc, None)
+ except Exception as exc:
+ self._exception = exc
+ self._closing = True
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
+ await self.close()
+ return WSMessage(WSMsgType.ERROR, exc, None)
+
+ if msg.type == WSMsgType.CLOSE:
+ self._closing = True
+ self._close_code = msg.data
+ if not self._closed and self._autoclose:
+ await self.close()
+ elif msg.type == WSMsgType.CLOSING:
+ self._closing = True
+ elif msg.type == WSMsgType.PING and self._autoping:
+ await self.pong(msg.data)
+ continue
+ elif msg.type == WSMsgType.PONG and self._autoping:
+ continue
+
+ return msg
+
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.TEXT:
+ raise TypeError(
+ "Received message {}:{!r} is not WSMsgType.TEXT".format(
+ msg.type, msg.data
+ )
+ )
+ return cast(str, msg.data)
+
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
+ msg = await self.receive(timeout)
+ if msg.type != WSMsgType.BINARY:
+ raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
+ return cast(bytes, msg.data)
+
+ async def receive_json(
+ self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
+ ) -> Any:
+ data = await self.receive_str(timeout=timeout)
+ return loads(data)
+
+ async def write(self, data: bytes) -> None:
+ raise RuntimeError("Cannot call .write() for websocket")
+
+ def __aiter__(self) -> "WebSocketResponse":
+ return self
+
+ async def __anext__(self) -> WSMessage:
+ msg = await self.receive()
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
+ raise StopAsyncIteration
+ return msg
+
+ def _cancel(self, exc: BaseException) -> None:
+ if self._reader is not None:
+ self._reader.set_exception(exc)
diff --git a/contrib/python/aiohttp/aiohttp/worker.py b/contrib/python/aiohttp/aiohttp/worker.py
new file mode 100644
index 0000000000..08945bcb4b
--- /dev/null
+++ b/contrib/python/aiohttp/aiohttp/worker.py
@@ -0,0 +1,269 @@
+"""Async gunicorn worker for aiohttp.web"""
+
+import asyncio
+import os
+import re
+import signal
+import sys
+from types import FrameType
+from typing import Any, Awaitable, Callable, Optional, Union # noqa
+
+from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
+from gunicorn.workers import base
+
+from aiohttp import web
+
+from .helpers import set_result
+from .web_app import Application
+from .web_log import AccessLogger
+
+try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
+
+
+class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
+
+ DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
+ DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
+
+ def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
+ super().__init__(*args, **kw)
+
+ self._task = None # type: Optional[asyncio.Task[None]]
+ self.exit_code = 0
+ self._notify_waiter = None # type: Optional[asyncio.Future[bool]]
+
+ def init_process(self) -> None:
+ # create new event_loop after fork
+ asyncio.get_event_loop().close()
+
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ super().init_process()
+
+ def run(self) -> None:
+ self._task = self.loop.create_task(self._run())
+
+ try: # ignore all finalization problems
+ self.loop.run_until_complete(self._task)
+ except Exception:
+ self.log.exception("Exception in gunicorn worker")
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+
+ sys.exit(self.exit_code)
+
+ async def _run(self) -> None:
+ runner = None
+ if isinstance(self.wsgi, Application):
+ app = self.wsgi
+ elif asyncio.iscoroutinefunction(self.wsgi):
+ wsgi = await self.wsgi()
+ if isinstance(wsgi, web.AppRunner):
+ runner = wsgi
+ app = runner.app
+ else:
+ app = wsgi
+ else:
+ raise RuntimeError(
+ "wsgi app should be either Application or "
+ "async function returning Application, got {}".format(self.wsgi)
+ )
+
+ if runner is None:
+ access_log = self.log.access_log if self.cfg.accesslog else None
+ runner = web.AppRunner(
+ app,
+ logger=self.log,
+ keepalive_timeout=self.cfg.keepalive,
+ access_log=access_log,
+ access_log_format=self._get_valid_log_format(
+ self.cfg.access_log_format
+ ),
+ )
+ await runner.setup()
+
+ ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
+
+ runner = runner
+ assert runner is not None
+ server = runner.server
+ assert server is not None
+ for sock in self.sockets:
+ site = web.SockSite(
+ runner,
+ sock,
+ ssl_context=ctx,
+ shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
+ )
+ await site.start()
+
+ # If our parent changed then we shut down.
+ pid = os.getpid()
+ try:
+ while self.alive: # type: ignore[has-type]
+ self.notify()
+
+ cnt = server.requests_count
+ if self.cfg.max_requests and cnt > self.cfg.max_requests:
+ self.alive = False
+ self.log.info("Max requests, shutting down: %s", self)
+
+ elif pid == os.getpid() and self.ppid != os.getppid():
+ self.alive = False
+ self.log.info("Parent changed, shutting down: %s", self)
+ else:
+ await self._wait_next_notify()
+ except BaseException:
+ pass
+
+ await runner.cleanup()
+
+ def _wait_next_notify(self) -> "asyncio.Future[bool]":
+ self._notify_waiter_done()
+
+ loop = self.loop
+ assert loop is not None
+ self._notify_waiter = waiter = loop.create_future()
+ self.loop.call_later(1.0, self._notify_waiter_done, waiter)
+
+ return waiter
+
+ def _notify_waiter_done(
+ self, waiter: Optional["asyncio.Future[bool]"] = None
+ ) -> None:
+ if waiter is None:
+ waiter = self._notify_waiter
+ if waiter is not None:
+ set_result(waiter, True)
+
+ if waiter is self._notify_waiter:
+ self._notify_waiter = None
+
+ def init_signals(self) -> None:
+ # Set up signals through the event loop API.
+
+ self.loop.add_signal_handler(
+ signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGINT, self.handle_quit, signal.SIGINT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
+ )
+
+ # Don't let SIGTERM and SIGUSR1 disturb active requests
+ # by interrupting system calls
+ signal.siginterrupt(signal.SIGTERM, False)
+ signal.siginterrupt(signal.SIGUSR1, False)
+ # Reset signals so Gunicorn doesn't swallow subprocess return codes
+ # See: https://github.com/aio-libs/aiohttp/issues/6130
+ if sys.version_info < (3, 8):
+ # Starting from Python 3.8,
+ # the default child watcher is ThreadedChildWatcher.
+ # The watcher doesn't depend on SIGCHLD signal,
+ # there is no need to reset it.
+ signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+
+ def handle_quit(self, sig: int, frame: FrameType) -> None:
+ self.alive = False
+
+ # worker_int callback
+ self.cfg.worker_int(self)
+
+ # wakeup closing process
+ self._notify_waiter_done()
+
+ def handle_abort(self, sig: int, frame: FrameType) -> None:
+ self.alive = False
+ self.exit_code = 1
+ self.cfg.worker_abort(self)
+ sys.exit(1)
+
+ @staticmethod
+ def _create_ssl_context(cfg: Any) -> "SSLContext":
+ """Creates SSLContext instance for usage in asyncio.create_server.
+
+ See ssl.SSLSocket.__init__ for more details.
+ """
+ if ssl is None: # pragma: no cover
+ raise RuntimeError("SSL is not supported.")
+
+ ctx = ssl.SSLContext(cfg.ssl_version)
+ ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
+ ctx.verify_mode = cfg.cert_reqs
+ if cfg.ca_certs:
+ ctx.load_verify_locations(cfg.ca_certs)
+ if cfg.ciphers:
+ ctx.set_ciphers(cfg.ciphers)
+ return ctx
+
+ def _get_valid_log_format(self, source_format: str) -> str:
+ if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
+ return self.DEFAULT_AIOHTTP_LOG_FORMAT
+ elif re.search(r"%\([^\)]+\)", source_format):
+ raise ValueError(
+ "Gunicorn's style options in form of `%(name)s` are not "
+ "supported for the log formatting. Please use aiohttp's "
+ "format specification to configure access log formatting: "
+ "http://docs.aiohttp.org/en/stable/logging.html"
+ "#format-specification"
+ )
+ else:
+ return source_format
+
+
+class GunicornUVLoopWebWorker(GunicornWebWorker):
+ def init_process(self) -> None:
+ import uvloop
+
+ # Close any existing event loop before setting a
+ # new policy.
+ asyncio.get_event_loop().close()
+
+ # Setup uvloop policy, so that every
+ # asyncio.get_event_loop() will create an instance
+ # of uvloop event loop.
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
+
+ super().init_process()
+
+
+class GunicornTokioWebWorker(GunicornWebWorker):
+ def init_process(self) -> None: # pragma: no cover
+ import tokio
+
+ # Close any existing event loop before setting a
+ # new policy.
+ asyncio.get_event_loop().close()
+
+ # Setup tokio policy, so that every
+ # asyncio.get_event_loop() will create an instance
+ # of tokio event loop.
+ asyncio.set_event_loop_policy(tokio.EventLoopPolicy())
+
+ super().init_process()
diff --git a/contrib/python/aiohttp/ya.make b/contrib/python/aiohttp/ya.make
new file mode 100644
index 0000000000..9fd0c0df74
--- /dev/null
+++ b/contrib/python/aiohttp/ya.make
@@ -0,0 +1,101 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(3.8.1)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/python/aiosignal
+ contrib/python/async-timeout
+ contrib/python/attrs
+ contrib/python/charset-normalizer
+ contrib/python/frozenlist
+ contrib/python/multidict
+ contrib/python/yarl
+ contrib/restricted/llhttp
+)
+
+ADDINCL(
+ contrib/restricted/llhttp/include
+ contrib/python/aiohttp/aiohttp
+ FOR cython contrib/python/aiohttp
+)
+
+NO_COMPILER_WARNINGS()
+
+NO_LINT()
+
+NO_CHECK_IMPORTS(
+ aiohttp.pytest_plugin
+ aiohttp.worker
+)
+
+SRCS(
+ aiohttp/_find_header.c
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ aiohttp/__init__.py
+ aiohttp/_helpers.pyi
+ aiohttp/abc.py
+ aiohttp/base_protocol.py
+ aiohttp/client.py
+ aiohttp/client_exceptions.py
+ aiohttp/client_proto.py
+ aiohttp/client_reqrep.py
+ aiohttp/client_ws.py
+ aiohttp/connector.py
+ aiohttp/cookiejar.py
+ aiohttp/formdata.py
+ aiohttp/hdrs.py
+ aiohttp/helpers.py
+ aiohttp/http.py
+ aiohttp/http_exceptions.py
+ aiohttp/http_parser.py
+ aiohttp/http_websocket.py
+ aiohttp/http_writer.py
+ aiohttp/locks.py
+ aiohttp/log.py
+ aiohttp/multipart.py
+ aiohttp/payload.py
+ aiohttp/payload_streamer.py
+ aiohttp/pytest_plugin.py
+ aiohttp/resolver.py
+ aiohttp/streams.py
+ aiohttp/tcp_helpers.py
+ aiohttp/test_utils.py
+ aiohttp/tracing.py
+ aiohttp/typedefs.py
+ aiohttp/web.py
+ aiohttp/web_app.py
+ aiohttp/web_exceptions.py
+ aiohttp/web_fileresponse.py
+ aiohttp/web_log.py
+ aiohttp/web_middlewares.py
+ aiohttp/web_protocol.py
+ aiohttp/web_request.py
+ aiohttp/web_response.py
+ aiohttp/web_routedef.py
+ aiohttp/web_runner.py
+ aiohttp/web_server.py
+ aiohttp/web_urldispatcher.py
+ aiohttp/web_ws.py
+ aiohttp/worker.py
+ CYTHON_C
+ aiohttp/_helpers.pyx
+ aiohttp/_http_parser.pyx
+ aiohttp/_http_writer.pyx
+ aiohttp/_websocket.pyx
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/aiohttp/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ aiohttp/py.typed
+)
+
+END()