aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-09-14 10:37:00 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-09-14 10:46:08 +0300
commitabdb8cda9345851fdea911b3016fafc5780c12f3 (patch)
tree0db33ad3afffc1f34446772b4c4a934e32fe6c5a
parenta63c2ac5dc364d4e90b3361cf85506ba4d28ebc0 (diff)
downloadydb-abdb8cda9345851fdea911b3016fafc5780c12f3.tar.gz
Intermediate changes
commit_hash:5944a2fbbc868679014342004c11cc515be60192
-rw-r--r--contrib/python/httpx/.dist-info/METADATA24
-rw-r--r--contrib/python/httpx/README.md11
-rw-r--r--contrib/python/httpx/httpx/__init__.py55
-rw-r--r--contrib/python/httpx/httpx/__version__.py2
-rw-r--r--contrib/python/httpx/httpx/_api.py32
-rw-r--r--contrib/python/httpx/httpx/_auth.py3
-rw-r--r--contrib/python/httpx/httpx/_client.py83
-rw-r--r--contrib/python/httpx/httpx/_compat.py22
-rw-r--r--contrib/python/httpx/httpx/_config.py6
-rw-r--r--contrib/python/httpx/httpx/_content.py2
-rw-r--r--contrib/python/httpx/httpx/_decoders.py44
-rw-r--r--contrib/python/httpx/httpx/_exceptions.py32
-rw-r--r--contrib/python/httpx/httpx/_models.py6
-rw-r--r--contrib/python/httpx/httpx/_status_codes.py2
-rw-r--r--contrib/python/httpx/httpx/_transports/__init__.py15
-rw-r--r--contrib/python/httpx/httpx/_transports/asgi.py21
-rw-r--r--contrib/python/httpx/httpx/_transports/base.py2
-rw-r--r--contrib/python/httpx/httpx/_transports/default.py4
-rw-r--r--contrib/python/httpx/httpx/_transports/mock.py3
-rw-r--r--contrib/python/httpx/httpx/_transports/wsgi.py3
-rw-r--r--contrib/python/httpx/httpx/_types.py6
-rw-r--r--contrib/python/httpx/httpx/_urlparse.py95
-rw-r--r--contrib/python/httpx/httpx/_urls.py6
-rw-r--r--contrib/python/httpx/ya.make2
24 files changed, 303 insertions, 178 deletions
diff --git a/contrib/python/httpx/.dist-info/METADATA b/contrib/python/httpx/.dist-info/METADATA
index b5ec37c7d9..4c6a080034 100644
--- a/contrib/python/httpx/.dist-info/METADATA
+++ b/contrib/python/httpx/.dist-info/METADATA
@@ -1,6 +1,6 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.3
Name: httpx
-Version: 0.27.0
+Version: 0.27.2
Summary: The next generation HTTP client.
Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md
Project-URL: Documentation, https://www.python-httpx.org
@@ -41,6 +41,8 @@ Provides-Extra: http2
Requires-Dist: h2<5,>=3; extra == 'http2'
Provides-Extra: socks
Requires-Dist: socksio==1.*; extra == 'socks'
+Provides-Extra: zstd
+Requires-Dist: zstandard>=0.18.0; extra == 'zstd'
Description-Content-Type: text/markdown
<p align="center">
@@ -67,7 +69,7 @@ and async APIs**.
Install HTTPX using pip:
```shell
-$ pip install httpx
+pip install httpx
```
Now, let's get started:
@@ -88,7 +90,7 @@ Now, let's get started:
Or, using the command-line client.
```shell
-$ pip install 'httpx[cli]' # The command line client is an optional dependency.
+pip install 'httpx[cli]' # The command line client is an optional dependency.
```
Which now allows us to use HTTPX directly from the command-line...
@@ -111,7 +113,7 @@ HTTPX builds on the well-established usability of `requests`, and gives you:
* An integrated command-line client.
* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/).
* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/).
-* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps).
+* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport).
* Strict timeouts everywhere.
* Fully type annotated.
* 100% test coverage.
@@ -139,13 +141,13 @@ Plus all the standard features of `requests`...
Install with pip:
```shell
-$ pip install httpx
+pip install httpx
```
Or, to include the optional HTTP/2 support, use:
```shell
-$ pip install httpx[http2]
+pip install httpx[http2]
```
HTTPX requires Python 3.8+.
@@ -183,6 +185,7 @@ As well as these optional installs:
* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)*
* `click` - Command line client support. *(Optional, with `httpx[cli]`)*
* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)*
+* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)*
A huge amount of credit is due to `requests` for the API layout that
much of this work follows, as well as to `urllib3` for plenty of design
@@ -194,14 +197,9 @@ inspiration around the lower-level networking details.
## Release Information
-### Deprecated
-
-* The `app=...` shortcut has been deprecated. Use the explicit style of `transport=httpx.WSGITransport()` or `transport=httpx.ASGITransport()` instead.
-
### Fixed
-* Respect the `http1` argument while configuring proxy transports. (#3023)
-* Fix RFC 2069 mode digest authentication. (#3045)
+* Reintroduced supposedly-private `URLTypes` shortcut. (#2673)
---
diff --git a/contrib/python/httpx/README.md b/contrib/python/httpx/README.md
index 62fb295d17..5e459a2814 100644
--- a/contrib/python/httpx/README.md
+++ b/contrib/python/httpx/README.md
@@ -22,7 +22,7 @@ and async APIs**.
Install HTTPX using pip:
```shell
-$ pip install httpx
+pip install httpx
```
Now, let's get started:
@@ -43,7 +43,7 @@ Now, let's get started:
Or, using the command-line client.
```shell
-$ pip install 'httpx[cli]' # The command line client is an optional dependency.
+pip install 'httpx[cli]' # The command line client is an optional dependency.
```
Which now allows us to use HTTPX directly from the command-line...
@@ -66,7 +66,7 @@ HTTPX builds on the well-established usability of `requests`, and gives you:
* An integrated command-line client.
* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/).
* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/).
-* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps).
+* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport).
* Strict timeouts everywhere.
* Fully type annotated.
* 100% test coverage.
@@ -94,13 +94,13 @@ Plus all the standard features of `requests`...
Install with pip:
```shell
-$ pip install httpx
+pip install httpx
```
Or, to include the optional HTTP/2 support, use:
```shell
-$ pip install httpx[http2]
+pip install httpx[http2]
```
HTTPX requires Python 3.8+.
@@ -138,6 +138,7 @@ As well as these optional installs:
* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)*
* `click` - Command line client support. *(Optional, with `httpx[cli]`)*
* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)*
+* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)*
A huge amount of credit is due to `requests` for the API layout that
much of this work follows, as well as to `urllib3` for plenty of design
diff --git a/contrib/python/httpx/httpx/__init__.py b/contrib/python/httpx/httpx/__init__.py
index f61112f8b2..e9addde071 100644
--- a/contrib/python/httpx/httpx/__init__.py
+++ b/contrib/python/httpx/httpx/__init__.py
@@ -1,48 +1,15 @@
from .__version__ import __description__, __title__, __version__
-from ._api import delete, get, head, options, patch, post, put, request, stream
-from ._auth import Auth, BasicAuth, DigestAuth, NetRCAuth
-from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client
-from ._config import Limits, Proxy, Timeout, create_ssl_context
-from ._content import ByteStream
-from ._exceptions import (
- CloseError,
- ConnectError,
- ConnectTimeout,
- CookieConflict,
- DecodingError,
- HTTPError,
- HTTPStatusError,
- InvalidURL,
- LocalProtocolError,
- NetworkError,
- PoolTimeout,
- ProtocolError,
- ProxyError,
- ReadError,
- ReadTimeout,
- RemoteProtocolError,
- RequestError,
- RequestNotRead,
- ResponseNotRead,
- StreamClosed,
- StreamConsumed,
- StreamError,
- TimeoutException,
- TooManyRedirects,
- TransportError,
- UnsupportedProtocol,
- WriteError,
- WriteTimeout,
-)
-from ._models import Cookies, Headers, Request, Response
-from ._status_codes import codes
-from ._transports.asgi import ASGITransport
-from ._transports.base import AsyncBaseTransport, BaseTransport
-from ._transports.default import AsyncHTTPTransport, HTTPTransport
-from ._transports.mock import MockTransport
-from ._transports.wsgi import WSGITransport
-from ._types import AsyncByteStream, SyncByteStream
-from ._urls import URL, QueryParams
+from ._api import *
+from ._auth import *
+from ._client import *
+from ._config import *
+from ._content import *
+from ._exceptions import *
+from ._models import *
+from ._status_codes import *
+from ._transports import *
+from ._types import *
+from ._urls import *
try:
from ._main import main
diff --git a/contrib/python/httpx/httpx/__version__.py b/contrib/python/httpx/httpx/__version__.py
index c121a898de..5eaaddbac9 100644
--- a/contrib/python/httpx/httpx/__version__.py
+++ b/contrib/python/httpx/httpx/__version__.py
@@ -1,3 +1,3 @@
__title__ = "httpx"
__description__ = "A next generation HTTP client, for Python 3."
-__version__ = "0.27.0"
+__version__ = "0.27.2"
diff --git a/contrib/python/httpx/httpx/_api.py b/contrib/python/httpx/httpx/_api.py
index b5821cc49e..4e98b60694 100644
--- a/contrib/python/httpx/httpx/_api.py
+++ b/contrib/python/httpx/httpx/_api.py
@@ -18,14 +18,26 @@ from ._types import (
RequestData,
RequestFiles,
TimeoutTypes,
- URLTypes,
VerifyTypes,
)
+from ._urls import URL
+
+__all__ = [
+ "delete",
+ "get",
+ "head",
+ "options",
+ "patch",
+ "post",
+ "put",
+ "request",
+ "stream",
+]
def request(
method: str,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
content: RequestContent | None = None,
@@ -120,7 +132,7 @@ def request(
@contextmanager
def stream(
method: str,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
content: RequestContent | None = None,
@@ -173,7 +185,7 @@ def stream(
def get(
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -213,7 +225,7 @@ def get(
def options(
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -253,7 +265,7 @@ def options(
def head(
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -293,7 +305,7 @@ def head(
def post(
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -338,7 +350,7 @@ def post(
def put(
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -383,7 +395,7 @@ def put(
def patch(
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -428,7 +440,7 @@ def patch(
def delete(
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
diff --git a/contrib/python/httpx/httpx/_auth.py b/contrib/python/httpx/httpx/_auth.py
index 903e399617..b03971ab4b 100644
--- a/contrib/python/httpx/httpx/_auth.py
+++ b/contrib/python/httpx/httpx/_auth.py
@@ -16,6 +16,9 @@ if typing.TYPE_CHECKING: # pragma: no cover
from hashlib import _Hash
+__all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"]
+
+
class Auth:
"""
Base class for all authentication schemes.
diff --git a/contrib/python/httpx/httpx/_client.py b/contrib/python/httpx/httpx/_client.py
index e2c6702e0c..26610f6e87 100644
--- a/contrib/python/httpx/httpx/_client.py
+++ b/contrib/python/httpx/httpx/_client.py
@@ -46,7 +46,6 @@ from ._types import (
RequestFiles,
SyncByteStream,
TimeoutTypes,
- URLTypes,
VerifyTypes,
)
from ._urls import URL, QueryParams
@@ -58,6 +57,8 @@ from ._utils import (
same_origin,
)
+__all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"]
+
# The type annotation for @classmethod and context managers here follows PEP 484
# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods
T = typing.TypeVar("T", bound="Client")
@@ -170,7 +171,7 @@ class BaseClient:
follow_redirects: bool = False,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
- base_url: URLTypes = "",
+ base_url: URL | str = "",
trust_env: bool = True,
default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
) -> None:
@@ -271,7 +272,7 @@ class BaseClient:
return self._base_url
@base_url.setter
- def base_url(self, url: URLTypes) -> None:
+ def base_url(self, url: URL | str) -> None:
self._base_url = self._enforce_trailing_slash(URL(url))
@property
@@ -319,7 +320,7 @@ class BaseClient:
def build_request(
self,
method: str,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -340,7 +341,7 @@ class BaseClient:
See also: [Request instances][0]
- [0]: /advanced/#request-instances
+ [0]: /advanced/clients/#request-instances
"""
url = self._merge_url(url)
headers = self._merge_headers(headers)
@@ -367,7 +368,7 @@ class BaseClient:
extensions=extensions,
)
- def _merge_url(self, url: URLTypes) -> URL:
+ def _merge_url(self, url: URL | str) -> URL:
"""
Merge a URL argument together with any 'base_url' on the client,
to create the URL used for the outgoing request.
@@ -560,6 +561,15 @@ class BaseClient:
return request.stream
+ def _set_timeout(self, request: Request) -> None:
+ if "timeout" not in request.extensions:
+ timeout = (
+ self.timeout
+ if isinstance(self.timeout, UseClientDefault)
+ else Timeout(self.timeout)
+ )
+ request.extensions = dict(**request.extensions, timeout=timeout.as_dict())
+
class Client(BaseClient):
"""
@@ -634,7 +644,7 @@ class Client(BaseClient):
limits: Limits = DEFAULT_LIMITS,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
- base_url: URLTypes = "",
+ base_url: URL | str = "",
transport: BaseTransport | None = None,
app: typing.Callable[..., typing.Any] | None = None,
trust_env: bool = True,
@@ -773,7 +783,7 @@ class Client(BaseClient):
def request(
self,
method: str,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -801,7 +811,7 @@ class Client(BaseClient):
[Merging of configuration][0] for how the various parameters
are merged with client-level configuration.
- [0]: /advanced/#merging-of-configuration
+ [0]: /advanced/clients/#merging-of-configuration
"""
if cookies is not None:
message = (
@@ -830,7 +840,7 @@ class Client(BaseClient):
def stream(
self,
method: str,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -897,7 +907,7 @@ class Client(BaseClient):
See also: [Request instances][0]
- [0]: /advanced/#request-instances
+ [0]: /advanced/clients/#request-instances
"""
if self._state == ClientState.CLOSED:
raise RuntimeError("Cannot send a request, as the client has been closed.")
@@ -909,6 +919,8 @@ class Client(BaseClient):
else follow_redirects
)
+ self._set_timeout(request)
+
auth = self._build_request_auth(request, auth)
response = self._send_handling_auth(
@@ -1036,12 +1048,12 @@ class Client(BaseClient):
def get(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
cookies: CookieTypes | None = None,
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
extensions: RequestExtensions | None = None,
@@ -1065,7 +1077,7 @@ class Client(BaseClient):
def options(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -1094,7 +1106,7 @@ class Client(BaseClient):
def head(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -1123,7 +1135,7 @@ class Client(BaseClient):
def post(
self,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1160,7 +1172,7 @@ class Client(BaseClient):
def put(
self,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1197,7 +1209,7 @@ class Client(BaseClient):
def patch(
self,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1234,7 +1246,7 @@ class Client(BaseClient):
def delete(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -1378,9 +1390,8 @@ class AsyncClient(BaseClient):
follow_redirects: bool = False,
limits: Limits = DEFAULT_LIMITS,
max_redirects: int = DEFAULT_MAX_REDIRECTS,
- event_hooks: None
- | (typing.Mapping[str, list[typing.Callable[..., typing.Any]]]) = None,
- base_url: URLTypes = "",
+ event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
+ base_url: URL | str = "",
transport: AsyncBaseTransport | None = None,
app: typing.Callable[..., typing.Any] | None = None,
trust_env: bool = True,
@@ -1425,7 +1436,7 @@ class AsyncClient(BaseClient):
)
warnings.warn(message, DeprecationWarning)
- allow_env_proxies = trust_env and transport is None
+ allow_env_proxies = trust_env and app is None and transport is None
proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies)
self._transport = self._init_transport(
@@ -1519,7 +1530,7 @@ class AsyncClient(BaseClient):
async def request(
self,
method: str,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1547,7 +1558,7 @@ class AsyncClient(BaseClient):
and [Merging of configuration][0] for how the various parameters
are merged with client-level configuration.
- [0]: /advanced/#merging-of-configuration
+ [0]: /advanced/clients/#merging-of-configuration
"""
if cookies is not None: # pragma: no cover
@@ -1577,7 +1588,7 @@ class AsyncClient(BaseClient):
async def stream(
self,
method: str,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1586,7 +1597,7 @@ class AsyncClient(BaseClient):
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
cookies: CookieTypes | None = None,
- auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
extensions: RequestExtensions | None = None,
@@ -1644,7 +1655,7 @@ class AsyncClient(BaseClient):
See also: [Request instances][0]
- [0]: /advanced/#request-instances
+ [0]: /advanced/clients/#request-instances
"""
if self._state == ClientState.CLOSED:
raise RuntimeError("Cannot send a request, as the client has been closed.")
@@ -1656,6 +1667,8 @@ class AsyncClient(BaseClient):
else follow_redirects
)
+ self._set_timeout(request)
+
auth = self._build_request_auth(request, auth)
response = await self._send_handling_auth(
@@ -1783,7 +1796,7 @@ class AsyncClient(BaseClient):
async def get(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -1812,7 +1825,7 @@ class AsyncClient(BaseClient):
async def options(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -1841,7 +1854,7 @@ class AsyncClient(BaseClient):
async def head(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
@@ -1870,7 +1883,7 @@ class AsyncClient(BaseClient):
async def post(
self,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1907,7 +1920,7 @@ class AsyncClient(BaseClient):
async def put(
self,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1944,7 +1957,7 @@ class AsyncClient(BaseClient):
async def patch(
self,
- url: URLTypes,
+ url: URL | str,
*,
content: RequestContent | None = None,
data: RequestData | None = None,
@@ -1981,7 +1994,7 @@ class AsyncClient(BaseClient):
async def delete(
self,
- url: URLTypes,
+ url: URL | str,
*,
params: QueryParamTypes | None = None,
headers: HeaderTypes | None = None,
diff --git a/contrib/python/httpx/httpx/_compat.py b/contrib/python/httpx/httpx/_compat.py
index 493e621087..7d86dced46 100644
--- a/contrib/python/httpx/httpx/_compat.py
+++ b/contrib/python/httpx/httpx/_compat.py
@@ -2,8 +2,12 @@
The _compat module is used for code which requires branching between different
Python environments. It is excluded from the code coverage checks.
"""
+
+import re
import ssl
import sys
+from types import ModuleType
+from typing import Optional
# Brotli support is optional
# The C bindings in `brotli` are recommended for CPython.
@@ -16,6 +20,24 @@ except ImportError: # pragma: no cover
except ImportError:
brotli = None
+# Zstandard support is optional
+zstd: Optional[ModuleType] = None
+try:
+ import zstandard as zstd
+except (AttributeError, ImportError, ValueError): # Defensive:
+ zstd = None
+else:
+ # The package 'zstandard' added the 'eof' property starting
+ # in v0.18.0 which we require to ensure a complete and
+ # valid zstd stream was fed into the ZstdDecoder.
+ # See: https://github.com/urllib3/urllib3/pull/2624
+ _zstd_version = tuple(
+ map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
+ )
+ if _zstd_version < (0, 18): # Defensive:
+ zstd = None
+
+
if sys.version_info >= (3, 10) or ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7):
def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None:
diff --git a/contrib/python/httpx/httpx/_config.py b/contrib/python/httpx/httpx/_config.py
index 6a3ae8022c..f9fbf917e0 100644
--- a/contrib/python/httpx/httpx/_config.py
+++ b/contrib/python/httpx/httpx/_config.py
@@ -10,10 +10,12 @@ import certifi
from ._compat import set_minimum_tls_version_1_2
from ._models import Headers
-from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes
+from ._types import CertTypes, HeaderTypes, TimeoutTypes, VerifyTypes
from ._urls import URL
from ._utils import get_ca_bundle_from_env
+__all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"]
+
DEFAULT_CIPHERS = ":".join(
[
"ECDHE+AESGCM",
@@ -330,7 +332,7 @@ class Limits:
class Proxy:
def __init__(
self,
- url: URLTypes,
+ url: URL | str,
*,
ssl_context: ssl.SSLContext | None = None,
auth: tuple[str, str] | None = None,
diff --git a/contrib/python/httpx/httpx/_content.py b/contrib/python/httpx/httpx/_content.py
index 10b574bb3d..786699f38f 100644
--- a/contrib/python/httpx/httpx/_content.py
+++ b/contrib/python/httpx/httpx/_content.py
@@ -25,6 +25,8 @@ from ._types import (
)
from ._utils import peek_filelike_length, primitive_value_to_str
+__all__ = ["ByteStream"]
+
class ByteStream(AsyncByteStream, SyncByteStream):
def __init__(self, stream: bytes) -> None:
diff --git a/contrib/python/httpx/httpx/_decoders.py b/contrib/python/httpx/httpx/_decoders.py
index 31c72c7f7a..62f2c0b911 100644
--- a/contrib/python/httpx/httpx/_decoders.py
+++ b/contrib/python/httpx/httpx/_decoders.py
@@ -3,6 +3,7 @@ Handlers for Content-Encoding.
See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
"""
+
from __future__ import annotations
import codecs
@@ -10,7 +11,7 @@ import io
import typing
import zlib
-from ._compat import brotli
+from ._compat import brotli, zstd
from ._exceptions import DecodingError
@@ -139,6 +140,44 @@ class BrotliDecoder(ContentDecoder):
raise DecodingError(str(exc)) from exc
+class ZStandardDecoder(ContentDecoder):
+ """
+ Handle 'zstd' RFC 8878 decoding.
+
+ Requires `pip install zstandard`.
+ Can be installed as a dependency of httpx using `pip install httpx[zstd]`.
+ """
+
+ # inspired by the ZstdDecoder implementation in urllib3
+ def __init__(self) -> None:
+ if zstd is None: # pragma: no cover
+ raise ImportError(
+ "Using 'ZStandardDecoder', ..."
+ "Make sure to install httpx using `pip install httpx[zstd]`."
+ ) from None
+
+ self.decompressor = zstd.ZstdDecompressor().decompressobj()
+
+ def decode(self, data: bytes) -> bytes:
+ assert zstd is not None
+ output = io.BytesIO()
+ try:
+ output.write(self.decompressor.decompress(data))
+ while self.decompressor.eof and self.decompressor.unused_data:
+ unused_data = self.decompressor.unused_data
+ self.decompressor = zstd.ZstdDecompressor().decompressobj()
+ output.write(self.decompressor.decompress(unused_data))
+ except zstd.ZstdError as exc:
+ raise DecodingError(str(exc)) from exc
+ return output.getvalue()
+
+ def flush(self) -> bytes:
+ ret = self.decompressor.flush() # note: this is a no-op
+ if not self.decompressor.eof:
+ raise DecodingError("Zstandard data is incomplete") # pragma: no cover
+ return bytes(ret)
+
+
class MultiDecoder(ContentDecoder):
"""
Handle the case where multiple encodings have been applied.
@@ -322,8 +361,11 @@ SUPPORTED_DECODERS = {
"gzip": GZipDecoder,
"deflate": DeflateDecoder,
"br": BrotliDecoder,
+ "zstd": ZStandardDecoder,
}
if brotli is None:
SUPPORTED_DECODERS.pop("br") # pragma: no cover
+if zstd is None:
+ SUPPORTED_DECODERS.pop("zstd") # pragma: no cover
diff --git a/contrib/python/httpx/httpx/_exceptions.py b/contrib/python/httpx/httpx/_exceptions.py
index 11424621c0..77f45a6d39 100644
--- a/contrib/python/httpx/httpx/_exceptions.py
+++ b/contrib/python/httpx/httpx/_exceptions.py
@@ -30,6 +30,7 @@ Our exception hierarchy:
x ResponseNotRead
x RequestNotRead
"""
+
from __future__ import annotations
import contextlib
@@ -38,6 +39,37 @@ import typing
if typing.TYPE_CHECKING:
from ._models import Request, Response # pragma: no cover
+__all__ = [
+ "CloseError",
+ "ConnectError",
+ "ConnectTimeout",
+ "CookieConflict",
+ "DecodingError",
+ "HTTPError",
+ "HTTPStatusError",
+ "InvalidURL",
+ "LocalProtocolError",
+ "NetworkError",
+ "PoolTimeout",
+ "ProtocolError",
+ "ProxyError",
+ "ReadError",
+ "ReadTimeout",
+ "RemoteProtocolError",
+ "RequestError",
+ "RequestNotRead",
+ "ResponseNotRead",
+ "StreamClosed",
+ "StreamConsumed",
+ "StreamError",
+ "TimeoutException",
+ "TooManyRedirects",
+ "TransportError",
+ "UnsupportedProtocol",
+ "WriteError",
+ "WriteTimeout",
+]
+
class HTTPError(Exception):
"""
diff --git a/contrib/python/httpx/httpx/_models.py b/contrib/python/httpx/httpx/_models.py
index cd76705f1a..01d9583bc5 100644
--- a/contrib/python/httpx/httpx/_models.py
+++ b/contrib/python/httpx/httpx/_models.py
@@ -53,6 +53,8 @@ from ._utils import (
parse_header_links,
)
+__all__ = ["Cookies", "Headers", "Request", "Response"]
+
class Headers(typing.MutableMapping[str, str]):
"""
@@ -816,7 +818,7 @@ class Response:
def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]:
"""
A byte-iterator over the decoded response content.
- This allows us to handle gzip, deflate, and brotli encoded responses.
+ This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
"""
if hasattr(self, "_content"):
chunk_size = len(self._content) if chunk_size is None else chunk_size
@@ -916,7 +918,7 @@ class Response:
) -> typing.AsyncIterator[bytes]:
"""
A byte-iterator over the decoded response content.
- This allows us to handle gzip, deflate, and brotli encoded responses.
+ This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
"""
if hasattr(self, "_content"):
chunk_size = len(self._content) if chunk_size is None else chunk_size
diff --git a/contrib/python/httpx/httpx/_status_codes.py b/contrib/python/httpx/httpx/_status_codes.py
index 4cde4e6845..133a6231a5 100644
--- a/contrib/python/httpx/httpx/_status_codes.py
+++ b/contrib/python/httpx/httpx/_status_codes.py
@@ -2,6 +2,8 @@ from __future__ import annotations
from enum import IntEnum
+__all__ = ["codes"]
+
class codes(IntEnum):
"""HTTP status codes and reason phrases
diff --git a/contrib/python/httpx/httpx/_transports/__init__.py b/contrib/python/httpx/httpx/_transports/__init__.py
index e69de29bb2..7a321053b2 100644
--- a/contrib/python/httpx/httpx/_transports/__init__.py
+++ b/contrib/python/httpx/httpx/_transports/__init__.py
@@ -0,0 +1,15 @@
+from .asgi import *
+from .base import *
+from .default import *
+from .mock import *
+from .wsgi import *
+
+__all__ = [
+ "ASGITransport",
+ "AsyncBaseTransport",
+ "BaseTransport",
+ "AsyncHTTPTransport",
+ "HTTPTransport",
+ "MockTransport",
+ "WSGITransport",
+]
diff --git a/contrib/python/httpx/httpx/_transports/asgi.py b/contrib/python/httpx/httpx/_transports/asgi.py
index 9543a12861..8578d4aeff 100644
--- a/contrib/python/httpx/httpx/_transports/asgi.py
+++ b/contrib/python/httpx/httpx/_transports/asgi.py
@@ -16,15 +16,17 @@ if typing.TYPE_CHECKING: # pragma: no cover
Event = typing.Union[asyncio.Event, trio.Event]
-_Message = typing.Dict[str, typing.Any]
+_Message = typing.MutableMapping[str, typing.Any]
_Receive = typing.Callable[[], typing.Awaitable[_Message]]
_Send = typing.Callable[
- [typing.Dict[str, typing.Any]], typing.Coroutine[None, None, None]
+ [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None]
]
_ASGIApp = typing.Callable[
- [typing.Dict[str, typing.Any], _Receive, _Send], typing.Coroutine[None, None, None]
+ [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None]
]
+__all__ = ["ASGITransport"]
+
def create_event() -> Event:
if sniffio.current_async_library() == "trio":
@@ -48,17 +50,8 @@ class ASGIResponseStream(AsyncByteStream):
class ASGITransport(AsyncBaseTransport):
"""
A custom AsyncTransport that handles sending requests directly to an ASGI app.
- The simplest way to use this functionality is to use the `app` argument.
-
- ```
- client = httpx.AsyncClient(app=app)
- ```
-
- Alternatively, you can setup the transport instance explicitly.
- This allows you to include any additional configuration arguments specific
- to the ASGITransport class:
- ```
+ ```python
transport = httpx.ASGITransport(
app=app,
root_path="/submount",
@@ -139,7 +132,7 @@ class ASGITransport(AsyncBaseTransport):
return {"type": "http.request", "body": b"", "more_body": False}
return {"type": "http.request", "body": body, "more_body": True}
- async def send(message: dict[str, typing.Any]) -> None:
+ async def send(message: typing.MutableMapping[str, typing.Any]) -> None:
nonlocal status_code, response_headers, response_started
if message["type"] == "http.response.start":
diff --git a/contrib/python/httpx/httpx/_transports/base.py b/contrib/python/httpx/httpx/_transports/base.py
index 8b6dc3c239..66fd99d702 100644
--- a/contrib/python/httpx/httpx/_transports/base.py
+++ b/contrib/python/httpx/httpx/_transports/base.py
@@ -8,6 +8,8 @@ from .._models import Request, Response
T = typing.TypeVar("T", bound="BaseTransport")
A = typing.TypeVar("A", bound="AsyncBaseTransport")
+__all__ = ["AsyncBaseTransport", "BaseTransport"]
+
class BaseTransport:
def __enter__(self: T) -> T:
diff --git a/contrib/python/httpx/httpx/_transports/default.py b/contrib/python/httpx/httpx/_transports/default.py
index 14476a3ce3..33db416dd1 100644
--- a/contrib/python/httpx/httpx/_transports/default.py
+++ b/contrib/python/httpx/httpx/_transports/default.py
@@ -23,6 +23,7 @@ client = httpx.Client(transport=transport)
transport = httpx.HTTPTransport(uds="socket.uds")
client = httpx.Client(transport=transport)
"""
+
from __future__ import annotations
import contextlib
@@ -62,6 +63,8 @@ SOCKET_OPTION = typing.Union[
typing.Tuple[int, int, None, int],
]
+__all__ = ["AsyncHTTPTransport", "HTTPTransport"]
+
@contextlib.contextmanager
def map_httpcore_exceptions() -> typing.Iterator[None]:
@@ -300,6 +303,7 @@ class AsyncHTTPTransport(AsyncBaseTransport):
),
proxy_auth=proxy.raw_auth,
proxy_headers=proxy.headers.raw,
+ proxy_ssl_context=proxy.ssl_context,
ssl_context=ssl_context,
max_connections=limits.max_connections,
max_keepalive_connections=limits.max_keepalive_connections,
diff --git a/contrib/python/httpx/httpx/_transports/mock.py b/contrib/python/httpx/httpx/_transports/mock.py
index 5abea83731..8c418f59e0 100644
--- a/contrib/python/httpx/httpx/_transports/mock.py
+++ b/contrib/python/httpx/httpx/_transports/mock.py
@@ -9,6 +9,9 @@ SyncHandler = typing.Callable[[Request], Response]
AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]]
+__all__ = ["MockTransport"]
+
+
class MockTransport(AsyncBaseTransport, BaseTransport):
def __init__(self, handler: SyncHandler | AsyncHandler) -> None:
self.handler = handler
diff --git a/contrib/python/httpx/httpx/_transports/wsgi.py b/contrib/python/httpx/httpx/_transports/wsgi.py
index cd03a9417b..8592ffe017 100644
--- a/contrib/python/httpx/httpx/_transports/wsgi.py
+++ b/contrib/python/httpx/httpx/_transports/wsgi.py
@@ -16,6 +16,9 @@ if typing.TYPE_CHECKING:
_T = typing.TypeVar("_T")
+__all__ = ["WSGITransport"]
+
+
def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]:
body = iter(body)
for chunk in body:
diff --git a/contrib/python/httpx/httpx/_types.py b/contrib/python/httpx/httpx/_types.py
index 649d101d54..661af262e7 100644
--- a/contrib/python/httpx/httpx/_types.py
+++ b/contrib/python/httpx/httpx/_types.py
@@ -78,8 +78,8 @@ TimeoutTypes = Union[
Tuple[Optional[float], Optional[float], Optional[float], Optional[float]],
"Timeout",
]
-ProxyTypes = Union[URLTypes, "Proxy"]
-ProxiesTypes = Union[ProxyTypes, Dict[URLTypes, Union[None, ProxyTypes]]]
+ProxyTypes = Union["URL", str, "Proxy"]
+ProxiesTypes = Union[ProxyTypes, Dict[Union["URL", str], Union[None, ProxyTypes]]]
AuthTypes = Union[
Tuple[Union[str, bytes], Union[str, bytes]],
@@ -108,6 +108,8 @@ RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
RequestExtensions = MutableMapping[str, Any]
+__all__ = ["AsyncByteStream", "SyncByteStream"]
+
class SyncByteStream:
def __iter__(self) -> Iterator[bytes]:
diff --git a/contrib/python/httpx/httpx/_urlparse.py b/contrib/python/httpx/httpx/_urlparse.py
index 6a4b55b38c..479c2ef8a1 100644
--- a/contrib/python/httpx/httpx/_urlparse.py
+++ b/contrib/python/httpx/httpx/_urlparse.py
@@ -15,6 +15,7 @@ Previously we relied on the excellent `rfc3986` package to handle URL parsing an
validation, but this module provides a simpler alternative, with less indirection
required.
"""
+
from __future__ import annotations
import ipaddress
@@ -159,7 +160,12 @@ def urlparse(url: str = "", **kwargs: str | None) -> ParseResult:
# If a URL includes any ASCII control characters including \t, \r, \n,
# then treat it as invalid.
if any(char.isascii() and not char.isprintable() for char in url):
- raise InvalidURL("Invalid non-printable ASCII character in URL")
+ char = next(char for char in url if char.isascii() and not char.isprintable())
+ idx = url.find(char)
+ error = (
+ f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}."
+ )
+ raise InvalidURL(error)
# Some keyword arguments require special handling.
# ------------------------------------------------
@@ -204,9 +210,15 @@ def urlparse(url: str = "", **kwargs: str | None) -> ParseResult:
# If a component includes any ASCII control characters including \t, \r, \n,
# then treat it as invalid.
if any(char.isascii() and not char.isprintable() for char in value):
- raise InvalidURL(
- f"Invalid non-printable ASCII character in URL component '{key}'"
+ char = next(
+ char for char in value if char.isascii() and not char.isprintable()
)
+ idx = value.find(char)
+ error = (
+ f"Invalid non-printable ASCII character in URL {key} component, "
+ f"{char!r} at position {idx}."
+ )
+ raise InvalidURL(error)
# Ensure that keyword arguments match as a valid regex.
if not COMPONENT_REGEX[key].fullmatch(value):
@@ -252,22 +264,27 @@ def urlparse(url: str = "", **kwargs: str | None) -> ParseResult:
parsed_userinfo != "" or parsed_host != "" or parsed_port is not None
)
validate_path(path, has_scheme=has_scheme, has_authority=has_authority)
- if has_authority:
+ if has_scheme or has_authority:
path = normalize_path(path)
# The GEN_DELIMS set is... : / ? # [ ] @
# These do not need to be percent-quoted unless they serve as delimiters for the
# specific component.
+ WHATWG_SAFE = '`{}%|^\\"'
# For 'path' we need to drop ? and # from the GEN_DELIMS set.
- parsed_path: str = quote(path, safe=SUB_DELIMS + ":/[]@")
+ parsed_path: str = quote(path, safe=SUB_DELIMS + WHATWG_SAFE + ":/[]@")
# For 'query' we need to drop '#' from the GEN_DELIMS set.
parsed_query: str | None = (
- None if query is None else quote(query, safe=SUB_DELIMS + ":/?[]@")
+ None
+ if query is None
+ else quote(query, safe=SUB_DELIMS + WHATWG_SAFE + ":/?[]@")
)
# For 'fragment' we can include all of the GEN_DELIMS set.
parsed_fragment: str | None = (
- None if fragment is None else quote(fragment, safe=SUB_DELIMS + ":/?#[]@")
+ None
+ if fragment is None
+ else quote(fragment, safe=SUB_DELIMS + WHATWG_SAFE + ":/?#[]@")
)
# The parsed ASCII bytestrings are our canonical form.
@@ -320,7 +337,8 @@ def encode_host(host: str) -> str:
# From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2
#
# reg-name = *( unreserved / pct-encoded / sub-delims )
- return quote(host.lower(), safe=SUB_DELIMS)
+ WHATWG_SAFE = '"`{}%|\\'
+ return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE)
# IDNA hostnames
try:
@@ -368,19 +386,17 @@ def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None:
# must either be empty or begin with a slash ("/") character."
if path and not path.startswith("/"):
raise InvalidURL("For absolute URLs, path must be empty or begin with '/'")
- else:
+
+ if not has_scheme and not has_authority:
# If a URI does not contain an authority component, then the path cannot begin
# with two slash characters ("//").
if path.startswith("//"):
- raise InvalidURL(
- "URLs with no authority component cannot have a path starting with '//'"
- )
+ raise InvalidURL("Relative URLs cannot have a path starting with '//'")
+
# In addition, a URI reference (Section 4.1) may be a relative-path reference,
# in which case the first path segment cannot contain a colon (":") character.
- if path.startswith(":") and not has_scheme:
- raise InvalidURL(
- "URLs with no scheme component cannot have a path starting with ':'"
- )
+ if path.startswith(":"):
+ raise InvalidURL("Relative URLs cannot have a path starting with ':'")
def normalize_path(path: str) -> str:
@@ -391,8 +407,17 @@ def normalize_path(path: str) -> str:
normalize_path("/path/./to/somewhere/..") == "/path/to"
"""
- # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4
+ # Fast return when no '.' characters in the path.
+ if "." not in path:
+ return path
+
components = path.split("/")
+
+ # Fast return when no '.' or '..' components in the path.
+ if "." not in components and ".." not in components:
+ return path
+
+ # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4
output: list[str] = []
for component in components:
if component == ".":
@@ -405,44 +430,22 @@ def normalize_path(path: str) -> str:
return "/".join(output)
-def percent_encode(char: str) -> str:
- """
- Replace a single character with the percent-encoded representation.
-
- Characters outside the ASCII range are represented with their a percent-encoded
- representation of their UTF-8 byte sequence.
-
- For example:
-
- percent_encode(" ") == "%20"
- """
- return "".join([f"%{byte:02x}" for byte in char.encode("utf-8")]).upper()
-
-
-def is_safe(string: str, safe: str = "/") -> bool:
- """
- Determine if a given string is already quote-safe.
- """
- NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + "%"
-
- # All characters must already be non-escaping or '%'
- for char in string:
- if char not in NON_ESCAPED_CHARS:
- return False
-
- return True
+def PERCENT(string: str) -> str:
+ return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")])
def percent_encoded(string: str, safe: str = "/") -> str:
"""
Use percent-encoding to quote a string.
"""
- if is_safe(string, safe=safe):
+ NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe
+
+ # Fast path for strings that don't need escaping.
+ if not string.rstrip(NON_ESCAPED_CHARS):
return string
- NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe
return "".join(
- [char if char in NON_ESCAPED_CHARS else percent_encode(char) for char in string]
+ [char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string]
)
diff --git a/contrib/python/httpx/httpx/_urls.py b/contrib/python/httpx/httpx/_urls.py
index 43dedd5644..ec4ea6b399 100644
--- a/contrib/python/httpx/httpx/_urls.py
+++ b/contrib/python/httpx/httpx/_urls.py
@@ -5,10 +5,12 @@ from urllib.parse import parse_qs, unquote
import idna
-from ._types import QueryParamTypes, RawURL, URLTypes
+from ._types import QueryParamTypes, RawURL
from ._urlparse import urlencode, urlparse
from ._utils import primitive_value_to_str
+__all__ = ["URL", "QueryParams"]
+
class URL:
"""
@@ -365,7 +367,7 @@ class URL:
def copy_merge_params(self, params: QueryParamTypes) -> URL:
return self.copy_with(params=self.params.merge(params))
- def join(self, url: URLTypes) -> URL:
+ def join(self, url: URL | str) -> URL:
"""
Return an absolute URL, using this URL as the base.
diff --git a/contrib/python/httpx/ya.make b/contrib/python/httpx/ya.make
index fe32d75035..26977a0c76 100644
--- a/contrib/python/httpx/ya.make
+++ b/contrib/python/httpx/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(0.27.0)
+VERSION(0.27.2)
LICENSE(BSD-3-Clause)