aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrobot-piglet <robot-piglet@yandex-team.com>2024-11-30 12:58:42 +0300
committerrobot-piglet <robot-piglet@yandex-team.com>2024-11-30 13:09:50 +0300
commit9dc6b2fddb37a98c860a013c641c112d5030c7a7 (patch)
tree6678c90833d76fbed73b10363f1b0ec668eacafe
parentdf64d4599e151698c173343d73e12328aa9ce1ef (diff)
downloadydb-9dc6b2fddb37a98c860a013c641c112d5030c7a7.tar.gz
Intermediate changes
commit_hash:f785655b6e4979e4b61af2cb8227296a279f7ab6
-rw-r--r--contrib/python/httpcore/.dist-info/METADATA9
-rw-r--r--contrib/python/httpcore/httpcore/__init__.py5
-rw-r--r--contrib/python/httpcore/httpcore/_api.py26
-rw-r--r--contrib/python/httpcore/httpcore/_async/connection.py30
-rw-r--r--contrib/python/httpcore/httpcore/_async/connection_pool.py98
-rw-r--r--contrib/python/httpcore/httpcore/_async/http11.py59
-rw-r--r--contrib/python/httpcore/httpcore/_async/http2.py50
-rw-r--r--contrib/python/httpcore/httpcore/_async/http_proxy.py73
-rw-r--r--contrib/python/httpcore/httpcore/_async/interfaces.py26
-rw-r--r--contrib/python/httpcore/httpcore/_async/socks_proxy.py61
-rw-r--r--contrib/python/httpcore/httpcore/_backends/anyio.py24
-rw-r--r--contrib/python/httpcore/httpcore/_backends/auto.py13
-rw-r--r--contrib/python/httpcore/httpcore/_backends/base.py46
-rw-r--r--contrib/python/httpcore/httpcore/_backends/mock.py47
-rw-r--r--contrib/python/httpcore/httpcore/_backends/sync.py40
-rw-r--r--contrib/python/httpcore/httpcore/_backends/trio.py24
-rw-r--r--contrib/python/httpcore/httpcore/_exceptions.py6
-rw-r--r--contrib/python/httpcore/httpcore/_models.py146
-rw-r--r--contrib/python/httpcore/httpcore/_sync/connection.py30
-rw-r--r--contrib/python/httpcore/httpcore/_sync/connection_pool.py98
-rw-r--r--contrib/python/httpcore/httpcore/_sync/http11.py59
-rw-r--r--contrib/python/httpcore/httpcore/_sync/http2.py50
-rw-r--r--contrib/python/httpcore/httpcore/_sync/http_proxy.py73
-rw-r--r--contrib/python/httpcore/httpcore/_sync/interfaces.py26
-rw-r--r--contrib/python/httpcore/httpcore/_sync/socks_proxy.py61
-rw-r--r--contrib/python/httpcore/httpcore/_synchronization.py57
-rw-r--r--contrib/python/httpcore/httpcore/_trace.py32
-rw-r--r--contrib/python/httpcore/httpcore/_utils.py5
-rw-r--r--contrib/python/httpcore/ya.make2
29 files changed, 683 insertions, 593 deletions
diff --git a/contrib/python/httpcore/.dist-info/METADATA b/contrib/python/httpcore/.dist-info/METADATA
index 6be8e4bb68..99be2236cd 100644
--- a/contrib/python/httpcore/.dist-info/METADATA
+++ b/contrib/python/httpcore/.dist-info/METADATA
@@ -1,13 +1,12 @@
Metadata-Version: 2.3
Name: httpcore
-Version: 1.0.6
+Version: 1.0.7
Summary: A minimal low-level HTTP client.
Project-URL: Documentation, https://www.encode.io/httpcore
Project-URL: Homepage, https://www.encode.io/httpcore/
Project-URL: Source, https://github.com/encode/httpcore
Author-email: Tom Christie <tom@tomchristie.com>
-License-Expression: BSD-3-Clause
-License-File: LICENSE.md
+License: BSD-3-Clause
Classifier: Development Status :: 3 - Alpha
Classifier: Environment :: Web Environment
Classifier: Framework :: AsyncIO
@@ -153,6 +152,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+## Version 1.0.7 (November 15th, 2024)
+
+- Support `proxy=…` configuration on `ConnectionPool()`. (#974)
+
## Version 1.0.6 (October 1st, 2024)
- Relax `trio` dependency pinning. (#956)
diff --git a/contrib/python/httpcore/httpcore/__init__.py b/contrib/python/httpcore/httpcore/__init__.py
index 330745a5dc..662b1563a1 100644
--- a/contrib/python/httpcore/httpcore/__init__.py
+++ b/contrib/python/httpcore/httpcore/__init__.py
@@ -34,7 +34,7 @@ from ._exceptions import (
WriteError,
WriteTimeout,
)
-from ._models import URL, Origin, Request, Response
+from ._models import URL, Origin, Proxy, Request, Response
from ._ssl import default_ssl_context
from ._sync import (
ConnectionInterface,
@@ -79,6 +79,7 @@ __all__ = [
"URL",
"Request",
"Response",
+ "Proxy",
# async
"AsyncHTTPConnection",
"AsyncConnectionPool",
@@ -130,7 +131,7 @@ __all__ = [
"WriteError",
]
-__version__ = "1.0.6"
+__version__ = "1.0.7"
__locals = locals()
diff --git a/contrib/python/httpcore/httpcore/_api.py b/contrib/python/httpcore/httpcore/_api.py
index 854235f5f6..38b961d10d 100644
--- a/contrib/python/httpcore/httpcore/_api.py
+++ b/contrib/python/httpcore/httpcore/_api.py
@@ -1,17 +1,19 @@
-from contextlib import contextmanager
-from typing import Iterator, Optional, Union
+from __future__ import annotations
+
+import contextlib
+import typing
from ._models import URL, Extensions, HeaderTypes, Response
from ._sync.connection_pool import ConnectionPool
def request(
- method: Union[bytes, str],
- url: Union[URL, bytes, str],
+ method: bytes | str,
+ url: URL | bytes | str,
*,
headers: HeaderTypes = None,
- content: Union[bytes, Iterator[bytes], None] = None,
- extensions: Optional[Extensions] = None,
+ content: bytes | typing.Iterator[bytes] | None = None,
+ extensions: Extensions | None = None,
) -> Response:
"""
Sends an HTTP request, returning the response.
@@ -45,15 +47,15 @@ def request(
)
-@contextmanager
+@contextlib.contextmanager
def stream(
- method: Union[bytes, str],
- url: Union[URL, bytes, str],
+ method: bytes | str,
+ url: URL | bytes | str,
*,
headers: HeaderTypes = None,
- content: Union[bytes, Iterator[bytes], None] = None,
- extensions: Optional[Extensions] = None,
-) -> Iterator[Response]:
+ content: bytes | typing.Iterator[bytes] | None = None,
+ extensions: Extensions | None = None,
+) -> typing.Iterator[Response]:
"""
Sends an HTTP request, returning the response within a content manager.
diff --git a/contrib/python/httpcore/httpcore/_async/connection.py b/contrib/python/httpcore/httpcore/_async/connection.py
index 2f439cf09c..b42581dff8 100644
--- a/contrib/python/httpcore/httpcore/_async/connection.py
+++ b/contrib/python/httpcore/httpcore/_async/connection.py
@@ -1,8 +1,10 @@
+from __future__ import annotations
+
import itertools
import logging
import ssl
-from types import TracebackType
-from typing import Iterable, Iterator, Optional, Type
+import types
+import typing
from .._backends.auto import AutoBackend
from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream
@@ -20,7 +22,7 @@ RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc.
logger = logging.getLogger("httpcore.connection")
-def exponential_backoff(factor: float) -> Iterator[float]:
+def exponential_backoff(factor: float) -> typing.Iterator[float]:
"""
Generate a geometric sequence that has a ratio of 2 and starts with 0.
@@ -37,15 +39,15 @@ class AsyncHTTPConnection(AsyncConnectionInterface):
def __init__(
self,
origin: Origin,
- ssl_context: Optional[ssl.SSLContext] = None,
- keepalive_expiry: Optional[float] = None,
+ ssl_context: ssl.SSLContext | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- local_address: Optional[str] = None,
- uds: Optional[str] = None,
- network_backend: Optional[AsyncNetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ local_address: str | None = None,
+ uds: str | None = None,
+ network_backend: AsyncNetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
self._origin = origin
self._ssl_context = ssl_context
@@ -59,7 +61,7 @@ class AsyncHTTPConnection(AsyncConnectionInterface):
self._network_backend: AsyncNetworkBackend = (
AutoBackend() if network_backend is None else network_backend
)
- self._connection: Optional[AsyncConnectionInterface] = None
+ self._connection: AsyncConnectionInterface | None = None
self._connect_failed: bool = False
self._request_lock = AsyncLock()
self._socket_options = socket_options
@@ -208,13 +210,13 @@ class AsyncHTTPConnection(AsyncConnectionInterface):
# These context managers are not used in the standard flow, but are
# useful for testing or working with connection instances directly.
- async def __aenter__(self) -> "AsyncHTTPConnection":
+ async def __aenter__(self) -> AsyncHTTPConnection:
return self
async def __aexit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
await self.aclose()
diff --git a/contrib/python/httpcore/httpcore/_async/connection_pool.py b/contrib/python/httpcore/httpcore/_async/connection_pool.py
index 214dfc4be4..96e973d0ce 100644
--- a/contrib/python/httpcore/httpcore/_async/connection_pool.py
+++ b/contrib/python/httpcore/httpcore/_async/connection_pool.py
@@ -1,12 +1,14 @@
+from __future__ import annotations
+
import ssl
import sys
-from types import TracebackType
-from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type
+import types
+import typing
from .._backends.auto import AutoBackend
from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend
from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol
-from .._models import Origin, Request, Response
+from .._models import Origin, Proxy, Request, Response
from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock
from .connection import AsyncHTTPConnection
from .interfaces import AsyncConnectionInterface, AsyncRequestInterface
@@ -15,12 +17,10 @@ from .interfaces import AsyncConnectionInterface, AsyncRequestInterface
class AsyncPoolRequest:
def __init__(self, request: Request) -> None:
self.request = request
- self.connection: Optional[AsyncConnectionInterface] = None
+ self.connection: AsyncConnectionInterface | None = None
self._connection_acquired = AsyncEvent()
- def assign_to_connection(
- self, connection: Optional[AsyncConnectionInterface]
- ) -> None:
+ def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None:
self.connection = connection
self._connection_acquired.set()
@@ -29,7 +29,7 @@ class AsyncPoolRequest:
self._connection_acquired = AsyncEvent()
async def wait_for_connection(
- self, timeout: Optional[float] = None
+ self, timeout: float | None = None
) -> AsyncConnectionInterface:
if self.connection is None:
await self._connection_acquired.wait(timeout=timeout)
@@ -47,17 +47,18 @@ class AsyncConnectionPool(AsyncRequestInterface):
def __init__(
self,
- ssl_context: Optional[ssl.SSLContext] = None,
- max_connections: Optional[int] = 10,
- max_keepalive_connections: Optional[int] = None,
- keepalive_expiry: Optional[float] = None,
+ ssl_context: ssl.SSLContext | None = None,
+ proxy: Proxy | None = None,
+ max_connections: int | None = 10,
+ max_keepalive_connections: int | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- local_address: Optional[str] = None,
- uds: Optional[str] = None,
- network_backend: Optional[AsyncNetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ local_address: str | None = None,
+ uds: str | None = None,
+ network_backend: AsyncNetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
"""
A connection pool for making HTTP requests.
@@ -89,7 +90,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
in the TCP socket when the connection was established.
"""
self._ssl_context = ssl_context
-
+ self._proxy = proxy
self._max_connections = (
sys.maxsize if max_connections is None else max_connections
)
@@ -116,8 +117,8 @@ class AsyncConnectionPool(AsyncRequestInterface):
# The mutable state on a connection pool is the queue of incoming requests,
# and the set of connections that are servicing those requests.
- self._connections: List[AsyncConnectionInterface] = []
- self._requests: List[AsyncPoolRequest] = []
+ self._connections: list[AsyncConnectionInterface] = []
+ self._requests: list[AsyncPoolRequest] = []
# We only mutate the state of the connection pool within an 'optional_thread_lock'
# context. This holds a threading lock unless we're running in async mode,
@@ -125,6 +126,45 @@ class AsyncConnectionPool(AsyncRequestInterface):
self._optional_thread_lock = AsyncThreadLock()
def create_connection(self, origin: Origin) -> AsyncConnectionInterface:
+ if self._proxy is not None:
+ if self._proxy.url.scheme in (b"socks5", b"socks5h"):
+ from .socks_proxy import AsyncSocks5Connection
+
+ return AsyncSocks5Connection(
+ proxy_origin=self._proxy.url.origin,
+ proxy_auth=self._proxy.auth,
+ remote_origin=origin,
+ ssl_context=self._ssl_context,
+ keepalive_expiry=self._keepalive_expiry,
+ http1=self._http1,
+ http2=self._http2,
+ network_backend=self._network_backend,
+ )
+ elif origin.scheme == b"http":
+ from .http_proxy import AsyncForwardHTTPConnection
+
+ return AsyncForwardHTTPConnection(
+ proxy_origin=self._proxy.url.origin,
+ proxy_headers=self._proxy.headers,
+ proxy_ssl_context=self._proxy.ssl_context,
+ remote_origin=origin,
+ keepalive_expiry=self._keepalive_expiry,
+ network_backend=self._network_backend,
+ )
+ from .http_proxy import AsyncTunnelHTTPConnection
+
+ return AsyncTunnelHTTPConnection(
+ proxy_origin=self._proxy.url.origin,
+ proxy_headers=self._proxy.headers,
+ proxy_ssl_context=self._proxy.ssl_context,
+ remote_origin=origin,
+ ssl_context=self._ssl_context,
+ keepalive_expiry=self._keepalive_expiry,
+ http1=self._http1,
+ http2=self._http2,
+ network_backend=self._network_backend,
+ )
+
return AsyncHTTPConnection(
origin=origin,
ssl_context=self._ssl_context,
@@ -139,7 +179,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
)
@property
- def connections(self) -> List[AsyncConnectionInterface]:
+ def connections(self) -> list[AsyncConnectionInterface]:
"""
Return a list of the connections currently in the pool.
@@ -217,7 +257,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
# Return the response. Note that in this case we still have to manage
# the point at which the response is closed.
- assert isinstance(response.stream, AsyncIterable)
+ assert isinstance(response.stream, typing.AsyncIterable)
return Response(
status=response.status,
headers=response.headers,
@@ -227,7 +267,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
extensions=response.extensions,
)
- def _assign_requests_to_connections(self) -> List[AsyncConnectionInterface]:
+ def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]:
"""
Manage the state of the connection pool, assigning incoming
requests to connections as available.
@@ -298,7 +338,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
return closing_connections
- async def _close_connections(self, closing: List[AsyncConnectionInterface]) -> None:
+ async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None:
# Close connections which have been removed from the pool.
with AsyncShieldCancellation():
for connection in closing:
@@ -312,14 +352,14 @@ class AsyncConnectionPool(AsyncRequestInterface):
self._connections = []
await self._close_connections(closing_connections)
- async def __aenter__(self) -> "AsyncConnectionPool":
+ async def __aenter__(self) -> AsyncConnectionPool:
return self
async def __aexit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
await self.aclose()
@@ -349,7 +389,7 @@ class AsyncConnectionPool(AsyncRequestInterface):
class PoolByteStream:
def __init__(
self,
- stream: AsyncIterable[bytes],
+ stream: typing.AsyncIterable[bytes],
pool_request: AsyncPoolRequest,
pool: AsyncConnectionPool,
) -> None:
@@ -358,7 +398,7 @@ class PoolByteStream:
self._pool = pool
self._closed = False
- async def __aiter__(self) -> AsyncIterator[bytes]:
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
try:
async for part in self._stream:
yield part
diff --git a/contrib/python/httpcore/httpcore/_async/http11.py b/contrib/python/httpcore/httpcore/_async/http11.py
index 0493a923dc..e6d6d70985 100644
--- a/contrib/python/httpcore/httpcore/_async/http11.py
+++ b/contrib/python/httpcore/httpcore/_async/http11.py
@@ -1,18 +1,11 @@
+from __future__ import annotations
+
import enum
import logging
import ssl
import time
-from types import TracebackType
-from typing import (
- Any,
- AsyncIterable,
- AsyncIterator,
- List,
- Optional,
- Tuple,
- Type,
- Union,
-)
+import types
+import typing
import h11
@@ -33,7 +26,7 @@ logger = logging.getLogger("httpcore.http11")
# A subset of `h11.Event` types supported by `_send_event`
-H11SendEvent = Union[
+H11SendEvent = typing.Union[
h11.Request,
h11.Data,
h11.EndOfMessage,
@@ -55,12 +48,12 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
self,
origin: Origin,
stream: AsyncNetworkStream,
- keepalive_expiry: Optional[float] = None,
+ keepalive_expiry: float | None = None,
) -> None:
self._origin = origin
self._network_stream = stream
- self._keepalive_expiry: Optional[float] = keepalive_expiry
- self._expire_at: Optional[float] = None
+ self._keepalive_expiry: float | None = keepalive_expiry
+ self._expire_at: float | None = None
self._state = HTTPConnectionState.NEW
self._state_lock = AsyncLock()
self._request_count = 0
@@ -160,16 +153,14 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("write", None)
- assert isinstance(request.stream, AsyncIterable)
+ assert isinstance(request.stream, typing.AsyncIterable)
async for chunk in request.stream:
event = h11.Data(data=chunk)
await self._send_event(event, timeout=timeout)
await self._send_event(h11.EndOfMessage(), timeout=timeout)
- async def _send_event(
- self, event: h11.Event, timeout: Optional[float] = None
- ) -> None:
+ async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None:
bytes_to_send = self._h11_state.send(event)
if bytes_to_send is not None:
await self._network_stream.write(bytes_to_send, timeout=timeout)
@@ -178,7 +169,7 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
async def _receive_response_headers(
self, request: Request
- ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]:
+ ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]:
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("read", None)
@@ -202,7 +193,9 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
return http_version, event.status_code, event.reason, headers, trailing_data
- async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]:
+ async def _receive_response_body(
+ self, request: Request
+ ) -> typing.AsyncIterator[bytes]:
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("read", None)
@@ -214,8 +207,8 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
break
async def _receive_event(
- self, timeout: Optional[float] = None
- ) -> Union[h11.Event, Type[h11.PAUSED]]:
+ self, timeout: float | None = None
+ ) -> h11.Event | type[h11.PAUSED]:
while True:
with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):
event = self._h11_state.next_event()
@@ -316,14 +309,14 @@ class AsyncHTTP11Connection(AsyncConnectionInterface):
# These context managers are not used in the standard flow, but are
# useful for testing or working with connection instances directly.
- async def __aenter__(self) -> "AsyncHTTP11Connection":
+ async def __aenter__(self) -> AsyncHTTP11Connection:
return self
async def __aexit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
await self.aclose()
@@ -334,7 +327,7 @@ class HTTP11ConnectionByteStream:
self._request = request
self._closed = False
- async def __aiter__(self) -> AsyncIterator[bytes]:
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
kwargs = {"request": self._request}
try:
async with Trace("receive_response_body", logger, self._request, kwargs):
@@ -360,7 +353,7 @@ class AsyncHTTP11UpgradeStream(AsyncNetworkStream):
self._stream = stream
self._leading_data = leading_data
- async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes:
+ async def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
if self._leading_data:
buffer = self._leading_data[:max_bytes]
self._leading_data = self._leading_data[max_bytes:]
@@ -368,7 +361,7 @@ class AsyncHTTP11UpgradeStream(AsyncNetworkStream):
else:
return await self._stream.read(max_bytes, timeout)
- async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None:
+ async def write(self, buffer: bytes, timeout: float | None = None) -> None:
await self._stream.write(buffer, timeout)
async def aclose(self) -> None:
@@ -377,10 +370,10 @@ class AsyncHTTP11UpgradeStream(AsyncNetworkStream):
async def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: Optional[str] = None,
- timeout: Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
) -> AsyncNetworkStream:
return await self._stream.start_tls(ssl_context, server_hostname, timeout)
- def get_extra_info(self, info: str) -> Any:
+ def get_extra_info(self, info: str) -> typing.Any:
return self._stream.get_extra_info(info)
diff --git a/contrib/python/httpcore/httpcore/_async/http2.py b/contrib/python/httpcore/httpcore/_async/http2.py
index c201ee4cbc..c6434a0496 100644
--- a/contrib/python/httpcore/httpcore/_async/http2.py
+++ b/contrib/python/httpcore/httpcore/_async/http2.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import enum
import logging
import time
@@ -45,14 +47,14 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
self,
origin: Origin,
stream: AsyncNetworkStream,
- keepalive_expiry: typing.Optional[float] = None,
+ keepalive_expiry: float | None = None,
):
self._origin = origin
self._network_stream = stream
- self._keepalive_expiry: typing.Optional[float] = keepalive_expiry
+ self._keepalive_expiry: float | None = keepalive_expiry
self._h2_state = h2.connection.H2Connection(config=self.CONFIG)
self._state = HTTPConnectionState.IDLE
- self._expire_at: typing.Optional[float] = None
+ self._expire_at: float | None = None
self._request_count = 0
self._init_lock = AsyncLock()
self._state_lock = AsyncLock()
@@ -63,24 +65,20 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
self._connection_error = False
# Mapping from stream ID to response stream events.
- self._events: typing.Dict[
+ self._events: dict[
int,
- typing.Union[
- h2.events.ResponseReceived,
- h2.events.DataReceived,
- h2.events.StreamEnded,
- h2.events.StreamReset,
- ],
+ h2.events.ResponseReceived
+ | h2.events.DataReceived
+ | h2.events.StreamEnded
+ | h2.events.StreamReset,
] = {}
# Connection terminated events are stored as state since
# we need to handle them for all streams.
- self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = (
- None
- )
+ self._connection_terminated: h2.events.ConnectionTerminated | None = None
- self._read_exception: typing.Optional[Exception] = None
- self._write_exception: typing.Optional[Exception] = None
+ self._read_exception: Exception | None = None
+ self._write_exception: Exception | None = None
async def handle_async_request(self, request: Request) -> Response:
if not self.can_handle_request(request.url.origin):
@@ -284,7 +282,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
async def _receive_response(
self, request: Request, stream_id: int
- ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]:
+ ) -> tuple[int, list[tuple[bytes, bytes]]]:
"""
Return the response status code and headers for a given stream ID.
"""
@@ -321,9 +319,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
async def _receive_stream_event(
self, request: Request, stream_id: int
- ) -> typing.Union[
- h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded
- ]:
+ ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded:
"""
Return the next available event for a given stream ID.
@@ -337,7 +333,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
return event
async def _receive_events(
- self, request: Request, stream_id: typing.Optional[int] = None
+ self, request: Request, stream_id: int | None = None
) -> None:
"""
Read some data from the network until we see one or more events
@@ -425,9 +421,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
# Wrappers around network read/write operations...
- async def _read_incoming_data(
- self, request: Request
- ) -> typing.List[h2.events.Event]:
+ async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]:
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("read", None)
@@ -451,7 +445,7 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
self._connection_error = True
raise exc
- events: typing.List[h2.events.Event] = self._h2_state.receive_data(data)
+ events: list[h2.events.Event] = self._h2_state.receive_data(data)
return events
@@ -544,14 +538,14 @@ class AsyncHTTP2Connection(AsyncConnectionInterface):
# These context managers are not used in the standard flow, but are
# useful for testing or working with connection instances directly.
- async def __aenter__(self) -> "AsyncHTTP2Connection":
+ async def __aenter__(self) -> AsyncHTTP2Connection:
return self
async def __aexit__(
self,
- exc_type: typing.Optional[typing.Type[BaseException]] = None,
- exc_value: typing.Optional[BaseException] = None,
- traceback: typing.Optional[types.TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
await self.aclose()
diff --git a/contrib/python/httpcore/httpcore/_async/http_proxy.py b/contrib/python/httpcore/httpcore/_async/http_proxy.py
index 4aa7d8741a..cc9d92066e 100644
--- a/contrib/python/httpcore/httpcore/_async/http_proxy.py
+++ b/contrib/python/httpcore/httpcore/_async/http_proxy.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
+import base64
import logging
import ssl
-from base64 import b64encode
-from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union
+import typing
from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend
from .._exceptions import ProxyError
@@ -22,17 +24,18 @@ from .connection_pool import AsyncConnectionPool
from .http11 import AsyncHTTP11Connection
from .interfaces import AsyncConnectionInterface
-HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]]
-HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]]
+ByteOrStr = typing.Union[bytes, str]
+HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]]
+HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr]
logger = logging.getLogger("httpcore.proxy")
def merge_headers(
- default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
- override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
-) -> List[Tuple[bytes, bytes]]:
+ default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+ override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+) -> list[tuple[bytes, bytes]]:
"""
Append default_headers and override_headers, de-duplicating if a key exists
in both cases.
@@ -48,33 +51,28 @@ def merge_headers(
return default_headers + override_headers
-def build_auth_header(username: bytes, password: bytes) -> bytes:
- userpass = username + b":" + password
- return b"Basic " + b64encode(userpass)
-
-
-class AsyncHTTPProxy(AsyncConnectionPool):
+class AsyncHTTPProxy(AsyncConnectionPool): # pragma: nocover
"""
A connection pool that sends requests via an HTTP proxy.
"""
def __init__(
self,
- proxy_url: Union[URL, bytes, str],
- proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None,
- proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
- ssl_context: Optional[ssl.SSLContext] = None,
- proxy_ssl_context: Optional[ssl.SSLContext] = None,
- max_connections: Optional[int] = 10,
- max_keepalive_connections: Optional[int] = None,
- keepalive_expiry: Optional[float] = None,
+ proxy_url: URL | bytes | str,
+ proxy_auth: tuple[bytes | str, bytes | str] | None = None,
+ proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ proxy_ssl_context: ssl.SSLContext | None = None,
+ max_connections: int | None = 10,
+ max_keepalive_connections: int | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- local_address: Optional[str] = None,
- uds: Optional[str] = None,
- network_backend: Optional[AsyncNetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ local_address: str | None = None,
+ uds: str | None = None,
+ network_backend: AsyncNetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
"""
A connection pool for making HTTP requests.
@@ -139,7 +137,8 @@ class AsyncHTTPProxy(AsyncConnectionPool):
if proxy_auth is not None:
username = enforce_bytes(proxy_auth[0], name="proxy_auth")
password = enforce_bytes(proxy_auth[1], name="proxy_auth")
- authorization = build_auth_header(username, password)
+ userpass = username + b":" + password
+ authorization = b"Basic " + base64.b64encode(userpass)
self._proxy_headers = [
(b"Proxy-Authorization", authorization)
] + self._proxy_headers
@@ -172,11 +171,11 @@ class AsyncForwardHTTPConnection(AsyncConnectionInterface):
self,
proxy_origin: Origin,
remote_origin: Origin,
- proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
- keepalive_expiry: Optional[float] = None,
- network_backend: Optional[AsyncNetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
- proxy_ssl_context: Optional[ssl.SSLContext] = None,
+ proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None,
+ keepalive_expiry: float | None = None,
+ network_backend: AsyncNetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
+ proxy_ssl_context: ssl.SSLContext | None = None,
) -> None:
self._connection = AsyncHTTPConnection(
origin=proxy_origin,
@@ -236,14 +235,14 @@ class AsyncTunnelHTTPConnection(AsyncConnectionInterface):
self,
proxy_origin: Origin,
remote_origin: Origin,
- ssl_context: Optional[ssl.SSLContext] = None,
- proxy_ssl_context: Optional[ssl.SSLContext] = None,
- proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
- keepalive_expiry: Optional[float] = None,
+ ssl_context: ssl.SSLContext | None = None,
+ proxy_ssl_context: ssl.SSLContext | None = None,
+ proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
- network_backend: Optional[AsyncNetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ network_backend: AsyncNetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
self._connection: AsyncConnectionInterface = AsyncHTTPConnection(
origin=proxy_origin,
diff --git a/contrib/python/httpcore/httpcore/_async/interfaces.py b/contrib/python/httpcore/httpcore/_async/interfaces.py
index c998dd2763..361583bede 100644
--- a/contrib/python/httpcore/httpcore/_async/interfaces.py
+++ b/contrib/python/httpcore/httpcore/_async/interfaces.py
@@ -1,5 +1,7 @@
-from contextlib import asynccontextmanager
-from typing import AsyncIterator, Optional, Union
+from __future__ import annotations
+
+import contextlib
+import typing
from .._models import (
URL,
@@ -18,12 +20,12 @@ from .._models import (
class AsyncRequestInterface:
async def request(
self,
- method: Union[bytes, str],
- url: Union[URL, bytes, str],
+ method: bytes | str,
+ url: URL | bytes | str,
*,
headers: HeaderTypes = None,
- content: Union[bytes, AsyncIterator[bytes], None] = None,
- extensions: Optional[Extensions] = None,
+ content: bytes | typing.AsyncIterator[bytes] | None = None,
+ extensions: Extensions | None = None,
) -> Response:
# Strict type checking on our parameters.
method = enforce_bytes(method, name="method")
@@ -47,16 +49,16 @@ class AsyncRequestInterface:
await response.aclose()
return response
- @asynccontextmanager
+ @contextlib.asynccontextmanager
async def stream(
self,
- method: Union[bytes, str],
- url: Union[URL, bytes, str],
+ method: bytes | str,
+ url: URL | bytes | str,
*,
headers: HeaderTypes = None,
- content: Union[bytes, AsyncIterator[bytes], None] = None,
- extensions: Optional[Extensions] = None,
- ) -> AsyncIterator[Response]:
+ content: bytes | typing.AsyncIterator[bytes] | None = None,
+ extensions: Extensions | None = None,
+ ) -> typing.AsyncIterator[Response]:
# Strict type checking on our parameters.
method = enforce_bytes(method, name="method")
url = enforce_url(url, name="url")
diff --git a/contrib/python/httpcore/httpcore/_async/socks_proxy.py b/contrib/python/httpcore/httpcore/_async/socks_proxy.py
index f839603fe5..b363f55a0b 100644
--- a/contrib/python/httpcore/httpcore/_async/socks_proxy.py
+++ b/contrib/python/httpcore/httpcore/_async/socks_proxy.py
@@ -1,8 +1,9 @@
+from __future__ import annotations
+
import logging
import ssl
-import typing
-from socksio import socks5
+import socksio
from .._backends.auto import AutoBackend
from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream
@@ -43,24 +44,24 @@ async def _init_socks5_connection(
*,
host: bytes,
port: int,
- auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
+ auth: tuple[bytes, bytes] | None = None,
) -> None:
- conn = socks5.SOCKS5Connection()
+ conn = socksio.socks5.SOCKS5Connection()
# Auth method request
auth_method = (
- socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
+ socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
if auth is None
- else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
+ else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
)
- conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method]))
+ conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method]))
outgoing_bytes = conn.data_to_send()
await stream.write(outgoing_bytes)
# Auth method response
incoming_bytes = await stream.read(max_bytes=4096)
response = conn.receive_data(incoming_bytes)
- assert isinstance(response, socks5.SOCKS5AuthReply)
+ assert isinstance(response, socksio.socks5.SOCKS5AuthReply)
if response.method != auth_method:
requested = AUTH_METHODS.get(auth_method, "UNKNOWN")
responded = AUTH_METHODS.get(response.method, "UNKNOWN")
@@ -68,25 +69,25 @@ async def _init_socks5_connection(
f"Requested {requested} from proxy server, but got {responded}."
)
- if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
+ if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
# Username/password request
assert auth is not None
username, password = auth
- conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password))
+ conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password))
outgoing_bytes = conn.data_to_send()
await stream.write(outgoing_bytes)
# Username/password response
incoming_bytes = await stream.read(max_bytes=4096)
response = conn.receive_data(incoming_bytes)
- assert isinstance(response, socks5.SOCKS5UsernamePasswordReply)
+ assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply)
if not response.success:
raise ProxyError("Invalid username/password")
# Connect request
conn.send(
- socks5.SOCKS5CommandRequest.from_address(
- socks5.SOCKS5Command.CONNECT, (host, port)
+ socksio.socks5.SOCKS5CommandRequest.from_address(
+ socksio.socks5.SOCKS5Command.CONNECT, (host, port)
)
)
outgoing_bytes = conn.data_to_send()
@@ -95,31 +96,29 @@ async def _init_socks5_connection(
# Connect response
incoming_bytes = await stream.read(max_bytes=4096)
response = conn.receive_data(incoming_bytes)
- assert isinstance(response, socks5.SOCKS5Reply)
- if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED:
+ assert isinstance(response, socksio.socks5.SOCKS5Reply)
+ if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED:
reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN")
raise ProxyError(f"Proxy Server could not connect: {reply_code}.")
-class AsyncSOCKSProxy(AsyncConnectionPool):
+class AsyncSOCKSProxy(AsyncConnectionPool): # pragma: nocover
"""
A connection pool that sends requests via an HTTP proxy.
"""
def __init__(
self,
- proxy_url: typing.Union[URL, bytes, str],
- proxy_auth: typing.Optional[
- typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]]
- ] = None,
- ssl_context: typing.Optional[ssl.SSLContext] = None,
- max_connections: typing.Optional[int] = 10,
- max_keepalive_connections: typing.Optional[int] = None,
- keepalive_expiry: typing.Optional[float] = None,
+ proxy_url: URL | bytes | str,
+ proxy_auth: tuple[bytes | str, bytes | str] | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ max_connections: int | None = 10,
+ max_keepalive_connections: int | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- network_backend: typing.Optional[AsyncNetworkBackend] = None,
+ network_backend: AsyncNetworkBackend | None = None,
) -> None:
"""
A connection pool for making HTTP requests.
@@ -167,7 +166,7 @@ class AsyncSOCKSProxy(AsyncConnectionPool):
username, password = proxy_auth
username_bytes = enforce_bytes(username, name="proxy_auth")
password_bytes = enforce_bytes(password, name="proxy_auth")
- self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = (
+ self._proxy_auth: tuple[bytes, bytes] | None = (
username_bytes,
password_bytes,
)
@@ -192,12 +191,12 @@ class AsyncSocks5Connection(AsyncConnectionInterface):
self,
proxy_origin: Origin,
remote_origin: Origin,
- proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
- ssl_context: typing.Optional[ssl.SSLContext] = None,
- keepalive_expiry: typing.Optional[float] = None,
+ proxy_auth: tuple[bytes, bytes] | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
- network_backend: typing.Optional[AsyncNetworkBackend] = None,
+ network_backend: AsyncNetworkBackend | None = None,
) -> None:
self._proxy_origin = proxy_origin
self._remote_origin = remote_origin
@@ -211,7 +210,7 @@ class AsyncSocks5Connection(AsyncConnectionInterface):
AutoBackend() if network_backend is None else network_backend
)
self._connect_lock = AsyncLock()
- self._connection: typing.Optional[AsyncConnectionInterface] = None
+ self._connection: AsyncConnectionInterface | None = None
self._connect_failed = False
async def handle_async_request(self, request: Request) -> Response:
diff --git a/contrib/python/httpcore/httpcore/_backends/anyio.py b/contrib/python/httpcore/httpcore/_backends/anyio.py
index d469e0084c..a140095e1b 100644
--- a/contrib/python/httpcore/httpcore/_backends/anyio.py
+++ b/contrib/python/httpcore/httpcore/_backends/anyio.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import ssl
import typing
@@ -20,9 +22,7 @@ class AnyIOStream(AsyncNetworkStream):
def __init__(self, stream: anyio.abc.ByteStream) -> None:
self._stream = stream
- async def read(
- self, max_bytes: int, timeout: typing.Optional[float] = None
- ) -> bytes:
+ async def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
exc_map = {
TimeoutError: ReadTimeout,
anyio.BrokenResourceError: ReadError,
@@ -36,9 +36,7 @@ class AnyIOStream(AsyncNetworkStream):
except anyio.EndOfStream: # pragma: nocover
return b""
- async def write(
- self, buffer: bytes, timeout: typing.Optional[float] = None
- ) -> None:
+ async def write(self, buffer: bytes, timeout: float | None = None) -> None:
if not buffer:
return
@@ -57,8 +55,8 @@ class AnyIOStream(AsyncNetworkStream):
async def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: typing.Optional[str] = None,
- timeout: typing.Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
) -> AsyncNetworkStream:
exc_map = {
TimeoutError: ConnectTimeout,
@@ -101,9 +99,9 @@ class AnyIOBackend(AsyncNetworkBackend):
self,
host: str,
port: int,
- timeout: typing.Optional[float] = None,
- local_address: typing.Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream: # pragma: nocover
if socket_options is None:
socket_options = []
@@ -127,8 +125,8 @@ class AnyIOBackend(AsyncNetworkBackend):
async def connect_unix_socket(
self,
path: str,
- timeout: typing.Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream: # pragma: nocover
if socket_options is None:
socket_options = []
diff --git a/contrib/python/httpcore/httpcore/_backends/auto.py b/contrib/python/httpcore/httpcore/_backends/auto.py
index 3ac05f4da0..49f0e698c9 100644
--- a/contrib/python/httpcore/httpcore/_backends/auto.py
+++ b/contrib/python/httpcore/httpcore/_backends/auto.py
@@ -1,5 +1,6 @@
+from __future__ import annotations
+
import typing
-from typing import Optional
from .._synchronization import current_async_library
from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream
@@ -22,9 +23,9 @@ class AutoBackend(AsyncNetworkBackend):
self,
host: str,
port: int,
- timeout: Optional[float] = None,
- local_address: Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream:
await self._init_backend()
return await self._backend.connect_tcp(
@@ -38,8 +39,8 @@ class AutoBackend(AsyncNetworkBackend):
async def connect_unix_socket(
self,
path: str,
- timeout: Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream: # pragma: nocover
await self._init_backend()
return await self._backend.connect_unix_socket(
diff --git a/contrib/python/httpcore/httpcore/_backends/base.py b/contrib/python/httpcore/httpcore/_backends/base.py
index 6cadedb5f9..cf55c8b10e 100644
--- a/contrib/python/httpcore/httpcore/_backends/base.py
+++ b/contrib/python/httpcore/httpcore/_backends/base.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import ssl
import time
import typing
@@ -10,10 +12,10 @@ SOCKET_OPTION = typing.Union[
class NetworkStream:
- def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes:
+ def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
raise NotImplementedError() # pragma: nocover
- def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None:
+ def write(self, buffer: bytes, timeout: float | None = None) -> None:
raise NotImplementedError() # pragma: nocover
def close(self) -> None:
@@ -22,9 +24,9 @@ class NetworkStream:
def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: typing.Optional[str] = None,
- timeout: typing.Optional[float] = None,
- ) -> "NetworkStream":
+ server_hostname: str | None = None,
+ timeout: float | None = None,
+ ) -> NetworkStream:
raise NotImplementedError() # pragma: nocover
def get_extra_info(self, info: str) -> typing.Any:
@@ -36,17 +38,17 @@ class NetworkBackend:
self,
host: str,
port: int,
- timeout: typing.Optional[float] = None,
- local_address: typing.Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> NetworkStream:
raise NotImplementedError() # pragma: nocover
def connect_unix_socket(
self,
path: str,
- timeout: typing.Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> NetworkStream:
raise NotImplementedError() # pragma: nocover
@@ -55,14 +57,10 @@ class NetworkBackend:
class AsyncNetworkStream:
- async def read(
- self, max_bytes: int, timeout: typing.Optional[float] = None
- ) -> bytes:
+ async def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
raise NotImplementedError() # pragma: nocover
- async def write(
- self, buffer: bytes, timeout: typing.Optional[float] = None
- ) -> None:
+ async def write(self, buffer: bytes, timeout: float | None = None) -> None:
raise NotImplementedError() # pragma: nocover
async def aclose(self) -> None:
@@ -71,9 +69,9 @@ class AsyncNetworkStream:
async def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: typing.Optional[str] = None,
- timeout: typing.Optional[float] = None,
- ) -> "AsyncNetworkStream":
+ server_hostname: str | None = None,
+ timeout: float | None = None,
+ ) -> AsyncNetworkStream:
raise NotImplementedError() # pragma: nocover
def get_extra_info(self, info: str) -> typing.Any:
@@ -85,17 +83,17 @@ class AsyncNetworkBackend:
self,
host: str,
port: int,
- timeout: typing.Optional[float] = None,
- local_address: typing.Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream:
raise NotImplementedError() # pragma: nocover
async def connect_unix_socket(
self,
path: str,
- timeout: typing.Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream:
raise NotImplementedError() # pragma: nocover
diff --git a/contrib/python/httpcore/httpcore/_backends/mock.py b/contrib/python/httpcore/httpcore/_backends/mock.py
index f7aefebf51..9b6edca03d 100644
--- a/contrib/python/httpcore/httpcore/_backends/mock.py
+++ b/contrib/python/httpcore/httpcore/_backends/mock.py
@@ -1,6 +1,7 @@
+from __future__ import annotations
+
import ssl
import typing
-from typing import Optional
from .._exceptions import ReadError
from .base import (
@@ -21,19 +22,19 @@ class MockSSLObject:
class MockStream(NetworkStream):
- def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None:
+ def __init__(self, buffer: list[bytes], http2: bool = False) -> None:
self._buffer = buffer
self._http2 = http2
self._closed = False
- def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes:
+ def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
if self._closed:
raise ReadError("Connection closed")
if not self._buffer:
return b""
return self._buffer.pop(0)
- def write(self, buffer: bytes, timeout: Optional[float] = None) -> None:
+ def write(self, buffer: bytes, timeout: float | None = None) -> None:
pass
def close(self) -> None:
@@ -42,8 +43,8 @@ class MockStream(NetworkStream):
def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: Optional[str] = None,
- timeout: Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
) -> NetworkStream:
return self
@@ -55,7 +56,7 @@ class MockStream(NetworkStream):
class MockBackend(NetworkBackend):
- def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None:
+ def __init__(self, buffer: list[bytes], http2: bool = False) -> None:
self._buffer = buffer
self._http2 = http2
@@ -63,17 +64,17 @@ class MockBackend(NetworkBackend):
self,
host: str,
port: int,
- timeout: Optional[float] = None,
- local_address: Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> NetworkStream:
return MockStream(list(self._buffer), http2=self._http2)
def connect_unix_socket(
self,
path: str,
- timeout: Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> NetworkStream:
return MockStream(list(self._buffer), http2=self._http2)
@@ -82,19 +83,19 @@ class MockBackend(NetworkBackend):
class AsyncMockStream(AsyncNetworkStream):
- def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None:
+ def __init__(self, buffer: list[bytes], http2: bool = False) -> None:
self._buffer = buffer
self._http2 = http2
self._closed = False
- async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes:
+ async def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
if self._closed:
raise ReadError("Connection closed")
if not self._buffer:
return b""
return self._buffer.pop(0)
- async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None:
+ async def write(self, buffer: bytes, timeout: float | None = None) -> None:
pass
async def aclose(self) -> None:
@@ -103,8 +104,8 @@ class AsyncMockStream(AsyncNetworkStream):
async def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: Optional[str] = None,
- timeout: Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
) -> AsyncNetworkStream:
return self
@@ -116,7 +117,7 @@ class AsyncMockStream(AsyncNetworkStream):
class AsyncMockBackend(AsyncNetworkBackend):
- def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None:
+ def __init__(self, buffer: list[bytes], http2: bool = False) -> None:
self._buffer = buffer
self._http2 = http2
@@ -124,17 +125,17 @@ class AsyncMockBackend(AsyncNetworkBackend):
self,
host: str,
port: int,
- timeout: Optional[float] = None,
- local_address: Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream:
return AsyncMockStream(list(self._buffer), http2=self._http2)
async def connect_unix_socket(
self,
path: str,
- timeout: Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream:
return AsyncMockStream(list(self._buffer), http2=self._http2)
diff --git a/contrib/python/httpcore/httpcore/_backends/sync.py b/contrib/python/httpcore/httpcore/_backends/sync.py
index 7b7b417dc1..4018a09c6f 100644
--- a/contrib/python/httpcore/httpcore/_backends/sync.py
+++ b/contrib/python/httpcore/httpcore/_backends/sync.py
@@ -1,8 +1,10 @@
+from __future__ import annotations
+
+import functools
import socket
import ssl
import sys
import typing
-from functools import partial
from .._exceptions import (
ConnectError,
@@ -33,8 +35,8 @@ class TLSinTLSStream(NetworkStream): # pragma: no cover
self,
sock: socket.socket,
ssl_context: ssl.SSLContext,
- server_hostname: typing.Optional[str] = None,
- timeout: typing.Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
):
self._sock = sock
self._incoming = ssl.MemoryBIO()
@@ -74,20 +76,20 @@ class TLSinTLSStream(NetworkStream): # pragma: no cover
if errno is None:
return ret
- def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes:
+ def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError}
with map_exceptions(exc_map):
self._sock.settimeout(timeout)
return typing.cast(
- bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes))
+ bytes, self._perform_io(functools.partial(self.ssl_obj.read, max_bytes))
)
- def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None:
+ def write(self, buffer: bytes, timeout: float | None = None) -> None:
exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError}
with map_exceptions(exc_map):
self._sock.settimeout(timeout)
while buffer:
- nsent = self._perform_io(partial(self.ssl_obj.write, buffer))
+ nsent = self._perform_io(functools.partial(self.ssl_obj.write, buffer))
buffer = buffer[nsent:]
def close(self) -> None:
@@ -96,9 +98,9 @@ class TLSinTLSStream(NetworkStream): # pragma: no cover
def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: typing.Optional[str] = None,
- timeout: typing.Optional[float] = None,
- ) -> "NetworkStream":
+ server_hostname: str | None = None,
+ timeout: float | None = None,
+ ) -> NetworkStream:
raise NotImplementedError()
def get_extra_info(self, info: str) -> typing.Any:
@@ -119,13 +121,13 @@ class SyncStream(NetworkStream):
def __init__(self, sock: socket.socket) -> None:
self._sock = sock
- def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes:
+ def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError}
with map_exceptions(exc_map):
self._sock.settimeout(timeout)
return self._sock.recv(max_bytes)
- def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None:
+ def write(self, buffer: bytes, timeout: float | None = None) -> None:
if not buffer:
return
@@ -142,8 +144,8 @@ class SyncStream(NetworkStream):
def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: typing.Optional[str] = None,
- timeout: typing.Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
) -> NetworkStream:
exc_map: ExceptionMapping = {
socket.timeout: ConnectTimeout,
@@ -187,9 +189,9 @@ class SyncBackend(NetworkBackend):
self,
host: str,
port: int,
- timeout: typing.Optional[float] = None,
- local_address: typing.Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> NetworkStream:
# Note that we automatically include `TCP_NODELAY`
# in addition to any other custom socket options.
@@ -216,8 +218,8 @@ class SyncBackend(NetworkBackend):
def connect_unix_socket(
self,
path: str,
- timeout: typing.Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> NetworkStream: # pragma: nocover
if sys.platform == "win32":
raise RuntimeError(
diff --git a/contrib/python/httpcore/httpcore/_backends/trio.py b/contrib/python/httpcore/httpcore/_backends/trio.py
index b1626d28e2..6f53f5f2a0 100644
--- a/contrib/python/httpcore/httpcore/_backends/trio.py
+++ b/contrib/python/httpcore/httpcore/_backends/trio.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import ssl
import typing
@@ -20,9 +22,7 @@ class TrioStream(AsyncNetworkStream):
def __init__(self, stream: trio.abc.Stream) -> None:
self._stream = stream
- async def read(
- self, max_bytes: int, timeout: typing.Optional[float] = None
- ) -> bytes:
+ async def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
timeout_or_inf = float("inf") if timeout is None else timeout
exc_map: ExceptionMapping = {
trio.TooSlowError: ReadTimeout,
@@ -34,9 +34,7 @@ class TrioStream(AsyncNetworkStream):
data: bytes = await self._stream.receive_some(max_bytes=max_bytes)
return data
- async def write(
- self, buffer: bytes, timeout: typing.Optional[float] = None
- ) -> None:
+ async def write(self, buffer: bytes, timeout: float | None = None) -> None:
if not buffer:
return
@@ -56,8 +54,8 @@ class TrioStream(AsyncNetworkStream):
async def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: typing.Optional[str] = None,
- timeout: typing.Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
) -> AsyncNetworkStream:
timeout_or_inf = float("inf") if timeout is None else timeout
exc_map: ExceptionMapping = {
@@ -113,9 +111,9 @@ class TrioBackend(AsyncNetworkBackend):
self,
host: str,
port: int,
- timeout: typing.Optional[float] = None,
- local_address: typing.Optional[str] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ local_address: str | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream:
# By default for TCP sockets, trio enables TCP_NODELAY.
# https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream
@@ -139,8 +137,8 @@ class TrioBackend(AsyncNetworkBackend):
async def connect_unix_socket(
self,
path: str,
- timeout: typing.Optional[float] = None,
- socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None,
+ timeout: float | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> AsyncNetworkStream: # pragma: nocover
if socket_options is None:
socket_options = []
diff --git a/contrib/python/httpcore/httpcore/_exceptions.py b/contrib/python/httpcore/httpcore/_exceptions.py
index 81e7fc61dd..bc28d44f55 100644
--- a/contrib/python/httpcore/httpcore/_exceptions.py
+++ b/contrib/python/httpcore/httpcore/_exceptions.py
@@ -1,11 +1,11 @@
import contextlib
-from typing import Iterator, Mapping, Type
+import typing
-ExceptionMapping = Mapping[Type[Exception], Type[Exception]]
+ExceptionMapping = typing.Mapping[typing.Type[Exception], typing.Type[Exception]]
@contextlib.contextmanager
-def map_exceptions(map: ExceptionMapping) -> Iterator[None]:
+def map_exceptions(map: ExceptionMapping) -> typing.Iterator[None]:
try:
yield
except Exception as exc: # noqa: PIE786
diff --git a/contrib/python/httpcore/httpcore/_models.py b/contrib/python/httpcore/httpcore/_models.py
index dadee79f69..8a65f13347 100644
--- a/contrib/python/httpcore/httpcore/_models.py
+++ b/contrib/python/httpcore/httpcore/_models.py
@@ -1,30 +1,22 @@
-from typing import (
- Any,
- AsyncIterable,
- AsyncIterator,
- Iterable,
- Iterator,
- List,
- Mapping,
- MutableMapping,
- Optional,
- Sequence,
- Tuple,
- Union,
-)
-from urllib.parse import urlparse
+from __future__ import annotations
+
+import base64
+import ssl
+import typing
+import urllib.parse
# Functions for typechecking...
-HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]]
-HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]]
-HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None]
+ByteOrStr = typing.Union[bytes, str]
+HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]]
+HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr]
+HeaderTypes = typing.Union[HeadersAsSequence, HeadersAsMapping, None]
-Extensions = MutableMapping[str, Any]
+Extensions = typing.MutableMapping[str, typing.Any]
-def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes:
+def enforce_bytes(value: bytes | str, *, name: str) -> bytes:
"""
Any arguments that are ultimately represented as bytes can be specified
either as bytes or as strings.
@@ -45,7 +37,7 @@ def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes:
raise TypeError(f"{name} must be bytes or str, but got {seen_type}.")
-def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL":
+def enforce_url(value: URL | bytes | str, *, name: str) -> URL:
"""
Type check for URL parameters.
"""
@@ -59,15 +51,15 @@ def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL":
def enforce_headers(
- value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str
-) -> List[Tuple[bytes, bytes]]:
+ value: HeadersAsMapping | HeadersAsSequence | None = None, *, name: str
+) -> list[tuple[bytes, bytes]]:
"""
Convienence function that ensure all items in request or response headers
are either bytes or strings in the plain ASCII range.
"""
if value is None:
return []
- elif isinstance(value, Mapping):
+ elif isinstance(value, typing.Mapping):
return [
(
enforce_bytes(k, name="header name"),
@@ -75,7 +67,7 @@ def enforce_headers(
)
for k, v in value.items()
]
- elif isinstance(value, Sequence):
+ elif isinstance(value, typing.Sequence):
return [
(
enforce_bytes(k, name="header name"),
@@ -91,8 +83,10 @@ def enforce_headers(
def enforce_stream(
- value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str
-) -> Union[Iterable[bytes], AsyncIterable[bytes]]:
+ value: bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes] | None,
+ *,
+ name: str,
+) -> typing.Iterable[bytes] | typing.AsyncIterable[bytes]:
if value is None:
return ByteStream(b"")
elif isinstance(value, bytes):
@@ -113,11 +107,11 @@ DEFAULT_PORTS = {
def include_request_headers(
- headers: List[Tuple[bytes, bytes]],
+ headers: list[tuple[bytes, bytes]],
*,
url: "URL",
- content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]],
-) -> List[Tuple[bytes, bytes]]:
+ content: None | bytes | typing.Iterable[bytes] | typing.AsyncIterable[bytes],
+) -> list[tuple[bytes, bytes]]:
headers_set = set(k.lower() for k, v in headers)
if b"host" not in headers_set:
@@ -154,10 +148,10 @@ class ByteStream:
def __init__(self, content: bytes) -> None:
self._content = content
- def __iter__(self) -> Iterator[bytes]:
+ def __iter__(self) -> typing.Iterator[bytes]:
yield self._content
- async def __aiter__(self) -> AsyncIterator[bytes]:
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
yield self._content
def __repr__(self) -> str:
@@ -170,7 +164,7 @@ class Origin:
self.host = host
self.port = port
- def __eq__(self, other: Any) -> bool:
+ def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, Origin)
and self.scheme == other.scheme
@@ -254,12 +248,12 @@ class URL:
def __init__(
self,
- url: Union[bytes, str] = "",
+ url: bytes | str = "",
*,
- scheme: Union[bytes, str] = b"",
- host: Union[bytes, str] = b"",
- port: Optional[int] = None,
- target: Union[bytes, str] = b"",
+ scheme: bytes | str = b"",
+ host: bytes | str = b"",
+ port: int | None = None,
+ target: bytes | str = b"",
) -> None:
"""
Parameters:
@@ -271,7 +265,7 @@ class URL:
target: The target of the HTTP request. Such as `"/items?search=red"`.
"""
if url:
- parsed = urlparse(enforce_bytes(url, name="url"))
+ parsed = urllib.parse.urlparse(enforce_bytes(url, name="url"))
self.scheme = parsed.scheme
self.host = parsed.hostname or b""
self.port = parsed.port
@@ -292,12 +286,13 @@ class URL:
b"ws": 80,
b"wss": 443,
b"socks5": 1080,
+ b"socks5h": 1080,
}[self.scheme]
return Origin(
scheme=self.scheme, host=self.host, port=self.port or default_port
)
- def __eq__(self, other: Any) -> bool:
+ def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, URL)
and other.scheme == self.scheme
@@ -325,12 +320,15 @@ class Request:
def __init__(
self,
- method: Union[bytes, str],
- url: Union[URL, bytes, str],
+ method: bytes | str,
+ url: URL | bytes | str,
*,
headers: HeaderTypes = None,
- content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None,
- extensions: Optional[Extensions] = None,
+ content: bytes
+ | typing.Iterable[bytes]
+ | typing.AsyncIterable[bytes]
+ | None = None,
+ extensions: Extensions | None = None,
) -> None:
"""
Parameters:
@@ -345,11 +343,11 @@ class Request:
"""
self.method: bytes = enforce_bytes(method, name="method")
self.url: URL = enforce_url(url, name="url")
- self.headers: List[Tuple[bytes, bytes]] = enforce_headers(
+ self.headers: list[tuple[bytes, bytes]] = enforce_headers(
headers, name="headers"
)
- self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream(
- content, name="content"
+ self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = (
+ enforce_stream(content, name="content")
)
self.extensions = {} if extensions is None else extensions
@@ -375,8 +373,11 @@ class Response:
status: int,
*,
headers: HeaderTypes = None,
- content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None,
- extensions: Optional[Extensions] = None,
+ content: bytes
+ | typing.Iterable[bytes]
+ | typing.AsyncIterable[bytes]
+ | None = None,
+ extensions: Extensions | None = None,
) -> None:
"""
Parameters:
@@ -388,11 +389,11 @@ class Response:
`"reason_phrase"`, and `"network_stream"`.
"""
self.status: int = status
- self.headers: List[Tuple[bytes, bytes]] = enforce_headers(
+ self.headers: list[tuple[bytes, bytes]] = enforce_headers(
headers, name="headers"
)
- self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream(
- content, name="content"
+ self.stream: typing.Iterable[bytes] | typing.AsyncIterable[bytes] = (
+ enforce_stream(content, name="content")
)
self.extensions = {} if extensions is None else extensions
@@ -401,7 +402,7 @@ class Response:
@property
def content(self) -> bytes:
if not hasattr(self, "_content"):
- if isinstance(self.stream, Iterable):
+ if isinstance(self.stream, typing.Iterable):
raise RuntimeError(
"Attempted to access 'response.content' on a streaming response. "
"Call 'response.read()' first."
@@ -419,7 +420,7 @@ class Response:
# Sync interface...
def read(self) -> bytes:
- if not isinstance(self.stream, Iterable): # pragma: nocover
+ if not isinstance(self.stream, typing.Iterable): # pragma: nocover
raise RuntimeError(
"Attempted to read an asynchronous response using 'response.read()'. "
"You should use 'await response.aread()' instead."
@@ -428,8 +429,8 @@ class Response:
self._content = b"".join([part for part in self.iter_stream()])
return self._content
- def iter_stream(self) -> Iterator[bytes]:
- if not isinstance(self.stream, Iterable): # pragma: nocover
+ def iter_stream(self) -> typing.Iterator[bytes]:
+ if not isinstance(self.stream, typing.Iterable): # pragma: nocover
raise RuntimeError(
"Attempted to stream an asynchronous response using 'for ... in "
"response.iter_stream()'. "
@@ -444,7 +445,7 @@ class Response:
yield chunk
def close(self) -> None:
- if not isinstance(self.stream, Iterable): # pragma: nocover
+ if not isinstance(self.stream, typing.Iterable): # pragma: nocover
raise RuntimeError(
"Attempted to close an asynchronous response using 'response.close()'. "
"You should use 'await response.aclose()' instead."
@@ -455,7 +456,7 @@ class Response:
# Async interface...
async def aread(self) -> bytes:
- if not isinstance(self.stream, AsyncIterable): # pragma: nocover
+ if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover
raise RuntimeError(
"Attempted to read an synchronous response using "
"'await response.aread()'. "
@@ -465,8 +466,8 @@ class Response:
self._content = b"".join([part async for part in self.aiter_stream()])
return self._content
- async def aiter_stream(self) -> AsyncIterator[bytes]:
- if not isinstance(self.stream, AsyncIterable): # pragma: nocover
+ async def aiter_stream(self) -> typing.AsyncIterator[bytes]:
+ if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover
raise RuntimeError(
"Attempted to stream an synchronous response using 'async for ... in "
"response.aiter_stream()'. "
@@ -482,7 +483,7 @@ class Response:
yield chunk
async def aclose(self) -> None:
- if not isinstance(self.stream, AsyncIterable): # pragma: nocover
+ if not isinstance(self.stream, typing.AsyncIterable): # pragma: nocover
raise RuntimeError(
"Attempted to close a synchronous response using "
"'await response.aclose()'. "
@@ -490,3 +491,26 @@ class Response:
)
if hasattr(self.stream, "aclose"):
await self.stream.aclose()
+
+
+class Proxy:
+ def __init__(
+ self,
+ url: URL | bytes | str,
+ auth: tuple[bytes | str, bytes | str] | None = None,
+ headers: HeadersAsMapping | HeadersAsSequence | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ ):
+ self.url = enforce_url(url, name="url")
+ self.headers = enforce_headers(headers, name="headers")
+ self.ssl_context = ssl_context
+
+ if auth is not None:
+ username = enforce_bytes(auth[0], name="auth")
+ password = enforce_bytes(auth[1], name="auth")
+ userpass = username + b":" + password
+ authorization = b"Basic " + base64.b64encode(userpass)
+ self.auth: tuple[bytes, bytes] | None = (username, password)
+ self.headers = [(b"Proxy-Authorization", authorization)] + self.headers
+ else:
+ self.auth = None
diff --git a/contrib/python/httpcore/httpcore/_sync/connection.py b/contrib/python/httpcore/httpcore/_sync/connection.py
index c3890f340c..363f8be819 100644
--- a/contrib/python/httpcore/httpcore/_sync/connection.py
+++ b/contrib/python/httpcore/httpcore/_sync/connection.py
@@ -1,8 +1,10 @@
+from __future__ import annotations
+
import itertools
import logging
import ssl
-from types import TracebackType
-from typing import Iterable, Iterator, Optional, Type
+import types
+import typing
from .._backends.sync import SyncBackend
from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream
@@ -20,7 +22,7 @@ RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc.
logger = logging.getLogger("httpcore.connection")
-def exponential_backoff(factor: float) -> Iterator[float]:
+def exponential_backoff(factor: float) -> typing.Iterator[float]:
"""
Generate a geometric sequence that has a ratio of 2 and starts with 0.
@@ -37,15 +39,15 @@ class HTTPConnection(ConnectionInterface):
def __init__(
self,
origin: Origin,
- ssl_context: Optional[ssl.SSLContext] = None,
- keepalive_expiry: Optional[float] = None,
+ ssl_context: ssl.SSLContext | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- local_address: Optional[str] = None,
- uds: Optional[str] = None,
- network_backend: Optional[NetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ local_address: str | None = None,
+ uds: str | None = None,
+ network_backend: NetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
self._origin = origin
self._ssl_context = ssl_context
@@ -59,7 +61,7 @@ class HTTPConnection(ConnectionInterface):
self._network_backend: NetworkBackend = (
SyncBackend() if network_backend is None else network_backend
)
- self._connection: Optional[ConnectionInterface] = None
+ self._connection: ConnectionInterface | None = None
self._connect_failed: bool = False
self._request_lock = Lock()
self._socket_options = socket_options
@@ -208,13 +210,13 @@ class HTTPConnection(ConnectionInterface):
# These context managers are not used in the standard flow, but are
# useful for testing or working with connection instances directly.
- def __enter__(self) -> "HTTPConnection":
+ def __enter__(self) -> HTTPConnection:
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
self.close()
diff --git a/contrib/python/httpcore/httpcore/_sync/connection_pool.py b/contrib/python/httpcore/httpcore/_sync/connection_pool.py
index 01bec59e88..9ccfa53e59 100644
--- a/contrib/python/httpcore/httpcore/_sync/connection_pool.py
+++ b/contrib/python/httpcore/httpcore/_sync/connection_pool.py
@@ -1,12 +1,14 @@
+from __future__ import annotations
+
import ssl
import sys
-from types import TracebackType
-from typing import Iterable, Iterator, Iterable, List, Optional, Type
+import types
+import typing
from .._backends.sync import SyncBackend
from .._backends.base import SOCKET_OPTION, NetworkBackend
from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol
-from .._models import Origin, Request, Response
+from .._models import Origin, Proxy, Request, Response
from .._synchronization import Event, ShieldCancellation, ThreadLock
from .connection import HTTPConnection
from .interfaces import ConnectionInterface, RequestInterface
@@ -15,12 +17,10 @@ from .interfaces import ConnectionInterface, RequestInterface
class PoolRequest:
def __init__(self, request: Request) -> None:
self.request = request
- self.connection: Optional[ConnectionInterface] = None
+ self.connection: ConnectionInterface | None = None
self._connection_acquired = Event()
- def assign_to_connection(
- self, connection: Optional[ConnectionInterface]
- ) -> None:
+ def assign_to_connection(self, connection: ConnectionInterface | None) -> None:
self.connection = connection
self._connection_acquired.set()
@@ -29,7 +29,7 @@ class PoolRequest:
self._connection_acquired = Event()
def wait_for_connection(
- self, timeout: Optional[float] = None
+ self, timeout: float | None = None
) -> ConnectionInterface:
if self.connection is None:
self._connection_acquired.wait(timeout=timeout)
@@ -47,17 +47,18 @@ class ConnectionPool(RequestInterface):
def __init__(
self,
- ssl_context: Optional[ssl.SSLContext] = None,
- max_connections: Optional[int] = 10,
- max_keepalive_connections: Optional[int] = None,
- keepalive_expiry: Optional[float] = None,
+ ssl_context: ssl.SSLContext | None = None,
+ proxy: Proxy | None = None,
+ max_connections: int | None = 10,
+ max_keepalive_connections: int | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- local_address: Optional[str] = None,
- uds: Optional[str] = None,
- network_backend: Optional[NetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ local_address: str | None = None,
+ uds: str | None = None,
+ network_backend: NetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
"""
A connection pool for making HTTP requests.
@@ -89,7 +90,7 @@ class ConnectionPool(RequestInterface):
in the TCP socket when the connection was established.
"""
self._ssl_context = ssl_context
-
+ self._proxy = proxy
self._max_connections = (
sys.maxsize if max_connections is None else max_connections
)
@@ -116,8 +117,8 @@ class ConnectionPool(RequestInterface):
# The mutable state on a connection pool is the queue of incoming requests,
# and the set of connections that are servicing those requests.
- self._connections: List[ConnectionInterface] = []
- self._requests: List[PoolRequest] = []
+ self._connections: list[ConnectionInterface] = []
+ self._requests: list[PoolRequest] = []
# We only mutate the state of the connection pool within an 'optional_thread_lock'
# context. This holds a threading lock unless we're running in async mode,
@@ -125,6 +126,45 @@ class ConnectionPool(RequestInterface):
self._optional_thread_lock = ThreadLock()
def create_connection(self, origin: Origin) -> ConnectionInterface:
+ if self._proxy is not None:
+ if self._proxy.url.scheme in (b"socks5", b"socks5h"):
+ from .socks_proxy import Socks5Connection
+
+ return Socks5Connection(
+ proxy_origin=self._proxy.url.origin,
+ proxy_auth=self._proxy.auth,
+ remote_origin=origin,
+ ssl_context=self._ssl_context,
+ keepalive_expiry=self._keepalive_expiry,
+ http1=self._http1,
+ http2=self._http2,
+ network_backend=self._network_backend,
+ )
+ elif origin.scheme == b"http":
+ from .http_proxy import ForwardHTTPConnection
+
+ return ForwardHTTPConnection(
+ proxy_origin=self._proxy.url.origin,
+ proxy_headers=self._proxy.headers,
+ proxy_ssl_context=self._proxy.ssl_context,
+ remote_origin=origin,
+ keepalive_expiry=self._keepalive_expiry,
+ network_backend=self._network_backend,
+ )
+ from .http_proxy import TunnelHTTPConnection
+
+ return TunnelHTTPConnection(
+ proxy_origin=self._proxy.url.origin,
+ proxy_headers=self._proxy.headers,
+ proxy_ssl_context=self._proxy.ssl_context,
+ remote_origin=origin,
+ ssl_context=self._ssl_context,
+ keepalive_expiry=self._keepalive_expiry,
+ http1=self._http1,
+ http2=self._http2,
+ network_backend=self._network_backend,
+ )
+
return HTTPConnection(
origin=origin,
ssl_context=self._ssl_context,
@@ -139,7 +179,7 @@ class ConnectionPool(RequestInterface):
)
@property
- def connections(self) -> List[ConnectionInterface]:
+ def connections(self) -> list[ConnectionInterface]:
"""
Return a list of the connections currently in the pool.
@@ -217,7 +257,7 @@ class ConnectionPool(RequestInterface):
# Return the response. Note that in this case we still have to manage
# the point at which the response is closed.
- assert isinstance(response.stream, Iterable)
+ assert isinstance(response.stream, typing.Iterable)
return Response(
status=response.status,
headers=response.headers,
@@ -227,7 +267,7 @@ class ConnectionPool(RequestInterface):
extensions=response.extensions,
)
- def _assign_requests_to_connections(self) -> List[ConnectionInterface]:
+ def _assign_requests_to_connections(self) -> list[ConnectionInterface]:
"""
Manage the state of the connection pool, assigning incoming
requests to connections as available.
@@ -298,7 +338,7 @@ class ConnectionPool(RequestInterface):
return closing_connections
- def _close_connections(self, closing: List[ConnectionInterface]) -> None:
+ def _close_connections(self, closing: list[ConnectionInterface]) -> None:
# Close connections which have been removed from the pool.
with ShieldCancellation():
for connection in closing:
@@ -312,14 +352,14 @@ class ConnectionPool(RequestInterface):
self._connections = []
self._close_connections(closing_connections)
- def __enter__(self) -> "ConnectionPool":
+ def __enter__(self) -> ConnectionPool:
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
self.close()
@@ -349,7 +389,7 @@ class ConnectionPool(RequestInterface):
class PoolByteStream:
def __init__(
self,
- stream: Iterable[bytes],
+ stream: typing.Iterable[bytes],
pool_request: PoolRequest,
pool: ConnectionPool,
) -> None:
@@ -358,7 +398,7 @@ class PoolByteStream:
self._pool = pool
self._closed = False
- def __iter__(self) -> Iterator[bytes]:
+ def __iter__(self) -> typing.Iterator[bytes]:
try:
for part in self._stream:
yield part
diff --git a/contrib/python/httpcore/httpcore/_sync/http11.py b/contrib/python/httpcore/httpcore/_sync/http11.py
index a74ff8e809..ebd3a97480 100644
--- a/contrib/python/httpcore/httpcore/_sync/http11.py
+++ b/contrib/python/httpcore/httpcore/_sync/http11.py
@@ -1,18 +1,11 @@
+from __future__ import annotations
+
import enum
import logging
import ssl
import time
-from types import TracebackType
-from typing import (
- Any,
- Iterable,
- Iterator,
- List,
- Optional,
- Tuple,
- Type,
- Union,
-)
+import types
+import typing
import h11
@@ -33,7 +26,7 @@ logger = logging.getLogger("httpcore.http11")
# A subset of `h11.Event` types supported by `_send_event`
-H11SendEvent = Union[
+H11SendEvent = typing.Union[
h11.Request,
h11.Data,
h11.EndOfMessage,
@@ -55,12 +48,12 @@ class HTTP11Connection(ConnectionInterface):
self,
origin: Origin,
stream: NetworkStream,
- keepalive_expiry: Optional[float] = None,
+ keepalive_expiry: float | None = None,
) -> None:
self._origin = origin
self._network_stream = stream
- self._keepalive_expiry: Optional[float] = keepalive_expiry
- self._expire_at: Optional[float] = None
+ self._keepalive_expiry: float | None = keepalive_expiry
+ self._expire_at: float | None = None
self._state = HTTPConnectionState.NEW
self._state_lock = Lock()
self._request_count = 0
@@ -160,16 +153,14 @@ class HTTP11Connection(ConnectionInterface):
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("write", None)
- assert isinstance(request.stream, Iterable)
+ assert isinstance(request.stream, typing.Iterable)
for chunk in request.stream:
event = h11.Data(data=chunk)
self._send_event(event, timeout=timeout)
self._send_event(h11.EndOfMessage(), timeout=timeout)
- def _send_event(
- self, event: h11.Event, timeout: Optional[float] = None
- ) -> None:
+ def _send_event(self, event: h11.Event, timeout: float | None = None) -> None:
bytes_to_send = self._h11_state.send(event)
if bytes_to_send is not None:
self._network_stream.write(bytes_to_send, timeout=timeout)
@@ -178,7 +169,7 @@ class HTTP11Connection(ConnectionInterface):
def _receive_response_headers(
self, request: Request
- ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]:
+ ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]:
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("read", None)
@@ -202,7 +193,9 @@ class HTTP11Connection(ConnectionInterface):
return http_version, event.status_code, event.reason, headers, trailing_data
- def _receive_response_body(self, request: Request) -> Iterator[bytes]:
+ def _receive_response_body(
+ self, request: Request
+ ) -> typing.Iterator[bytes]:
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("read", None)
@@ -214,8 +207,8 @@ class HTTP11Connection(ConnectionInterface):
break
def _receive_event(
- self, timeout: Optional[float] = None
- ) -> Union[h11.Event, Type[h11.PAUSED]]:
+ self, timeout: float | None = None
+ ) -> h11.Event | type[h11.PAUSED]:
while True:
with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):
event = self._h11_state.next_event()
@@ -316,14 +309,14 @@ class HTTP11Connection(ConnectionInterface):
# These context managers are not used in the standard flow, but are
# useful for testing or working with connection instances directly.
- def __enter__(self) -> "HTTP11Connection":
+ def __enter__(self) -> HTTP11Connection:
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
self.close()
@@ -334,7 +327,7 @@ class HTTP11ConnectionByteStream:
self._request = request
self._closed = False
- def __iter__(self) -> Iterator[bytes]:
+ def __iter__(self) -> typing.Iterator[bytes]:
kwargs = {"request": self._request}
try:
with Trace("receive_response_body", logger, self._request, kwargs):
@@ -360,7 +353,7 @@ class HTTP11UpgradeStream(NetworkStream):
self._stream = stream
self._leading_data = leading_data
- def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes:
+ def read(self, max_bytes: int, timeout: float | None = None) -> bytes:
if self._leading_data:
buffer = self._leading_data[:max_bytes]
self._leading_data = self._leading_data[max_bytes:]
@@ -368,7 +361,7 @@ class HTTP11UpgradeStream(NetworkStream):
else:
return self._stream.read(max_bytes, timeout)
- def write(self, buffer: bytes, timeout: Optional[float] = None) -> None:
+ def write(self, buffer: bytes, timeout: float | None = None) -> None:
self._stream.write(buffer, timeout)
def close(self) -> None:
@@ -377,10 +370,10 @@ class HTTP11UpgradeStream(NetworkStream):
def start_tls(
self,
ssl_context: ssl.SSLContext,
- server_hostname: Optional[str] = None,
- timeout: Optional[float] = None,
+ server_hostname: str | None = None,
+ timeout: float | None = None,
) -> NetworkStream:
return self._stream.start_tls(ssl_context, server_hostname, timeout)
- def get_extra_info(self, info: str) -> Any:
+ def get_extra_info(self, info: str) -> typing.Any:
return self._stream.get_extra_info(info)
diff --git a/contrib/python/httpcore/httpcore/_sync/http2.py b/contrib/python/httpcore/httpcore/_sync/http2.py
index 1ee4bbb34f..ca4dd72432 100644
--- a/contrib/python/httpcore/httpcore/_sync/http2.py
+++ b/contrib/python/httpcore/httpcore/_sync/http2.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import enum
import logging
import time
@@ -45,14 +47,14 @@ class HTTP2Connection(ConnectionInterface):
self,
origin: Origin,
stream: NetworkStream,
- keepalive_expiry: typing.Optional[float] = None,
+ keepalive_expiry: float | None = None,
):
self._origin = origin
self._network_stream = stream
- self._keepalive_expiry: typing.Optional[float] = keepalive_expiry
+ self._keepalive_expiry: float | None = keepalive_expiry
self._h2_state = h2.connection.H2Connection(config=self.CONFIG)
self._state = HTTPConnectionState.IDLE
- self._expire_at: typing.Optional[float] = None
+ self._expire_at: float | None = None
self._request_count = 0
self._init_lock = Lock()
self._state_lock = Lock()
@@ -63,24 +65,20 @@ class HTTP2Connection(ConnectionInterface):
self._connection_error = False
# Mapping from stream ID to response stream events.
- self._events: typing.Dict[
+ self._events: dict[
int,
- typing.Union[
- h2.events.ResponseReceived,
- h2.events.DataReceived,
- h2.events.StreamEnded,
- h2.events.StreamReset,
- ],
+ h2.events.ResponseReceived
+ | h2.events.DataReceived
+ | h2.events.StreamEnded
+ | h2.events.StreamReset,
] = {}
# Connection terminated events are stored as state since
# we need to handle them for all streams.
- self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = (
- None
- )
+ self._connection_terminated: h2.events.ConnectionTerminated | None = None
- self._read_exception: typing.Optional[Exception] = None
- self._write_exception: typing.Optional[Exception] = None
+ self._read_exception: Exception | None = None
+ self._write_exception: Exception | None = None
def handle_request(self, request: Request) -> Response:
if not self.can_handle_request(request.url.origin):
@@ -284,7 +282,7 @@ class HTTP2Connection(ConnectionInterface):
def _receive_response(
self, request: Request, stream_id: int
- ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]:
+ ) -> tuple[int, list[tuple[bytes, bytes]]]:
"""
Return the response status code and headers for a given stream ID.
"""
@@ -321,9 +319,7 @@ class HTTP2Connection(ConnectionInterface):
def _receive_stream_event(
self, request: Request, stream_id: int
- ) -> typing.Union[
- h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded
- ]:
+ ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded:
"""
Return the next available event for a given stream ID.
@@ -337,7 +333,7 @@ class HTTP2Connection(ConnectionInterface):
return event
def _receive_events(
- self, request: Request, stream_id: typing.Optional[int] = None
+ self, request: Request, stream_id: int | None = None
) -> None:
"""
Read some data from the network until we see one or more events
@@ -425,9 +421,7 @@ class HTTP2Connection(ConnectionInterface):
# Wrappers around network read/write operations...
- def _read_incoming_data(
- self, request: Request
- ) -> typing.List[h2.events.Event]:
+ def _read_incoming_data(self, request: Request) -> list[h2.events.Event]:
timeouts = request.extensions.get("timeout", {})
timeout = timeouts.get("read", None)
@@ -451,7 +445,7 @@ class HTTP2Connection(ConnectionInterface):
self._connection_error = True
raise exc
- events: typing.List[h2.events.Event] = self._h2_state.receive_data(data)
+ events: list[h2.events.Event] = self._h2_state.receive_data(data)
return events
@@ -544,14 +538,14 @@ class HTTP2Connection(ConnectionInterface):
# These context managers are not used in the standard flow, but are
# useful for testing or working with connection instances directly.
- def __enter__(self) -> "HTTP2Connection":
+ def __enter__(self) -> HTTP2Connection:
return self
def __exit__(
self,
- exc_type: typing.Optional[typing.Type[BaseException]] = None,
- exc_value: typing.Optional[BaseException] = None,
- traceback: typing.Optional[types.TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
self.close()
diff --git a/contrib/python/httpcore/httpcore/_sync/http_proxy.py b/contrib/python/httpcore/httpcore/_sync/http_proxy.py
index 6acac9a7cd..ecca88f7dc 100644
--- a/contrib/python/httpcore/httpcore/_sync/http_proxy.py
+++ b/contrib/python/httpcore/httpcore/_sync/http_proxy.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
+import base64
import logging
import ssl
-from base64 import b64encode
-from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union
+import typing
from .._backends.base import SOCKET_OPTION, NetworkBackend
from .._exceptions import ProxyError
@@ -22,17 +24,18 @@ from .connection_pool import ConnectionPool
from .http11 import HTTP11Connection
from .interfaces import ConnectionInterface
-HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]]
-HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]]
+ByteOrStr = typing.Union[bytes, str]
+HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]]
+HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr]
logger = logging.getLogger("httpcore.proxy")
def merge_headers(
- default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
- override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
-) -> List[Tuple[bytes, bytes]]:
+ default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+ override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+) -> list[tuple[bytes, bytes]]:
"""
Append default_headers and override_headers, de-duplicating if a key exists
in both cases.
@@ -48,33 +51,28 @@ def merge_headers(
return default_headers + override_headers
-def build_auth_header(username: bytes, password: bytes) -> bytes:
- userpass = username + b":" + password
- return b"Basic " + b64encode(userpass)
-
-
-class HTTPProxy(ConnectionPool):
+class HTTPProxy(ConnectionPool): # pragma: nocover
"""
A connection pool that sends requests via an HTTP proxy.
"""
def __init__(
self,
- proxy_url: Union[URL, bytes, str],
- proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None,
- proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
- ssl_context: Optional[ssl.SSLContext] = None,
- proxy_ssl_context: Optional[ssl.SSLContext] = None,
- max_connections: Optional[int] = 10,
- max_keepalive_connections: Optional[int] = None,
- keepalive_expiry: Optional[float] = None,
+ proxy_url: URL | bytes | str,
+ proxy_auth: tuple[bytes | str, bytes | str] | None = None,
+ proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ proxy_ssl_context: ssl.SSLContext | None = None,
+ max_connections: int | None = 10,
+ max_keepalive_connections: int | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- local_address: Optional[str] = None,
- uds: Optional[str] = None,
- network_backend: Optional[NetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ local_address: str | None = None,
+ uds: str | None = None,
+ network_backend: NetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
"""
A connection pool for making HTTP requests.
@@ -139,7 +137,8 @@ class HTTPProxy(ConnectionPool):
if proxy_auth is not None:
username = enforce_bytes(proxy_auth[0], name="proxy_auth")
password = enforce_bytes(proxy_auth[1], name="proxy_auth")
- authorization = build_auth_header(username, password)
+ userpass = username + b":" + password
+ authorization = b"Basic " + base64.b64encode(userpass)
self._proxy_headers = [
(b"Proxy-Authorization", authorization)
] + self._proxy_headers
@@ -172,11 +171,11 @@ class ForwardHTTPConnection(ConnectionInterface):
self,
proxy_origin: Origin,
remote_origin: Origin,
- proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None,
- keepalive_expiry: Optional[float] = None,
- network_backend: Optional[NetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
- proxy_ssl_context: Optional[ssl.SSLContext] = None,
+ proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None,
+ keepalive_expiry: float | None = None,
+ network_backend: NetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
+ proxy_ssl_context: ssl.SSLContext | None = None,
) -> None:
self._connection = HTTPConnection(
origin=proxy_origin,
@@ -236,14 +235,14 @@ class TunnelHTTPConnection(ConnectionInterface):
self,
proxy_origin: Origin,
remote_origin: Origin,
- ssl_context: Optional[ssl.SSLContext] = None,
- proxy_ssl_context: Optional[ssl.SSLContext] = None,
- proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None,
- keepalive_expiry: Optional[float] = None,
+ ssl_context: ssl.SSLContext | None = None,
+ proxy_ssl_context: ssl.SSLContext | None = None,
+ proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
- network_backend: Optional[NetworkBackend] = None,
- socket_options: Optional[Iterable[SOCKET_OPTION]] = None,
+ network_backend: NetworkBackend | None = None,
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
) -> None:
self._connection: ConnectionInterface = HTTPConnection(
origin=proxy_origin,
diff --git a/contrib/python/httpcore/httpcore/_sync/interfaces.py b/contrib/python/httpcore/httpcore/_sync/interfaces.py
index 5e95be1ec7..e673d4cc1b 100644
--- a/contrib/python/httpcore/httpcore/_sync/interfaces.py
+++ b/contrib/python/httpcore/httpcore/_sync/interfaces.py
@@ -1,5 +1,7 @@
-from contextlib import contextmanager
-from typing import Iterator, Optional, Union
+from __future__ import annotations
+
+import contextlib
+import typing
from .._models import (
URL,
@@ -18,12 +20,12 @@ from .._models import (
class RequestInterface:
def request(
self,
- method: Union[bytes, str],
- url: Union[URL, bytes, str],
+ method: bytes | str,
+ url: URL | bytes | str,
*,
headers: HeaderTypes = None,
- content: Union[bytes, Iterator[bytes], None] = None,
- extensions: Optional[Extensions] = None,
+ content: bytes | typing.Iterator[bytes] | None = None,
+ extensions: Extensions | None = None,
) -> Response:
# Strict type checking on our parameters.
method = enforce_bytes(method, name="method")
@@ -47,16 +49,16 @@ class RequestInterface:
response.close()
return response
- @contextmanager
+ @contextlib.contextmanager
def stream(
self,
- method: Union[bytes, str],
- url: Union[URL, bytes, str],
+ method: bytes | str,
+ url: URL | bytes | str,
*,
headers: HeaderTypes = None,
- content: Union[bytes, Iterator[bytes], None] = None,
- extensions: Optional[Extensions] = None,
- ) -> Iterator[Response]:
+ content: bytes | typing.Iterator[bytes] | None = None,
+ extensions: Extensions | None = None,
+ ) -> typing.Iterator[Response]:
# Strict type checking on our parameters.
method = enforce_bytes(method, name="method")
url = enforce_url(url, name="url")
diff --git a/contrib/python/httpcore/httpcore/_sync/socks_proxy.py b/contrib/python/httpcore/httpcore/_sync/socks_proxy.py
index 502e4d7fef..0ca96ddfb5 100644
--- a/contrib/python/httpcore/httpcore/_sync/socks_proxy.py
+++ b/contrib/python/httpcore/httpcore/_sync/socks_proxy.py
@@ -1,8 +1,9 @@
+from __future__ import annotations
+
import logging
import ssl
-import typing
-from socksio import socks5
+import socksio
from .._backends.sync import SyncBackend
from .._backends.base import NetworkBackend, NetworkStream
@@ -43,24 +44,24 @@ def _init_socks5_connection(
*,
host: bytes,
port: int,
- auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
+ auth: tuple[bytes, bytes] | None = None,
) -> None:
- conn = socks5.SOCKS5Connection()
+ conn = socksio.socks5.SOCKS5Connection()
# Auth method request
auth_method = (
- socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
+ socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED
if auth is None
- else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
+ else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD
)
- conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method]))
+ conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method]))
outgoing_bytes = conn.data_to_send()
stream.write(outgoing_bytes)
# Auth method response
incoming_bytes = stream.read(max_bytes=4096)
response = conn.receive_data(incoming_bytes)
- assert isinstance(response, socks5.SOCKS5AuthReply)
+ assert isinstance(response, socksio.socks5.SOCKS5AuthReply)
if response.method != auth_method:
requested = AUTH_METHODS.get(auth_method, "UNKNOWN")
responded = AUTH_METHODS.get(response.method, "UNKNOWN")
@@ -68,25 +69,25 @@ def _init_socks5_connection(
f"Requested {requested} from proxy server, but got {responded}."
)
- if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
+ if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD:
# Username/password request
assert auth is not None
username, password = auth
- conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password))
+ conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password))
outgoing_bytes = conn.data_to_send()
stream.write(outgoing_bytes)
# Username/password response
incoming_bytes = stream.read(max_bytes=4096)
response = conn.receive_data(incoming_bytes)
- assert isinstance(response, socks5.SOCKS5UsernamePasswordReply)
+ assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply)
if not response.success:
raise ProxyError("Invalid username/password")
# Connect request
conn.send(
- socks5.SOCKS5CommandRequest.from_address(
- socks5.SOCKS5Command.CONNECT, (host, port)
+ socksio.socks5.SOCKS5CommandRequest.from_address(
+ socksio.socks5.SOCKS5Command.CONNECT, (host, port)
)
)
outgoing_bytes = conn.data_to_send()
@@ -95,31 +96,29 @@ def _init_socks5_connection(
# Connect response
incoming_bytes = stream.read(max_bytes=4096)
response = conn.receive_data(incoming_bytes)
- assert isinstance(response, socks5.SOCKS5Reply)
- if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED:
+ assert isinstance(response, socksio.socks5.SOCKS5Reply)
+ if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED:
reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN")
raise ProxyError(f"Proxy Server could not connect: {reply_code}.")
-class SOCKSProxy(ConnectionPool):
+class SOCKSProxy(ConnectionPool): # pragma: nocover
"""
A connection pool that sends requests via an HTTP proxy.
"""
def __init__(
self,
- proxy_url: typing.Union[URL, bytes, str],
- proxy_auth: typing.Optional[
- typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]]
- ] = None,
- ssl_context: typing.Optional[ssl.SSLContext] = None,
- max_connections: typing.Optional[int] = 10,
- max_keepalive_connections: typing.Optional[int] = None,
- keepalive_expiry: typing.Optional[float] = None,
+ proxy_url: URL | bytes | str,
+ proxy_auth: tuple[bytes | str, bytes | str] | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ max_connections: int | None = 10,
+ max_keepalive_connections: int | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
retries: int = 0,
- network_backend: typing.Optional[NetworkBackend] = None,
+ network_backend: NetworkBackend | None = None,
) -> None:
"""
A connection pool for making HTTP requests.
@@ -167,7 +166,7 @@ class SOCKSProxy(ConnectionPool):
username, password = proxy_auth
username_bytes = enforce_bytes(username, name="proxy_auth")
password_bytes = enforce_bytes(password, name="proxy_auth")
- self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = (
+ self._proxy_auth: tuple[bytes, bytes] | None = (
username_bytes,
password_bytes,
)
@@ -192,12 +191,12 @@ class Socks5Connection(ConnectionInterface):
self,
proxy_origin: Origin,
remote_origin: Origin,
- proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None,
- ssl_context: typing.Optional[ssl.SSLContext] = None,
- keepalive_expiry: typing.Optional[float] = None,
+ proxy_auth: tuple[bytes, bytes] | None = None,
+ ssl_context: ssl.SSLContext | None = None,
+ keepalive_expiry: float | None = None,
http1: bool = True,
http2: bool = False,
- network_backend: typing.Optional[NetworkBackend] = None,
+ network_backend: NetworkBackend | None = None,
) -> None:
self._proxy_origin = proxy_origin
self._remote_origin = remote_origin
@@ -211,7 +210,7 @@ class Socks5Connection(ConnectionInterface):
SyncBackend() if network_backend is None else network_backend
)
self._connect_lock = Lock()
- self._connection: typing.Optional[ConnectionInterface] = None
+ self._connection: ConnectionInterface | None = None
self._connect_failed = False
def handle_request(self, request: Request) -> Response:
diff --git a/contrib/python/httpcore/httpcore/_synchronization.py b/contrib/python/httpcore/httpcore/_synchronization.py
index 50cfefe0a2..2ecc9e9c36 100644
--- a/contrib/python/httpcore/httpcore/_synchronization.py
+++ b/contrib/python/httpcore/httpcore/_synchronization.py
@@ -1,6 +1,7 @@
+from __future__ import annotations
+
import threading
-from types import TracebackType
-from typing import Optional, Type
+import types
from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions
@@ -66,7 +67,7 @@ class AsyncLock:
elif self._backend == "asyncio":
self._anyio_lock = anyio.Lock()
- async def __aenter__(self) -> "AsyncLock":
+ async def __aenter__(self) -> AsyncLock:
if not self._backend:
self.setup()
@@ -79,9 +80,9 @@ class AsyncLock:
async def __aexit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
if self._backend == "trio":
self._trio_lock.release()
@@ -97,14 +98,14 @@ class AsyncThreadLock:
In the async case `AsyncThreadLock` is a no-op.
"""
- def __enter__(self) -> "AsyncThreadLock":
+ def __enter__(self) -> AsyncThreadLock:
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
pass
@@ -133,7 +134,7 @@ class AsyncEvent:
elif self._backend == "asyncio":
self._anyio_event.set()
- async def wait(self, timeout: Optional[float] = None) -> None:
+ async def wait(self, timeout: float | None = None) -> None:
if not self._backend:
self.setup()
@@ -206,7 +207,7 @@ class AsyncShieldCancellation:
elif self._backend == "asyncio":
self._anyio_shield = anyio.CancelScope(shield=True)
- def __enter__(self) -> "AsyncShieldCancellation":
+ def __enter__(self) -> AsyncShieldCancellation:
if self._backend == "trio":
self._trio_shield.__enter__()
elif self._backend == "asyncio":
@@ -215,9 +216,9 @@ class AsyncShieldCancellation:
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
if self._backend == "trio":
self._trio_shield.__exit__(exc_type, exc_value, traceback)
@@ -239,15 +240,15 @@ class Lock:
def __init__(self) -> None:
self._lock = threading.Lock()
- def __enter__(self) -> "Lock":
+ def __enter__(self) -> Lock:
self._lock.acquire()
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
self._lock.release()
@@ -263,15 +264,15 @@ class ThreadLock:
def __init__(self) -> None:
self._lock = threading.Lock()
- def __enter__(self) -> "ThreadLock":
+ def __enter__(self) -> ThreadLock:
self._lock.acquire()
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
self._lock.release()
@@ -283,7 +284,7 @@ class Event:
def set(self) -> None:
self._event.set()
- def wait(self, timeout: Optional[float] = None) -> None:
+ def wait(self, timeout: float | None = None) -> None:
if timeout == float("inf"): # pragma: no cover
timeout = None
if not self._event.wait(timeout=timeout):
@@ -305,13 +306,13 @@ class ShieldCancellation:
# Thread-synchronous codebases don't support cancellation semantics.
# We have this class because we need to mirror the async and sync
# cases within our package, but it's just a no-op.
- def __enter__(self) -> "ShieldCancellation":
+ def __enter__(self) -> ShieldCancellation:
return self
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
pass
diff --git a/contrib/python/httpcore/httpcore/_trace.py b/contrib/python/httpcore/httpcore/_trace.py
index b122a53e88..5f1cd7c478 100644
--- a/contrib/python/httpcore/httpcore/_trace.py
+++ b/contrib/python/httpcore/httpcore/_trace.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
import inspect
import logging
-from types import TracebackType
-from typing import Any, Dict, Optional, Type
+import types
+import typing
from ._models import Request
@@ -11,8 +13,8 @@ class Trace:
self,
name: str,
logger: logging.Logger,
- request: Optional[Request] = None,
- kwargs: Optional[Dict[str, Any]] = None,
+ request: Request | None = None,
+ kwargs: dict[str, typing.Any] | None = None,
) -> None:
self.name = name
self.logger = logger
@@ -21,11 +23,11 @@ class Trace:
)
self.debug = self.logger.isEnabledFor(logging.DEBUG)
self.kwargs = kwargs or {}
- self.return_value: Any = None
+ self.return_value: typing.Any = None
self.should_trace = self.debug or self.trace_extension is not None
self.prefix = self.logger.name.split(".")[-1]
- def trace(self, name: str, info: Dict[str, Any]) -> None:
+ def trace(self, name: str, info: dict[str, typing.Any]) -> None:
if self.trace_extension is not None:
prefix_and_name = f"{self.prefix}.{name}"
ret = self.trace_extension(prefix_and_name, info)
@@ -44,7 +46,7 @@ class Trace:
message = f"{name} {args}"
self.logger.debug(message)
- def __enter__(self) -> "Trace":
+ def __enter__(self) -> Trace:
if self.should_trace:
info = self.kwargs
self.trace(f"{self.name}.started", info)
@@ -52,9 +54,9 @@ class Trace:
def __exit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
if self.should_trace:
if exc_value is None:
@@ -64,7 +66,7 @@ class Trace:
info = {"exception": exc_value}
self.trace(f"{self.name}.failed", info)
- async def atrace(self, name: str, info: Dict[str, Any]) -> None:
+ async def atrace(self, name: str, info: dict[str, typing.Any]) -> None:
if self.trace_extension is not None:
prefix_and_name = f"{self.prefix}.{name}"
coro = self.trace_extension(prefix_and_name, info)
@@ -84,7 +86,7 @@ class Trace:
message = f"{name} {args}"
self.logger.debug(message)
- async def __aenter__(self) -> "Trace":
+ async def __aenter__(self) -> Trace:
if self.should_trace:
info = self.kwargs
await self.atrace(f"{self.name}.started", info)
@@ -92,9 +94,9 @@ class Trace:
async def __aexit__(
self,
- exc_type: Optional[Type[BaseException]] = None,
- exc_value: Optional[BaseException] = None,
- traceback: Optional[TracebackType] = None,
+ exc_type: type[BaseException] | None = None,
+ exc_value: BaseException | None = None,
+ traceback: types.TracebackType | None = None,
) -> None:
if self.should_trace:
if exc_value is None:
diff --git a/contrib/python/httpcore/httpcore/_utils.py b/contrib/python/httpcore/httpcore/_utils.py
index df5dea8fe4..c44ff93cb2 100644
--- a/contrib/python/httpcore/httpcore/_utils.py
+++ b/contrib/python/httpcore/httpcore/_utils.py
@@ -1,10 +1,11 @@
+from __future__ import annotations
+
import select
import socket
import sys
-import typing
-def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool:
+def is_socket_readable(sock: socket.socket | None) -> bool:
"""
Return whether a socket, as identifed by its file descriptor, is readable.
"A socket is readable" means that the read buffer isn't empty, i.e. that calling
diff --git a/contrib/python/httpcore/ya.make b/contrib/python/httpcore/ya.make
index 30d63fe784..6d4f3507ca 100644
--- a/contrib/python/httpcore/ya.make
+++ b/contrib/python/httpcore/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(1.0.6)
+VERSION(1.0.7)
LICENSE(BSD-3-Clause)