diff options
author | robot-piglet <robot-piglet@yandex-team.com> | 2024-10-29 09:54:58 +0300 |
---|---|---|
committer | robot-piglet <robot-piglet@yandex-team.com> | 2024-10-29 10:05:47 +0300 |
commit | 951b23b132d746894dce79549b664284d84ca912 (patch) | |
tree | 49a51797f20100c557d3d80f8dbf1ad7e7a8c00a /contrib/python/anyio | |
parent | c138ba0fd95b01235efb454180c5edb1bbe11834 (diff) | |
download | ydb-951b23b132d746894dce79549b664284d84ca912.tar.gz |
Intermediate changes
commit_hash:5e0e414190745fa0359941d411052207c42514ab
Diffstat (limited to 'contrib/python/anyio')
-rw-r--r-- | contrib/python/anyio/.dist-info/METADATA | 5 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/_backends/_asyncio.py | 277 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/_backends/_trio.py | 28 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/_core/_fileio.py | 3 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/_core/_signals.py | 6 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/_core/_streams.py | 4 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/_core/_subprocesses.py | 21 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/abc/_eventloop.py | 8 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/abc/_sockets.py | 8 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/from_thread.py | 16 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/pytest_plugin.py | 4 | ||||
-rw-r--r-- | contrib/python/anyio/anyio/streams/tls.py | 6 | ||||
-rw-r--r-- | contrib/python/anyio/ya.make | 2 |
13 files changed, 228 insertions, 160 deletions
diff --git a/contrib/python/anyio/.dist-info/METADATA b/contrib/python/anyio/.dist-info/METADATA index e28bbd52d0..10d7aafc77 100644 --- a/contrib/python/anyio/.dist-info/METADATA +++ b/contrib/python/anyio/.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: anyio -Version: 4.6.2 +Version: 4.6.2.post1 Summary: High level compatibility layer for multiple asynchronous event loop implementations Author-email: Alex Grönholm <alex.gronholm@nextday.fi> License: MIT @@ -15,13 +15,12 @@ Classifier: Framework :: AnyIO Classifier: Typing :: Typed Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: 3.11 Classifier: Programming Language :: Python :: 3.12 Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=3.8 +Requires-Python: >=3.9 Description-Content-Type: text/x-rst License-File: LICENSE Requires-Dist: idna >=2.8 diff --git a/contrib/python/anyio/anyio/_backends/_asyncio.py b/contrib/python/anyio/anyio/_backends/_asyncio.py index fa5349a8c2..0a69e7ac61 100644 --- a/contrib/python/anyio/anyio/_backends/_asyncio.py +++ b/contrib/python/anyio/anyio/_backends/_asyncio.py @@ -20,9 +20,18 @@ from asyncio import ( ) from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] from collections import OrderedDict, deque -from collections.abc import AsyncIterator, Iterable +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) from concurrent.futures import Future -from contextlib import suppress +from contextlib import AbstractContextManager, suppress from contextvars import Context, copy_context from dataclasses import dataclass from functools import partial, wraps @@ -42,15 +51,7 @@ from types import TracebackType from typing import ( IO, Any, - AsyncGenerator, - Awaitable, - Callable, - Collection, - ContextManager, - Coroutine, Optional, - Sequence, - Tuple, TypeVar, cast, ) @@ -358,6 +359,14 @@ def _task_started(task: asyncio.Task) -> bool: # +def is_anyio_cancellation(exc: CancelledError) -> bool: + return ( + bool(exc.args) + and isinstance(exc.args[0], str) + and exc.args[0].startswith("Cancelled by cancel scope ") + ) + + class CancelScope(BaseCancelScope): def __new__( cls, *, deadline: float = math.inf, shield: bool = False @@ -416,6 +425,8 @@ class CancelScope(BaseCancelScope): exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> bool | None: + del exc_tb + if not self._active: raise RuntimeError("This cancel scope is not active") if current_task() is not self._host_task: @@ -432,47 +443,93 @@ class CancelScope(BaseCancelScope): "current cancel scope" ) - self._active = False - if self._timeout_handle: - self._timeout_handle.cancel() - self._timeout_handle = None - - self._tasks.remove(self._host_task) - if self._parent_scope is not None: - self._parent_scope._child_scopes.remove(self) - self._parent_scope._tasks.add(self._host_task) + try: + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None - host_task_state.cancel_scope = self._parent_scope + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) - # Restart the cancellation effort in the closest directly cancelled parent - # scope if this one was shielded - self._restart_cancellation_in_parent() + host_task_state.cancel_scope = self._parent_scope - if self._cancel_called and exc_val is not None: - for exc in iterate_exceptions(exc_val): - if isinstance(exc, CancelledError): - self._cancelled_caught = self._uncancel(exc) - if self._cancelled_caught: + # Undo all cancellations done by this scope + if self._cancelling is not None: + while self._cancel_calls: + self._cancel_calls -= 1 + if self._host_task.uncancel() <= self._cancelling: break - return self._cancelled_caught + # We only swallow the exception iff it was an AnyIO CancelledError, either + # directly as exc_val or inside an exception group and there are no cancelled + # parent cancel scopes visible to us here + not_swallowed_exceptions = 0 + swallow_exception = False + if exc_val is not None: + for exc in iterate_exceptions(exc_val): + if self._cancel_called and isinstance(exc, CancelledError): + if not (swallow_exception := self._uncancel(exc)): + not_swallowed_exceptions += 1 + else: + not_swallowed_exceptions += 1 - return None + # Restart the cancellation effort in the closest visible, cancelled parent + # scope if necessary + self._restart_cancellation_in_parent() + return swallow_exception and not not_swallowed_exceptions + finally: + self._host_task = None + del exc_val + + @property + def _effectively_cancelled(self) -> bool: + cancel_scope: CancelScope | None = self + while cancel_scope is not None: + if cancel_scope._cancel_called: + return True + + if cancel_scope.shield: + return False + + cancel_scope = cancel_scope._parent_scope + + return False + + @property + def _parent_cancellation_is_visible_to_us(self) -> bool: + return ( + self._parent_scope is not None + and not self.shield + and self._parent_scope._effectively_cancelled + ) def _uncancel(self, cancelled_exc: CancelledError) -> bool: - if sys.version_info < (3, 9) or self._host_task is None: + if self._host_task is None: self._cancel_calls = 0 return True - # Undo all cancellations done by this scope - if self._cancelling is not None: - while self._cancel_calls: - self._cancel_calls -= 1 - if self._host_task.uncancel() <= self._cancelling: - return True + while True: + if is_anyio_cancellation(cancelled_exc): + # Only swallow the cancellation exception if it's an AnyIO cancel + # exception and there are no other cancel scopes down the line pending + # cancellation + self._cancelled_caught = ( + self._effectively_cancelled + and not self._parent_cancellation_is_visible_to_us + ) + return self._cancelled_caught - self._cancel_calls = 0 - return f"Cancelled by cancel scope {id(self):x}" in cancelled_exc.args + # Sometimes third party frameworks catch a CancelledError and raise a new + # one, so as a workaround we have to look at the previous ones in + # __context__ too for a matching cancel message + if isinstance(cancelled_exc.__context__, CancelledError): + cancelled_exc = cancelled_exc.__context__ + continue + + return False def _timeout(self) -> None: if self._deadline != math.inf: @@ -496,19 +553,17 @@ class CancelScope(BaseCancelScope): should_retry = False current = current_task() for task in self._tasks: + should_retry = True if task._must_cancel: # type: ignore[attr-defined] continue # The task is eligible for cancellation if it has started - should_retry = True if task is not current and (task is self._host_task or _task_started(task)): waiter = task._fut_waiter # type: ignore[attr-defined] if not isinstance(waiter, asyncio.Future) or not waiter.done(): - origin._cancel_calls += 1 - if sys.version_info >= (3, 9): - task.cancel(f"Cancelled by cancel scope {id(origin):x}") - else: - task.cancel() + task.cancel(f"Cancelled by cancel scope {id(origin):x}") + if task is origin._host_task: + origin._cancel_calls += 1 # Deliver cancellation to child scopes that aren't shielded or running their own # cancellation callbacks @@ -546,17 +601,6 @@ class CancelScope(BaseCancelScope): scope = scope._parent_scope - def _parent_cancelled(self) -> bool: - # Check whether any parent has been cancelled - cancel_scope = self._parent_scope - while cancel_scope is not None and not cancel_scope._shield: - if cancel_scope._cancel_called: - return True - else: - cancel_scope = cancel_scope._parent_scope - - return False - def cancel(self) -> None: if not self._cancel_called: if self._timeout_handle: @@ -645,6 +689,26 @@ class _AsyncioTaskStatus(abc.TaskStatus): _task_states[task].parent_id = self._parent_id +async def _wait(tasks: Iterable[asyncio.Task[object]]) -> None: + tasks = set(tasks) + waiter = get_running_loop().create_future() + + def on_completion(task: asyncio.Task[object]) -> None: + tasks.discard(task) + if not tasks and not waiter.done(): + waiter.set_result(None) + + for task in tasks: + task.add_done_callback(on_completion) + del task + + try: + await waiter + finally: + while tasks: + tasks.pop().remove_done_callback(on_completion) + + class TaskGroup(abc.TaskGroup): def __init__(self) -> None: self.cancel_scope: CancelScope = CancelScope() @@ -663,38 +727,53 @@ class TaskGroup(abc.TaskGroup): exc_val: BaseException | None, exc_tb: TracebackType | None, ) -> bool | None: - ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) - if exc_val is not None: - self.cancel_scope.cancel() - if not isinstance(exc_val, CancelledError): - self._exceptions.append(exc_val) - - cancelled_exc_while_waiting_tasks: CancelledError | None = None - while self._tasks: - try: - await asyncio.wait(self._tasks) - except CancelledError as exc: - # This task was cancelled natively; reraise the CancelledError later - # unless this task was already interrupted by another exception + try: + if exc_val is not None: self.cancel_scope.cancel() - if cancelled_exc_while_waiting_tasks is None: - cancelled_exc_while_waiting_tasks = exc + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) - self._active = False - if self._exceptions: - raise BaseExceptionGroup( - "unhandled errors in a TaskGroup", self._exceptions - ) + try: + if self._tasks: + with CancelScope() as wait_scope: + while self._tasks: + try: + await _wait(self._tasks) + except CancelledError as exc: + # Shield the scope against further cancellation attempts, + # as they're not productive (#695) + wait_scope.shield = True + self.cancel_scope.cancel() + + # Set exc_val from the cancellation exception if it was + # previously unset. However, we should not replace a native + # cancellation exception with one raise by a cancel scope. + if exc_val is None or ( + isinstance(exc_val, CancelledError) + and not is_anyio_cancellation(exc) + ): + exc_val = exc + else: + # If there are no child tasks to wait on, run at least one checkpoint + # anyway + await AsyncIOBackend.cancel_shielded_checkpoint() - # Raise the CancelledError received while waiting for child tasks to exit, - # unless the context manager itself was previously exited with another - # exception, or if any of the child tasks raised an exception other than - # CancelledError - if cancelled_exc_while_waiting_tasks: - if exc_val is None or ignore_exception: - raise cancelled_exc_while_waiting_tasks + self._active = False + if self._exceptions: + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) + elif exc_val: + raise exc_val + except BaseException as exc: + if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): + return True - return ignore_exception + raise + + return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + finally: + del exc_val, exc_tb, self._exceptions def _spawn( self, @@ -730,7 +809,7 @@ class TaskGroup(abc.TaskGroup): if not isinstance(exc, CancelledError): self._exceptions.append(exc) - if not self.cancel_scope._parent_cancelled(): + if not self.cancel_scope._effectively_cancelled: self.cancel_scope.cancel() else: task_status_future.set_exception(exc) @@ -806,7 +885,7 @@ class TaskGroup(abc.TaskGroup): # Threads # -_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] +_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]] class WorkerThread(Thread): @@ -955,7 +1034,7 @@ class Process(abc.Process): _stderr: StreamReaderWrapper | None async def aclose(self) -> None: - with CancelScope(shield=True): + with CancelScope(shield=True) as scope: if self._stdin: await self._stdin.aclose() if self._stdout: @@ -963,14 +1042,14 @@ class Process(abc.Process): if self._stderr: await self._stderr.aclose() - try: - await self.wait() - except BaseException: - self.kill() - with CancelScope(shield=True): + scope.shield = False + try: await self.wait() - - raise + except BaseException: + scope.shield = True + self.kill() + await self.wait() + raise async def wait(self) -> int: return await self._process.wait() @@ -2022,9 +2101,7 @@ class AsyncIOTaskInfo(TaskInfo): if task_state := _task_states.get(task): if cancel_scope := task_state.cancel_scope: - return cancel_scope.cancel_called or ( - not cancel_scope.shield and cancel_scope._parent_cancelled() - ) + return cancel_scope._effectively_cancelled return False @@ -2118,7 +2195,7 @@ class TestRunner(abc.TestRunner): ) -> T_Retval: if not self._runner_task: self._send_stream, receive_stream = create_memory_object_stream[ - Tuple[Awaitable[Any], asyncio.Future] + tuple[Awaitable[Any], asyncio.Future] ](1) self._runner_task = self.get_loop().create_task( self._run_tests_and_fixtures(receive_stream) @@ -2480,7 +2557,7 @@ class AsyncIOBackend(AsyncBackend): cls, host: str, port: int, local_address: IPSockAddrType | None = None ) -> abc.SocketStream: transport, protocol = cast( - Tuple[asyncio.Transport, StreamProtocol], + tuple[asyncio.Transport, StreamProtocol], await get_running_loop().create_connection( StreamProtocol, host, port, local_addr=local_address ), @@ -2659,7 +2736,7 @@ class AsyncIOBackend(AsyncBackend): @classmethod def open_signal_receiver( cls, *signals: Signals - ) -> ContextManager[AsyncIterator[Signals]]: + ) -> AbstractContextManager[AsyncIterator[Signals]]: return _SignalReceiver(signals) @classmethod diff --git a/contrib/python/anyio/anyio/_backends/_trio.py b/contrib/python/anyio/anyio/_backends/_trio.py index aee974deb6..24dcd74446 100644 --- a/contrib/python/anyio/anyio/_backends/_trio.py +++ b/contrib/python/anyio/anyio/_backends/_trio.py @@ -7,8 +7,18 @@ import socket import sys import types import weakref -from collections.abc import AsyncIterator, Iterable +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) from concurrent.futures import Future +from contextlib import AbstractContextManager from dataclasses import dataclass from functools import partial from io import IOBase @@ -19,15 +29,8 @@ from types import TracebackType from typing import ( IO, Any, - AsyncGenerator, - Awaitable, - Callable, - Collection, - ContextManager, - Coroutine, Generic, NoReturn, - Sequence, TypeVar, cast, overload, @@ -183,13 +186,12 @@ class TaskGroup(abc.TaskGroup): try: return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) except BaseExceptionGroup as exc: - _, rest = exc.split(trio.Cancelled) - if not rest: - cancelled_exc = trio.Cancelled._create() - raise cancelled_exc from exc + if not exc.split(trio.Cancelled)[1]: + raise trio.Cancelled._create() from exc raise finally: + del exc_val, exc_tb self._active = False def start_soon( @@ -1289,7 +1291,7 @@ class TrioBackend(AsyncBackend): @classmethod def open_signal_receiver( cls, *signals: Signals - ) -> ContextManager[AsyncIterator[Signals]]: + ) -> AbstractContextManager[AsyncIterator[Signals]]: return _SignalReceiver(signals) @classmethod diff --git a/contrib/python/anyio/anyio/_core/_fileio.py b/contrib/python/anyio/anyio/_core/_fileio.py index 214a90bfd8..53d3288c29 100644 --- a/contrib/python/anyio/anyio/_core/_fileio.py +++ b/contrib/python/anyio/anyio/_core/_fileio.py @@ -3,7 +3,7 @@ from __future__ import annotations import os import pathlib import sys -from collections.abc import Callable, Iterable, Iterator, Sequence +from collections.abc import AsyncIterator, Callable, Iterable, Iterator, Sequence from dataclasses import dataclass from functools import partial from os import PathLike @@ -12,7 +12,6 @@ from typing import ( TYPE_CHECKING, Any, AnyStr, - AsyncIterator, Final, Generic, overload, diff --git a/contrib/python/anyio/anyio/_core/_signals.py b/contrib/python/anyio/anyio/_core/_signals.py index 115c749bd9..f3451d302f 100644 --- a/contrib/python/anyio/anyio/_core/_signals.py +++ b/contrib/python/anyio/anyio/_core/_signals.py @@ -1,13 +1,15 @@ from __future__ import annotations from collections.abc import AsyncIterator +from contextlib import AbstractContextManager from signal import Signals -from typing import ContextManager from ._eventloop import get_async_backend -def open_signal_receiver(*signals: Signals) -> ContextManager[AsyncIterator[Signals]]: +def open_signal_receiver( + *signals: Signals, +) -> AbstractContextManager[AsyncIterator[Signals]]: """ Start receiving operating system signals. diff --git a/contrib/python/anyio/anyio/_core/_streams.py b/contrib/python/anyio/anyio/_core/_streams.py index aa6b0c222a..6a9814e5a9 100644 --- a/contrib/python/anyio/anyio/_core/_streams.py +++ b/contrib/python/anyio/anyio/_core/_streams.py @@ -1,7 +1,7 @@ from __future__ import annotations import math -from typing import Tuple, TypeVar +from typing import TypeVar from warnings import warn from ..streams.memory import ( @@ -14,7 +14,7 @@ T_Item = TypeVar("T_Item") class create_memory_object_stream( - Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], + tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], ): """ Create a memory object stream. diff --git a/contrib/python/anyio/anyio/_core/_subprocesses.py b/contrib/python/anyio/anyio/_core/_subprocesses.py index 1ac2d549df..7ba41a5b03 100644 --- a/contrib/python/anyio/anyio/_core/_subprocesses.py +++ b/contrib/python/anyio/anyio/_core/_subprocesses.py @@ -160,38 +160,25 @@ async def open_process( child process prior to the execution of the subprocess. (POSIX only) :param pass_fds: sequence of file descriptors to keep open between the parent and child processes. (POSIX only) - :param user: effective user to run the process as (Python >= 3.9; POSIX only) - :param group: effective group to run the process as (Python >= 3.9; POSIX only) - :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9; - POSIX only) + :param user: effective user to run the process as (POSIX only) + :param group: effective group to run the process as (POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (POSIX only) :param umask: if not negative, this umask is applied in the child process before - running the given command (Python >= 3.9; POSIX only) + running the given command (POSIX only) :return: an asynchronous process object """ kwargs: dict[str, Any] = {} if user is not None: - if sys.version_info < (3, 9): - raise TypeError("the 'user' argument requires Python 3.9 or later") - kwargs["user"] = user if group is not None: - if sys.version_info < (3, 9): - raise TypeError("the 'group' argument requires Python 3.9 or later") - kwargs["group"] = group if extra_groups is not None: - if sys.version_info < (3, 9): - raise TypeError("the 'extra_groups' argument requires Python 3.9 or later") - kwargs["extra_groups"] = group if umask >= 0: - if sys.version_info < (3, 9): - raise TypeError("the 'umask' argument requires Python 3.9 or later") - kwargs["umask"] = umask return await get_async_backend().open_process( diff --git a/contrib/python/anyio/anyio/abc/_eventloop.py b/contrib/python/anyio/anyio/abc/_eventloop.py index 2c73bb9ffb..93d0e9d25b 100644 --- a/contrib/python/anyio/anyio/abc/_eventloop.py +++ b/contrib/python/anyio/anyio/abc/_eventloop.py @@ -3,7 +3,8 @@ from __future__ import annotations import math import sys from abc import ABCMeta, abstractmethod -from collections.abc import AsyncIterator, Awaitable +from collections.abc import AsyncIterator, Awaitable, Callable, Sequence +from contextlib import AbstractContextManager from os import PathLike from signal import Signals from socket import AddressFamily, SocketKind, socket @@ -11,9 +12,6 @@ from typing import ( IO, TYPE_CHECKING, Any, - Callable, - ContextManager, - Sequence, TypeVar, Union, overload, @@ -352,7 +350,7 @@ class AsyncBackend(metaclass=ABCMeta): @abstractmethod def open_signal_receiver( cls, *signals: Signals - ) -> ContextManager[AsyncIterator[Signals]]: + ) -> AbstractContextManager[AsyncIterator[Signals]]: pass @classmethod diff --git a/contrib/python/anyio/anyio/abc/_sockets.py b/contrib/python/anyio/anyio/abc/_sockets.py index b321225a7b..1c6a450cdc 100644 --- a/contrib/python/anyio/anyio/abc/_sockets.py +++ b/contrib/python/anyio/anyio/abc/_sockets.py @@ -8,7 +8,7 @@ from io import IOBase from ipaddress import IPv4Address, IPv6Address from socket import AddressFamily from types import TracebackType -from typing import Any, Tuple, TypeVar, Union +from typing import Any, TypeVar, Union from .._core._typedattr import ( TypedAttributeProvider, @@ -19,10 +19,10 @@ from ._streams import ByteStream, Listener, UnreliableObjectStream from ._tasks import TaskGroup IPAddressType = Union[str, IPv4Address, IPv6Address] -IPSockAddrType = Tuple[str, int] +IPSockAddrType = tuple[str, int] SockAddrType = Union[IPSockAddrType, str] -UDPPacketType = Tuple[bytes, IPSockAddrType] -UNIXDatagramPacketType = Tuple[bytes, str] +UDPPacketType = tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType = tuple[bytes, str] T_Retval = TypeVar("T_Retval") diff --git a/contrib/python/anyio/anyio/from_thread.py b/contrib/python/anyio/anyio/from_thread.py index b8785845ba..93a4cfe8e4 100644 --- a/contrib/python/anyio/anyio/from_thread.py +++ b/contrib/python/anyio/anyio/from_thread.py @@ -3,15 +3,17 @@ from __future__ import annotations import sys from collections.abc import Awaitable, Callable, Generator from concurrent.futures import Future -from contextlib import AbstractContextManager, contextmanager +from contextlib import ( + AbstractAsyncContextManager, + AbstractContextManager, + contextmanager, +) from dataclasses import dataclass, field from inspect import isawaitable from threading import Lock, Thread, get_ident from types import TracebackType from typing import ( Any, - AsyncContextManager, - ContextManager, Generic, TypeVar, cast, @@ -87,7 +89,9 @@ class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): type[BaseException] | None, BaseException | None, TracebackType | None ] = (None, None, None) - def __init__(self, async_cm: AsyncContextManager[T_co], portal: BlockingPortal): + def __init__( + self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal + ): self._async_cm = async_cm self._portal = portal @@ -374,8 +378,8 @@ class BlockingPortal: return f, task_status_future.result() def wrap_async_context_manager( - self, cm: AsyncContextManager[T_co] - ) -> ContextManager[T_co]: + self, cm: AbstractAsyncContextManager[T_co] + ) -> AbstractContextManager[T_co]: """ Wrap an async context manager as a synchronous context manager via this portal. diff --git a/contrib/python/anyio/anyio/pytest_plugin.py b/contrib/python/anyio/anyio/pytest_plugin.py index b7d9305614..4a0d59dd06 100644 --- a/contrib/python/anyio/anyio/pytest_plugin.py +++ b/contrib/python/anyio/anyio/pytest_plugin.py @@ -4,7 +4,7 @@ import sys from collections.abc import Generator, Iterator from contextlib import ExitStack, contextmanager from inspect import isasyncgenfunction, iscoroutinefunction, ismethod -from typing import Any, Dict, Tuple, cast +from typing import Any, cast import pytest import sniffio @@ -28,7 +28,7 @@ def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: return backend, {} elif isinstance(backend, tuple) and len(backend) == 2: if isinstance(backend[0], str) and isinstance(backend[1], dict): - return cast(Tuple[str, Dict[str, Any]], backend) + return cast(tuple[str, dict[str, Any]], backend) raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") diff --git a/contrib/python/anyio/anyio/streams/tls.py b/contrib/python/anyio/anyio/streams/tls.py index d01c8e6f4c..b6961bee16 100644 --- a/contrib/python/anyio/anyio/streams/tls.py +++ b/contrib/python/anyio/anyio/streams/tls.py @@ -7,7 +7,7 @@ import sys from collections.abc import Callable, Mapping from dataclasses import dataclass from functools import wraps -from typing import Any, Tuple, TypeVar +from typing import Any, TypeVar from .. import ( BrokenResourceError, @@ -25,8 +25,8 @@ else: T_Retval = TypeVar("T_Retval") PosArgsT = TypeVarTuple("PosArgsT") -_PCTRTT = Tuple[Tuple[str, str], ...] -_PCTRTTT = Tuple[_PCTRTT, ...] +_PCTRTT = tuple[tuple[str, str], ...] +_PCTRTTT = tuple[_PCTRTT, ...] class TLSAttribute(TypedAttributeSet): diff --git a/contrib/python/anyio/ya.make b/contrib/python/anyio/ya.make index bb56a53ce5..aadbb5b297 100644 --- a/contrib/python/anyio/ya.make +++ b/contrib/python/anyio/ya.make @@ -2,7 +2,7 @@ PY3_LIBRARY() -VERSION(4.6.2) +VERSION(4.6.2.post1) LICENSE(MIT) |