aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/tools/python3/src/Lib/typing.py
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.ru>2022-02-10 16:44:39 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:44:39 +0300
commite9656aae26e0358d5378e5b63dcac5c8dbe0e4d0 (patch)
tree64175d5cadab313b3e7039ebaa06c5bc3295e274 /contrib/tools/python3/src/Lib/typing.py
parent2598ef1d0aee359b4b6d5fdd1758916d5907d04f (diff)
downloadydb-e9656aae26e0358d5378e5b63dcac5c8dbe0e4d0.tar.gz
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/tools/python3/src/Lib/typing.py')
-rw-r--r--contrib/tools/python3/src/Lib/typing.py2336
1 files changed, 1168 insertions, 1168 deletions
diff --git a/contrib/tools/python3/src/Lib/typing.py b/contrib/tools/python3/src/Lib/typing.py
index 7b687082a4..da70d4115f 100644
--- a/contrib/tools/python3/src/Lib/typing.py
+++ b/contrib/tools/python3/src/Lib/typing.py
@@ -9,15 +9,15 @@ At large scale, the structure of the module is following:
* The core of internal generics API: _GenericAlias and _VariadicGenericAlias, the latter is
currently only used by Tuple and Callable. All subscripted types like X[int], Union[int, str],
etc., are instances of either of these classes.
-* The public counterpart of the generics API consists of two classes: Generic and Protocol.
+* The public counterpart of the generics API consists of two classes: Generic and Protocol.
* Public helper functions: get_type_hints, overload, cast, no_type_check,
no_type_check_decorator.
* Generic aliases for collections.abc ABCs and few additional protocols.
-* Special types: NewType, NamedTuple, TypedDict.
+* Special types: NewType, NamedTuple, TypedDict.
* Wrapper submodules for re and io related types.
"""
-from abc import abstractmethod, ABCMeta
+from abc import abstractmethod, ABCMeta
import collections
import collections.abc
import contextlib
@@ -26,21 +26,21 @@ import operator
import re as stdlib_re # Avoid confusion with the re we export.
import sys
import types
-from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, GenericAlias
+from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, GenericAlias
# Please keep __all__ alphabetized within each category.
__all__ = [
# Super-special typing primitives.
- 'Annotated',
+ 'Annotated',
'Any',
'Callable',
'ClassVar',
- 'Final',
- 'ForwardRef',
+ 'Final',
+ 'ForwardRef',
'Generic',
- 'Literal',
+ 'Literal',
'Optional',
- 'Protocol',
+ 'Protocol',
'Tuple',
'Type',
'TypeVar',
@@ -78,44 +78,44 @@ __all__ = [
'SupportsBytes',
'SupportsComplex',
'SupportsFloat',
- 'SupportsIndex',
+ 'SupportsIndex',
'SupportsInt',
'SupportsRound',
# Concrete collection types.
- 'ChainMap',
+ 'ChainMap',
'Counter',
'Deque',
'Dict',
'DefaultDict',
'List',
- 'OrderedDict',
+ 'OrderedDict',
'Set',
'FrozenSet',
'NamedTuple', # Not really a type.
- 'TypedDict', # Not really a type.
+ 'TypedDict', # Not really a type.
'Generator',
-
- # Other concrete types.
- 'BinaryIO',
- 'IO',
- 'Match',
- 'Pattern',
- 'TextIO',
+
+ # Other concrete types.
+ 'BinaryIO',
+ 'IO',
+ 'Match',
+ 'Pattern',
+ 'TextIO',
# One-off things.
'AnyStr',
'cast',
- 'final',
- 'get_args',
- 'get_origin',
+ 'final',
+ 'get_args',
+ 'get_origin',
'get_type_hints',
'NewType',
'no_type_check',
'no_type_check_decorator',
'NoReturn',
'overload',
- 'runtime_checkable',
+ 'runtime_checkable',
'Text',
'TYPE_CHECKING',
]
@@ -125,16 +125,16 @@ __all__ = [
# legitimate imports of those modules.
-def _type_convert(arg, module=None):
- """For converting None to type(None), and strings to ForwardRef."""
- if arg is None:
- return type(None)
- if isinstance(arg, str):
- return ForwardRef(arg, module=module)
- return arg
-
-
-def _type_check(arg, msg, is_argument=True, module=None, *, is_class=False):
+def _type_convert(arg, module=None):
+ """For converting None to type(None), and strings to ForwardRef."""
+ if arg is None:
+ return type(None)
+ if isinstance(arg, str):
+ return ForwardRef(arg, module=module)
+ return arg
+
+
+def _type_check(arg, msg, is_argument=True, module=None, *, is_class=False):
"""Check that the argument is a type, and return it (internal helper).
As a special case, accept None and return type(None) instead. Also wrap strings
@@ -146,19 +146,19 @@ def _type_check(arg, msg, is_argument=True, module=None, *, is_class=False):
We append the repr() of the actual value (truncated to 100 chars).
"""
- invalid_generic_forms = (Generic, Protocol)
- if not is_class:
- invalid_generic_forms += (ClassVar,)
- if is_argument:
- invalid_generic_forms += (Final,)
+ invalid_generic_forms = (Generic, Protocol)
+ if not is_class:
+ invalid_generic_forms += (ClassVar,)
+ if is_argument:
+ invalid_generic_forms += (Final,)
- arg = _type_convert(arg, module=module)
+ arg = _type_convert(arg, module=module)
if (isinstance(arg, _GenericAlias) and
arg.__origin__ in invalid_generic_forms):
raise TypeError(f"{arg} is not valid as type argument")
- if arg in (Any, NoReturn, Final):
- return arg
- if isinstance(arg, _SpecialForm) or arg in (Generic, Protocol):
+ if arg in (Any, NoReturn, Final):
+ return arg
+ if isinstance(arg, _SpecialForm) or arg in (Generic, Protocol):
raise TypeError(f"Plain {arg} is not valid as type argument")
if isinstance(arg, (type, TypeVar, ForwardRef)):
return arg
@@ -175,8 +175,8 @@ def _type_repr(obj):
typically enough to uniquely identify a type. For everything
else, we fall back on repr(obj).
"""
- if isinstance(obj, types.GenericAlias):
- return repr(obj)
+ if isinstance(obj, types.GenericAlias):
+ return repr(obj)
if isinstance(obj, type):
if obj.__module__ == 'builtins':
return obj.__qualname__
@@ -198,16 +198,16 @@ def _collect_type_vars(types):
for t in types:
if isinstance(t, TypeVar) and t not in tvars:
tvars.append(t)
- if isinstance(t, (_GenericAlias, GenericAlias)):
+ if isinstance(t, (_GenericAlias, GenericAlias)):
tvars.extend([t for t in t.__parameters__ if t not in tvars])
return tuple(tvars)
-def _check_generic(cls, parameters, elen):
+def _check_generic(cls, parameters, elen):
"""Check correct count for parameters of a generic cls (internal helper).
This gives a nice error message in case of count mismatch.
"""
- if not elen:
+ if not elen:
raise TypeError(f"{cls} is not a generic class")
alen = len(parameters)
if alen != elen:
@@ -215,20 +215,20 @@ def _check_generic(cls, parameters, elen):
f" actual {alen}, expected {elen}")
-def _deduplicate(params):
- # Weed out strict duplicates, preserving the first of each occurrence.
- all_params = set(params)
- if len(all_params) < len(params):
- new_params = []
- for t in params:
- if t in all_params:
- new_params.append(t)
- all_params.remove(t)
- params = new_params
- assert not all_params, all_params
- return params
-
-
+def _deduplicate(params):
+ # Weed out strict duplicates, preserving the first of each occurrence.
+ all_params = set(params)
+ if len(all_params) < len(params):
+ new_params = []
+ for t in params:
+ if t in all_params:
+ new_params.append(t)
+ all_params.remove(t)
+ params = new_params
+ assert not all_params, all_params
+ return params
+
+
def _remove_dups_flatten(parameters):
"""An internal helper for Union creation and substitution: flatten Unions
among parameters, then remove duplicates.
@@ -236,68 +236,68 @@ def _remove_dups_flatten(parameters):
# Flatten out Union[Union[...], ...].
params = []
for p in parameters:
- if isinstance(p, _UnionGenericAlias):
+ if isinstance(p, _UnionGenericAlias):
params.extend(p.__args__)
elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
params.extend(p[1:])
else:
params.append(p)
-
- return tuple(_deduplicate(params))
-
-
-def _flatten_literal_params(parameters):
- """An internal helper for Literal creation: flatten Literals among parameters"""
- params = []
- for p in parameters:
- if isinstance(p, _LiteralGenericAlias):
- params.extend(p.__args__)
- else:
- params.append(p)
+
+ return tuple(_deduplicate(params))
+
+
+def _flatten_literal_params(parameters):
+ """An internal helper for Literal creation: flatten Literals among parameters"""
+ params = []
+ for p in parameters:
+ if isinstance(p, _LiteralGenericAlias):
+ params.extend(p.__args__)
+ else:
+ params.append(p)
return tuple(params)
_cleanups = []
-def _tp_cache(func=None, /, *, typed=False):
+def _tp_cache(func=None, /, *, typed=False):
"""Internal wrapper caching __getitem__ of generic types with a fallback to
original function for non-hashable arguments.
"""
- def decorator(func):
- cached = functools.lru_cache(typed=typed)(func)
- _cleanups.append(cached.cache_clear)
-
- @functools.wraps(func)
- def inner(*args, **kwds):
- try:
- return cached(*args, **kwds)
- except TypeError:
- pass # All real errors (not unhashable args) are raised below.
- return func(*args, **kwds)
- return inner
-
- if func is not None:
- return decorator(func)
-
- return decorator
-
-def _eval_type(t, globalns, localns, recursive_guard=frozenset()):
- """Evaluate all forward references in the given type t.
+ def decorator(func):
+ cached = functools.lru_cache(typed=typed)(func)
+ _cleanups.append(cached.cache_clear)
+
+ @functools.wraps(func)
+ def inner(*args, **kwds):
+ try:
+ return cached(*args, **kwds)
+ except TypeError:
+ pass # All real errors (not unhashable args) are raised below.
+ return func(*args, **kwds)
+ return inner
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+def _eval_type(t, globalns, localns, recursive_guard=frozenset()):
+ """Evaluate all forward references in the given type t.
For use of globalns and localns see the docstring for get_type_hints().
- recursive_guard is used to prevent prevent infinite recursion
- with recursive ForwardRef.
+ recursive_guard is used to prevent prevent infinite recursion
+ with recursive ForwardRef.
"""
if isinstance(t, ForwardRef):
- return t._evaluate(globalns, localns, recursive_guard)
- if isinstance(t, (_GenericAlias, GenericAlias)):
- ev_args = tuple(_eval_type(a, globalns, localns, recursive_guard) for a in t.__args__)
+ return t._evaluate(globalns, localns, recursive_guard)
+ if isinstance(t, (_GenericAlias, GenericAlias)):
+ ev_args = tuple(_eval_type(a, globalns, localns, recursive_guard) for a in t.__args__)
if ev_args == t.__args__:
return t
- if isinstance(t, GenericAlias):
- return GenericAlias(t.__origin__, ev_args)
- else:
- return t.copy_with(ev_args)
+ if isinstance(t, GenericAlias):
+ return GenericAlias(t.__origin__, ev_args)
+ else:
+ return t.copy_with(ev_args)
return t
@@ -306,13 +306,13 @@ class _Final:
__slots__ = ('__weakref__',)
- def __init_subclass__(self, /, *args, **kwds):
+ def __init_subclass__(self, /, *args, **kwds):
if '_root' not in kwds:
raise TypeError("Cannot subclass special typing classes")
class _Immutable:
"""Mixin to indicate that object should not be copied."""
- __slots__ = ()
+ __slots__ = ()
def __copy__(self):
return self
@@ -321,18 +321,18 @@ class _Immutable:
return self
-# Internal indicator of special typing constructs.
-# See __doc__ instance attribute for specific docs.
-class _SpecialForm(_Final, _root=True):
- __slots__ = ('_name', '__doc__', '_getitem')
+# Internal indicator of special typing constructs.
+# See __doc__ instance attribute for specific docs.
+class _SpecialForm(_Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
- def __init__(self, getitem):
- self._getitem = getitem
- self._name = getitem.__name__
- self.__doc__ = getitem.__doc__
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
- def __mro_entries__(self, bases):
- raise TypeError(f"Cannot subclass {self!r}")
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
def __repr__(self):
return 'typing.' + self._name
@@ -351,18 +351,18 @@ class _SpecialForm(_Final, _root=True):
@_tp_cache
def __getitem__(self, parameters):
- return self._getitem(self, parameters)
-
-
-class _LiteralSpecialForm(_SpecialForm, _root=True):
- def __getitem__(self, parameters):
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- return self._getitem(self, *parameters)
-
-
-@_SpecialForm
-def Any(self, parameters):
+ return self._getitem(self, parameters)
+
+
+class _LiteralSpecialForm(_SpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ return self._getitem(self, *parameters)
+
+
+@_SpecialForm
+def Any(self, parameters):
"""Special type indicating an unconstrained type.
- Any is compatible with every type.
@@ -372,11 +372,11 @@ def Any(self, parameters):
Note that all the above statements are true from the point of view of
static type checkers. At runtime, Any should not be used with instance
or class checks.
- """
- raise TypeError(f"{self} is not subscriptable")
+ """
+ raise TypeError(f"{self} is not subscriptable")
-@_SpecialForm
-def NoReturn(self, parameters):
+@_SpecialForm
+def NoReturn(self, parameters):
"""Special type indicating functions that never return.
Example::
@@ -387,11 +387,11 @@ def NoReturn(self, parameters):
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
- """
- raise TypeError(f"{self} is not subscriptable")
+ """
+ raise TypeError(f"{self} is not subscriptable")
-@_SpecialForm
-def ClassVar(self, parameters):
+@_SpecialForm
+def ClassVar(self, parameters):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
@@ -406,33 +406,33 @@ def ClassVar(self, parameters):
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
- """
- item = _type_check(parameters, f'{self} accepts only single type.')
- return _GenericAlias(self, (item,))
-
-@_SpecialForm
-def Final(self, parameters):
- """Special typing construct to indicate final names to type checkers.
-
- A final name cannot be re-assigned or overridden in a subclass.
- For example:
-
- MAX_SIZE: Final = 9000
- MAX_SIZE += 1 # Error reported by type checker
-
- class Connection:
- TIMEOUT: Final[int] = 10
-
- class FastConnector(Connection):
- TIMEOUT = 1 # Error reported by type checker
-
- There is no runtime checking of these properties.
- """
- item = _type_check(parameters, f'{self} accepts only single type.')
- return _GenericAlias(self, (item,))
-
-@_SpecialForm
-def Union(self, parameters):
+ """
+ item = _type_check(parameters, f'{self} accepts only single type.')
+ return _GenericAlias(self, (item,))
+
+@_SpecialForm
+def Final(self, parameters):
+ """Special typing construct to indicate final names to type checkers.
+
+ A final name cannot be re-assigned or overridden in a subclass.
+ For example:
+
+ MAX_SIZE: Final = 9000
+ MAX_SIZE += 1 # Error reported by type checker
+
+ class Connection:
+ TIMEOUT: Final[int] = 10
+
+ class FastConnector(Connection):
+ TIMEOUT = 1 # Error reported by type checker
+
+ There is no runtime checking of these properties.
+ """
+ item = _type_check(parameters, f'{self} accepts only single type.')
+ return _GenericAlias(self, (item,))
+
+@_SpecialForm
+def Union(self, parameters):
"""Union type; Union[X, Y] means either X or Y.
To define a union, use e.g. Union[int, str]. Details:
@@ -457,71 +457,71 @@ def Union(self, parameters):
- You cannot subclass or instantiate a union.
- You can use Optional[X] as a shorthand for Union[X, None].
- """
- if parameters == ():
- raise TypeError("Cannot take a Union of no types.")
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- msg = "Union[arg, ...]: each arg must be a type."
- parameters = tuple(_type_check(p, msg) for p in parameters)
- parameters = _remove_dups_flatten(parameters)
- if len(parameters) == 1:
- return parameters[0]
- return _UnionGenericAlias(self, parameters)
-
-@_SpecialForm
-def Optional(self, parameters):
+ """
+ if parameters == ():
+ raise TypeError("Cannot take a Union of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ msg = "Union[arg, ...]: each arg must be a type."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ parameters = _remove_dups_flatten(parameters)
+ if len(parameters) == 1:
+ return parameters[0]
+ return _UnionGenericAlias(self, parameters)
+
+@_SpecialForm
+def Optional(self, parameters):
"""Optional type.
Optional[X] is equivalent to Union[X, None].
- """
- arg = _type_check(parameters, f"{self} requires a single type.")
- return Union[arg, type(None)]
-
-@_LiteralSpecialForm
-@_tp_cache(typed=True)
-def Literal(self, *parameters):
- """Special typing form to define literal types (a.k.a. value types).
-
- This form can be used to indicate to type checkers that the corresponding
- variable or function parameter has a value equivalent to the provided
- literal (or one of several literals):
-
- def validate_simple(data: Any) -> Literal[True]: # always returns True
- ...
-
- MODE = Literal['r', 'rb', 'w', 'wb']
- def open_helper(file: str, mode: MODE) -> str:
- ...
-
- open_helper('/some/path', 'r') # Passes type check
- open_helper('/other/path', 'typo') # Error in type checker
-
- Literal[...] cannot be subclassed. At runtime, an arbitrary value
- is allowed as type argument to Literal[...], but type checkers may
- impose restrictions.
- """
- # There is no '_type_check' call because arguments to Literal[...] are
- # values, not types.
- parameters = _flatten_literal_params(parameters)
-
- try:
- parameters = tuple(p for p, _ in _deduplicate(list(_value_and_type_iter(parameters))))
- except TypeError: # unhashable parameters
- pass
-
- return _LiteralGenericAlias(self, parameters)
-
-
+ """
+ arg = _type_check(parameters, f"{self} requires a single type.")
+ return Union[arg, type(None)]
+
+@_LiteralSpecialForm
+@_tp_cache(typed=True)
+def Literal(self, *parameters):
+ """Special typing form to define literal types (a.k.a. value types).
+
+ This form can be used to indicate to type checkers that the corresponding
+ variable or function parameter has a value equivalent to the provided
+ literal (or one of several literals):
+
+ def validate_simple(data: Any) -> Literal[True]: # always returns True
+ ...
+
+ MODE = Literal['r', 'rb', 'w', 'wb']
+ def open_helper(file: str, mode: MODE) -> str:
+ ...
+
+ open_helper('/some/path', 'r') # Passes type check
+ open_helper('/other/path', 'typo') # Error in type checker
+
+ Literal[...] cannot be subclassed. At runtime, an arbitrary value
+ is allowed as type argument to Literal[...], but type checkers may
+ impose restrictions.
+ """
+ # There is no '_type_check' call because arguments to Literal[...] are
+ # values, not types.
+ parameters = _flatten_literal_params(parameters)
+
+ try:
+ parameters = tuple(p for p, _ in _deduplicate(list(_value_and_type_iter(parameters))))
+ except TypeError: # unhashable parameters
+ pass
+
+ return _LiteralGenericAlias(self, parameters)
+
+
class ForwardRef(_Final, _root=True):
"""Internal wrapper to hold a forward reference."""
__slots__ = ('__forward_arg__', '__forward_code__',
'__forward_evaluated__', '__forward_value__',
- '__forward_is_argument__', '__forward_is_class__',
- '__forward_module__')
+ '__forward_is_argument__', '__forward_is_class__',
+ '__forward_module__')
- def __init__(self, arg, is_argument=True, module=None, *, is_class=False):
+ def __init__(self, arg, is_argument=True, module=None, *, is_class=False):
if not isinstance(arg, str):
raise TypeError(f"Forward reference must be a string -- got {arg!r}")
try:
@@ -533,12 +533,12 @@ class ForwardRef(_Final, _root=True):
self.__forward_evaluated__ = False
self.__forward_value__ = None
self.__forward_is_argument__ = is_argument
- self.__forward_is_class__ = is_class
- self.__forward_module__ = module
+ self.__forward_is_class__ = is_class
+ self.__forward_module__ = module
- def _evaluate(self, globalns, localns, recursive_guard):
- if self.__forward_arg__ in recursive_guard:
- return self
+ def _evaluate(self, globalns, localns, recursive_guard):
+ if self.__forward_arg__ in recursive_guard:
+ return self
if not self.__forward_evaluated__ or localns is not globalns:
if globalns is None and localns is None:
globalns = localns = {}
@@ -546,32 +546,32 @@ class ForwardRef(_Final, _root=True):
globalns = localns
elif localns is None:
localns = globalns
- if self.__forward_module__ is not None:
- globalns = getattr(
- sys.modules.get(self.__forward_module__, None), '__dict__', globalns
- )
- type_ = _type_check(
+ if self.__forward_module__ is not None:
+ globalns = getattr(
+ sys.modules.get(self.__forward_module__, None), '__dict__', globalns
+ )
+ type_ = _type_check(
eval(self.__forward_code__, globalns, localns),
"Forward references must evaluate to types.",
- is_argument=self.__forward_is_argument__,
- is_class=self.__forward_is_class__,
- )
- self.__forward_value__ = _eval_type(
- type_, globalns, localns, recursive_guard | {self.__forward_arg__}
- )
+ is_argument=self.__forward_is_argument__,
+ is_class=self.__forward_is_class__,
+ )
+ self.__forward_value__ = _eval_type(
+ type_, globalns, localns, recursive_guard | {self.__forward_arg__}
+ )
self.__forward_evaluated__ = True
return self.__forward_value__
def __eq__(self, other):
if not isinstance(other, ForwardRef):
return NotImplemented
- if self.__forward_evaluated__ and other.__forward_evaluated__:
- return (self.__forward_arg__ == other.__forward_arg__ and
- self.__forward_value__ == other.__forward_value__)
- return self.__forward_arg__ == other.__forward_arg__
+ if self.__forward_evaluated__ and other.__forward_evaluated__:
+ return (self.__forward_arg__ == other.__forward_arg__ and
+ self.__forward_value__ == other.__forward_value__)
+ return self.__forward_arg__ == other.__forward_arg__
def __hash__(self):
- return hash(self.__forward_arg__)
+ return hash(self.__forward_arg__)
def __repr__(self):
return f'ForwardRef({self.__forward_arg__!r})'
@@ -622,7 +622,7 @@ class TypeVar(_Final, _Immutable, _root=True):
"""
__slots__ = ('__name__', '__bound__', '__constraints__',
- '__covariant__', '__contravariant__', '__dict__')
+ '__covariant__', '__contravariant__', '__dict__')
def __init__(self, name, *constraints, bound=None,
covariant=False, contravariant=False):
@@ -641,10 +641,10 @@ class TypeVar(_Final, _Immutable, _root=True):
self.__bound__ = _type_check(bound, "Bound must be a type.")
else:
self.__bound__ = None
- try:
- def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') # for pickling
- except (AttributeError, ValueError):
- def_mod = None
+ try:
+ def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') # for pickling
+ except (AttributeError, ValueError):
+ def_mod = None
if def_mod != 'typing':
self.__module__ = def_mod
@@ -664,7 +664,7 @@ class TypeVar(_Final, _Immutable, _root=True):
def _is_dunder(attr):
return attr.startswith('__') and attr.endswith('__')
-class _BaseGenericAlias(_Final, _root=True):
+class _BaseGenericAlias(_Final, _root=True):
"""The central part of internal API.
This represents a generic version of type 'origin' with type arguments 'params'.
@@ -673,70 +673,70 @@ class _BaseGenericAlias(_Final, _root=True):
have 'name' always set. If 'inst' is False, then the alias can't be instantiated,
this is used by e.g. typing.List and typing.Dict.
"""
- def __init__(self, origin, *, inst=True, name=None):
+ def __init__(self, origin, *, inst=True, name=None):
self._inst = inst
self._name = name
- self.__origin__ = origin
- self.__slots__ = None # This is not documented.
-
- def __call__(self, *args, **kwargs):
- if not self._inst:
- raise TypeError(f"Type {self._name} cannot be instantiated; "
- f"use {self.__origin__.__name__}() instead")
- result = self.__origin__(*args, **kwargs)
- try:
- result.__orig_class__ = self
- except AttributeError:
- pass
- return result
-
- def __mro_entries__(self, bases):
- res = []
- if self.__origin__ not in bases:
- res.append(self.__origin__)
- i = bases.index(self)
- for b in bases[i+1:]:
- if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
- break
- else:
- res.append(Generic)
- return tuple(res)
-
- def __getattr__(self, attr):
- # We are careful for copy and pickle.
- # Also for simplicity we just don't relay all dunder names
- if '__origin__' in self.__dict__ and not _is_dunder(attr):
- return getattr(self.__origin__, attr)
- raise AttributeError(attr)
-
- def __setattr__(self, attr, val):
- if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'):
- super().__setattr__(attr, val)
- else:
- setattr(self.__origin__, attr, val)
-
- def __instancecheck__(self, obj):
- return self.__subclasscheck__(type(obj))
-
- def __subclasscheck__(self, cls):
- raise TypeError("Subscripted generics cannot be used with"
- " class and instance checks")
-
-
-# Special typing constructs Union, Optional, Generic, Callable and Tuple
-# use three special attributes for internal bookkeeping of generic types:
-# * __parameters__ is a tuple of unique free type parameters of a generic
-# type, for example, Dict[T, T].__parameters__ == (T,);
-# * __origin__ keeps a reference to a type that was subscripted,
-# e.g., Union[T, int].__origin__ == Union, or the non-generic version of
-# the type.
-# * __args__ is a tuple of all arguments used in subscripting,
-# e.g., Dict[T, int].__args__ == (T, int).
-
-
-class _GenericAlias(_BaseGenericAlias, _root=True):
- def __init__(self, origin, params, *, inst=True, name=None):
- super().__init__(origin, inst=inst, name=name)
+ self.__origin__ = origin
+ self.__slots__ = None # This is not documented.
+
+ def __call__(self, *args, **kwargs):
+ if not self._inst:
+ raise TypeError(f"Type {self._name} cannot be instantiated; "
+ f"use {self.__origin__.__name__}() instead")
+ result = self.__origin__(*args, **kwargs)
+ try:
+ result.__orig_class__ = self
+ except AttributeError:
+ pass
+ return result
+
+ def __mro_entries__(self, bases):
+ res = []
+ if self.__origin__ not in bases:
+ res.append(self.__origin__)
+ i = bases.index(self)
+ for b in bases[i+1:]:
+ if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
+ break
+ else:
+ res.append(Generic)
+ return tuple(res)
+
+ def __getattr__(self, attr):
+ # We are careful for copy and pickle.
+ # Also for simplicity we just don't relay all dunder names
+ if '__origin__' in self.__dict__ and not _is_dunder(attr):
+ return getattr(self.__origin__, attr)
+ raise AttributeError(attr)
+
+ def __setattr__(self, attr, val):
+ if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'):
+ super().__setattr__(attr, val)
+ else:
+ setattr(self.__origin__, attr, val)
+
+ def __instancecheck__(self, obj):
+ return self.__subclasscheck__(type(obj))
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Subscripted generics cannot be used with"
+ " class and instance checks")
+
+
+# Special typing constructs Union, Optional, Generic, Callable and Tuple
+# use three special attributes for internal bookkeeping of generic types:
+# * __parameters__ is a tuple of unique free type parameters of a generic
+# type, for example, Dict[T, T].__parameters__ == (T,);
+# * __origin__ keeps a reference to a type that was subscripted,
+# e.g., Union[T, int].__origin__ == Union, or the non-generic version of
+# the type.
+# * __args__ is a tuple of all arguments used in subscripting,
+# e.g., Dict[T, int].__args__ == (T, int).
+
+
+class _GenericAlias(_BaseGenericAlias, _root=True):
+ def __init__(self, origin, params, *, inst=True, name=None):
+ super().__init__(origin, inst=inst, name=name)
if not isinstance(params, tuple):
params = (params,)
self.__args__ = tuple(... if a is _TypingEllipsis else
@@ -746,218 +746,218 @@ class _GenericAlias(_BaseGenericAlias, _root=True):
if not name:
self.__module__ = origin.__module__
- def __eq__(self, other):
- if not isinstance(other, _GenericAlias):
- return NotImplemented
- return (self.__origin__ == other.__origin__
- and self.__args__ == other.__args__)
-
- def __hash__(self):
- return hash((self.__origin__, self.__args__))
-
+ def __eq__(self, other):
+ if not isinstance(other, _GenericAlias):
+ return NotImplemented
+ return (self.__origin__ == other.__origin__
+ and self.__args__ == other.__args__)
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__args__))
+
@_tp_cache
def __getitem__(self, params):
- if self.__origin__ in (Generic, Protocol):
- # Can't subscript Generic[...] or Protocol[...].
+ if self.__origin__ in (Generic, Protocol):
+ # Can't subscript Generic[...] or Protocol[...].
raise TypeError(f"Cannot subscript already-subscripted {self}")
if not isinstance(params, tuple):
params = (params,)
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
- _check_generic(self, params, len(self.__parameters__))
-
- subst = dict(zip(self.__parameters__, params))
- new_args = []
- for arg in self.__args__:
- if isinstance(arg, TypeVar):
- arg = subst[arg]
- elif isinstance(arg, (_GenericAlias, GenericAlias)):
- subparams = arg.__parameters__
- if subparams:
- subargs = tuple(subst[x] for x in subparams)
- arg = arg[subargs]
- new_args.append(arg)
- return self.copy_with(tuple(new_args))
-
+ _check_generic(self, params, len(self.__parameters__))
+
+ subst = dict(zip(self.__parameters__, params))
+ new_args = []
+ for arg in self.__args__:
+ if isinstance(arg, TypeVar):
+ arg = subst[arg]
+ elif isinstance(arg, (_GenericAlias, GenericAlias)):
+ subparams = arg.__parameters__
+ if subparams:
+ subargs = tuple(subst[x] for x in subparams)
+ arg = arg[subargs]
+ new_args.append(arg)
+ return self.copy_with(tuple(new_args))
+
def copy_with(self, params):
- return self.__class__(self.__origin__, params, name=self._name, inst=self._inst)
+ return self.__class__(self.__origin__, params, name=self._name, inst=self._inst)
def __repr__(self):
- if self._name:
- name = 'typing.' + self._name
- else:
- name = _type_repr(self.__origin__)
- args = ", ".join([_type_repr(a) for a in self.__args__])
- return f'{name}[{args}]'
-
- def __reduce__(self):
- if self._name:
- origin = globals()[self._name]
- else:
- origin = self.__origin__
- args = tuple(self.__args__)
- if len(args) == 1 and not isinstance(args[0], tuple):
- args, = args
- return operator.getitem, (origin, args)
+ if self._name:
+ name = 'typing.' + self._name
+ else:
+ name = _type_repr(self.__origin__)
+ args = ", ".join([_type_repr(a) for a in self.__args__])
+ return f'{name}[{args}]'
+
+ def __reduce__(self):
+ if self._name:
+ origin = globals()[self._name]
+ else:
+ origin = self.__origin__
+ args = tuple(self.__args__)
+ if len(args) == 1 and not isinstance(args[0], tuple):
+ args, = args
+ return operator.getitem, (origin, args)
def __mro_entries__(self, bases):
if self._name: # generic version of an ABC or built-in class
- return super().__mro_entries__(bases)
+ return super().__mro_entries__(bases)
if self.__origin__ is Generic:
- if Protocol in bases:
- return ()
+ if Protocol in bases:
+ return ()
i = bases.index(self)
for b in bases[i+1:]:
- if isinstance(b, _BaseGenericAlias) and b is not self:
+ if isinstance(b, _BaseGenericAlias) and b is not self:
return ()
return (self.__origin__,)
-# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
-# 1 for List and 2 for Dict. It may be -1 if variable number of
-# parameters are accepted (needs custom __getitem__).
-
-class _SpecialGenericAlias(_BaseGenericAlias, _root=True):
- def __init__(self, origin, nparams, *, inst=True, name=None):
- if name is None:
- name = origin.__name__
- super().__init__(origin, inst=inst, name=name)
- self._nparams = nparams
- if origin.__module__ == 'builtins':
- self.__doc__ = f'A generic version of {origin.__qualname__}.'
+# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
+# 1 for List and 2 for Dict. It may be -1 if variable number of
+# parameters are accepted (needs custom __getitem__).
+
+class _SpecialGenericAlias(_BaseGenericAlias, _root=True):
+ def __init__(self, origin, nparams, *, inst=True, name=None):
+ if name is None:
+ name = origin.__name__
+ super().__init__(origin, inst=inst, name=name)
+ self._nparams = nparams
+ if origin.__module__ == 'builtins':
+ self.__doc__ = f'A generic version of {origin.__qualname__}.'
else:
- self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
-
- @_tp_cache
- def __getitem__(self, params):
- if not isinstance(params, tuple):
- params = (params,)
- msg = "Parameters to generic types must be types."
- params = tuple(_type_check(p, msg) for p in params)
- _check_generic(self, params, self._nparams)
- return self.copy_with(params)
-
- def copy_with(self, params):
- return _GenericAlias(self.__origin__, params,
- name=self._name, inst=self._inst)
-
- def __repr__(self):
- return 'typing.' + self._name
-
+ self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
+
+ @_tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ msg = "Parameters to generic types must be types."
+ params = tuple(_type_check(p, msg) for p in params)
+ _check_generic(self, params, self._nparams)
+ return self.copy_with(params)
+
+ def copy_with(self, params):
+ return _GenericAlias(self.__origin__, params,
+ name=self._name, inst=self._inst)
+
+ def __repr__(self):
+ return 'typing.' + self._name
+
def __subclasscheck__(self, cls):
- if isinstance(cls, _SpecialGenericAlias):
- return issubclass(cls.__origin__, self.__origin__)
- if not isinstance(cls, _GenericAlias):
- return issubclass(cls, self.__origin__)
- return super().__subclasscheck__(cls)
+ if isinstance(cls, _SpecialGenericAlias):
+ return issubclass(cls.__origin__, self.__origin__)
+ if not isinstance(cls, _GenericAlias):
+ return issubclass(cls, self.__origin__)
+ return super().__subclasscheck__(cls)
def __reduce__(self):
- return self._name
-
-
-class _CallableGenericAlias(_GenericAlias, _root=True):
- def __repr__(self):
- assert self._name == 'Callable'
- if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
- return super().__repr__()
- return (f'typing.Callable'
- f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
- f'{_type_repr(self.__args__[-1])}]')
-
- def __reduce__(self):
- args = self.__args__
- if not (len(args) == 2 and args[0] is ...):
- args = list(args[:-1]), args[-1]
- return operator.getitem, (Callable, args)
-
-
-class _CallableType(_SpecialGenericAlias, _root=True):
- def copy_with(self, params):
- return _CallableGenericAlias(self.__origin__, params,
- name=self._name, inst=self._inst)
-
+ return self._name
+
+
+class _CallableGenericAlias(_GenericAlias, _root=True):
+ def __repr__(self):
+ assert self._name == 'Callable'
+ if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
+ return super().__repr__()
+ return (f'typing.Callable'
+ f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
+ f'{_type_repr(self.__args__[-1])}]')
+
+ def __reduce__(self):
+ args = self.__args__
+ if not (len(args) == 2 and args[0] is ...):
+ args = list(args[:-1]), args[-1]
+ return operator.getitem, (Callable, args)
+
+
+class _CallableType(_SpecialGenericAlias, _root=True):
+ def copy_with(self, params):
+ return _CallableGenericAlias(self.__origin__, params,
+ name=self._name, inst=self._inst)
+
def __getitem__(self, params):
if not isinstance(params, tuple) or len(params) != 2:
raise TypeError("Callable must be used as "
"Callable[[arg, ...], result].")
args, result = params
- # This relaxes what args can be on purpose to allow things like
- # PEP 612 ParamSpec. Responsibility for whether a user is using
- # Callable[...] properly is deferred to static type checkers.
- if isinstance(args, list):
- params = (tuple(args), result)
+ # This relaxes what args can be on purpose to allow things like
+ # PEP 612 ParamSpec. Responsibility for whether a user is using
+ # Callable[...] properly is deferred to static type checkers.
+ if isinstance(args, list):
+ params = (tuple(args), result)
else:
- params = (args, result)
+ params = (args, result)
return self.__getitem_inner__(params)
@_tp_cache
def __getitem_inner__(self, params):
- args, result = params
- msg = "Callable[args, result]: result must be a type."
- result = _type_check(result, msg)
- if args is Ellipsis:
- return self.copy_with((_TypingEllipsis, result))
- if not isinstance(args, tuple):
- args = (args,)
- args = tuple(_type_convert(arg) for arg in args)
- params = args + (result,)
- return self.copy_with(params)
-
-
-class _TupleType(_SpecialGenericAlias, _root=True):
- @_tp_cache
- def __getitem__(self, params):
- if params == ():
- return self.copy_with((_TypingEmpty,))
- if not isinstance(params, tuple):
- params = (params,)
- if len(params) == 2 and params[1] is ...:
- msg = "Tuple[t, ...]: t must be a type."
- p = _type_check(params[0], msg)
- return self.copy_with((p, _TypingEllipsis))
- msg = "Tuple[t0, t1, ...]: each t must be a type."
- params = tuple(_type_check(p, msg) for p in params)
- return self.copy_with(params)
-
-
-class _UnionGenericAlias(_GenericAlias, _root=True):
- def copy_with(self, params):
- return Union[params]
-
- def __eq__(self, other):
- if not isinstance(other, _UnionGenericAlias):
- return NotImplemented
- return set(self.__args__) == set(other.__args__)
-
- def __hash__(self):
- return hash(frozenset(self.__args__))
-
- def __repr__(self):
- args = self.__args__
- if len(args) == 2:
- if args[0] is type(None):
- return f'typing.Optional[{_type_repr(args[1])}]'
- elif args[1] is type(None):
- return f'typing.Optional[{_type_repr(args[0])}]'
- return super().__repr__()
-
-
-def _value_and_type_iter(parameters):
- return ((p, type(p)) for p in parameters)
-
-
-class _LiteralGenericAlias(_GenericAlias, _root=True):
-
- def __eq__(self, other):
- if not isinstance(other, _LiteralGenericAlias):
- return NotImplemented
-
- return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
-
- def __hash__(self):
- return hash(frozenset(_value_and_type_iter(self.__args__)))
-
-
+ args, result = params
+ msg = "Callable[args, result]: result must be a type."
+ result = _type_check(result, msg)
+ if args is Ellipsis:
+ return self.copy_with((_TypingEllipsis, result))
+ if not isinstance(args, tuple):
+ args = (args,)
+ args = tuple(_type_convert(arg) for arg in args)
+ params = args + (result,)
+ return self.copy_with(params)
+
+
+class _TupleType(_SpecialGenericAlias, _root=True):
+ @_tp_cache
+ def __getitem__(self, params):
+ if params == ():
+ return self.copy_with((_TypingEmpty,))
+ if not isinstance(params, tuple):
+ params = (params,)
+ if len(params) == 2 and params[1] is ...:
+ msg = "Tuple[t, ...]: t must be a type."
+ p = _type_check(params[0], msg)
+ return self.copy_with((p, _TypingEllipsis))
+ msg = "Tuple[t0, t1, ...]: each t must be a type."
+ params = tuple(_type_check(p, msg) for p in params)
+ return self.copy_with(params)
+
+
+class _UnionGenericAlias(_GenericAlias, _root=True):
+ def copy_with(self, params):
+ return Union[params]
+
+ def __eq__(self, other):
+ if not isinstance(other, _UnionGenericAlias):
+ return NotImplemented
+ return set(self.__args__) == set(other.__args__)
+
+ def __hash__(self):
+ return hash(frozenset(self.__args__))
+
+ def __repr__(self):
+ args = self.__args__
+ if len(args) == 2:
+ if args[0] is type(None):
+ return f'typing.Optional[{_type_repr(args[1])}]'
+ elif args[1] is type(None):
+ return f'typing.Optional[{_type_repr(args[0])}]'
+ return super().__repr__()
+
+
+def _value_and_type_iter(parameters):
+ return ((p, type(p)) for p in parameters)
+
+
+class _LiteralGenericAlias(_GenericAlias, _root=True):
+
+ def __eq__(self, other):
+ if not isinstance(other, _LiteralGenericAlias):
+ return NotImplemented
+
+ return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
+
+ def __hash__(self):
+ return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+
class Generic:
"""Abstract base class for generic types.
@@ -979,7 +979,7 @@ class Generic:
return default
"""
__slots__ = ()
- _is_protocol = False
+ _is_protocol = False
@_tp_cache
def __class_getitem__(cls, params):
@@ -990,17 +990,17 @@ class Generic:
f"Parameter list to {cls.__qualname__}[...] cannot be empty")
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
- if cls in (Generic, Protocol):
- # Generic and Protocol can only be subscripted with unique type variables.
+ if cls in (Generic, Protocol):
+ # Generic and Protocol can only be subscripted with unique type variables.
if not all(isinstance(p, TypeVar) for p in params):
raise TypeError(
- f"Parameters to {cls.__name__}[...] must all be type variables")
+ f"Parameters to {cls.__name__}[...] must all be type variables")
if len(set(params)) != len(params):
raise TypeError(
- f"Parameters to {cls.__name__}[...] must all be unique")
+ f"Parameters to {cls.__name__}[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
- _check_generic(cls, params, len(cls.__parameters__))
+ _check_generic(cls, params, len(cls.__parameters__))
return _GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
@@ -1009,7 +1009,7 @@ class Generic:
if '__orig_bases__' in cls.__dict__:
error = Generic in cls.__orig_bases__
else:
- error = Generic in cls.__bases__ and cls.__name__ != 'Protocol'
+ error = Generic in cls.__bases__ and cls.__name__ != 'Protocol'
if error:
raise TypeError("Cannot inherit from plain Generic")
if '__orig_bases__' in cls.__dict__:
@@ -1027,7 +1027,7 @@ class Generic:
raise TypeError(
"Cannot inherit from Generic[...] multiple types.")
gvars = base.__parameters__
- if gvars is not None:
+ if gvars is not None:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
@@ -1050,327 +1050,327 @@ class _TypingEllipsis:
"""Internal placeholder for ... (ellipsis)."""
-_TYPING_INTERNALS = ['__parameters__', '__orig_bases__', '__orig_class__',
- '_is_protocol', '_is_runtime_protocol']
-
-_SPECIAL_NAMES = ['__abstractmethods__', '__annotations__', '__dict__', '__doc__',
- '__init__', '__module__', '__new__', '__slots__',
- '__subclasshook__', '__weakref__', '__class_getitem__']
-
-# These special attributes will be not collected as protocol members.
-EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS + _SPECIAL_NAMES + ['_MutableMapping__marker']
-
-
-def _get_protocol_attrs(cls):
- """Collect protocol members from a protocol class objects.
-
- This includes names actually defined in the class dictionary, as well
- as names that appear in annotations. Special names (above) are skipped.
- """
- attrs = set()
- for base in cls.__mro__[:-1]: # without object
- if base.__name__ in ('Protocol', 'Generic'):
- continue
- annotations = getattr(base, '__annotations__', {})
- for attr in list(base.__dict__.keys()) + list(annotations.keys()):
- if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES:
- attrs.add(attr)
- return attrs
-
-
-def _is_callable_members_only(cls):
- # PEP 544 prohibits using issubclass() with protocols that have non-method members.
- return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
-
-
-def _no_init_or_replace_init(self, *args, **kwargs):
- cls = type(self)
-
- if cls._is_protocol:
- raise TypeError('Protocols cannot be instantiated')
-
- # Already using a custom `__init__`. No need to calculate correct
- # `__init__` to call. This can lead to RecursionError. See bpo-45121.
- if cls.__init__ is not _no_init_or_replace_init:
- return
-
- # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
- # The first instantiation of the subclass will call `_no_init_or_replace_init` which
- # searches for a proper new `__init__` in the MRO. The new `__init__`
- # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
- # instantiation of the protocol subclass will thus use the new
- # `__init__` and no longer call `_no_init_or_replace_init`.
- for base in cls.__mro__:
- init = base.__dict__.get('__init__', _no_init_or_replace_init)
- if init is not _no_init_or_replace_init:
- cls.__init__ = init
- break
- else:
- # should not happen
- cls.__init__ = object.__init__
-
- cls.__init__(self, *args, **kwargs)
-
-
-
-def _allow_reckless_class_cheks():
- """Allow instance and class checks for special stdlib modules.
-
- The abc and functools modules indiscriminately call isinstance() and
- issubclass() on the whole MRO of a user class, which may contain protocols.
- """
- try:
- return sys._getframe(3).f_globals['__name__'] in ['abc', 'functools']
- except (AttributeError, ValueError): # For platforms without _getframe().
- return True
-
-
-_PROTO_WHITELIST = {
- 'collections.abc': [
- 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
- 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
- ],
- 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
-}
-
-
-class _ProtocolMeta(ABCMeta):
- # This metaclass is really unfortunate and exists only because of
- # the lack of __instancehook__.
- def __instancecheck__(cls, instance):
- # We need this method for situations where attributes are
- # assigned in __init__.
- if ((not getattr(cls, '_is_protocol', False) or
- _is_callable_members_only(cls)) and
- issubclass(instance.__class__, cls)):
- return True
- if cls._is_protocol:
- if all(hasattr(instance, attr) and
- # All *methods* can be blocked by setting them to None.
- (not callable(getattr(cls, attr, None)) or
- getattr(instance, attr) is not None)
- for attr in _get_protocol_attrs(cls)):
- return True
- return super().__instancecheck__(instance)
-
-
-class Protocol(Generic, metaclass=_ProtocolMeta):
- """Base class for protocol classes.
-
- Protocol classes are defined as::
-
- class Proto(Protocol):
- def meth(self) -> int:
- ...
-
- Such classes are primarily used with static type checkers that recognize
- structural subtyping (static duck-typing), for example::
-
- class C:
- def meth(self) -> int:
- return 0
-
- def func(x: Proto) -> int:
- return x.meth()
-
- func(C()) # Passes static type check
-
- See PEP 544 for details. Protocol classes decorated with
- @typing.runtime_checkable act as simple-minded runtime protocols that check
- only the presence of given attributes, ignoring their type signatures.
- Protocol classes can be generic, they are defined as::
-
- class GenProto(Protocol[T]):
- def meth(self) -> T:
- ...
- """
- __slots__ = ()
- _is_protocol = True
- _is_runtime_protocol = False
-
- def __init_subclass__(cls, *args, **kwargs):
- super().__init_subclass__(*args, **kwargs)
-
- # Determine if this is a protocol or a concrete subclass.
- if not cls.__dict__.get('_is_protocol', False):
- cls._is_protocol = any(b is Protocol for b in cls.__bases__)
-
- # Set (or override) the protocol subclass hook.
- def _proto_hook(other):
- if not cls.__dict__.get('_is_protocol', False):
- return NotImplemented
-
- # First, perform various sanity checks.
- if not getattr(cls, '_is_runtime_protocol', False):
- if _allow_reckless_class_cheks():
- return NotImplemented
- raise TypeError("Instance and class checks can only be used with"
- " @runtime_checkable protocols")
- if not _is_callable_members_only(cls):
- if _allow_reckless_class_cheks():
- return NotImplemented
- raise TypeError("Protocols with non-method members"
- " don't support issubclass()")
- if not isinstance(other, type):
- # Same error message as for issubclass(1, int).
- raise TypeError('issubclass() arg 1 must be a class')
-
- # Second, perform the actual structural compatibility check.
- for attr in _get_protocol_attrs(cls):
- for base in other.__mro__:
- # Check if the members appears in the class dictionary...
- if attr in base.__dict__:
- if base.__dict__[attr] is None:
- return NotImplemented
- break
-
- # ...or in annotations, if it is a sub-protocol.
- annotations = getattr(base, '__annotations__', {})
- if (isinstance(annotations, collections.abc.Mapping) and
- attr in annotations and
- issubclass(other, Generic) and other._is_protocol):
- break
- else:
- return NotImplemented
- return True
-
- if '__subclasshook__' not in cls.__dict__:
- cls.__subclasshook__ = _proto_hook
-
- # We have nothing more to do for non-protocols...
- if not cls._is_protocol:
- return
-
- # ... otherwise check consistency of bases, and prohibit instantiation.
- for base in cls.__bases__:
- if not (base in (object, Generic) or
- base.__module__ in _PROTO_WHITELIST and
- base.__name__ in _PROTO_WHITELIST[base.__module__] or
- issubclass(base, Generic) and base._is_protocol):
- raise TypeError('Protocols can only inherit from other'
- ' protocols, got %r' % base)
- cls.__init__ = _no_init_or_replace_init
-
-
-class _AnnotatedAlias(_GenericAlias, _root=True):
- """Runtime representation of an annotated type.
-
- At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
- with extra annotations. The alias behaves like a normal typing alias,
- instantiating is the same as instantiating the underlying type, binding
- it to types is also the same.
- """
- def __init__(self, origin, metadata):
- if isinstance(origin, _AnnotatedAlias):
- metadata = origin.__metadata__ + metadata
- origin = origin.__origin__
- super().__init__(origin, origin)
- self.__metadata__ = metadata
-
- def copy_with(self, params):
- assert len(params) == 1
- new_type = params[0]
- return _AnnotatedAlias(new_type, self.__metadata__)
-
- def __repr__(self):
- return "typing.Annotated[{}, {}]".format(
- _type_repr(self.__origin__),
- ", ".join(repr(a) for a in self.__metadata__)
- )
-
- def __reduce__(self):
- return operator.getitem, (
- Annotated, (self.__origin__,) + self.__metadata__
- )
-
- def __eq__(self, other):
- if not isinstance(other, _AnnotatedAlias):
- return NotImplemented
- return (self.__origin__ == other.__origin__
- and self.__metadata__ == other.__metadata__)
-
- def __hash__(self):
- return hash((self.__origin__, self.__metadata__))
-
-
-class Annotated:
- """Add context specific metadata to a type.
-
- Example: Annotated[int, runtime_check.Unsigned] indicates to the
- hypothetical runtime_check module that this type is an unsigned int.
- Every other consumer of this type can ignore this metadata and treat
- this type as int.
-
- The first argument to Annotated must be a valid type.
-
- Details:
-
- - It's an error to call `Annotated` with less than two arguments.
- - Nested Annotated are flattened::
-
- Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
-
- - Instantiating an annotated type is equivalent to instantiating the
- underlying type::
-
- Annotated[C, Ann1](5) == C(5)
-
- - Annotated can be used as a generic type alias::
-
- Optimized = Annotated[T, runtime.Optimize()]
- Optimized[int] == Annotated[int, runtime.Optimize()]
-
- OptimizedList = Annotated[List[T], runtime.Optimize()]
- OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwargs):
- raise TypeError("Type Annotated cannot be instantiated.")
-
- @_tp_cache
- def __class_getitem__(cls, params):
- if not isinstance(params, tuple) or len(params) < 2:
- raise TypeError("Annotated[...] should be used "
- "with at least two arguments (a type and an "
- "annotation).")
- msg = "Annotated[t, ...]: t must be a type."
- origin = _type_check(params[0], msg)
- metadata = tuple(params[1:])
- return _AnnotatedAlias(origin, metadata)
-
- def __init_subclass__(cls, *args, **kwargs):
- raise TypeError(
- "Cannot subclass {}.Annotated".format(cls.__module__)
- )
-
-
-def runtime_checkable(cls):
- """Mark a protocol class as a runtime protocol.
-
- Such protocol can be used with isinstance() and issubclass().
- Raise TypeError if applied to a non-protocol class.
- This allows a simple-minded structural check very similar to
- one trick ponies in collections.abc such as Iterable.
- For example::
-
- @runtime_checkable
- class Closable(Protocol):
- def close(self): ...
-
- assert isinstance(open('/some/file'), Closable)
-
- Warning: this will check only the presence of the required methods,
- not their type signatures!
- """
- if not issubclass(cls, Generic) or not cls._is_protocol:
- raise TypeError('@runtime_checkable can be only applied to protocol classes,'
- ' got %r' % cls)
- cls._is_runtime_protocol = True
- return cls
-
-
+_TYPING_INTERNALS = ['__parameters__', '__orig_bases__', '__orig_class__',
+ '_is_protocol', '_is_runtime_protocol']
+
+_SPECIAL_NAMES = ['__abstractmethods__', '__annotations__', '__dict__', '__doc__',
+ '__init__', '__module__', '__new__', '__slots__',
+ '__subclasshook__', '__weakref__', '__class_getitem__']
+
+# These special attributes will be not collected as protocol members.
+EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS + _SPECIAL_NAMES + ['_MutableMapping__marker']
+
+
+def _get_protocol_attrs(cls):
+ """Collect protocol members from a protocol class objects.
+
+ This includes names actually defined in the class dictionary, as well
+ as names that appear in annotations. Special names (above) are skipped.
+ """
+ attrs = set()
+ for base in cls.__mro__[:-1]: # without object
+ if base.__name__ in ('Protocol', 'Generic'):
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in list(base.__dict__.keys()) + list(annotations.keys()):
+ if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES:
+ attrs.add(attr)
+ return attrs
+
+
+def _is_callable_members_only(cls):
+ # PEP 544 prohibits using issubclass() with protocols that have non-method members.
+ return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
+
+
+def _no_init_or_replace_init(self, *args, **kwargs):
+ cls = type(self)
+
+ if cls._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+
+ # Already using a custom `__init__`. No need to calculate correct
+ # `__init__` to call. This can lead to RecursionError. See bpo-45121.
+ if cls.__init__ is not _no_init_or_replace_init:
+ return
+
+ # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
+ # The first instantiation of the subclass will call `_no_init_or_replace_init` which
+ # searches for a proper new `__init__` in the MRO. The new `__init__`
+ # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
+ # instantiation of the protocol subclass will thus use the new
+ # `__init__` and no longer call `_no_init_or_replace_init`.
+ for base in cls.__mro__:
+ init = base.__dict__.get('__init__', _no_init_or_replace_init)
+ if init is not _no_init_or_replace_init:
+ cls.__init__ = init
+ break
+ else:
+ # should not happen
+ cls.__init__ = object.__init__
+
+ cls.__init__(self, *args, **kwargs)
+
+
+
+def _allow_reckless_class_cheks():
+ """Allow instance and class checks for special stdlib modules.
+
+ The abc and functools modules indiscriminately call isinstance() and
+ issubclass() on the whole MRO of a user class, which may contain protocols.
+ """
+ try:
+ return sys._getframe(3).f_globals['__name__'] in ['abc', 'functools']
+ except (AttributeError, ValueError): # For platforms without _getframe().
+ return True
+
+
+_PROTO_WHITELIST = {
+ 'collections.abc': [
+ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+ 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
+ ],
+ 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+}
+
+
+class _ProtocolMeta(ABCMeta):
+ # This metaclass is really unfortunate and exists only because of
+ # the lack of __instancehook__.
+ def __instancecheck__(cls, instance):
+ # We need this method for situations where attributes are
+ # assigned in __init__.
+ if ((not getattr(cls, '_is_protocol', False) or
+ _is_callable_members_only(cls)) and
+ issubclass(instance.__class__, cls)):
+ return True
+ if cls._is_protocol:
+ if all(hasattr(instance, attr) and
+ # All *methods* can be blocked by setting them to None.
+ (not callable(getattr(cls, attr, None)) or
+ getattr(instance, attr) is not None)
+ for attr in _get_protocol_attrs(cls)):
+ return True
+ return super().__instancecheck__(instance)
+
+
+class Protocol(Generic, metaclass=_ProtocolMeta):
+ """Base class for protocol classes.
+
+ Protocol classes are defined as::
+
+ class Proto(Protocol):
+ def meth(self) -> int:
+ ...
+
+ Such classes are primarily used with static type checkers that recognize
+ structural subtyping (static duck-typing), for example::
+
+ class C:
+ def meth(self) -> int:
+ return 0
+
+ def func(x: Proto) -> int:
+ return x.meth()
+
+ func(C()) # Passes static type check
+
+ See PEP 544 for details. Protocol classes decorated with
+ @typing.runtime_checkable act as simple-minded runtime protocols that check
+ only the presence of given attributes, ignoring their type signatures.
+ Protocol classes can be generic, they are defined as::
+
+ class GenProto(Protocol[T]):
+ def meth(self) -> T:
+ ...
+ """
+ __slots__ = ()
+ _is_protocol = True
+ _is_runtime_protocol = False
+
+ def __init_subclass__(cls, *args, **kwargs):
+ super().__init_subclass__(*args, **kwargs)
+
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', False):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+ # Set (or override) the protocol subclass hook.
+ def _proto_hook(other):
+ if not cls.__dict__.get('_is_protocol', False):
+ return NotImplemented
+
+ # First, perform various sanity checks.
+ if not getattr(cls, '_is_runtime_protocol', False):
+ if _allow_reckless_class_cheks():
+ return NotImplemented
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime_checkable protocols")
+ if not _is_callable_members_only(cls):
+ if _allow_reckless_class_cheks():
+ return NotImplemented
+ raise TypeError("Protocols with non-method members"
+ " don't support issubclass()")
+ if not isinstance(other, type):
+ # Same error message as for issubclass(1, int).
+ raise TypeError('issubclass() arg 1 must be a class')
+
+ # Second, perform the actual structural compatibility check.
+ for attr in _get_protocol_attrs(cls):
+ for base in other.__mro__:
+ # Check if the members appears in the class dictionary...
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+
+ # ...or in annotations, if it is a sub-protocol.
+ annotations = getattr(base, '__annotations__', {})
+ if (isinstance(annotations, collections.abc.Mapping) and
+ attr in annotations and
+ issubclass(other, Generic) and other._is_protocol):
+ break
+ else:
+ return NotImplemented
+ return True
+
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ # We have nothing more to do for non-protocols...
+ if not cls._is_protocol:
+ return
+
+ # ... otherwise check consistency of bases, and prohibit instantiation.
+ for base in cls.__bases__:
+ if not (base in (object, Generic) or
+ base.__module__ in _PROTO_WHITELIST and
+ base.__name__ in _PROTO_WHITELIST[base.__module__] or
+ issubclass(base, Generic) and base._is_protocol):
+ raise TypeError('Protocols can only inherit from other'
+ ' protocols, got %r' % base)
+ cls.__init__ = _no_init_or_replace_init
+
+
+class _AnnotatedAlias(_GenericAlias, _root=True):
+ """Runtime representation of an annotated type.
+
+ At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+ with extra annotations. The alias behaves like a normal typing alias,
+ instantiating is the same as instantiating the underlying type, binding
+ it to types is also the same.
+ """
+ def __init__(self, origin, metadata):
+ if isinstance(origin, _AnnotatedAlias):
+ metadata = origin.__metadata__ + metadata
+ origin = origin.__origin__
+ super().__init__(origin, origin)
+ self.__metadata__ = metadata
+
+ def copy_with(self, params):
+ assert len(params) == 1
+ new_type = params[0]
+ return _AnnotatedAlias(new_type, self.__metadata__)
+
+ def __repr__(self):
+ return "typing.Annotated[{}, {}]".format(
+ _type_repr(self.__origin__),
+ ", ".join(repr(a) for a in self.__metadata__)
+ )
+
+ def __reduce__(self):
+ return operator.getitem, (
+ Annotated, (self.__origin__,) + self.__metadata__
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, _AnnotatedAlias):
+ return NotImplemented
+ return (self.__origin__ == other.__origin__
+ and self.__metadata__ == other.__metadata__)
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__metadata__))
+
+
+class Annotated:
+ """Add context specific metadata to a type.
+
+ Example: Annotated[int, runtime_check.Unsigned] indicates to the
+ hypothetical runtime_check module that this type is an unsigned int.
+ Every other consumer of this type can ignore this metadata and treat
+ this type as int.
+
+ The first argument to Annotated must be a valid type.
+
+ Details:
+
+ - It's an error to call `Annotated` with less than two arguments.
+ - Nested Annotated are flattened::
+
+ Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+ - Instantiating an annotated type is equivalent to instantiating the
+ underlying type::
+
+ Annotated[C, Ann1](5) == C(5)
+
+ - Annotated can be used as a generic type alias::
+
+ Optimized = Annotated[T, runtime.Optimize()]
+ Optimized[int] == Annotated[int, runtime.Optimize()]
+
+ OptimizedList = Annotated[List[T], runtime.Optimize()]
+ OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise TypeError("Type Annotated cannot be instantiated.")
+
+ @_tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple) or len(params) < 2:
+ raise TypeError("Annotated[...] should be used "
+ "with at least two arguments (a type and an "
+ "annotation).")
+ msg = "Annotated[t, ...]: t must be a type."
+ origin = _type_check(params[0], msg)
+ metadata = tuple(params[1:])
+ return _AnnotatedAlias(origin, metadata)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ "Cannot subclass {}.Annotated".format(cls.__module__)
+ )
+
+
+def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol.
+
+ Such protocol can be used with isinstance() and issubclass().
+ Raise TypeError if applied to a non-protocol class.
+ This allows a simple-minded structural check very similar to
+ one trick ponies in collections.abc such as Iterable.
+ For example::
+
+ @runtime_checkable
+ class Closable(Protocol):
+ def close(self): ...
+
+ assert isinstance(open('/some/file'), Closable)
+
+ Warning: this will check only the presence of the required methods,
+ not their type signatures!
+ """
+ if not issubclass(cls, Generic) or not cls._is_protocol:
+ raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+ ' got %r' % cls)
+ cls._is_runtime_protocol = True
+ return cls
+
+
def cast(typ, val):
"""Cast a value to a type.
@@ -1407,13 +1407,13 @@ _allowed_types = (types.FunctionType, types.BuiltinFunctionType,
WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
-def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
- forward references encoded as string literals, adds Optional[t] if a
- default value equal to None is set and recursively replaces all
- 'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
@@ -1454,20 +1454,20 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
if value is None:
value = type(None)
if isinstance(value, str):
- value = ForwardRef(value, is_argument=False, is_class=True)
+ value = ForwardRef(value, is_argument=False, is_class=True)
value = _eval_type(value, base_globals, localns)
hints[name] = value
- return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+ return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
if globalns is None:
if isinstance(obj, types.ModuleType):
globalns = obj.__dict__
else:
- nsobj = obj
- # Find globalns for the unwrapped object.
- while hasattr(nsobj, '__wrapped__'):
- nsobj = nsobj.__wrapped__
- globalns = getattr(nsobj, '__globals__', {})
+ nsobj = obj
+ # Find globalns for the unwrapped object.
+ while hasattr(nsobj, '__wrapped__'):
+ nsobj = nsobj.__wrapped__
+ globalns = getattr(nsobj, '__globals__', {})
if localns is None:
localns = globalns
elif localns is None:
@@ -1486,82 +1486,82 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
if value is None:
value = type(None)
if isinstance(value, str):
- # class-level forward refs were handled above, this must be either
- # a module-level annotation or a function argument annotation
- value = ForwardRef(
- value,
- is_argument=not isinstance(obj, types.ModuleType),
- is_class=False,
- )
+ # class-level forward refs were handled above, this must be either
+ # a module-level annotation or a function argument annotation
+ value = ForwardRef(
+ value,
+ is_argument=not isinstance(obj, types.ModuleType),
+ is_class=False,
+ )
value = _eval_type(value, globalns, localns)
if name in defaults and defaults[name] is None:
value = Optional[value]
hints[name] = value
- return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
-
-
-def _strip_annotations(t):
- """Strips the annotations from a given type.
- """
- if isinstance(t, _AnnotatedAlias):
- return _strip_annotations(t.__origin__)
- if isinstance(t, _GenericAlias):
- stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
- if stripped_args == t.__args__:
- return t
- return t.copy_with(stripped_args)
- if isinstance(t, GenericAlias):
- stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
- if stripped_args == t.__args__:
- return t
- return GenericAlias(t.__origin__, stripped_args)
- return t
-
-
-def get_origin(tp):
- """Get the unsubscripted version of a type.
-
- This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
- and Annotated. Return None for unsupported types. Examples::
-
- get_origin(Literal[42]) is Literal
- get_origin(int) is None
- get_origin(ClassVar[int]) is ClassVar
- get_origin(Generic) is Generic
- get_origin(Generic[T]) is Generic
- get_origin(Union[T, int]) is Union
- get_origin(List[Tuple[T, T]][int]) == list
- """
- if isinstance(tp, _AnnotatedAlias):
- return Annotated
- if isinstance(tp, (_BaseGenericAlias, GenericAlias)):
- return tp.__origin__
- if tp is Generic:
- return Generic
- return None
-
-
-def get_args(tp):
- """Get type arguments with all substitutions performed.
-
- For unions, basic simplifications used by Union constructor are performed.
- Examples::
- get_args(Dict[str, int]) == (str, int)
- get_args(int) == ()
- get_args(Union[int, Union[T, int], str][int]) == (int, str)
- get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
- get_args(Callable[[], T][int]) == ([], int)
- """
- if isinstance(tp, _AnnotatedAlias):
- return (tp.__origin__,) + tp.__metadata__
- if isinstance(tp, (_GenericAlias, GenericAlias)):
- res = tp.__args__
- if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis:
- res = (list(res[:-1]), res[-1])
- return res
- return ()
-
-
+ return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+
+
+def _strip_annotations(t):
+ """Strips the annotations from a given type.
+ """
+ if isinstance(t, _AnnotatedAlias):
+ return _strip_annotations(t.__origin__)
+ if isinstance(t, _GenericAlias):
+ stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return t.copy_with(stripped_args)
+ if isinstance(t, GenericAlias):
+ stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return GenericAlias(t.__origin__, stripped_args)
+ return t
+
+
+def get_origin(tp):
+ """Get the unsubscripted version of a type.
+
+ This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+ and Annotated. Return None for unsupported types. Examples::
+
+ get_origin(Literal[42]) is Literal
+ get_origin(int) is None
+ get_origin(ClassVar[int]) is ClassVar
+ get_origin(Generic) is Generic
+ get_origin(Generic[T]) is Generic
+ get_origin(Union[T, int]) is Union
+ get_origin(List[Tuple[T, T]][int]) == list
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return Annotated
+ if isinstance(tp, (_BaseGenericAlias, GenericAlias)):
+ return tp.__origin__
+ if tp is Generic:
+ return Generic
+ return None
+
+
+def get_args(tp):
+ """Get type arguments with all substitutions performed.
+
+ For unions, basic simplifications used by Union constructor are performed.
+ Examples::
+ get_args(Dict[str, int]) == (str, int)
+ get_args(int) == ()
+ get_args(Union[int, Union[T, int], str][int]) == (int, str)
+ get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+ get_args(Callable[[], T][int]) == ([], int)
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return (tp.__origin__,) + tp.__metadata__
+ if isinstance(tp, (_GenericAlias, GenericAlias)):
+ res = tp.__args__
+ if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis:
+ res = (list(res[:-1]), res[-1])
+ return res
+ return ()
+
+
def no_type_check(arg):
"""Decorator to indicate that annotations are not type hints.
@@ -1642,30 +1642,30 @@ def overload(func):
return _overload_dummy
-def final(f):
- """A decorator to indicate final methods and final classes.
+def final(f):
+ """A decorator to indicate final methods and final classes.
- Use this decorator to indicate to type checkers that the decorated
- method cannot be overridden, and decorated class cannot be subclassed.
- For example:
+ Use this decorator to indicate to type checkers that the decorated
+ method cannot be overridden, and decorated class cannot be subclassed.
+ For example:
- class Base:
- @final
- def done(self) -> None:
- ...
- class Sub(Base):
- def done(self) -> None: # Error reported by type checker
- ...
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
- @final
- class Leaf:
- ...
- class Other(Leaf): # Error reported by type checker
- ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
- There is no runtime checking of these properties.
+ There is no runtime checking of these properties.
"""
- return f
+ return f
# Some unconstrained type variables. These are used by the container types.
@@ -1686,20 +1686,20 @@ AnyStr = TypeVar('AnyStr', bytes, str)
# Various ABCs mimicking those in collections.abc.
-_alias = _SpecialGenericAlias
-
-Hashable = _alias(collections.abc.Hashable, 0) # Not generic.
-Awaitable = _alias(collections.abc.Awaitable, 1)
-Coroutine = _alias(collections.abc.Coroutine, 3)
-AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
-AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
-Iterable = _alias(collections.abc.Iterable, 1)
-Iterator = _alias(collections.abc.Iterator, 1)
-Reversible = _alias(collections.abc.Reversible, 1)
-Sized = _alias(collections.abc.Sized, 0) # Not generic.
-Container = _alias(collections.abc.Container, 1)
-Collection = _alias(collections.abc.Collection, 1)
-Callable = _CallableType(collections.abc.Callable, 2)
+_alias = _SpecialGenericAlias
+
+Hashable = _alias(collections.abc.Hashable, 0) # Not generic.
+Awaitable = _alias(collections.abc.Awaitable, 1)
+Coroutine = _alias(collections.abc.Coroutine, 3)
+AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
+AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
+Iterable = _alias(collections.abc.Iterable, 1)
+Iterator = _alias(collections.abc.Iterator, 1)
+Reversible = _alias(collections.abc.Reversible, 1)
+Sized = _alias(collections.abc.Sized, 0) # Not generic.
+Container = _alias(collections.abc.Container, 1)
+Collection = _alias(collections.abc.Collection, 1)
+Callable = _CallableType(collections.abc.Callable, 2)
Callable.__doc__ = \
"""Callable type; Callable[[int], str] is a function of (int) -> str.
@@ -1710,16 +1710,16 @@ Callable.__doc__ = \
There is no syntax to indicate optional or keyword arguments,
such function types are rarely used as callback types.
"""
-AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
-MutableSet = _alias(collections.abc.MutableSet, 1)
+AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
+MutableSet = _alias(collections.abc.MutableSet, 1)
# NOTE: Mapping is only covariant in the value type.
-Mapping = _alias(collections.abc.Mapping, 2)
-MutableMapping = _alias(collections.abc.MutableMapping, 2)
-Sequence = _alias(collections.abc.Sequence, 1)
-MutableSequence = _alias(collections.abc.MutableSequence, 1)
-ByteString = _alias(collections.abc.ByteString, 0) # Not generic
-# Tuple accepts variable number of parameters.
-Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
+Mapping = _alias(collections.abc.Mapping, 2)
+MutableMapping = _alias(collections.abc.MutableMapping, 2)
+Sequence = _alias(collections.abc.Sequence, 1)
+MutableSequence = _alias(collections.abc.MutableSequence, 1)
+ByteString = _alias(collections.abc.ByteString, 0) # Not generic
+# Tuple accepts variable number of parameters.
+Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
Tuple.__doc__ = \
"""Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
@@ -1729,24 +1729,24 @@ Tuple.__doc__ = \
To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
"""
-List = _alias(list, 1, inst=False, name='List')
-Deque = _alias(collections.deque, 1, name='Deque')
-Set = _alias(set, 1, inst=False, name='Set')
-FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
-MappingView = _alias(collections.abc.MappingView, 1)
-KeysView = _alias(collections.abc.KeysView, 1)
-ItemsView = _alias(collections.abc.ItemsView, 2)
-ValuesView = _alias(collections.abc.ValuesView, 1)
-ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
-AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
-Dict = _alias(dict, 2, inst=False, name='Dict')
-DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
-OrderedDict = _alias(collections.OrderedDict, 2)
-Counter = _alias(collections.Counter, 1)
-ChainMap = _alias(collections.ChainMap, 2)
-Generator = _alias(collections.abc.Generator, 3)
-AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
-Type = _alias(type, 1, inst=False, name='Type')
+List = _alias(list, 1, inst=False, name='List')
+Deque = _alias(collections.deque, 1, name='Deque')
+Set = _alias(set, 1, inst=False, name='Set')
+FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
+MappingView = _alias(collections.abc.MappingView, 1)
+KeysView = _alias(collections.abc.KeysView, 1)
+ItemsView = _alias(collections.abc.ItemsView, 2)
+ValuesView = _alias(collections.abc.ValuesView, 1)
+ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
+AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
+Dict = _alias(dict, 2, inst=False, name='Dict')
+DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
+OrderedDict = _alias(collections.OrderedDict, 2)
+Counter = _alias(collections.Counter, 1)
+ChainMap = _alias(collections.ChainMap, 2)
+Generator = _alias(collections.abc.Generator, 3)
+AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
+Type = _alias(type, 1, inst=False, name='Type')
Type.__doc__ = \
"""A special construct usable to annotate class objects.
@@ -1772,9 +1772,9 @@ Type.__doc__ = \
"""
-@runtime_checkable
-class SupportsInt(Protocol):
- """An ABC with one abstract method __int__."""
+@runtime_checkable
+class SupportsInt(Protocol):
+ """An ABC with one abstract method __int__."""
__slots__ = ()
@abstractmethod
@@ -1782,9 +1782,9 @@ class SupportsInt(Protocol):
pass
-@runtime_checkable
-class SupportsFloat(Protocol):
- """An ABC with one abstract method __float__."""
+@runtime_checkable
+class SupportsFloat(Protocol):
+ """An ABC with one abstract method __float__."""
__slots__ = ()
@abstractmethod
@@ -1792,9 +1792,9 @@ class SupportsFloat(Protocol):
pass
-@runtime_checkable
-class SupportsComplex(Protocol):
- """An ABC with one abstract method __complex__."""
+@runtime_checkable
+class SupportsComplex(Protocol):
+ """An ABC with one abstract method __complex__."""
__slots__ = ()
@abstractmethod
@@ -1802,9 +1802,9 @@ class SupportsComplex(Protocol):
pass
-@runtime_checkable
-class SupportsBytes(Protocol):
- """An ABC with one abstract method __bytes__."""
+@runtime_checkable
+class SupportsBytes(Protocol):
+ """An ABC with one abstract method __bytes__."""
__slots__ = ()
@abstractmethod
@@ -1812,19 +1812,19 @@ class SupportsBytes(Protocol):
pass
-@runtime_checkable
-class SupportsIndex(Protocol):
- """An ABC with one abstract method __index__."""
- __slots__ = ()
-
- @abstractmethod
- def __index__(self) -> int:
- pass
-
-
-@runtime_checkable
-class SupportsAbs(Protocol[T_co]):
- """An ABC with one abstract method __abs__ that is covariant in its return type."""
+@runtime_checkable
+class SupportsIndex(Protocol):
+ """An ABC with one abstract method __index__."""
+ __slots__ = ()
+
+ @abstractmethod
+ def __index__(self) -> int:
+ pass
+
+
+@runtime_checkable
+class SupportsAbs(Protocol[T_co]):
+ """An ABC with one abstract method __abs__ that is covariant in its return type."""
__slots__ = ()
@abstractmethod
@@ -1832,9 +1832,9 @@ class SupportsAbs(Protocol[T_co]):
pass
-@runtime_checkable
-class SupportsRound(Protocol[T_co]):
- """An ABC with one abstract method __round__ that is covariant in its return type."""
+@runtime_checkable
+class SupportsRound(Protocol[T_co]):
+ """An ABC with one abstract method __round__ that is covariant in its return type."""
__slots__ = ()
@abstractmethod
@@ -1842,41 +1842,41 @@ class SupportsRound(Protocol[T_co]):
pass
-def _make_nmtuple(name, types, module, defaults = ()):
- fields = [n for n, t in types]
- types = {n: _type_check(t, f"field {n} annotation must be a type")
- for n, t in types}
- nm_tpl = collections.namedtuple(name, fields,
- defaults=defaults, module=module)
- nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
+def _make_nmtuple(name, types, module, defaults = ()):
+ fields = [n for n, t in types]
+ types = {n: _type_check(t, f"field {n} annotation must be a type")
+ for n, t in types}
+ nm_tpl = collections.namedtuple(name, fields,
+ defaults=defaults, module=module)
+ nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
return nm_tpl
# attributes prohibited to set in NamedTuple class syntax
-_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
- '_fields', '_field_defaults',
- '_make', '_replace', '_asdict', '_source'})
+_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
+ '_fields', '_field_defaults',
+ '_make', '_replace', '_asdict', '_source'})
-_special = frozenset({'__module__', '__name__', '__annotations__'})
+_special = frozenset({'__module__', '__name__', '__annotations__'})
class NamedTupleMeta(type):
def __new__(cls, typename, bases, ns):
- assert bases[0] is _NamedTuple
+ assert bases[0] is _NamedTuple
types = ns.get('__annotations__', {})
- default_names = []
+ default_names = []
for field_name in types:
if field_name in ns:
- default_names.append(field_name)
- elif default_names:
- raise TypeError(f"Non-default namedtuple field {field_name} "
- f"cannot follow default field"
- f"{'s' if len(default_names) > 1 else ''} "
- f"{', '.join(default_names)}")
- nm_tpl = _make_nmtuple(typename, types.items(),
- defaults=[ns[n] for n in default_names],
- module=ns['__module__'])
+ default_names.append(field_name)
+ elif default_names:
+ raise TypeError(f"Non-default namedtuple field {field_name} "
+ f"cannot follow default field"
+ f"{'s' if len(default_names) > 1 else ''} "
+ f"{', '.join(default_names)}")
+ nm_tpl = _make_nmtuple(typename, types.items(),
+ defaults=[ns[n] for n in default_names],
+ module=ns['__module__'])
# update from user namespace without overriding special namedtuple attributes
for key in ns:
if key in _prohibited:
@@ -1886,7 +1886,7 @@ class NamedTupleMeta(type):
return nm_tpl
-def NamedTuple(typename, fields=None, /, **kwargs):
+def NamedTuple(typename, fields=None, /, **kwargs):
"""Typed version of namedtuple.
Usage in Python versions >= 3.6::
@@ -1899,10 +1899,10 @@ def NamedTuple(typename, fields=None, /, **kwargs):
Employee = collections.namedtuple('Employee', ['name', 'id'])
- The resulting class has an extra __annotations__ attribute, giving a
- dict that maps field names to types. (The field names are also in
- the _fields attribute, which is part of the namedtuple API.)
- Alternative equivalent keyword syntax is also accepted::
+ The resulting class has an extra __annotations__ attribute, giving a
+ dict that maps field names to types. (The field names are also in
+ the _fields attribute, which is part of the namedtuple API.)
+ Alternative equivalent keyword syntax is also accepted::
Employee = NamedTuple('Employee', name=str, id=int)
@@ -1910,142 +1910,142 @@ def NamedTuple(typename, fields=None, /, **kwargs):
Employee = NamedTuple('Employee', [('name', str), ('id', int)])
"""
- if fields is None:
- fields = kwargs.items()
- elif kwargs:
- raise TypeError("Either list of fields or keywords"
- " can be provided to NamedTuple, not both")
- try:
- module = sys._getframe(1).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- module = None
- return _make_nmtuple(typename, fields, module=module)
-
-_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
-
-def _namedtuple_mro_entries(bases):
- if len(bases) > 1:
- raise TypeError("Multiple inheritance with NamedTuple is not supported")
- assert bases[0] is NamedTuple
- return (_NamedTuple,)
-
-NamedTuple.__mro_entries__ = _namedtuple_mro_entries
-
-
-class _TypedDictMeta(type):
- def __new__(cls, name, bases, ns, total=True):
- """Create new typed dict class object.
-
- This method is called when TypedDict is subclassed,
- or when TypedDict is instantiated. This way
- TypedDict supports all three syntax forms described in its docstring.
- Subclasses and instances of TypedDict return actual dictionaries.
- """
- for base in bases:
- if type(base) is not _TypedDictMeta:
- raise TypeError('cannot inherit from both a TypedDict type '
- 'and a non-TypedDict base class')
- tp_dict = type.__new__(_TypedDictMeta, name, (dict,), ns)
-
- annotations = {}
- own_annotations = ns.get('__annotations__', {})
- own_annotation_keys = set(own_annotations.keys())
- msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
- own_annotations = {
- n: _type_check(tp, msg, module=tp_dict.__module__)
- for n, tp in own_annotations.items()
- }
- required_keys = set()
- optional_keys = set()
-
- for base in bases:
- annotations.update(base.__dict__.get('__annotations__', {}))
- required_keys.update(base.__dict__.get('__required_keys__', ()))
- optional_keys.update(base.__dict__.get('__optional_keys__', ()))
-
- annotations.update(own_annotations)
- if total:
- required_keys.update(own_annotation_keys)
- else:
- optional_keys.update(own_annotation_keys)
-
- tp_dict.__annotations__ = annotations
- tp_dict.__required_keys__ = frozenset(required_keys)
- tp_dict.__optional_keys__ = frozenset(optional_keys)
- if not hasattr(tp_dict, '__total__'):
- tp_dict.__total__ = total
- return tp_dict
-
- __call__ = dict # static method
-
- def __subclasscheck__(cls, other):
- # Typed dicts are only for static structural subtyping.
- raise TypeError('TypedDict does not support instance and class checks')
-
- __instancecheck__ = __subclasscheck__
-
-
-def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
- """A simple typed namespace. At runtime it is equivalent to a plain dict.
-
- TypedDict creates a dictionary type that expects all of its
- instances to have a certain set of keys, where each key is
- associated with a value of a consistent type. This expectation
- is not checked at runtime but is only enforced by type checkers.
- Usage::
-
- class Point2D(TypedDict):
- x: int
- y: int
- label: str
-
- a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
- b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
-
- assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
-
- The type info can be accessed via the Point2D.__annotations__ dict, and
- the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
- TypedDict supports two additional equivalent forms::
-
- Point2D = TypedDict('Point2D', x=int, y=int, label=str)
- Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
-
- By default, all keys must be present in a TypedDict. It is possible
- to override this by specifying totality.
- Usage::
-
- class point2D(TypedDict, total=False):
- x: int
- y: int
-
- This means that a point2D TypedDict can have any of the keys omitted.A type
- checker is only expected to support a literal False or True as the value of
- the total argument. True is the default, and makes all items defined in the
- class body be required.
-
- The class syntax is only supported in Python 3.6+, while two other
- syntax forms work for Python 2.7 and 3.2+
- """
- if fields is None:
- fields = kwargs
- elif kwargs:
- raise TypeError("TypedDict takes either a dict or keyword arguments,"
- " but not both")
-
- ns = {'__annotations__': dict(fields)}
- try:
- # Setting correct module is necessary to make typed dict classes pickleable.
- ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- pass
-
- return _TypedDictMeta(typename, (), ns, total=total)
-
-_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
-TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
-
-
+ if fields is None:
+ fields = kwargs.items()
+ elif kwargs:
+ raise TypeError("Either list of fields or keywords"
+ " can be provided to NamedTuple, not both")
+ try:
+ module = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ module = None
+ return _make_nmtuple(typename, fields, module=module)
+
+_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
+
+def _namedtuple_mro_entries(bases):
+ if len(bases) > 1:
+ raise TypeError("Multiple inheritance with NamedTuple is not supported")
+ assert bases[0] is NamedTuple
+ return (_NamedTuple,)
+
+NamedTuple.__mro_entries__ = _namedtuple_mro_entries
+
+
+class _TypedDictMeta(type):
+ def __new__(cls, name, bases, ns, total=True):
+ """Create new typed dict class object.
+
+ This method is called when TypedDict is subclassed,
+ or when TypedDict is instantiated. This way
+ TypedDict supports all three syntax forms described in its docstring.
+ Subclasses and instances of TypedDict return actual dictionaries.
+ """
+ for base in bases:
+ if type(base) is not _TypedDictMeta:
+ raise TypeError('cannot inherit from both a TypedDict type '
+ 'and a non-TypedDict base class')
+ tp_dict = type.__new__(_TypedDictMeta, name, (dict,), ns)
+
+ annotations = {}
+ own_annotations = ns.get('__annotations__', {})
+ own_annotation_keys = set(own_annotations.keys())
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ own_annotations = {
+ n: _type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own_annotations.items()
+ }
+ required_keys = set()
+ optional_keys = set()
+
+ for base in bases:
+ annotations.update(base.__dict__.get('__annotations__', {}))
+ required_keys.update(base.__dict__.get('__required_keys__', ()))
+ optional_keys.update(base.__dict__.get('__optional_keys__', ()))
+
+ annotations.update(own_annotations)
+ if total:
+ required_keys.update(own_annotation_keys)
+ else:
+ optional_keys.update(own_annotation_keys)
+
+ tp_dict.__annotations__ = annotations
+ tp_dict.__required_keys__ = frozenset(required_keys)
+ tp_dict.__optional_keys__ = frozenset(optional_keys)
+ if not hasattr(tp_dict, '__total__'):
+ tp_dict.__total__ = total
+ return tp_dict
+
+ __call__ = dict # static method
+
+ def __subclasscheck__(cls, other):
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+
+ __instancecheck__ = __subclasscheck__
+
+
+def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
+ """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type that expects all of its
+ instances to have a certain set of keys, where each key is
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime but is only enforced by type checkers.
+ Usage::
+
+ class Point2D(TypedDict):
+ x: int
+ y: int
+ label: str
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info can be accessed via the Point2D.__annotations__ dict, and
+ the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+ TypedDict supports two additional equivalent forms::
+
+ Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ By default, all keys must be present in a TypedDict. It is possible
+ to override this by specifying totality.
+ Usage::
+
+ class point2D(TypedDict, total=False):
+ x: int
+ y: int
+
+ This means that a point2D TypedDict can have any of the keys omitted.A type
+ checker is only expected to support a literal False or True as the value of
+ the total argument. True is the default, and makes all items defined in the
+ class body be required.
+
+ The class syntax is only supported in Python 3.6+, while two other
+ syntax forms work for Python 2.7 and 3.2+
+ """
+ if fields is None:
+ fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+
+ ns = {'__annotations__': dict(fields)}
+ try:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+
+ return _TypedDictMeta(typename, (), ns, total=total)
+
+_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
+
+
def NewType(name, tp):
"""NewType creates simple unique types with almost zero
runtime overhead. NewType(name, tp) is considered a subtype of tp
@@ -2096,13 +2096,13 @@ class IO(Generic[AnyStr]):
__slots__ = ()
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def mode(self) -> str:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def name(self) -> str:
pass
@@ -2110,8 +2110,8 @@ class IO(Generic[AnyStr]):
def close(self) -> None:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def closed(self) -> bool:
pass
@@ -2199,28 +2199,28 @@ class TextIO(IO[str]):
__slots__ = ()
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def buffer(self) -> BinaryIO:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def encoding(self) -> str:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def errors(self) -> Optional[str]:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def line_buffering(self) -> bool:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def newlines(self) -> Any:
pass
@@ -2241,8 +2241,8 @@ class io:
io.__name__ = __name__ + '.io'
sys.modules[io.__name__] = io
-Pattern = _alias(stdlib_re.Pattern, 1)
-Match = _alias(stdlib_re.Match, 1)
+Pattern = _alias(stdlib_re.Pattern, 1)
+Match = _alias(stdlib_re.Match, 1)
class re:
"""Wrapper namespace for re type aliases."""