diff options
author | dvshkurko <dvshkurko@yandex-team.ru> | 2022-02-10 16:45:52 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:45:52 +0300 |
commit | c768a99151e47c3a4bb7b92c514d256abd301c4d (patch) | |
tree | 1a2c5ffcf89eb53ecd79dbc9bc0a195c27404d0c /contrib/libs/grpc/src/python | |
parent | 321ee9bce31ec6e238be26dbcbe539cffa2c3309 (diff) | |
download | ydb-c768a99151e47c3a4bb7b92c514d256abd301c4d.tar.gz |
Restoring authorship annotation for <dvshkurko@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/libs/grpc/src/python')
207 files changed, 19084 insertions, 19084 deletions
diff --git a/contrib/libs/grpc/src/python/grpcio/README.rst b/contrib/libs/grpc/src/python/grpcio/README.rst index 9bfec34f6d..fe34ad5adf 100644 --- a/contrib/libs/grpc/src/python/grpcio/README.rst +++ b/contrib/libs/grpc/src/python/grpcio/README.rst @@ -1,24 +1,24 @@ gRPC Python =========== -|compat_check_pypi| - +|compat_check_pypi| + Package for gRPC Python. -.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=grpcio - :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=grpcio - -Supported Python Versions -------------------------- -Python >= 3.5 - +.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=grpcio + :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=grpcio + +Supported Python Versions +------------------------- +Python >= 3.5 + Installation ------------ gRPC Python is available for Linux, macOS, and Windows. -Installing From PyPI -~~~~~~~~~~~~~~~~~~~~ +Installing From PyPI +~~~~~~~~~~~~~~~~~~~~ If you are installing locally... @@ -46,8 +46,8 @@ n.b. On Windows and on Mac OS X one *must* have a recent release of :code:`pip` to retrieve the proper wheel from PyPI. Be sure to upgrade to the latest version! -Installing From Source -~~~~~~~~~~~~~~~~~~~~~~ +Installing From Source +~~~~~~~~~~~~~~~~~~~~~~ Building from source requires that you have the Python headers (usually a package named :code:`python-dev`). diff --git a/contrib/libs/grpc/src/python/grpcio/commands.py b/contrib/libs/grpc/src/python/grpcio/commands.py index dcb2ce5a53..8240beb295 100644 --- a/contrib/libs/grpc/src/python/grpcio/commands.py +++ b/contrib/libs/grpc/src/python/grpcio/commands.py @@ -212,43 +212,43 @@ class BuildExt(build_ext.build_ext): LINK_OPTIONS = {} def build_extensions(self): - - def compiler_ok_with_extra_std(): - """Test if default compiler is okay with specifying c++ version - when invoked in C mode. GCC is okay with this, while clang is not. - """ + + def compiler_ok_with_extra_std(): + """Test if default compiler is okay with specifying c++ version + when invoked in C mode. GCC is okay with this, while clang is not. + """ if platform.system() != 'Windows': return False - # TODO(lidiz) Remove the generated a.out for success tests. + # TODO(lidiz) Remove the generated a.out for success tests. cc_test = subprocess.Popen(['cc', '-x', 'c', '-std=c++11', '-'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - _, cc_err = cc_test.communicate(input=b'int main(){return 0;}') - return not 'invalid argument' in str(cc_err) - + _, cc_err = cc_test.communicate(input=b'int main(){return 0;}') + return not 'invalid argument' in str(cc_err) + # This special conditioning is here due to difference of compiler # behavior in gcc and clang. The clang doesn't take --stdc++11 # flags but gcc does. Since the setuptools of Python only support # all C or all C++ compilation, the mix of C and C++ will crash. - # *By default*, macOS and FreBSD use clang and Linux use gcc - # - # If we are not using a permissive compiler that's OK with being - # passed wrong std flags, swap out compile function by adding a filter - # for it. - if not compiler_ok_with_extra_std(): - old_compile = self.compiler._compile - - def new_compile(obj, src, ext, cc_args, extra_postargs, pp_opts): - if src[-2:] == '.c': - extra_postargs = [ - arg for arg in extra_postargs if not '-std=c++' in arg - ] - return old_compile(obj, src, ext, cc_args, extra_postargs, - pp_opts) - - self.compiler._compile = new_compile - + # *By default*, macOS and FreBSD use clang and Linux use gcc + # + # If we are not using a permissive compiler that's OK with being + # passed wrong std flags, swap out compile function by adding a filter + # for it. + if not compiler_ok_with_extra_std(): + old_compile = self.compiler._compile + + def new_compile(obj, src, ext, cc_args, extra_postargs, pp_opts): + if src[-2:] == '.c': + extra_postargs = [ + arg for arg in extra_postargs if not '-std=c++' in arg + ] + return old_compile(obj, src, ext, cc_args, extra_postargs, + pp_opts) + + self.compiler._compile = new_compile + compiler = self.compiler.compiler_type if compiler in BuildExt.C_OPTIONS: for extension in self.extensions: diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py index 62205cd0ad..abe87458c4 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py @@ -14,14 +14,14 @@ """gRPC's Python API.""" import abc -import contextlib +import contextlib import enum import logging import sys import six from grpc._cython import cygrpc as _cygrpc -from grpc import _compression +from grpc import _compression logging.getLogger(__name__).addHandler(logging.NullHandler()) @@ -192,9 +192,9 @@ class Future(six.with_metaclass(abc.ABCMeta)): If the computation has already completed, the callback will be called immediately. - Exceptions raised in the callback will be logged at ERROR level, but - will not terminate any threads of execution. - + Exceptions raised in the callback will be logged at ERROR level, but + will not terminate any threads of execution. + Args: fn: A callable taking this Future object as its single parameter. """ @@ -286,7 +286,7 @@ class Status(six.with_metaclass(abc.ABCMeta)): Attributes: code: A StatusCode object to be sent to the client. - details: A UTF-8-encodable string to be sent to the client upon + details: A UTF-8-encodable string to be sent to the client upon termination of the RPC. trailing_metadata: The trailing :term:`metadata` in the RPC. """ @@ -342,7 +342,7 @@ class RpcContext(six.with_metaclass(abc.ABCMeta)): callback: A no-parameter callable to be called on RPC termination. Returns: - True if the callback was added and will be called later; False if + True if the callback was added and will be called later; False if the callback was not added and will not be called (because the RPC already terminated or some other reason). """ @@ -416,8 +416,8 @@ class ClientCallDetails(six.with_metaclass(abc.ABCMeta)): credentials: An optional CallCredentials for the RPC. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. """ @@ -586,9 +586,9 @@ class ChannelCredentials(object): class CallCredentials(object): """An encapsulation of the data required to assert an identity over a call. - A CallCredentials has to be used with secure Channel, otherwise the - metadata will not be transmitted to the server. - + A CallCredentials has to be used with secure Channel, otherwise the + metadata will not be transmitted to the server. + A CallCredentials may be composed with ChannelCredentials to always assert identity for every call over that Channel. @@ -677,8 +677,8 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Synchronously invokes the underlying RPC. Args: @@ -687,12 +687,12 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): for the RPC. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: The response value for the RPC. @@ -710,8 +710,8 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Synchronously invokes the underlying RPC. Args: @@ -720,12 +720,12 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): the RPC. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: The response value for the RPC and a Call value for the RPC. @@ -743,8 +743,8 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Asynchronously invokes the underlying RPC. Args: @@ -753,12 +753,12 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): the RPC. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: An object that is both a Call for the RPC and a Future. @@ -779,8 +779,8 @@ class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Invokes the underlying RPC. Args: @@ -789,12 +789,12 @@ class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)): the RPC. If None, the timeout is considered infinite. metadata: An optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: An object that is both a Call for the RPC and an iterator of @@ -814,8 +814,8 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Synchronously invokes the underlying RPC. Args: @@ -825,12 +825,12 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): the RPC. If None, the timeout is considered infinite. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: The response value for the RPC. @@ -848,8 +848,8 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Synchronously invokes the underlying RPC on the client. Args: @@ -859,12 +859,12 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): the RPC. If None, the timeout is considered infinite. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: The response value for the RPC and a Call object for the RPC. @@ -882,8 +882,8 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Asynchronously invokes the underlying RPC on the client. Args: @@ -892,12 +892,12 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)): the RPC. If None, the timeout is considered infinite. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: An object that is both a Call for the RPC and a Future. @@ -918,8 +918,8 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): """Invokes the underlying RPC on the client. Args: @@ -928,12 +928,12 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)): the RPC. If not specified, the timeout is considered infinite. metadata: Optional :term:`metadata` to be transmitted to the service-side of the RPC. - credentials: An optional CallCredentials for the RPC. Only valid for - secure Channel. + credentials: An optional CallCredentials for the RPC. Only valid for + secure Channel. wait_for_ready: This is an EXPERIMENTAL argument. An optional flag to enable :term:`wait_for_ready` mechanism. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This is an EXPERIMENTAL option. Returns: An object that is both a Call for the RPC and an iterator of @@ -1145,17 +1145,17 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)): """ raise NotImplementedError() - def set_compression(self, compression): - """Set the compression algorithm to be used for the entire call. - - This is an EXPERIMENTAL method. - - Args: - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. - """ - raise NotImplementedError() - + def set_compression(self, compression): + """Set the compression algorithm to be used for the entire call. + + This is an EXPERIMENTAL method. + + Args: + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. + """ + raise NotImplementedError() + @abc.abstractmethod def send_initial_metadata(self, initial_metadata): """Sends the initial metadata value to the client. @@ -1196,7 +1196,7 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)): Args: code: A StatusCode object to be sent to the client. It must not be StatusCode.OK. - details: A UTF-8-encodable string to be sent to the client upon + details: A UTF-8-encodable string to be sent to the client upon termination of the RPC. Raises: @@ -1244,22 +1244,22 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)): no details to transmit. Args: - details: A UTF-8-encodable string to be sent to the client upon + details: A UTF-8-encodable string to be sent to the client upon termination of the RPC. """ raise NotImplementedError() - def disable_next_message_compression(self): - """Disables compression for the next response message. - - This is an EXPERIMENTAL method. - - This method will override any compression configuration set during - server creation or set on the call. - """ - raise NotImplementedError() - - + def disable_next_message_compression(self): + """Disables compression for the next response message. + + This is an EXPERIMENTAL method. + + This method will override any compression configuration set during + server creation or set on the call. + """ + raise NotImplementedError() + + ##################### Service-Side Handler Interfaces ######################## @@ -1301,7 +1301,7 @@ class RpcMethodHandler(six.with_metaclass(abc.ABCMeta)): class HandlerCallDetails(six.with_metaclass(abc.ABCMeta)): """Describes an RPC that has just arrived for service. - + Attributes: method: The method name of the RPC. invocation_metadata: The :term:`metadata` sent by the client. @@ -1398,8 +1398,8 @@ class Server(six.with_metaclass(abc.ABCMeta)): This method may only be called before starting the server. Args: - address: The address for which to open a port. If the port is 0, - or not specified in the address, then gRPC runtime will choose a port. + address: The address for which to open a port. If the port is 0, + or not specified in the address, then gRPC runtime will choose a port. Returns: An integer port on which server will accept RPC requests. @@ -1461,30 +1461,30 @@ class Server(six.with_metaclass(abc.ABCMeta)): """ raise NotImplementedError() - def wait_for_termination(self, timeout=None): - """Block current thread until the server stops. - - This is an EXPERIMENTAL API. - - The wait will not consume computational resources during blocking, and - it will block until one of the two following conditions are met: - - 1) The server is stopped or terminated; - 2) A timeout occurs if timeout is not `None`. - - The timeout argument works in the same way as `threading.Event.wait()`. + def wait_for_termination(self, timeout=None): + """Block current thread until the server stops. + + This is an EXPERIMENTAL API. + + The wait will not consume computational resources during blocking, and + it will block until one of the two following conditions are met: + + 1) The server is stopped or terminated; + 2) A timeout occurs if timeout is not `None`. + + The timeout argument works in the same way as `threading.Event.wait()`. https://docs.python.org/3/library/threading.html#threading.Event.wait - - Args: - timeout: A floating point number specifying a timeout for the - operation in seconds. - - Returns: - A bool indicates if the operation times out. - """ - raise NotImplementedError() - - + + Args: + timeout: A floating point number specifying a timeout for the + operation in seconds. + + Returns: + A bool indicates if the operation times out. + """ + raise NotImplementedError() + + ################################# Functions ################################ @@ -1761,78 +1761,78 @@ def dynamic_ssl_server_credentials(initial_certificate_configuration, certificate_configuration_fetcher, require_client_authentication)) -@enum.unique -class LocalConnectionType(enum.Enum): - """Types of local connection for local credential creation. - - Attributes: - UDS: Unix domain socket connections - LOCAL_TCP: Local TCP connections. - """ - UDS = _cygrpc.LocalConnectionType.uds - LOCAL_TCP = _cygrpc.LocalConnectionType.local_tcp - - -def local_channel_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP): - """Creates a local ChannelCredentials used for local connections. - - This is an EXPERIMENTAL API. - - Local credentials are used by local TCP endpoints (e.g. localhost:10000) - also UDS connections. - - The connections created by local channel credentials are not - encrypted, but will be checked if they are local or not. - The UDS connections are considered secure by providing peer authentication - and data confidentiality while TCP connections are considered insecure. - - It is allowed to transmit call credentials over connections created by - local channel credentials. - - Local channel credentials are useful for 1) eliminating insecure_channel usage; - 2) enable unit testing for call credentials without setting up secrets. - - Args: - local_connect_type: Local connection type (either - grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP) - - Returns: - A ChannelCredentials for use with a local Channel - """ - return ChannelCredentials( - _cygrpc.channel_credentials_local(local_connect_type.value)) - - -def local_server_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP): - """Creates a local ServerCredentials used for local connections. - - This is an EXPERIMENTAL API. - - Local credentials are used by local TCP endpoints (e.g. localhost:10000) - also UDS connections. - - The connections created by local server credentials are not - encrypted, but will be checked if they are local or not. - The UDS connections are considered secure by providing peer authentication - and data confidentiality while TCP connections are considered insecure. - - It is allowed to transmit call credentials over connections created by local - server credentials. - - Local server credentials are useful for 1) eliminating insecure_channel usage; - 2) enable unit testing for call credentials without setting up secrets. - - Args: - local_connect_type: Local connection type (either - grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP) - - Returns: - A ServerCredentials for use with a local Server - """ - return ServerCredentials( - _cygrpc.server_credentials_local(local_connect_type.value)) - - +@enum.unique +class LocalConnectionType(enum.Enum): + """Types of local connection for local credential creation. + + Attributes: + UDS: Unix domain socket connections + LOCAL_TCP: Local TCP connections. + """ + UDS = _cygrpc.LocalConnectionType.uds + LOCAL_TCP = _cygrpc.LocalConnectionType.local_tcp + + +def local_channel_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP): + """Creates a local ChannelCredentials used for local connections. + + This is an EXPERIMENTAL API. + + Local credentials are used by local TCP endpoints (e.g. localhost:10000) + also UDS connections. + + The connections created by local channel credentials are not + encrypted, but will be checked if they are local or not. + The UDS connections are considered secure by providing peer authentication + and data confidentiality while TCP connections are considered insecure. + + It is allowed to transmit call credentials over connections created by + local channel credentials. + + Local channel credentials are useful for 1) eliminating insecure_channel usage; + 2) enable unit testing for call credentials without setting up secrets. + + Args: + local_connect_type: Local connection type (either + grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP) + + Returns: + A ChannelCredentials for use with a local Channel + """ + return ChannelCredentials( + _cygrpc.channel_credentials_local(local_connect_type.value)) + + +def local_server_credentials(local_connect_type=LocalConnectionType.LOCAL_TCP): + """Creates a local ServerCredentials used for local connections. + + This is an EXPERIMENTAL API. + + Local credentials are used by local TCP endpoints (e.g. localhost:10000) + also UDS connections. + + The connections created by local server credentials are not + encrypted, but will be checked if they are local or not. + The UDS connections are considered secure by providing peer authentication + and data confidentiality while TCP connections are considered insecure. + + It is allowed to transmit call credentials over connections created by local + server credentials. + + Local server credentials are useful for 1) eliminating insecure_channel usage; + 2) enable unit testing for call credentials without setting up secrets. + + Args: + local_connect_type: Local connection type (either + grpc.LocalConnectionType.UDS or grpc.LocalConnectionType.LOCAL_TCP) + + Returns: + A ServerCredentials for use with a local Server + """ + return ServerCredentials( + _cygrpc.server_credentials_local(local_connect_type.value)) + + def alts_channel_credentials(service_accounts=None): """Creates a ChannelCredentials for use with an ALTS-enabled Channel. @@ -1902,7 +1902,7 @@ def channel_ready_future(channel): return _utilities.channel_ready_future(channel) -def insecure_channel(target, options=None, compression=None): +def insecure_channel(target, options=None, compression=None): """Creates an insecure Channel to a server. The returned Channel is thread-safe. @@ -1911,8 +1911,8 @@ def insecure_channel(target, options=None, compression=None): target: The server address options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core runtime) to configure the channel. - compression: An optional value indicating the compression method to be - used over the lifetime of the channel. This is an EXPERIMENTAL option. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. This is an EXPERIMENTAL option. Returns: A Channel. @@ -1922,7 +1922,7 @@ def insecure_channel(target, options=None, compression=None): compression) -def secure_channel(target, credentials, options=None, compression=None): +def secure_channel(target, credentials, options=None, compression=None): """Creates a secure Channel to a server. The returned Channel is thread-safe. @@ -1932,8 +1932,8 @@ def secure_channel(target, credentials, options=None, compression=None): credentials: A ChannelCredentials instance. options: An optional list of key-value pairs (:term:`channel_arguments` in gRPC Core runtime) to configure the channel. - compression: An optional value indicating the compression method to be - used over the lifetime of the channel. This is an EXPERIMENTAL option. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. This is an EXPERIMENTAL option. Returns: A Channel. @@ -1945,7 +1945,7 @@ def secure_channel(target, credentials, options=None, compression=None): "secure_channel cannot be called with insecure credentials." + " Call insecure_channel instead.") return _channel.Channel(target, () if options is None else options, - credentials._credentials, compression) + credentials._credentials, compression) def intercept_channel(channel, *interceptors): @@ -1980,8 +1980,8 @@ def server(thread_pool, handlers=None, interceptors=None, options=None, - maximum_concurrent_rpcs=None, - compression=None): + maximum_concurrent_rpcs=None, + compression=None): """Creates a Server with which RPCs can be serviced. Args: @@ -1999,9 +1999,9 @@ def server(thread_pool, maximum_concurrent_rpcs: The maximum number of concurrent RPCs this server will service before returning RESOURCE_EXHAUSTED status, or None to indicate no limit. - compression: An element of grpc.compression, e.g. - grpc.compression.Gzip. This compression algorithm will be used for the - lifetime of the server unless overridden. This is an EXPERIMENTAL option. + compression: An element of grpc.compression, e.g. + grpc.compression.Gzip. This compression algorithm will be used for the + lifetime of the server unless overridden. This is an EXPERIMENTAL option. Returns: A Server object. @@ -2011,33 +2011,33 @@ def server(thread_pool, () if handlers is None else handlers, () if interceptors is None else interceptors, () if options is None else options, - maximum_concurrent_rpcs, compression) - - -@contextlib.contextmanager -def _create_servicer_context(rpc_event, state, request_deserializer): - from grpc import _server # pylint: disable=cyclic-import - context = _server._Context(rpc_event, state, request_deserializer) - yield context - context._finalize_state() # pylint: disable=protected-access - - -@enum.unique -class Compression(enum.IntEnum): - """Indicates the compression method to be used for an RPC. - - This enumeration is part of an EXPERIMENTAL API. - - Attributes: - NoCompression: Do not use compression algorithm. - Deflate: Use "Deflate" compression algorithm. - Gzip: Use "Gzip" compression algorithm. - """ - NoCompression = _compression.NoCompression - Deflate = _compression.Deflate - Gzip = _compression.Gzip - - + maximum_concurrent_rpcs, compression) + + +@contextlib.contextmanager +def _create_servicer_context(rpc_event, state, request_deserializer): + from grpc import _server # pylint: disable=cyclic-import + context = _server._Context(rpc_event, state, request_deserializer) + yield context + context._finalize_state() # pylint: disable=protected-access + + +@enum.unique +class Compression(enum.IntEnum): + """Indicates the compression method to be used for an RPC. + + This enumeration is part of an EXPERIMENTAL API. + + Attributes: + NoCompression: Do not use compression algorithm. + Deflate: Use "Deflate" compression algorithm. + Gzip: Use "Gzip" compression algorithm. + """ + NoCompression = _compression.NoCompression + Deflate = _compression.Deflate + Gzip = _compression.Gzip + + from grpc._runtime_protos import protos, services, protos_and_services # pylint: disable=wrong-import-position ################################### __all__ ################################# @@ -2057,11 +2057,11 @@ __all__ = ( 'AuthMetadataContext', 'AuthMetadataPluginCallback', 'AuthMetadataPlugin', - 'Compression', + 'Compression', 'ClientCallDetails', 'ServerCertificateConfiguration', 'ServerCredentials', - 'LocalConnectionType', + 'LocalConnectionType', 'UnaryUnaryMultiCallable', 'UnaryStreamMultiCallable', 'StreamUnaryMultiCallable', @@ -2088,8 +2088,8 @@ __all__ = ( 'access_token_call_credentials', 'composite_call_credentials', 'composite_channel_credentials', - 'local_channel_credentials', - 'local_server_credentials', + 'local_channel_credentials', + 'local_server_credentials', 'alts_channel_credentials', 'alts_server_credentials', 'ssl_server_credentials', diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py b/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py index f8ffb7eb2a..11921d7883 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py @@ -13,17 +13,17 @@ # limitations under the License. """Invocation-side implementation of gRPC Python.""" -import copy -import functools +import copy +import functools import logging -import os +import os import sys import threading import time import grpc -import grpc.experimental -from grpc import _compression +import grpc.experimental +from grpc import _compression from grpc import _common from grpc import _grpcio_metadata from grpc._cython import cygrpc @@ -34,11 +34,11 @@ _USER_AGENT = 'grpc-python/{}'.format(_grpcio_metadata.__version__) _EMPTY_FLAGS = 0 -# NOTE(rbellevi): No guarantees are given about the maintenance of this -# environment variable. -_DEFAULT_SINGLE_THREADED_UNARY_STREAM = os.getenv( - "GRPC_SINGLE_THREADED_UNARY_STREAM") is not None - +# NOTE(rbellevi): No guarantees are given about the maintenance of this +# environment variable. +_DEFAULT_SINGLE_THREADED_UNARY_STREAM = os.getenv( + "GRPC_SINGLE_THREADED_UNARY_STREAM") is not None + _UNARY_UNARY_INITIAL_DUE = ( cygrpc.OperationType.send_initial_metadata, cygrpc.OperationType.send_message, @@ -69,12 +69,12 @@ _STREAM_STREAM_INITIAL_DUE = ( _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = ( 'Exception calling channel subscription callback!') -_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n' +_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n' '\tstatus = {}\n' '\tdetails = "{}"\n' '>') -_NON_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n' +_NON_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n' '\tstatus = {}\n' '\tdetails = "{}"\n' '\tdebug_error_string = "{}"\n' @@ -167,11 +167,11 @@ def _event_handler(state, response_deserializer): state.condition.notify_all() done = not state.due for callback in callbacks: - try: - callback() - except Exception as e: # pylint: disable=broad-except - # NOTE(rbellevi): We suppress but log errors here so as not to - # kill the channel spin thread. + try: + callback() + except Exception as e: # pylint: disable=broad-except + # NOTE(rbellevi): We suppress but log errors here so as not to + # kill the channel spin thread. logging.error('Exception in callback %s: %s', repr(callback.func), repr(e)) return done and state.fork_epoch >= cygrpc.get_fork_epoch() @@ -182,11 +182,11 @@ def _event_handler(state, response_deserializer): #pylint: disable=too-many-statements def _consume_request_iterator(request_iterator, state, call, request_serializer, event_handler): - """Consume a request iterator supplied by the user.""" + """Consume a request iterator supplied by the user.""" def consume_request_iterator(): # pylint: disable=too-many-branches - # Iterate over the request iterator until it is exhausted or an error - # condition is encountered. + # Iterate over the request iterator until it is exhausted or an error + # condition is encountered. while True: return_from_user_request_generator_invoked = False try: @@ -227,19 +227,19 @@ def _consume_request_iterator(request_iterator, state, call, request_serializer, state.due.add(cygrpc.OperationType.send_message) else: return - - def _done(): - return (state.code is not None or - cygrpc.OperationType.send_message not in - state.due) - + + def _done(): + return (state.code is not None or + cygrpc.OperationType.send_message not in + state.due) + _common.wait(state.condition.wait, _done, spin_cb=functools.partial( cygrpc.block_if_fork_in_progress, state)) - if state.code is not None: - return + if state.code is not None: + return else: return with state.condition: @@ -256,112 +256,112 @@ def _consume_request_iterator(request_iterator, state, call, request_serializer, consumption_thread.start() -def _rpc_state_string(class_name, rpc_state): - """Calculates error string for RPC.""" - with rpc_state.condition: - if rpc_state.code is None: - return '<{} object>'.format(class_name) - elif rpc_state.code is grpc.StatusCode.OK: - return _OK_RENDEZVOUS_REPR_FORMAT.format(class_name, rpc_state.code, - rpc_state.details) - else: - return _NON_OK_RENDEZVOUS_REPR_FORMAT.format( - class_name, rpc_state.code, rpc_state.details, - rpc_state.debug_error_string) - - -class _InactiveRpcError(grpc.RpcError, grpc.Call, grpc.Future): - """An RPC error not tied to the execution of a particular RPC. - - The RPC represented by the state object must not be in-progress or - cancelled. - - Attributes: - _state: An instance of _RPCState. - """ - - def __init__(self, state): - with state.condition: - self._state = _RPCState((), copy.deepcopy(state.initial_metadata), - copy.deepcopy(state.trailing_metadata), - state.code, copy.deepcopy(state.details)) - self._state.response = copy.copy(state.response) - self._state.debug_error_string = copy.copy(state.debug_error_string) - - def initial_metadata(self): - return self._state.initial_metadata - - def trailing_metadata(self): - return self._state.trailing_metadata - - def code(self): - return self._state.code - - def details(self): - return _common.decode(self._state.details) - - def debug_error_string(self): - return _common.decode(self._state.debug_error_string) - - def _repr(self): - return _rpc_state_string(self.__class__.__name__, self._state) - - def __repr__(self): - return self._repr() - - def __str__(self): - return self._repr() - - def cancel(self): - """See grpc.Future.cancel.""" - return False - - def cancelled(self): - """See grpc.Future.cancelled.""" - return False - - def running(self): - """See grpc.Future.running.""" - return False - - def done(self): - """See grpc.Future.done.""" - return True - - def result(self, timeout=None): # pylint: disable=unused-argument - """See grpc.Future.result.""" - raise self - - def exception(self, timeout=None): # pylint: disable=unused-argument - """See grpc.Future.exception.""" - return self - - def traceback(self, timeout=None): # pylint: disable=unused-argument - """See grpc.Future.traceback.""" - try: - raise self - except grpc.RpcError: - return sys.exc_info()[2] - - def add_done_callback(self, fn, timeout=None): # pylint: disable=unused-argument - """See grpc.Future.add_done_callback.""" - fn(self) - - -class _Rendezvous(grpc.RpcError, grpc.RpcContext): - """An RPC iterator. - - Attributes: - _state: An instance of _RPCState. - _call: An instance of SegregatedCall or IntegratedCall. - In either case, the _call object is expected to have operate, cancel, - and next_event methods. - _response_deserializer: A callable taking bytes and return a Python - object. - _deadline: A float representing the deadline of the RPC in seconds. Or - possibly None, to represent an RPC with no deadline at all. - """ - +def _rpc_state_string(class_name, rpc_state): + """Calculates error string for RPC.""" + with rpc_state.condition: + if rpc_state.code is None: + return '<{} object>'.format(class_name) + elif rpc_state.code is grpc.StatusCode.OK: + return _OK_RENDEZVOUS_REPR_FORMAT.format(class_name, rpc_state.code, + rpc_state.details) + else: + return _NON_OK_RENDEZVOUS_REPR_FORMAT.format( + class_name, rpc_state.code, rpc_state.details, + rpc_state.debug_error_string) + + +class _InactiveRpcError(grpc.RpcError, grpc.Call, grpc.Future): + """An RPC error not tied to the execution of a particular RPC. + + The RPC represented by the state object must not be in-progress or + cancelled. + + Attributes: + _state: An instance of _RPCState. + """ + + def __init__(self, state): + with state.condition: + self._state = _RPCState((), copy.deepcopy(state.initial_metadata), + copy.deepcopy(state.trailing_metadata), + state.code, copy.deepcopy(state.details)) + self._state.response = copy.copy(state.response) + self._state.debug_error_string = copy.copy(state.debug_error_string) + + def initial_metadata(self): + return self._state.initial_metadata + + def trailing_metadata(self): + return self._state.trailing_metadata + + def code(self): + return self._state.code + + def details(self): + return _common.decode(self._state.details) + + def debug_error_string(self): + return _common.decode(self._state.debug_error_string) + + def _repr(self): + return _rpc_state_string(self.__class__.__name__, self._state) + + def __repr__(self): + return self._repr() + + def __str__(self): + return self._repr() + + def cancel(self): + """See grpc.Future.cancel.""" + return False + + def cancelled(self): + """See grpc.Future.cancelled.""" + return False + + def running(self): + """See grpc.Future.running.""" + return False + + def done(self): + """See grpc.Future.done.""" + return True + + def result(self, timeout=None): # pylint: disable=unused-argument + """See grpc.Future.result.""" + raise self + + def exception(self, timeout=None): # pylint: disable=unused-argument + """See grpc.Future.exception.""" + return self + + def traceback(self, timeout=None): # pylint: disable=unused-argument + """See grpc.Future.traceback.""" + try: + raise self + except grpc.RpcError: + return sys.exc_info()[2] + + def add_done_callback(self, fn, timeout=None): # pylint: disable=unused-argument + """See grpc.Future.add_done_callback.""" + fn(self) + + +class _Rendezvous(grpc.RpcError, grpc.RpcContext): + """An RPC iterator. + + Attributes: + _state: An instance of _RPCState. + _call: An instance of SegregatedCall or IntegratedCall. + In either case, the _call object is expected to have operate, cancel, + and next_event methods. + _response_deserializer: A callable taking bytes and return a Python + object. + _deadline: A float representing the deadline of the RPC in seconds. Or + possibly None, to represent an RPC with no deadline at all. + """ + def __init__(self, state, call, response_deserializer, deadline): super(_Rendezvous, self).__init__() self._state = state @@ -369,21 +369,21 @@ class _Rendezvous(grpc.RpcError, grpc.RpcContext): self._response_deserializer = response_deserializer self._deadline = deadline - def is_active(self): - """See grpc.RpcContext.is_active""" - with self._state.condition: - return self._state.code is None - - def time_remaining(self): - """See grpc.RpcContext.time_remaining""" - with self._state.condition: - if self._deadline is None: - return None - else: - return max(self._deadline - time.time(), 0) - + def is_active(self): + """See grpc.RpcContext.is_active""" + with self._state.condition: + return self._state.code is None + + def time_remaining(self): + """See grpc.RpcContext.time_remaining""" + with self._state.condition: + if self._deadline is None: + return None + else: + return max(self._deadline - time.time(), 0) + def cancel(self): - """See grpc.RpcContext.cancel""" + """See grpc.RpcContext.cancel""" with self._state.condition: if self._state.code is None: code = grpc.StatusCode.CANCELLED @@ -393,69 +393,69 @@ class _Rendezvous(grpc.RpcError, grpc.RpcContext): self._state.cancelled = True _abort(self._state, code, details) self._state.condition.notify_all() - return True - else: - return False - - def add_callback(self, callback): - """See grpc.RpcContext.add_callback""" - with self._state.condition: - if self._state.callbacks is None: - return False - else: - self._state.callbacks.append(callback) - return True - - def __iter__(self): - return self - - def next(self): - return self._next() - - def __next__(self): - return self._next() - - def _next(self): - raise NotImplementedError() - - def debug_error_string(self): - raise NotImplementedError() - - def _repr(self): - return _rpc_state_string(self.__class__.__name__, self._state) - - def __repr__(self): - return self._repr() - - def __str__(self): - return self._repr() - - def __del__(self): - with self._state.condition: - if self._state.code is None: - self._state.code = grpc.StatusCode.CANCELLED - self._state.details = 'Cancelled upon garbage collection!' - self._state.cancelled = True - self._call.cancel( - _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[self._state.code], - self._state.details) - self._state.condition.notify_all() - - + return True + else: + return False + + def add_callback(self, callback): + """See grpc.RpcContext.add_callback""" + with self._state.condition: + if self._state.callbacks is None: + return False + else: + self._state.callbacks.append(callback) + return True + + def __iter__(self): + return self + + def next(self): + return self._next() + + def __next__(self): + return self._next() + + def _next(self): + raise NotImplementedError() + + def debug_error_string(self): + raise NotImplementedError() + + def _repr(self): + return _rpc_state_string(self.__class__.__name__, self._state) + + def __repr__(self): + return self._repr() + + def __str__(self): + return self._repr() + + def __del__(self): + with self._state.condition: + if self._state.code is None: + self._state.code = grpc.StatusCode.CANCELLED + self._state.details = 'Cancelled upon garbage collection!' + self._state.cancelled = True + self._call.cancel( + _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[self._state.code], + self._state.details) + self._state.condition.notify_all() + + class _SingleThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: disable=too-many-ancestors - """An RPC iterator operating entirely on a single thread. - - The __next__ method of _SingleThreadedRendezvous does not depend on the - existence of any other thread, including the "channel spin thread". - However, this means that its interface is entirely synchronous. So this + """An RPC iterator operating entirely on a single thread. + + The __next__ method of _SingleThreadedRendezvous does not depend on the + existence of any other thread, including the "channel spin thread". + However, this means that its interface is entirely synchronous. So this class cannot completely fulfill the grpc.Future interface. The result, exception, and traceback methods will never block and will instead raise an exception if calling the method would result in blocking. This means that these methods are safe to call from add_done_callback handlers. - """ - + """ + def _is_complete(self): return self._state.code is not None @@ -548,145 +548,145 @@ class _SingleThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: fn(self) - def initial_metadata(self): - """See grpc.Call.initial_metadata""" - with self._state.condition: - # NOTE(gnossen): Based on our initial call batch, we are guaranteed - # to receive initial metadata before any messages. - while self._state.initial_metadata is None: - self._consume_next_event() - return self._state.initial_metadata - - def trailing_metadata(self): - """See grpc.Call.trailing_metadata""" - with self._state.condition: - if self._state.trailing_metadata is None: - raise grpc.experimental.UsageError( - "Cannot get trailing metadata until RPC is completed.") - return self._state.trailing_metadata - - def code(self): - """See grpc.Call.code""" - with self._state.condition: - if self._state.code is None: - raise grpc.experimental.UsageError( - "Cannot get code until RPC is completed.") - return self._state.code - - def details(self): - """See grpc.Call.details""" - with self._state.condition: - if self._state.details is None: - raise grpc.experimental.UsageError( - "Cannot get details until RPC is completed.") - return _common.decode(self._state.details) - - def _consume_next_event(self): - event = self._call.next_event() - with self._state.condition: - callbacks = _handle_event(event, self._state, - self._response_deserializer) - for callback in callbacks: - # NOTE(gnossen): We intentionally allow exceptions to bubble up - # to the user when running on a single thread. - callback() - return event - - def _next_response(self): - while True: - self._consume_next_event() - with self._state.condition: - if self._state.response is not None: - response = self._state.response - self._state.response = None - return response - elif cygrpc.OperationType.receive_message not in self._state.due: - if self._state.code is grpc.StatusCode.OK: - raise StopIteration() - elif self._state.code is not None: - raise self - - def _next(self): - with self._state.condition: - if self._state.code is None: - operating = self._call.operate( - (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), None) - if operating: - self._state.due.add(cygrpc.OperationType.receive_message) - elif self._state.code is grpc.StatusCode.OK: - raise StopIteration() - else: - raise self - return self._next_response() - - def debug_error_string(self): - with self._state.condition: - if self._state.debug_error_string is None: - raise grpc.experimental.UsageError( - "Cannot get debug error string until RPC is completed.") - return _common.decode(self._state.debug_error_string) - - -class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: disable=too-many-ancestors - """An RPC iterator that depends on a channel spin thread. - - This iterator relies upon a per-channel thread running in the background, - dequeueing events from the completion queue, and notifying threads waiting - on the threading.Condition object in the _RPCState object. - - This extra thread allows _MultiThreadedRendezvous to fulfill the grpc.Future interface - and to mediate a bidirection streaming RPC. - """ - - def initial_metadata(self): - """See grpc.Call.initial_metadata""" - with self._state.condition: - - def _done(): - return self._state.initial_metadata is not None - - _common.wait(self._state.condition.wait, _done) - return self._state.initial_metadata - - def trailing_metadata(self): - """See grpc.Call.trailing_metadata""" - with self._state.condition: - - def _done(): - return self._state.trailing_metadata is not None - - _common.wait(self._state.condition.wait, _done) - return self._state.trailing_metadata - - def code(self): - """See grpc.Call.code""" - with self._state.condition: - - def _done(): - return self._state.code is not None - - _common.wait(self._state.condition.wait, _done) - return self._state.code - - def details(self): - """See grpc.Call.details""" - with self._state.condition: - - def _done(): - return self._state.details is not None - - _common.wait(self._state.condition.wait, _done) - return _common.decode(self._state.details) - - def debug_error_string(self): - with self._state.condition: - - def _done(): - return self._state.debug_error_string is not None - - _common.wait(self._state.condition.wait, _done) - return _common.decode(self._state.debug_error_string) - + def initial_metadata(self): + """See grpc.Call.initial_metadata""" + with self._state.condition: + # NOTE(gnossen): Based on our initial call batch, we are guaranteed + # to receive initial metadata before any messages. + while self._state.initial_metadata is None: + self._consume_next_event() + return self._state.initial_metadata + + def trailing_metadata(self): + """See grpc.Call.trailing_metadata""" + with self._state.condition: + if self._state.trailing_metadata is None: + raise grpc.experimental.UsageError( + "Cannot get trailing metadata until RPC is completed.") + return self._state.trailing_metadata + + def code(self): + """See grpc.Call.code""" + with self._state.condition: + if self._state.code is None: + raise grpc.experimental.UsageError( + "Cannot get code until RPC is completed.") + return self._state.code + + def details(self): + """See grpc.Call.details""" + with self._state.condition: + if self._state.details is None: + raise grpc.experimental.UsageError( + "Cannot get details until RPC is completed.") + return _common.decode(self._state.details) + + def _consume_next_event(self): + event = self._call.next_event() + with self._state.condition: + callbacks = _handle_event(event, self._state, + self._response_deserializer) + for callback in callbacks: + # NOTE(gnossen): We intentionally allow exceptions to bubble up + # to the user when running on a single thread. + callback() + return event + + def _next_response(self): + while True: + self._consume_next_event() + with self._state.condition: + if self._state.response is not None: + response = self._state.response + self._state.response = None + return response + elif cygrpc.OperationType.receive_message not in self._state.due: + if self._state.code is grpc.StatusCode.OK: + raise StopIteration() + elif self._state.code is not None: + raise self + + def _next(self): + with self._state.condition: + if self._state.code is None: + operating = self._call.operate( + (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), None) + if operating: + self._state.due.add(cygrpc.OperationType.receive_message) + elif self._state.code is grpc.StatusCode.OK: + raise StopIteration() + else: + raise self + return self._next_response() + + def debug_error_string(self): + with self._state.condition: + if self._state.debug_error_string is None: + raise grpc.experimental.UsageError( + "Cannot get debug error string until RPC is completed.") + return _common.decode(self._state.debug_error_string) + + +class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: disable=too-many-ancestors + """An RPC iterator that depends on a channel spin thread. + + This iterator relies upon a per-channel thread running in the background, + dequeueing events from the completion queue, and notifying threads waiting + on the threading.Condition object in the _RPCState object. + + This extra thread allows _MultiThreadedRendezvous to fulfill the grpc.Future interface + and to mediate a bidirection streaming RPC. + """ + + def initial_metadata(self): + """See grpc.Call.initial_metadata""" + with self._state.condition: + + def _done(): + return self._state.initial_metadata is not None + + _common.wait(self._state.condition.wait, _done) + return self._state.initial_metadata + + def trailing_metadata(self): + """See grpc.Call.trailing_metadata""" + with self._state.condition: + + def _done(): + return self._state.trailing_metadata is not None + + _common.wait(self._state.condition.wait, _done) + return self._state.trailing_metadata + + def code(self): + """See grpc.Call.code""" + with self._state.condition: + + def _done(): + return self._state.code is not None + + _common.wait(self._state.condition.wait, _done) + return self._state.code + + def details(self): + """See grpc.Call.details""" + with self._state.condition: + + def _done(): + return self._state.details is not None + + _common.wait(self._state.condition.wait, _done) + return _common.decode(self._state.details) + + def debug_error_string(self): + with self._state.condition: + + def _done(): + return self._state.debug_error_string is not None + + _common.wait(self._state.condition.wait, _done) + return _common.decode(self._state.debug_error_string) + def cancelled(self): with self._state.condition: return self._state.cancelled @@ -699,22 +699,22 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: with self._state.condition: return self._state.code is not None - def _is_complete(self): - return self._state.code is not None - + def _is_complete(self): + return self._state.code is not None + def result(self, timeout=None): - """Returns the result of the computation or raises its exception. - - See grpc.Future.result for the full API contract. - """ + """Returns the result of the computation or raises its exception. + + See grpc.Future.result for the full API contract. + """ with self._state.condition: timed_out = _common.wait(self._state.condition.wait, self._is_complete, timeout=timeout) - if timed_out: - raise grpc.FutureTimeoutError() - else: - if self._state.code is grpc.StatusCode.OK: + if timed_out: + raise grpc.FutureTimeoutError() + else: + if self._state.code is grpc.StatusCode.OK: return self._state.response elif self._state.cancelled: raise grpc.FutureCancelledError() @@ -722,18 +722,18 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: raise self def exception(self, timeout=None): - """Return the exception raised by the computation. - - See grpc.Future.exception for the full API contract. - """ + """Return the exception raised by the computation. + + See grpc.Future.exception for the full API contract. + """ with self._state.condition: timed_out = _common.wait(self._state.condition.wait, self._is_complete, timeout=timeout) - if timed_out: - raise grpc.FutureTimeoutError() - else: - if self._state.code is grpc.StatusCode.OK: + if timed_out: + raise grpc.FutureTimeoutError() + else: + if self._state.code is grpc.StatusCode.OK: return None elif self._state.cancelled: raise grpc.FutureCancelledError() @@ -741,18 +741,18 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: return self def traceback(self, timeout=None): - """Access the traceback of the exception raised by the computation. - - See grpc.future.traceback for the full API contract. - """ + """Access the traceback of the exception raised by the computation. + + See grpc.future.traceback for the full API contract. + """ with self._state.condition: timed_out = _common.wait(self._state.condition.wait, self._is_complete, timeout=timeout) - if timed_out: - raise grpc.FutureTimeoutError() - else: - if self._state.code is grpc.StatusCode.OK: + if timed_out: + raise grpc.FutureTimeoutError() + else: + if self._state.code is grpc.StatusCode.OK: return None elif self._state.cancelled: raise grpc.FutureCancelledError() @@ -765,7 +765,7 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: def add_done_callback(self, fn): with self._state.condition: if self._state.code is None: - self._state.callbacks.append(functools.partial(fn, self)) + self._state.callbacks.append(functools.partial(fn, self)) return fn(self) @@ -785,22 +785,22 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint: else: raise self - def _response_ready(): - return ( - self._state.response is not None or - (cygrpc.OperationType.receive_message not in self._state.due - and self._state.code is not None)) - - _common.wait(self._state.condition.wait, _response_ready) - if self._state.response is not None: - response = self._state.response - self._state.response = None - return response - elif cygrpc.OperationType.receive_message not in self._state.due: - if self._state.code is grpc.StatusCode.OK: - raise StopIteration() - elif self._state.code is not None: - raise self + def _response_ready(): + return ( + self._state.response is not None or + (cygrpc.OperationType.receive_message not in self._state.due + and self._state.code is not None)) + + _common.wait(self._state.condition.wait, _response_ready) + if self._state.response is not None: + response = self._state.response + self._state.response = None + return response + elif cygrpc.OperationType.receive_message not in self._state.due: + if self._state.code is grpc.StatusCode.OK: + raise StopIteration() + elif self._state.code is not None: + raise self def _start_unary_request(request, timeout, request_serializer): @@ -809,8 +809,8 @@ def _start_unary_request(request, timeout, request_serializer): if serialized_request is None: state = _RPCState((), (), (), grpc.StatusCode.INTERNAL, 'Exception serializing request!') - error = _InactiveRpcError(state) - return deadline, None, error + error = _InactiveRpcError(state) + return deadline, None, error else: return deadline, serialized_request, None @@ -818,12 +818,12 @@ def _start_unary_request(request, timeout, request_serializer): def _end_unary_response_blocking(state, call, with_call, deadline): if state.code is grpc.StatusCode.OK: if with_call: - rendezvous = _MultiThreadedRendezvous(state, call, None, deadline) + rendezvous = _MultiThreadedRendezvous(state, call, None, deadline) return state.response, rendezvous else: return state.response else: - raise _InactiveRpcError(state) + raise _InactiveRpcError(state) def _stream_unary_invocation_operationses(metadata, initial_metadata_flags): @@ -847,18 +847,18 @@ def _stream_unary_invocation_operationses_and_tags(metadata, metadata, initial_metadata_flags)) -def _determine_deadline(user_deadline): - parent_deadline = cygrpc.get_deadline_from_context() - if parent_deadline is None and user_deadline is None: - return None - elif parent_deadline is not None and user_deadline is None: - return parent_deadline - elif user_deadline is not None and parent_deadline is None: - return user_deadline - else: - return min(parent_deadline, user_deadline) - - +def _determine_deadline(user_deadline): + parent_deadline = cygrpc.get_deadline_from_context() + if parent_deadline is None and user_deadline is None: + return None + elif parent_deadline is not None and user_deadline is None: + return parent_deadline + elif user_deadline is not None and parent_deadline is None: + return user_deadline + else: + return min(parent_deadline, user_deadline) + + class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): # pylint: disable=too-many-arguments @@ -871,19 +871,19 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): self._response_deserializer = response_deserializer self._context = cygrpc.build_census_context() - def _prepare(self, request, timeout, metadata, wait_for_ready, compression): + def _prepare(self, request, timeout, metadata, wait_for_ready, compression): deadline, serialized_request, rendezvous = _start_unary_request( request, timeout, self._request_serializer) initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( wait_for_ready) - augmented_metadata = _compression.augment_metadata( - metadata, compression) + augmented_metadata = _compression.augment_metadata( + metadata, compression) if serialized_request is None: return None, None, None, rendezvous else: state = _RPCState(_UNARY_UNARY_INITIAL_DUE, None, None, None, None) operations = ( - cygrpc.SendInitialMetadataOperation(augmented_metadata, + cygrpc.SendInitialMetadataOperation(augmented_metadata, initial_metadata_flags), cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS), cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), @@ -893,17 +893,17 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): ) return state, operations, deadline, None - def _blocking(self, request, timeout, metadata, credentials, wait_for_ready, - compression): + def _blocking(self, request, timeout, metadata, credentials, wait_for_ready, + compression): state, operations, deadline, rendezvous = self._prepare( - request, timeout, metadata, wait_for_ready, compression) + request, timeout, metadata, wait_for_ready, compression) if state is None: raise rendezvous # pylint: disable-msg=raising-bad-type else: call = self._channel.segregated_call( cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, - self._method, None, _determine_deadline(deadline), metadata, - None if credentials is None else credentials._credentials, (( + self._method, None, _determine_deadline(deadline), metadata, + None if credentials is None else credentials._credentials, (( operations, None, ),), self._context) @@ -916,10 +916,10 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): state, call, = self._blocking(request, timeout, metadata, credentials, - wait_for_ready, compression) + wait_for_ready, compression) return _end_unary_response_blocking(state, call, False, None) def with_call(self, @@ -927,10 +927,10 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): state, call, = self._blocking(request, timeout, metadata, credentials, - wait_for_ready, compression) + wait_for_ready, compression) return _end_unary_response_blocking(state, call, True, None) def future(self, @@ -938,10 +938,10 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): state, operations, deadline, rendezvous = self._prepare( - request, timeout, metadata, wait_for_ready, compression) + request, timeout, metadata, wait_for_ready, compression) if state is None: raise rendezvous # pylint: disable-msg=raising-bad-type else: @@ -956,56 +956,56 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): deadline) -class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): - - # pylint: disable=too-many-arguments - def __init__(self, channel, method, request_serializer, - response_deserializer): - self._channel = channel - self._method = method - self._request_serializer = request_serializer - self._response_deserializer = response_deserializer - self._context = cygrpc.build_census_context() - - def __call__( # pylint: disable=too-many-locals - self, - request, - timeout=None, - metadata=None, - credentials=None, - wait_for_ready=None, - compression=None): - deadline = _deadline(timeout) - serialized_request = _common.serialize(request, - self._request_serializer) - if serialized_request is None: - state = _RPCState((), (), (), grpc.StatusCode.INTERNAL, - 'Exception serializing request!') - raise _InactiveRpcError(state) - - state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None) - call_credentials = None if credentials is None else credentials._credentials - initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( - wait_for_ready) - augmented_metadata = _compression.augment_metadata( - metadata, compression) - operations = ( - (cygrpc.SendInitialMetadataOperation(augmented_metadata, - initial_metadata_flags), - cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS), - cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS)), - (cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),), - (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),), - ) - operations_and_tags = tuple((ops, None) for ops in operations) - call = self._channel.segregated_call( - cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method, - None, _determine_deadline(deadline), metadata, call_credentials, - operations_and_tags, self._context) - return _SingleThreadedRendezvous(state, call, - self._response_deserializer, deadline) - - +class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): + + # pylint: disable=too-many-arguments + def __init__(self, channel, method, request_serializer, + response_deserializer): + self._channel = channel + self._method = method + self._request_serializer = request_serializer + self._response_deserializer = response_deserializer + self._context = cygrpc.build_census_context() + + def __call__( # pylint: disable=too-many-locals + self, + request, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None): + deadline = _deadline(timeout) + serialized_request = _common.serialize(request, + self._request_serializer) + if serialized_request is None: + state = _RPCState((), (), (), grpc.StatusCode.INTERNAL, + 'Exception serializing request!') + raise _InactiveRpcError(state) + + state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None) + call_credentials = None if credentials is None else credentials._credentials + initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( + wait_for_ready) + augmented_metadata = _compression.augment_metadata( + metadata, compression) + operations = ( + (cygrpc.SendInitialMetadataOperation(augmented_metadata, + initial_metadata_flags), + cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS)), + (cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),), + (cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),), + ) + operations_and_tags = tuple((ops, None) for ops in operations) + call = self._channel.segregated_call( + cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method, + None, _determine_deadline(deadline), metadata, call_credentials, + operations_and_tags, self._context) + return _SingleThreadedRendezvous(state, call, + self._response_deserializer, deadline) + + class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): # pylint: disable=too-many-arguments @@ -1018,14 +1018,14 @@ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): self._response_deserializer = response_deserializer self._context = cygrpc.build_census_context() - def __call__( # pylint: disable=too-many-locals - self, - request, - timeout=None, - metadata=None, - credentials=None, - wait_for_ready=None, - compression=None): + def __call__( # pylint: disable=too-many-locals + self, + request, + timeout=None, + metadata=None, + credentials=None, + wait_for_ready=None, + compression=None): deadline, serialized_request, rendezvous = _start_unary_request( request, timeout, self._request_serializer) initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( @@ -1033,12 +1033,12 @@ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): if serialized_request is None: raise rendezvous # pylint: disable-msg=raising-bad-type else: - augmented_metadata = _compression.augment_metadata( - metadata, compression) + augmented_metadata = _compression.augment_metadata( + metadata, compression) state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None) operationses = ( ( - cygrpc.SendInitialMetadataOperation(augmented_metadata, + cygrpc.SendInitialMetadataOperation(augmented_metadata, initial_metadata_flags), cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS), @@ -1049,7 +1049,7 @@ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): ) call = self._managed_call( cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, - self._method, None, _determine_deadline(deadline), metadata, + self._method, None, _determine_deadline(deadline), metadata, None if credentials is None else credentials._credentials, operationses, _event_handler(state, self._response_deserializer), @@ -1072,19 +1072,19 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): self._context = cygrpc.build_census_context() def _blocking(self, request_iterator, timeout, metadata, credentials, - wait_for_ready, compression): + wait_for_ready, compression): deadline = _deadline(timeout) state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None) initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( wait_for_ready) - augmented_metadata = _compression.augment_metadata( - metadata, compression) + augmented_metadata = _compression.augment_metadata( + metadata, compression) call = self._channel.segregated_call( cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method, None, _determine_deadline(deadline), augmented_metadata, None if credentials is None else credentials._credentials, _stream_unary_invocation_operationses_and_tags( - augmented_metadata, initial_metadata_flags), self._context) + augmented_metadata, initial_metadata_flags), self._context) _consume_request_iterator(request_iterator, state, call, self._request_serializer, None) while True: @@ -1101,10 +1101,10 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): state, call, = self._blocking(request_iterator, timeout, metadata, - credentials, wait_for_ready, compression) + credentials, wait_for_ready, compression) return _end_unary_response_blocking(state, call, False, None) def with_call(self, @@ -1112,10 +1112,10 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): state, call, = self._blocking(request_iterator, timeout, metadata, - credentials, wait_for_ready, compression) + credentials, wait_for_ready, compression) return _end_unary_response_blocking(state, call, True, None) def future(self, @@ -1123,15 +1123,15 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): deadline = _deadline(timeout) state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None) event_handler = _event_handler(state, self._response_deserializer) initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( wait_for_ready) - augmented_metadata = _compression.augment_metadata( - metadata, compression) + augmented_metadata = _compression.augment_metadata( + metadata, compression) call = self._managed_call( cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS, self._method, None, deadline, augmented_metadata, @@ -1141,8 +1141,8 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): event_handler, self._context) _consume_request_iterator(request_iterator, state, call, self._request_serializer, event_handler) - return _MultiThreadedRendezvous(state, call, - self._response_deserializer, deadline) + return _MultiThreadedRendezvous(state, call, + self._response_deserializer, deadline) class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable): @@ -1162,17 +1162,17 @@ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): deadline = _deadline(timeout) state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None) initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready( wait_for_ready) - augmented_metadata = _compression.augment_metadata( - metadata, compression) + augmented_metadata = _compression.augment_metadata( + metadata, compression) operationses = ( ( - cygrpc.SendInitialMetadataOperation(augmented_metadata, + cygrpc.SendInitialMetadataOperation(augmented_metadata, initial_metadata_flags), cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), ), @@ -1186,8 +1186,8 @@ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable): operationses, event_handler, self._context) _consume_request_iterator(request_iterator, state, call, self._request_serializer, event_handler) - return _MultiThreadedRendezvous(state, call, - self._response_deserializer, deadline) + return _MultiThreadedRendezvous(state, call, + self._response_deserializer, deadline) class _InitialMetadataFlags(int): @@ -1416,55 +1416,55 @@ def _unsubscribe(state, callback): break -def _augment_options(base_options, compression): - compression_option = _compression.create_channel_option(compression) - return tuple(base_options) + compression_option + (( - cygrpc.ChannelArgKey.primary_user_agent_string, - _USER_AGENT, - ),) - - -def _separate_channel_options(options): - """Separates core channel options from Python channel options.""" - core_options = [] - python_options = [] - for pair in options: - if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream: - python_options.append(pair) - else: - core_options.append(pair) - return python_options, core_options - - +def _augment_options(base_options, compression): + compression_option = _compression.create_channel_option(compression) + return tuple(base_options) + compression_option + (( + cygrpc.ChannelArgKey.primary_user_agent_string, + _USER_AGENT, + ),) + + +def _separate_channel_options(options): + """Separates core channel options from Python channel options.""" + core_options = [] + python_options = [] + for pair in options: + if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream: + python_options.append(pair) + else: + core_options.append(pair) + return python_options, core_options + + class Channel(grpc.Channel): """A cygrpc.Channel-backed implementation of grpc.Channel.""" - def __init__(self, target, options, credentials, compression): + def __init__(self, target, options, credentials, compression): """Constructor. Args: target: The target to which to connect. options: Configuration options for the channel. credentials: A cygrpc.ChannelCredentials or None. - compression: An optional value indicating the compression method to be - used over the lifetime of the channel. + compression: An optional value indicating the compression method to be + used over the lifetime of the channel. """ - python_options, core_options = _separate_channel_options(options) - self._single_threaded_unary_stream = _DEFAULT_SINGLE_THREADED_UNARY_STREAM - self._process_python_options(python_options) + python_options, core_options = _separate_channel_options(options) + self._single_threaded_unary_stream = _DEFAULT_SINGLE_THREADED_UNARY_STREAM + self._process_python_options(python_options) self._channel = cygrpc.Channel( - _common.encode(target), _augment_options(core_options, compression), - credentials) + _common.encode(target), _augment_options(core_options, compression), + credentials) self._call_state = _ChannelCallState(self._channel) self._connectivity_state = _ChannelConnectivityState(self._channel) cygrpc.fork_register_channel(self) - def _process_python_options(self, python_options): - """Sets channel attributes according to python-only channel options.""" - for pair in python_options: - if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream: - self._single_threaded_unary_stream = True - + def _process_python_options(self, python_options): + """Sets channel attributes according to python-only channel options.""" + for pair in python_options: + if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream: + self._single_threaded_unary_stream = True + def subscribe(self, callback, try_to_connect=None): _subscribe(self._connectivity_state, callback, try_to_connect) @@ -1483,15 +1483,15 @@ class Channel(grpc.Channel): method, request_serializer=None, response_deserializer=None): - # NOTE(rbellevi): Benchmarks have shown that running a unary-stream RPC - # on a single Python thread results in an appreciable speed-up. However, - # due to slight differences in capability, the multi-threaded variant - # remains the default. - if self._single_threaded_unary_stream: - return _SingleThreadedUnaryStreamMultiCallable( - self._channel, _common.encode(method), request_serializer, - response_deserializer) - else: + # NOTE(rbellevi): Benchmarks have shown that running a unary-stream RPC + # on a single Python thread results in an appreciable speed-up. However, + # due to slight differences in capability, the multi-threaded variant + # remains the default. + if self._single_threaded_unary_stream: + return _SingleThreadedUnaryStreamMultiCallable( + self._channel, _common.encode(method), request_serializer, + response_deserializer) + else: return _UnaryStreamMultiCallable( self._channel, _channel_managed_call_management(self._call_state), @@ -1514,19 +1514,19 @@ class Channel(grpc.Channel): self._channel, _channel_managed_call_management(self._call_state), _common.encode(method), request_serializer, response_deserializer) - def _unsubscribe_all(self): - state = self._connectivity_state - if state: - with state.lock: - del state.callbacks_and_connectivities[:] - + def _unsubscribe_all(self): + state = self._connectivity_state + if state: + with state.lock: + del state.callbacks_and_connectivities[:] + def _close(self): - self._unsubscribe_all() + self._unsubscribe_all() self._channel.close(cygrpc.StatusCode.cancelled, 'Channel closed!') - cygrpc.fork_unregister_channel(self) + cygrpc.fork_unregister_channel(self) def _close_on_fork(self): - self._unsubscribe_all() + self._unsubscribe_all() self._channel.close_on_fork(cygrpc.StatusCode.cancelled, 'Channel closed due to fork') @@ -1550,9 +1550,9 @@ class Channel(grpc.Channel): # for as long as they are in use and to close them after using them, # then deletion of this grpc._channel.Channel instance can be made to # effect closure of the underlying cygrpc.Channel instance. - try: - self._unsubscribe_all() - except: # pylint: disable=bare-except - # Exceptions in __del__ are ignored by Python anyway, but they can - # keep spamming logs. Just silence them. - pass + try: + self._unsubscribe_all() + except: # pylint: disable=bare-except + # Exceptions in __del__ are ignored by Python anyway, but they can + # keep spamming logs. Just silence them. + pass diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_common.py b/contrib/libs/grpc/src/python/grpcio/grpc/_common.py index c18d2fca18..128124c325 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_common.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_common.py @@ -14,7 +14,7 @@ """Shared implementation.""" import logging -import time +import time import six import grpc @@ -59,11 +59,11 @@ STATUS_CODE_TO_CYGRPC_STATUS_CODE = { CYGRPC_STATUS_CODE_TO_STATUS_CODE) } -MAXIMUM_WAIT_TIMEOUT = 0.1 +MAXIMUM_WAIT_TIMEOUT = 0.1 _ERROR_MESSAGE_PORT_BINDING_FAILED = 'Failed to bind to address %s; set ' \ 'GRPC_VERBOSITY=debug environment variable to see detailed error message.' - + def encode(s): if isinstance(s, bytes): @@ -100,53 +100,53 @@ def deserialize(serialized_message, deserializer): def fully_qualified_method(group, method): return '/{}/{}'.format(group, method) - - -def _wait_once(wait_fn, timeout, spin_cb): - wait_fn(timeout=timeout) - if spin_cb is not None: - spin_cb() - - -def wait(wait_fn, wait_complete_fn, timeout=None, spin_cb=None): - """Blocks waiting for an event without blocking the thread indefinitely. - - See https://github.com/grpc/grpc/issues/19464 for full context. CPython's - `threading.Event.wait` and `threading.Condition.wait` methods, if invoked - without a timeout kwarg, may block the calling thread indefinitely. If the - call is made from the main thread, this means that signal handlers may not - run for an arbitrarily long period of time. - - This wrapper calls the supplied wait function with an arbitrary short - timeout to ensure that no signal handler has to wait longer than - MAXIMUM_WAIT_TIMEOUT before executing. - - Args: - wait_fn: A callable acceptable a single float-valued kwarg named - `timeout`. This function is expected to be one of `threading.Event.wait` - or `threading.Condition.wait`. - wait_complete_fn: A callable taking no arguments and returning a bool. - When this function returns true, it indicates that waiting should cease. - timeout: An optional float-valued number of seconds after which the wait - should cease. - spin_cb: An optional Callable taking no arguments and returning nothing. - This callback will be called on each iteration of the spin. This may be - used for, e.g. work related to forking. - - Returns: - True if a timeout was supplied and it was reached. False otherwise. - """ - if timeout is None: - while not wait_complete_fn(): - _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) - else: - end = time.time() + timeout - while not wait_complete_fn(): - remaining = min(end - time.time(), MAXIMUM_WAIT_TIMEOUT) - if remaining < 0: - return True - _wait_once(wait_fn, remaining, spin_cb) - return False + + +def _wait_once(wait_fn, timeout, spin_cb): + wait_fn(timeout=timeout) + if spin_cb is not None: + spin_cb() + + +def wait(wait_fn, wait_complete_fn, timeout=None, spin_cb=None): + """Blocks waiting for an event without blocking the thread indefinitely. + + See https://github.com/grpc/grpc/issues/19464 for full context. CPython's + `threading.Event.wait` and `threading.Condition.wait` methods, if invoked + without a timeout kwarg, may block the calling thread indefinitely. If the + call is made from the main thread, this means that signal handlers may not + run for an arbitrarily long period of time. + + This wrapper calls the supplied wait function with an arbitrary short + timeout to ensure that no signal handler has to wait longer than + MAXIMUM_WAIT_TIMEOUT before executing. + + Args: + wait_fn: A callable acceptable a single float-valued kwarg named + `timeout`. This function is expected to be one of `threading.Event.wait` + or `threading.Condition.wait`. + wait_complete_fn: A callable taking no arguments and returning a bool. + When this function returns true, it indicates that waiting should cease. + timeout: An optional float-valued number of seconds after which the wait + should cease. + spin_cb: An optional Callable taking no arguments and returning nothing. + This callback will be called on each iteration of the spin. This may be + used for, e.g. work related to forking. + + Returns: + True if a timeout was supplied and it was reached. False otherwise. + """ + if timeout is None: + while not wait_complete_fn(): + _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) + else: + end = time.time() + timeout + while not wait_complete_fn(): + remaining = min(end - time.time(), MAXIMUM_WAIT_TIMEOUT) + if remaining < 0: + return True + _wait_once(wait_fn, remaining, spin_cb) + return False def validate_port_binding_result(address, port): diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_compression.py b/contrib/libs/grpc/src/python/grpcio/grpc/_compression.py index 18da583271..45339c3afe 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_compression.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_compression.py @@ -1,55 +1,55 @@ -# Copyright 2019 The gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from grpc._cython import cygrpc - -NoCompression = cygrpc.CompressionAlgorithm.none -Deflate = cygrpc.CompressionAlgorithm.deflate -Gzip = cygrpc.CompressionAlgorithm.gzip - -_METADATA_STRING_MAPPING = { - NoCompression: 'identity', - Deflate: 'deflate', - Gzip: 'gzip', -} - - -def _compression_algorithm_to_metadata_value(compression): - return _METADATA_STRING_MAPPING[compression] - - -def compression_algorithm_to_metadata(compression): - return (cygrpc.GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY, - _compression_algorithm_to_metadata_value(compression)) - - -def create_channel_option(compression): - return ((cygrpc.GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM, - int(compression)),) if compression else () - - -def augment_metadata(metadata, compression): - if not metadata and not compression: - return None - base_metadata = tuple(metadata) if metadata else () - compression_metadata = ( - compression_algorithm_to_metadata(compression),) if compression else () - return base_metadata + compression_metadata - - -__all__ = ( - "NoCompression", - "Deflate", - "Gzip", -) +# Copyright 2019 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from grpc._cython import cygrpc + +NoCompression = cygrpc.CompressionAlgorithm.none +Deflate = cygrpc.CompressionAlgorithm.deflate +Gzip = cygrpc.CompressionAlgorithm.gzip + +_METADATA_STRING_MAPPING = { + NoCompression: 'identity', + Deflate: 'deflate', + Gzip: 'gzip', +} + + +def _compression_algorithm_to_metadata_value(compression): + return _METADATA_STRING_MAPPING[compression] + + +def compression_algorithm_to_metadata(compression): + return (cygrpc.GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY, + _compression_algorithm_to_metadata_value(compression)) + + +def create_channel_option(compression): + return ((cygrpc.GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM, + int(compression)),) if compression else () + + +def augment_metadata(metadata, compression): + if not metadata and not compression: + return None + base_metadata = tuple(metadata) if metadata else () + compression_metadata = ( + compression_algorithm_to_metadata(compression),) if compression else () + return base_metadata + compression_metadata + + +__all__ = ( + "NoCompression", + "Deflate", + "Gzip", +) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/_hooks.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/_hooks.pyx.pxi index da5317830a..de4d71b819 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/_hooks.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/_hooks.pyx.pxi @@ -16,7 +16,7 @@ cdef object _custom_op_on_c_call(int op, grpc_call *call): raise NotImplementedError("No custom hooks are implemented") -def install_context_from_request_call_event(RequestCallEvent event): +def install_context_from_request_call_event(RequestCallEvent event): pass def uninstall_context(): @@ -30,6 +30,6 @@ cdef class CensusContext: def set_census_context_on_call(_CallState call_state, CensusContext census_ctx): pass - -def get_deadline_from_context(): - return None + +def get_deadline_from_context(): + return None diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pxd.pxi index 7e7f554672..867245a694 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pxd.pxi @@ -1,29 +1,29 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + cdef class _AioCall(GrpcCallWrapper): - cdef: + cdef: readonly AioChannel _channel - list _references + list _references object _deadline list _done_callbacks # Caches the picked event loop, so we can avoid the 30ns overhead each # time we need access to the event loop. object _loop - + # Flag indicates whether cancel being called or not. Cancellation from # Core or peer works perfectly fine with normal procedure. However, we # need this flag to clean up resources for cancellation from the diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pyx.pxi index 5b6edf35cf..10c024e1b3 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/call.pyx.pxi @@ -1,34 +1,34 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -_EMPTY_FLAGS = 0 -_EMPTY_MASK = 0 +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +_EMPTY_FLAGS = 0 +_EMPTY_MASK = 0 _IMMUTABLE_EMPTY_METADATA = tuple() - + _UNKNOWN_CANCELLATION_DETAILS = 'RPC cancelled for unknown reason.' _OK_CALL_REPRESENTATION = ('<{} of RPC that terminated with:\n' '\tstatus = {}\n' '\tdetails = "{}"\n' '>') - + _NON_OK_CALL_REPRESENTATION = ('<{} of RPC that terminated with:\n' '\tstatus = {}\n' '\tdetails = "{}"\n' '\tdebug_error_string = "{}"\n' '>') - + cdef int _get_send_initial_metadata_flags(object wait_for_ready) except *: cdef int flags = 0 @@ -48,9 +48,9 @@ cdef class _AioCall(GrpcCallWrapper): bytes method, CallCredentials call_credentials, object wait_for_ready): init_grpc_aio() self.call = NULL - self._channel = channel + self._channel = channel self._loop = channel.loop - self._references = [] + self._references = [] self._status = None self._initial_metadata = None self._waiters_status = [] @@ -60,7 +60,7 @@ cdef class _AioCall(GrpcCallWrapper): self._deadline = deadline self._send_initial_metadata_flags = _get_send_initial_metadata_flags(wait_for_ready) self._create_grpc_call(deadline, method, call_credentials) - + def __dealloc__(self): if self.call: grpc_call_unref(self.call) @@ -71,7 +71,7 @@ cdef class _AioCall(GrpcCallWrapper): # This needs to be loaded at run time once everything # has been loaded. from grpc import _common - + if not self.done(): return '<{} object>'.format(self.__class__.__name__) @@ -97,40 +97,40 @@ cdef class _AioCall(GrpcCallWrapper): object deadline, bytes method, CallCredentials credentials) except *: - """Creates the corresponding Core object for this RPC. - + """Creates the corresponding Core object for this RPC. + For unary calls, the grpc_call lives shortly and can be destroyed after - invoke start_batch. However, if either side is streaming, the grpc_call - life span will be longer than one function. So, it would better save it - as an instance variable than a stack variable, which reflects its - nature in Core. - """ - cdef grpc_slice method_slice + invoke start_batch. However, if either side is streaming, the grpc_call + life span will be longer than one function. So, it would better save it + as an instance variable than a stack variable, which reflects its + nature in Core. + """ + cdef grpc_slice method_slice cdef gpr_timespec c_deadline = _timespec_from_time(deadline) cdef grpc_call_error set_credentials_error - - method_slice = grpc_slice_from_copied_buffer( - <const char *> method, - <size_t> len(method) - ) + + method_slice = grpc_slice_from_copied_buffer( + <const char *> method, + <size_t> len(method) + ) self.call = grpc_channel_create_call( - self._channel.channel, - NULL, - _EMPTY_MASK, + self._channel.channel, + NULL, + _EMPTY_MASK, global_completion_queue(), - method_slice, - NULL, + method_slice, + NULL, c_deadline, - NULL - ) + NULL + ) if credentials is not None: set_credentials_error = grpc_call_set_credentials(self.call, credentials.c()) if set_credentials_error != GRPC_CALL_OK: raise InternalError("Credentials couldn't have been set: {0}".format(set_credentials_error)) - grpc_slice_unref(method_slice) - + grpc_slice_unref(method_slice) + cdef void _set_status(self, AioRpcStatus status) except *: cdef list waiters @@ -177,7 +177,7 @@ cdef class _AioCall(GrpcCallWrapper): return None else: return max(0, self._deadline - time.time()) - + def cancel(self, str details): """Cancels the RPC in Core with given RPC status. @@ -185,11 +185,11 @@ cdef class _AioCall(GrpcCallWrapper): proper state. """ self._is_locally_cancelled = True - + cdef object details_bytes cdef char *c_details cdef grpc_call_error error - + self._set_status(AioRpcStatus( StatusCode.cancelled, details, @@ -294,7 +294,7 @@ cdef class _AioCall(GrpcCallWrapper): outbound_initial_metadata: optional outbound metadata. """ cdef tuple ops - + cdef SendInitialMetadataOperation initial_metadata_op = SendInitialMetadataOperation( outbound_initial_metadata, self._send_initial_metadata_flags) @@ -303,19 +303,19 @@ cdef class _AioCall(GrpcCallWrapper): cdef ReceiveInitialMetadataOperation receive_initial_metadata_op = ReceiveInitialMetadataOperation(_EMPTY_FLAGS) cdef ReceiveMessageOperation receive_message_op = ReceiveMessageOperation(_EMPTY_FLAGS) cdef ReceiveStatusOnClientOperation receive_status_on_client_op = ReceiveStatusOnClientOperation(_EMPTY_FLAGS) - + ops = (initial_metadata_op, send_message_op, send_close_op, receive_initial_metadata_op, receive_message_op, receive_status_on_client_op) - + # Executes all operations in one batch. # Might raise CancelledError, handling it in Python UnaryUnaryCall. await execute_batch(self, ops, self._loop) - + self._set_initial_metadata(receive_initial_metadata_op.initial_metadata()) - + cdef grpc_status_code code code = receive_status_on_client_op.code() @@ -325,12 +325,12 @@ cdef class _AioCall(GrpcCallWrapper): receive_status_on_client_op.trailing_metadata(), receive_status_on_client_op.error_string(), )) - + if code == StatusCode.ok: return receive_message_op.message() else: return None - + async def _handle_status_once_received(self): """Handles the status sent by peer once received.""" cdef ReceiveStatusOnClientOperation op = ReceiveStatusOnClientOperation(_EMPTY_FLAGS) @@ -347,11 +347,11 @@ cdef class _AioCall(GrpcCallWrapper): op.trailing_metadata(), op.error_string(), )) - + async def receive_serialized_message(self): """Receives one single raw message in bytes.""" cdef bytes received_message - + # Receives a message. Returns None when failed: # * EOF, no more messages to read; # * The client application cancels; @@ -364,7 +364,7 @@ cdef class _AioCall(GrpcCallWrapper): return received_message else: return EOF - + async def send_serialized_message(self, bytes message): """Sends one single raw message in bytes.""" await _send_message(self, @@ -372,7 +372,7 @@ cdef class _AioCall(GrpcCallWrapper): None, False, self._loop) - + async def send_receive_close(self): """Half close the RPC on the client-side.""" cdef SendCloseFromClientOperation op = SendCloseFromClientOperation(_EMPTY_FLAGS) @@ -401,7 +401,7 @@ cdef class _AioCall(GrpcCallWrapper): initial_metadata_op, send_message_op, send_close_op, - ) + ) try: # Sends out the request message. diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pxd.pxi index 44276bbad3..e5620cd166 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pxd.pxi @@ -1,57 +1,57 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -cdef class CallbackFailureHandler: - cdef str _core_function_name - cdef object _error_details - cdef object _exception_type - - cdef handle(self, object future) - - -cdef struct CallbackContext: - # C struct to store callback context in the form of pointers. - # - # Attributes: - # functor: A grpc_experimental_completion_queue_functor represents the +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +cdef class CallbackFailureHandler: + cdef str _core_function_name + cdef object _error_details + cdef object _exception_type + + cdef handle(self, object future) + + +cdef struct CallbackContext: + # C struct to store callback context in the form of pointers. + # + # Attributes: + # functor: A grpc_experimental_completion_queue_functor represents the # callback function in the only way Core understands. - # waiter: An asyncio.Future object that fulfills when the callback is + # waiter: An asyncio.Future object that fulfills when the callback is # invoked by Core. # failure_handler: A CallbackFailureHandler object that called when Core - # returns 'success == 0' state. + # returns 'success == 0' state. # wrapper: A self-reference to the CallbackWrapper to help life cycle # management. - grpc_experimental_completion_queue_functor functor - cpython.PyObject *waiter + grpc_experimental_completion_queue_functor functor + cpython.PyObject *waiter cpython.PyObject *loop - cpython.PyObject *failure_handler + cpython.PyObject *failure_handler cpython.PyObject *callback_wrapper - - -cdef class CallbackWrapper: - cdef CallbackContext context - cdef object _reference_of_future - cdef object _reference_of_failure_handler - - @staticmethod - cdef void functor_run( - grpc_experimental_completion_queue_functor* functor, - int succeed) - - cdef grpc_experimental_completion_queue_functor *c_functor(self) - - -cdef class GrpcCallWrapper: - cdef grpc_call* call + + +cdef class CallbackWrapper: + cdef CallbackContext context + cdef object _reference_of_future + cdef object _reference_of_failure_handler + + @staticmethod + cdef void functor_run( + grpc_experimental_completion_queue_functor* functor, + int succeed) + + cdef grpc_experimental_completion_queue_functor *c_functor(self) + + +cdef class GrpcCallWrapper: + cdef grpc_call* call diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pyx.pxi index 00a05448ab..86fc91e76a 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/callback_common.pyx.pxi @@ -1,57 +1,57 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -cdef class CallbackFailureHandler: - - def __cinit__(self, - str core_function_name, - object error_details, - object exception_type): - """Handles failure by raising exception.""" - self._core_function_name = core_function_name - self._error_details = error_details - self._exception_type = exception_type - - cdef handle(self, object future): - future.set_exception(self._exception_type( - 'Failed "%s": %s' % (self._core_function_name, self._error_details) - )) - - -cdef class CallbackWrapper: - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +cdef class CallbackFailureHandler: + + def __cinit__(self, + str core_function_name, + object error_details, + object exception_type): + """Handles failure by raising exception.""" + self._core_function_name = core_function_name + self._error_details = error_details + self._exception_type = exception_type + + cdef handle(self, object future): + future.set_exception(self._exception_type( + 'Failed "%s": %s' % (self._core_function_name, self._error_details) + )) + + +cdef class CallbackWrapper: + def __cinit__(self, object future, object loop, CallbackFailureHandler failure_handler): - self.context.functor.functor_run = self.functor_run - self.context.waiter = <cpython.PyObject*>future + self.context.functor.functor_run = self.functor_run + self.context.waiter = <cpython.PyObject*>future self.context.loop = <cpython.PyObject*>loop - self.context.failure_handler = <cpython.PyObject*>failure_handler + self.context.failure_handler = <cpython.PyObject*>failure_handler self.context.callback_wrapper = <cpython.PyObject*>self - # NOTE(lidiz) Not using a list here, because this class is critical in - # data path. We should make it as efficient as possible. - self._reference_of_future = future - self._reference_of_failure_handler = failure_handler + # NOTE(lidiz) Not using a list here, because this class is critical in + # data path. We should make it as efficient as possible. + self._reference_of_future = future + self._reference_of_failure_handler = failure_handler # NOTE(lidiz) We need to ensure when Core invokes our callback, the # callback function itself is not deallocated. Othersise, we will get # a segfault. We can view this as Core holding a ref. cpython.Py_INCREF(self) - - @staticmethod - cdef void functor_run( - grpc_experimental_completion_queue_functor* functor, - int success): - cdef CallbackContext *context = <CallbackContext *>functor + + @staticmethod + cdef void functor_run( + grpc_experimental_completion_queue_functor* functor, + int success): + cdef CallbackContext *context = <CallbackContext *>functor cdef object waiter = <object>context.waiter if not waiter.cancelled(): if success == 0: @@ -59,47 +59,47 @@ cdef class CallbackWrapper: else: waiter.set_result(None) cpython.Py_DECREF(<object>context.callback_wrapper) - - cdef grpc_experimental_completion_queue_functor *c_functor(self): - return &self.context.functor - - -cdef CallbackFailureHandler CQ_SHUTDOWN_FAILURE_HANDLER = CallbackFailureHandler( - 'grpc_completion_queue_shutdown', - 'Unknown', + + cdef grpc_experimental_completion_queue_functor *c_functor(self): + return &self.context.functor + + +cdef CallbackFailureHandler CQ_SHUTDOWN_FAILURE_HANDLER = CallbackFailureHandler( + 'grpc_completion_queue_shutdown', + 'Unknown', InternalError) - - + + class ExecuteBatchError(InternalError): """Raised when execute batch returns a failure from Core.""" - - + + async def execute_batch(GrpcCallWrapper grpc_call_wrapper, - tuple operations, - object loop): - """The callback version of start batch operations.""" - cdef _BatchOperationTag batch_operation_tag = _BatchOperationTag(None, operations, None) - batch_operation_tag.prepare() - - cdef object future = loop.create_future() - cdef CallbackWrapper wrapper = CallbackWrapper( - future, + tuple operations, + object loop): + """The callback version of start batch operations.""" + cdef _BatchOperationTag batch_operation_tag = _BatchOperationTag(None, operations, None) + batch_operation_tag.prepare() + + cdef object future = loop.create_future() + cdef CallbackWrapper wrapper = CallbackWrapper( + future, loop, CallbackFailureHandler('execute_batch', operations, ExecuteBatchError)) - cdef grpc_call_error error = grpc_call_start_batch( - grpc_call_wrapper.call, - batch_operation_tag.c_ops, - batch_operation_tag.c_nops, - wrapper.c_functor(), NULL) - - if error != GRPC_CALL_OK: + cdef grpc_call_error error = grpc_call_start_batch( + grpc_call_wrapper.call, + batch_operation_tag.c_ops, + batch_operation_tag.c_nops, + wrapper.c_functor(), NULL) + + if error != GRPC_CALL_OK: raise ExecuteBatchError("Failed grpc_call_start_batch: {}".format(error)) - - await future - cdef grpc_event c_event - # Tag.event must be called, otherwise messages won't be parsed from C - batch_operation_tag.event(c_event) + await future + + cdef grpc_event c_event + # Tag.event must be called, otherwise messages won't be parsed from C + batch_operation_tag.event(c_event) cdef prepend_send_initial_metadata_op(tuple ops, tuple metadata): diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pxd.pxi index d25de9a741..03b4990e48 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pxd.pxi @@ -1,27 +1,27 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + cdef enum AioChannelStatus: AIO_CHANNEL_STATUS_UNKNOWN AIO_CHANNEL_STATUS_READY AIO_CHANNEL_STATUS_CLOSING AIO_CHANNEL_STATUS_DESTROYED -cdef class AioChannel: - cdef: - grpc_channel * channel +cdef class AioChannel: + cdef: + grpc_channel * channel object loop - bytes _target + bytes _target AioChannelStatus _status bint _is_secure diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pyx.pxi index 45cdc5171c..beadce67b4 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/channel.pyx.pxi @@ -1,18 +1,18 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2019 gRPC authors. # - +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + class _WatchConnectivityFailed(Exception): """Dedicated exception class for watch connectivity failed. @@ -25,7 +25,7 @@ cdef CallbackFailureHandler _WATCH_CONNECTIVITY_FAILURE_HANDLER = CallbackFailur _WatchConnectivityFailed) -cdef class AioChannel: +cdef class AioChannel: def __cinit__(self, bytes target, tuple options, ChannelCredentials credentials, object loop): init_grpc_aio() if options is None: @@ -34,7 +34,7 @@ cdef class AioChannel: self._target = target self.loop = loop self._status = AIO_CHANNEL_STATUS_READY - + if credentials is None: self._is_secure = False self.channel = grpc_insecure_channel_create( @@ -52,11 +52,11 @@ cdef class AioChannel: def __dealloc__(self): shutdown_grpc_aio() - def __repr__(self): - class_name = self.__class__.__name__ - id_ = id(self) - return f"<{class_name} {id_}>" - + def __repr__(self): + class_name = self.__class__.__name__ + id_ = id(self) + return f"<{class_name} {id_}>" + def check_connectivity_state(self, bint try_to_connect): """A Cython wrapper for Core's check connectivity state API.""" if self._status == AIO_CHANNEL_STATUS_DESTROYED: @@ -102,10 +102,10 @@ cdef class AioChannel: def closing(self): self._status = AIO_CHANNEL_STATUS_CLOSING - def close(self): + def close(self): self._status = AIO_CHANNEL_STATUS_DESTROYED - grpc_channel_destroy(self.channel) - + grpc_channel_destroy(self.channel) + def closed(self): return self._status in (AIO_CHANNEL_STATUS_CLOSING, AIO_CHANNEL_STATUS_DESTROYED) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pxd.pxi index d2941b6044..ebf0660174 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pxd.pxi @@ -1,18 +1,18 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# distutils: language=c++ - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# distutils: language=c++ + cdef class _AioState: cdef object lock # threading.RLock cdef int refcount @@ -29,15 +29,15 @@ cpdef init_grpc_aio() cpdef shutdown_grpc_aio() -cdef extern from "src/core/lib/iomgr/timer_manager.h": +cdef extern from "src/core/lib/iomgr/timer_manager.h": void grpc_timer_manager_set_threading(bint enabled) - -cdef extern from "src/core/lib/iomgr/iomgr_internal.h": + +cdef extern from "src/core/lib/iomgr/iomgr_internal.h": void grpc_set_default_iomgr_platform() - -cdef extern from "src/core/lib/iomgr/executor.h" namespace "grpc_core": - cdef cppclass Executor: - @staticmethod + +cdef extern from "src/core/lib/iomgr/executor.h" namespace "grpc_core": + cdef cppclass Executor: + @staticmethod void SetThreadingAll(bint enable) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pyx.pxi index ea9b7b9a0e..06c92cac58 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/grpc_aio.pyx.pxi @@ -1,28 +1,28 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import enum - + cdef str _GRPC_ASYNCIO_ENGINE = os.environ.get('GRPC_ASYNCIO_ENGINE', 'poller').upper() cdef _AioState _global_aio_state = _AioState() - - + + class AsyncIOEngine(enum.Enum): CUSTOM_IO_MANAGER = 'custom_io_manager' POLLER = 'poller' - - + + cdef _default_asyncio_engine(): return AsyncIOEngine.POLLER @@ -45,20 +45,20 @@ cdef _initialize_custom_io_manager(): # NOTE(lidiz) Custom IO manager must be activated before the first # `grpc_init()`. Otherwise, some special configurations in Core won't # pick up the change, and resulted in SEGFAULT or ABORT. - install_asyncio_iomgr() + install_asyncio_iomgr() # Initializes gRPC Core, must be called before other Core API - grpc_init() - - # Timers are triggered by the Asyncio loop. We disable - # the background thread that is being used by the native - # gRPC iomgr. + grpc_init() + + # Timers are triggered by the Asyncio loop. We disable + # the background thread that is being used by the native + # gRPC iomgr. grpc_timer_manager_set_threading(False) - - # gRPC callbaks are executed within the same thread used by the Asyncio - # event loop, as it is being done by the other Asyncio callbacks. + + # gRPC callbaks are executed within the same thread used by the Asyncio + # event loop, as it is being done by the other Asyncio callbacks. Executor.SetThreadingAll(False) - + # Creates the only completion queue _global_aio_state.cq = CallbackCompletionQueue() diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/iomgr.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/iomgr.pyx.pxi index c3b1e2fe75..917ae24e11 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/iomgr.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/iomgr.pyx.pxi @@ -1,217 +1,217 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + import platform -from cpython cimport Py_INCREF, Py_DECREF -from libc cimport string - -import socket as native_socket -try: - import ipaddress # CPython 3.3 and above -except ImportError: - pass - -cdef grpc_socket_vtable asyncio_socket_vtable -cdef grpc_custom_resolver_vtable asyncio_resolver_vtable -cdef grpc_custom_timer_vtable asyncio_timer_vtable -cdef grpc_custom_poller_vtable asyncio_pollset_vtable +from cpython cimport Py_INCREF, Py_DECREF +from libc cimport string + +import socket as native_socket +try: + import ipaddress # CPython 3.3 and above +except ImportError: + pass + +cdef grpc_socket_vtable asyncio_socket_vtable +cdef grpc_custom_resolver_vtable asyncio_resolver_vtable +cdef grpc_custom_timer_vtable asyncio_timer_vtable +cdef grpc_custom_poller_vtable asyncio_pollset_vtable cdef bint so_reuse_port - - -cdef grpc_error* asyncio_socket_init( - grpc_custom_socket* grpc_socket, - int domain) with gil: - socket = _AsyncioSocket.create(grpc_socket, None, None) - Py_INCREF(socket) - grpc_socket.impl = <void*>socket - return <grpc_error*>0 - - -cdef void asyncio_socket_destroy(grpc_custom_socket* grpc_socket) with gil: - Py_DECREF(<_AsyncioSocket>grpc_socket.impl) - - -cdef void asyncio_socket_connect( - grpc_custom_socket* grpc_socket, - const grpc_sockaddr* addr, - size_t addr_len, - grpc_custom_connect_callback connect_cb) with gil: - host, port = sockaddr_to_tuple(addr, addr_len) - socket = <_AsyncioSocket>grpc_socket.impl - socket.connect(host, port, connect_cb) - - -cdef void asyncio_socket_close( - grpc_custom_socket* grpc_socket, - grpc_custom_close_callback close_cb) with gil: - socket = (<_AsyncioSocket>grpc_socket.impl) - socket.close() - close_cb(grpc_socket) - - -cdef void asyncio_socket_shutdown(grpc_custom_socket* grpc_socket) with gil: - socket = (<_AsyncioSocket>grpc_socket.impl) - socket.close() - - -cdef void asyncio_socket_write( - grpc_custom_socket* grpc_socket, - grpc_slice_buffer* slice_buffer, - grpc_custom_write_callback write_cb) with gil: - socket = (<_AsyncioSocket>grpc_socket.impl) - socket.write(slice_buffer, write_cb) - - -cdef void asyncio_socket_read( - grpc_custom_socket* grpc_socket, - char* buffer_, - size_t length, - grpc_custom_read_callback read_cb) with gil: - socket = (<_AsyncioSocket>grpc_socket.impl) - socket.read(buffer_, length, read_cb) - - -cdef grpc_error* asyncio_socket_getpeername( - grpc_custom_socket* grpc_socket, - const grpc_sockaddr* addr, - int* length) with gil: - peer = (<_AsyncioSocket>grpc_socket.impl).peername() - - cdef grpc_resolved_address c_addr - hostname = str_to_bytes(peer[0]) - grpc_string_to_sockaddr(&c_addr, hostname, peer[1]) - # TODO(https://github.com/grpc/grpc/issues/20684) Remove the memcpy - string.memcpy(<void*>addr, <void*>c_addr.addr, c_addr.len) - length[0] = c_addr.len - return grpc_error_none() - - -cdef grpc_error* asyncio_socket_getsockname( - grpc_custom_socket* grpc_socket, - const grpc_sockaddr* addr, - int* length) with gil: - """Supplies sock_addr in add_socket_to_server.""" - cdef grpc_resolved_address c_addr - socket = (<_AsyncioSocket>grpc_socket.impl) - if socket is None: - peer = ('0.0.0.0', 0) - else: - peer = socket.sockname() - hostname = str_to_bytes(peer[0]) - grpc_string_to_sockaddr(&c_addr, hostname, peer[1]) - # TODO(https://github.com/grpc/grpc/issues/20684) Remove the memcpy - string.memcpy(<void*>addr, <void*>c_addr.addr, c_addr.len) - length[0] = c_addr.len - return grpc_error_none() - - -cdef grpc_error* asyncio_socket_listen(grpc_custom_socket* grpc_socket) with gil: - (<_AsyncioSocket>grpc_socket.impl).listen() - return grpc_error_none() - - + + +cdef grpc_error* asyncio_socket_init( + grpc_custom_socket* grpc_socket, + int domain) with gil: + socket = _AsyncioSocket.create(grpc_socket, None, None) + Py_INCREF(socket) + grpc_socket.impl = <void*>socket + return <grpc_error*>0 + + +cdef void asyncio_socket_destroy(grpc_custom_socket* grpc_socket) with gil: + Py_DECREF(<_AsyncioSocket>grpc_socket.impl) + + +cdef void asyncio_socket_connect( + grpc_custom_socket* grpc_socket, + const grpc_sockaddr* addr, + size_t addr_len, + grpc_custom_connect_callback connect_cb) with gil: + host, port = sockaddr_to_tuple(addr, addr_len) + socket = <_AsyncioSocket>grpc_socket.impl + socket.connect(host, port, connect_cb) + + +cdef void asyncio_socket_close( + grpc_custom_socket* grpc_socket, + grpc_custom_close_callback close_cb) with gil: + socket = (<_AsyncioSocket>grpc_socket.impl) + socket.close() + close_cb(grpc_socket) + + +cdef void asyncio_socket_shutdown(grpc_custom_socket* grpc_socket) with gil: + socket = (<_AsyncioSocket>grpc_socket.impl) + socket.close() + + +cdef void asyncio_socket_write( + grpc_custom_socket* grpc_socket, + grpc_slice_buffer* slice_buffer, + grpc_custom_write_callback write_cb) with gil: + socket = (<_AsyncioSocket>grpc_socket.impl) + socket.write(slice_buffer, write_cb) + + +cdef void asyncio_socket_read( + grpc_custom_socket* grpc_socket, + char* buffer_, + size_t length, + grpc_custom_read_callback read_cb) with gil: + socket = (<_AsyncioSocket>grpc_socket.impl) + socket.read(buffer_, length, read_cb) + + +cdef grpc_error* asyncio_socket_getpeername( + grpc_custom_socket* grpc_socket, + const grpc_sockaddr* addr, + int* length) with gil: + peer = (<_AsyncioSocket>grpc_socket.impl).peername() + + cdef grpc_resolved_address c_addr + hostname = str_to_bytes(peer[0]) + grpc_string_to_sockaddr(&c_addr, hostname, peer[1]) + # TODO(https://github.com/grpc/grpc/issues/20684) Remove the memcpy + string.memcpy(<void*>addr, <void*>c_addr.addr, c_addr.len) + length[0] = c_addr.len + return grpc_error_none() + + +cdef grpc_error* asyncio_socket_getsockname( + grpc_custom_socket* grpc_socket, + const grpc_sockaddr* addr, + int* length) with gil: + """Supplies sock_addr in add_socket_to_server.""" + cdef grpc_resolved_address c_addr + socket = (<_AsyncioSocket>grpc_socket.impl) + if socket is None: + peer = ('0.0.0.0', 0) + else: + peer = socket.sockname() + hostname = str_to_bytes(peer[0]) + grpc_string_to_sockaddr(&c_addr, hostname, peer[1]) + # TODO(https://github.com/grpc/grpc/issues/20684) Remove the memcpy + string.memcpy(<void*>addr, <void*>c_addr.addr, c_addr.len) + length[0] = c_addr.len + return grpc_error_none() + + +cdef grpc_error* asyncio_socket_listen(grpc_custom_socket* grpc_socket) with gil: + (<_AsyncioSocket>grpc_socket.impl).listen() + return grpc_error_none() + + def _asyncio_apply_socket_options(object s, int flags): # Turn SO_REUSEADDR on for TCP sockets; if we want to support UDS, we will # need to update this function. - s.setsockopt(native_socket.SOL_SOCKET, native_socket.SO_REUSEADDR, 1) + s.setsockopt(native_socket.SOL_SOCKET, native_socket.SO_REUSEADDR, 1) # SO_REUSEPORT only available in POSIX systems. if platform.system() != 'Windows': if GRPC_CUSTOM_SOCKET_OPT_SO_REUSEPORT & flags: s.setsockopt(native_socket.SOL_SOCKET, native_socket.SO_REUSEPORT, 1) - s.setsockopt(native_socket.IPPROTO_TCP, native_socket.TCP_NODELAY, True) - - -cdef grpc_error* asyncio_socket_bind( - grpc_custom_socket* grpc_socket, - const grpc_sockaddr* addr, - size_t len, int flags) with gil: - host, port = sockaddr_to_tuple(addr, len) - try: - ip = ipaddress.ip_address(host) - if isinstance(ip, ipaddress.IPv6Address): - family = native_socket.AF_INET6 - else: - family = native_socket.AF_INET - - socket = native_socket.socket(family=family) + s.setsockopt(native_socket.IPPROTO_TCP, native_socket.TCP_NODELAY, True) + + +cdef grpc_error* asyncio_socket_bind( + grpc_custom_socket* grpc_socket, + const grpc_sockaddr* addr, + size_t len, int flags) with gil: + host, port = sockaddr_to_tuple(addr, len) + try: + ip = ipaddress.ip_address(host) + if isinstance(ip, ipaddress.IPv6Address): + family = native_socket.AF_INET6 + else: + family = native_socket.AF_INET + + socket = native_socket.socket(family=family) _asyncio_apply_socket_options(socket, flags) - socket.bind((host, port)) - except IOError as io_error: + socket.bind((host, port)) + except IOError as io_error: socket.close() - return socket_error("bind", str(io_error)) - else: - aio_socket = _AsyncioSocket.create_with_py_socket(grpc_socket, socket) - cpython.Py_INCREF(aio_socket) # Py_DECREF in asyncio_socket_destroy - grpc_socket.impl = <void*>aio_socket - return grpc_error_none() - - -cdef void asyncio_socket_accept( - grpc_custom_socket* grpc_socket, - grpc_custom_socket* grpc_socket_client, - grpc_custom_accept_callback accept_cb) with gil: - (<_AsyncioSocket>grpc_socket.impl).accept(grpc_socket_client, accept_cb) - - -cdef grpc_error* asyncio_resolve( + return socket_error("bind", str(io_error)) + else: + aio_socket = _AsyncioSocket.create_with_py_socket(grpc_socket, socket) + cpython.Py_INCREF(aio_socket) # Py_DECREF in asyncio_socket_destroy + grpc_socket.impl = <void*>aio_socket + return grpc_error_none() + + +cdef void asyncio_socket_accept( + grpc_custom_socket* grpc_socket, + grpc_custom_socket* grpc_socket_client, + grpc_custom_accept_callback accept_cb) with gil: + (<_AsyncioSocket>grpc_socket.impl).accept(grpc_socket_client, accept_cb) + + +cdef grpc_error* asyncio_resolve( const char* host, const char* port, - grpc_resolved_addresses** res) with gil: - result = native_socket.getaddrinfo(host, port) - res[0] = tuples_to_resolvaddr(result) - - -cdef void asyncio_resolve_async( - grpc_custom_resolver* grpc_resolver, + grpc_resolved_addresses** res) with gil: + result = native_socket.getaddrinfo(host, port) + res[0] = tuples_to_resolvaddr(result) + + +cdef void asyncio_resolve_async( + grpc_custom_resolver* grpc_resolver, const char* host, const char* port) with gil: - resolver = _AsyncioResolver.create(grpc_resolver) - resolver.resolve(host, port) - - -cdef void asyncio_timer_start(grpc_custom_timer* grpc_timer) with gil: - timer = _AsyncioTimer.create(grpc_timer, grpc_timer.timeout_ms / 1000.0) - grpc_timer.timer = <void*>timer - - -cdef void asyncio_timer_stop(grpc_custom_timer* grpc_timer) with gil: + resolver = _AsyncioResolver.create(grpc_resolver) + resolver.resolve(host, port) + + +cdef void asyncio_timer_start(grpc_custom_timer* grpc_timer) with gil: + timer = _AsyncioTimer.create(grpc_timer, grpc_timer.timeout_ms / 1000.0) + grpc_timer.timer = <void*>timer + + +cdef void asyncio_timer_stop(grpc_custom_timer* grpc_timer) with gil: # TODO(https://github.com/grpc/grpc/issues/22278) remove this if condition if grpc_timer.timer == NULL: return else: timer = <_AsyncioTimer>grpc_timer.timer timer.stop() - - -cdef void asyncio_init_loop() with gil: - pass - - -cdef void asyncio_destroy_loop() with gil: - pass - - -cdef void asyncio_kick_loop() with gil: - pass - - -cdef void asyncio_run_loop(size_t timeout_ms) with gil: - pass - - + + +cdef void asyncio_init_loop() with gil: + pass + + +cdef void asyncio_destroy_loop() with gil: + pass + + +cdef void asyncio_kick_loop() with gil: + pass + + +cdef void asyncio_run_loop(size_t timeout_ms) with gil: + pass + + def _auth_plugin_callback_wrapper(object cb, str service_url, str method_name, @@ -219,38 +219,38 @@ def _auth_plugin_callback_wrapper(object cb, get_working_loop().call_soon(cb, service_url, method_name, callback) -def install_asyncio_iomgr(): +def install_asyncio_iomgr(): # Auth plugins invoke user provided logic in another thread by default. We # need to override that behavior by registering the call to the event loop. set_async_callback_func(_auth_plugin_callback_wrapper) - asyncio_resolver_vtable.resolve = asyncio_resolve - asyncio_resolver_vtable.resolve_async = asyncio_resolve_async - - asyncio_socket_vtable.init = asyncio_socket_init - asyncio_socket_vtable.connect = asyncio_socket_connect - asyncio_socket_vtable.destroy = asyncio_socket_destroy - asyncio_socket_vtable.shutdown = asyncio_socket_shutdown - asyncio_socket_vtable.close = asyncio_socket_close - asyncio_socket_vtable.write = asyncio_socket_write - asyncio_socket_vtable.read = asyncio_socket_read - asyncio_socket_vtable.getpeername = asyncio_socket_getpeername - asyncio_socket_vtable.getsockname = asyncio_socket_getsockname - asyncio_socket_vtable.bind = asyncio_socket_bind - asyncio_socket_vtable.listen = asyncio_socket_listen - asyncio_socket_vtable.accept = asyncio_socket_accept - - asyncio_timer_vtable.start = asyncio_timer_start - asyncio_timer_vtable.stop = asyncio_timer_stop - - asyncio_pollset_vtable.init = asyncio_init_loop - asyncio_pollset_vtable.poll = asyncio_run_loop - asyncio_pollset_vtable.kick = asyncio_kick_loop - asyncio_pollset_vtable.shutdown = asyncio_destroy_loop - - grpc_custom_iomgr_init( - &asyncio_socket_vtable, - &asyncio_resolver_vtable, - &asyncio_timer_vtable, - &asyncio_pollset_vtable - ) + asyncio_resolver_vtable.resolve = asyncio_resolve + asyncio_resolver_vtable.resolve_async = asyncio_resolve_async + + asyncio_socket_vtable.init = asyncio_socket_init + asyncio_socket_vtable.connect = asyncio_socket_connect + asyncio_socket_vtable.destroy = asyncio_socket_destroy + asyncio_socket_vtable.shutdown = asyncio_socket_shutdown + asyncio_socket_vtable.close = asyncio_socket_close + asyncio_socket_vtable.write = asyncio_socket_write + asyncio_socket_vtable.read = asyncio_socket_read + asyncio_socket_vtable.getpeername = asyncio_socket_getpeername + asyncio_socket_vtable.getsockname = asyncio_socket_getsockname + asyncio_socket_vtable.bind = asyncio_socket_bind + asyncio_socket_vtable.listen = asyncio_socket_listen + asyncio_socket_vtable.accept = asyncio_socket_accept + + asyncio_timer_vtable.start = asyncio_timer_start + asyncio_timer_vtable.stop = asyncio_timer_stop + + asyncio_pollset_vtable.init = asyncio_init_loop + asyncio_pollset_vtable.poll = asyncio_run_loop + asyncio_pollset_vtable.kick = asyncio_kick_loop + asyncio_pollset_vtable.shutdown = asyncio_destroy_loop + + grpc_custom_iomgr_init( + &asyncio_socket_vtable, + &asyncio_resolver_vtable, + &asyncio_timer_vtable, + &asyncio_pollset_vtable + ) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pxd.pxi index 61449745d0..51730c1597 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pxd.pxi @@ -1,24 +1,24 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -cdef class _AsyncioResolver: - cdef: +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +cdef class _AsyncioResolver: + cdef: object _loop - grpc_custom_resolver* _grpc_resolver - object _task_resolve - - @staticmethod - cdef _AsyncioResolver create(grpc_custom_resolver* grpc_resolver) - + grpc_custom_resolver* _grpc_resolver + object _task_resolve + + @staticmethod + cdef _AsyncioResolver create(grpc_custom_resolver* grpc_resolver) + cdef void resolve(self, const char* host, const char* port) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pyx.pxi index 70f6a86026..1a2e244ff9 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/resolver.pyx.pxi @@ -1,56 +1,56 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -cdef class _AsyncioResolver: - def __cinit__(self): +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +cdef class _AsyncioResolver: + def __cinit__(self): self._loop = get_working_loop() - self._grpc_resolver = NULL - self._task_resolve = None - - @staticmethod - cdef _AsyncioResolver create(grpc_custom_resolver* grpc_resolver): - resolver = _AsyncioResolver() - resolver._grpc_resolver = grpc_resolver - return resolver - - def __repr__(self): - class_name = self.__class__.__name__ - id_ = id(self) - return f"<{class_name} {id_}>" - + self._grpc_resolver = NULL + self._task_resolve = None + + @staticmethod + cdef _AsyncioResolver create(grpc_custom_resolver* grpc_resolver): + resolver = _AsyncioResolver() + resolver._grpc_resolver = grpc_resolver + return resolver + + def __repr__(self): + class_name = self.__class__.__name__ + id_ = id(self) + return f"<{class_name} {id_}>" + async def _async_resolve(self, bytes host, bytes port): self._task_resolve = None - try: + try: resolved = await self._loop.getaddrinfo(host, port) - except Exception as e: - grpc_custom_resolve_callback( - <grpc_custom_resolver*>self._grpc_resolver, + except Exception as e: + grpc_custom_resolve_callback( + <grpc_custom_resolver*>self._grpc_resolver, NULL, grpc_socket_error("Resolve address [{}:{}] failed: {}: {}".format( host, port, type(e), str(e)).encode()) - ) - else: - grpc_custom_resolve_callback( - <grpc_custom_resolver*>self._grpc_resolver, + ) + else: + grpc_custom_resolve_callback( + <grpc_custom_resolver*>self._grpc_resolver, tuples_to_resolvaddr(resolved), <grpc_error*>0 - ) - + ) + cdef void resolve(self, const char* host, const char* port): - assert not self._task_resolve - + assert not self._task_resolve + self._task_resolve = self._loop.create_task( self._async_resolve(host, port) - ) + ) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pxd.pxi index c08f7ace31..cfab5549b2 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pxd.pxi @@ -1,63 +1,63 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -cdef class _AsyncioSocket: - cdef: - # Common attributes - grpc_custom_socket * _grpc_socket - grpc_custom_read_callback _grpc_read_cb +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +cdef class _AsyncioSocket: + cdef: + # Common attributes + grpc_custom_socket * _grpc_socket + grpc_custom_read_callback _grpc_read_cb grpc_custom_write_callback _grpc_write_cb - object _reader - object _writer - object _task_read + object _reader + object _writer + object _task_read object _task_write - object _task_connect + object _task_connect object _task_listen - char * _read_buffer + char * _read_buffer # Caches the picked event loop, so we can avoid the 30ns overhead each # time we need access to the event loop. object _loop # TODO(lidiz) Drop after 3.6 deprecation. Python 3.7 introduces methods # like `is_closing()` to help graceful shutdown. bint _closed - - # Client-side attributes - grpc_custom_connect_callback _grpc_connect_cb - - # Server-side attributes - grpc_custom_accept_callback _grpc_accept_cb - grpc_custom_socket * _grpc_client_socket - object _server - object _py_socket - object _peername - - @staticmethod - cdef _AsyncioSocket create( - grpc_custom_socket * grpc_socket, - object reader, - object writer) - @staticmethod - cdef _AsyncioSocket create_with_py_socket(grpc_custom_socket * grpc_socket, object py_socket) - - cdef void connect(self, object host, object port, grpc_custom_connect_callback grpc_connect_cb) - cdef void write(self, grpc_slice_buffer * g_slice_buffer, grpc_custom_write_callback grpc_write_cb) - cdef void read(self, char * buffer_, size_t length, grpc_custom_read_callback grpc_read_cb) - cdef bint is_connected(self) - cdef void close(self) - - cdef accept(self, grpc_custom_socket* grpc_socket_client, grpc_custom_accept_callback grpc_accept_cb) - cdef listen(self) - cdef tuple peername(self) - cdef tuple sockname(self) + + # Client-side attributes + grpc_custom_connect_callback _grpc_connect_cb + + # Server-side attributes + grpc_custom_accept_callback _grpc_accept_cb + grpc_custom_socket * _grpc_client_socket + object _server + object _py_socket + object _peername + + @staticmethod + cdef _AsyncioSocket create( + grpc_custom_socket * grpc_socket, + object reader, + object writer) + @staticmethod + cdef _AsyncioSocket create_with_py_socket(grpc_custom_socket * grpc_socket, object py_socket) + + cdef void connect(self, object host, object port, grpc_custom_connect_callback grpc_connect_cb) + cdef void write(self, grpc_slice_buffer * g_slice_buffer, grpc_custom_write_callback grpc_write_cb) + cdef void read(self, char * buffer_, size_t length, grpc_custom_read_callback grpc_read_cb) + cdef bint is_connected(self) + cdef void close(self) + + cdef accept(self, grpc_custom_socket* grpc_socket_client, grpc_custom_accept_callback grpc_accept_cb) + cdef listen(self) + cdef tuple peername(self) + cdef tuple sockname(self) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pyx.pxi index 68d3d55d92..eecef17d98 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/socket.pyx.pxi @@ -1,74 +1,74 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import socket as native_socket - -from libc cimport string - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import socket as native_socket + +from libc cimport string + cdef int _ASYNCIO_STREAM_DEFAULT_SOCKET_BACKLOG = 100 # TODO(https://github.com/grpc/grpc/issues/21348) Better flow control needed. -cdef class _AsyncioSocket: - def __cinit__(self): - self._grpc_socket = NULL - self._grpc_connect_cb = NULL - self._grpc_read_cb = NULL +cdef class _AsyncioSocket: + def __cinit__(self): + self._grpc_socket = NULL + self._grpc_connect_cb = NULL + self._grpc_read_cb = NULL self._grpc_write_cb = NULL - self._reader = None - self._writer = None - self._task_connect = None - self._task_read = None + self._reader = None + self._writer = None + self._task_connect = None + self._task_read = None self._task_write = None self._task_listen = None - self._read_buffer = NULL - self._server = None - self._py_socket = None - self._peername = None + self._read_buffer = NULL + self._server = None + self._py_socket = None + self._peername = None self._closed = False self._loop = get_working_loop() - - @staticmethod - cdef _AsyncioSocket create(grpc_custom_socket * grpc_socket, - object reader, - object writer): - socket = _AsyncioSocket() - socket._grpc_socket = grpc_socket - socket._reader = reader - socket._writer = writer - if writer is not None: - socket._peername = writer.get_extra_info('peername') - return socket - - @staticmethod - cdef _AsyncioSocket create_with_py_socket(grpc_custom_socket * grpc_socket, object py_socket): - socket = _AsyncioSocket() - socket._grpc_socket = grpc_socket - socket._py_socket = py_socket - return socket - - def __repr__(self): - class_name = self.__class__.__name__ - id_ = id(self) - connected = self.is_connected() - return f"<{class_name} {id_} connected={connected}>" - + + @staticmethod + cdef _AsyncioSocket create(grpc_custom_socket * grpc_socket, + object reader, + object writer): + socket = _AsyncioSocket() + socket._grpc_socket = grpc_socket + socket._reader = reader + socket._writer = writer + if writer is not None: + socket._peername = writer.get_extra_info('peername') + return socket + + @staticmethod + cdef _AsyncioSocket create_with_py_socket(grpc_custom_socket * grpc_socket, object py_socket): + socket = _AsyncioSocket() + socket._grpc_socket = grpc_socket + socket._py_socket = py_socket + return socket + + def __repr__(self): + class_name = self.__class__.__name__ + id_ = id(self) + connected = self.is_connected() + return f"<{class_name} {id_} connected={connected}>" + async def _async_connect(self, object host, object port,): self._task_connect = None - try: + try: self._reader, self._writer = await asyncio.open_connection(host, port) - except Exception as e: + except Exception as e: self._grpc_connect_cb( <grpc_custom_socket*>self._grpc_socket, grpc_socket_error("Socket connect failed: {}: {}".format(type(e), str(e)).encode()) @@ -78,12 +78,12 @@ cdef class _AsyncioSocket: # algorithm. sock = self._writer.transport.get_extra_info('socket') sock.setsockopt(native_socket.IPPROTO_TCP, native_socket.TCP_NODELAY, True) - + self._grpc_connect_cb( <grpc_custom_socket*>self._grpc_socket, <grpc_error*>0 ) - + cdef void connect(self, object host, object port, @@ -95,10 +95,10 @@ cdef class _AsyncioSocket: self._async_connect(host, port) ) self._grpc_connect_cb = grpc_connect_cb - + async def _async_read(self, size_t length): self._task_read = None - try: + try: inbound_buffer = await self._reader.read(n=length) except ConnectionError as e: self._grpc_read_cb( @@ -107,22 +107,22 @@ cdef class _AsyncioSocket: grpc_socket_error("Read failed: {}".format(e).encode()) ) else: - string.memcpy( - <void*>self._read_buffer, + string.memcpy( + <void*>self._read_buffer, <char*>inbound_buffer, len(inbound_buffer) - ) - self._grpc_read_cb( - <grpc_custom_socket*>self._grpc_socket, + ) + self._grpc_read_cb( + <grpc_custom_socket*>self._grpc_socket, len(inbound_buffer), - <grpc_error*>0 - ) - - cdef void read(self, char * buffer_, size_t length, grpc_custom_read_callback grpc_read_cb): - assert not self._task_read - - self._grpc_read_cb = grpc_read_cb - self._read_buffer = buffer_ + <grpc_error*>0 + ) + + cdef void read(self, char * buffer_, size_t length, grpc_custom_read_callback grpc_read_cb): + assert not self._task_read + + self._grpc_read_cb = grpc_read_cb + self._read_buffer = buffer_ self._task_read = self._loop.create_task(self._async_read(length)) async def _async_write(self, bytearray outbound_buffer): @@ -140,7 +140,7 @@ cdef class _AsyncioSocket: grpc_socket_error("Socket write failed: {}".format(connection_error).encode()), ) - cdef void write(self, grpc_slice_buffer * g_slice_buffer, grpc_custom_write_callback grpc_write_cb): + cdef void write(self, grpc_slice_buffer * g_slice_buffer, grpc_custom_write_callback grpc_write_cb): """Performs write to network socket in AsyncIO. For each socket, Core guarantees there'll be only one ongoing write. @@ -148,37 +148,37 @@ cdef class _AsyncioSocket: Core that the work is done. """ assert not self._task_write - cdef char* start + cdef char* start cdef bytearray outbound_buffer = bytearray() - for i in range(g_slice_buffer.count): - start = grpc_slice_buffer_start(g_slice_buffer, i) - length = grpc_slice_buffer_length(g_slice_buffer, i) + for i in range(g_slice_buffer.count): + start = grpc_slice_buffer_start(g_slice_buffer, i) + length = grpc_slice_buffer_length(g_slice_buffer, i) outbound_buffer.extend(<bytes>start[:length]) - + self._grpc_write_cb = grpc_write_cb self._task_write = self._loop.create_task(self._async_write(outbound_buffer)) - - cdef bint is_connected(self): - return self._reader and not self._reader._transport.is_closing() - - cdef void close(self): + + cdef bint is_connected(self): + return self._reader and not self._reader._transport.is_closing() + + cdef void close(self): if self._closed: return else: self._closed = True - if self.is_connected(): - self._writer.close() + if self.is_connected(): + self._writer.close() if self._task_listen and not self._task_listen.done(): self._task_listen.close() - if self._server: - self._server.close() - # NOTE(lidiz) If the asyncio.Server is created from a Python socket, - # the server.close() won't release the fd until the close() is called - # for the Python socket. - if self._py_socket: - self._py_socket.close() - - def _new_connection_callback(self, object reader, object writer): + if self._server: + self._server.close() + # NOTE(lidiz) If the asyncio.Server is created from a Python socket, + # the server.close() won't release the fd until the close() is called + # for the Python socket. + if self._py_socket: + self._py_socket.close() + + def _new_connection_callback(self, object reader, object writer): # If the socket is closed, stop. if self._closed: return @@ -188,38 +188,38 @@ cdef class _AsyncioSocket: writer.close() return - client_socket = _AsyncioSocket.create( - self._grpc_client_socket, - reader, - writer, - ) - - self._grpc_client_socket.impl = <void*>client_socket - cpython.Py_INCREF(client_socket) # Py_DECREF in asyncio_socket_destroy - # Accept callback expects to be called with: + client_socket = _AsyncioSocket.create( + self._grpc_client_socket, + reader, + writer, + ) + + self._grpc_client_socket.impl = <void*>client_socket + cpython.Py_INCREF(client_socket) # Py_DECREF in asyncio_socket_destroy + # Accept callback expects to be called with: # * grpc_custom_socket: A grpc custom socket for server # * grpc_custom_socket: A grpc custom socket for client (with new Socket instance) # * grpc_error: An error object - self._grpc_accept_cb(self._grpc_socket, self._grpc_client_socket, grpc_error_none()) - - cdef listen(self): + self._grpc_accept_cb(self._grpc_socket, self._grpc_client_socket, grpc_error_none()) + + cdef listen(self): self._py_socket.listen(_ASYNCIO_STREAM_DEFAULT_SOCKET_BACKLOG) - async def create_asyncio_server(): - self._server = await asyncio.start_server( - self._new_connection_callback, - sock=self._py_socket, - ) - + async def create_asyncio_server(): + self._server = await asyncio.start_server( + self._new_connection_callback, + sock=self._py_socket, + ) + self._task_listen = self._loop.create_task(create_asyncio_server()) - - cdef accept(self, - grpc_custom_socket* grpc_socket_client, - grpc_custom_accept_callback grpc_accept_cb): - self._grpc_client_socket = grpc_socket_client - self._grpc_accept_cb = grpc_accept_cb - - cdef tuple peername(self): - return self._peername - - cdef tuple sockname(self): - return self._py_socket.getsockname() + + cdef accept(self, + grpc_custom_socket* grpc_socket_client, + grpc_custom_accept_callback grpc_accept_cb): + self._grpc_client_socket = grpc_socket_client + self._grpc_accept_cb = grpc_accept_cb + + cdef tuple peername(self): + return self._peername + + cdef tuple sockname(self): + return self._py_socket.getsockname() diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pxd.pxi index 1079411402..76c3be0c57 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pxd.pxi @@ -1,25 +1,25 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -cdef class _AsyncioTimer: - cdef: - grpc_custom_timer * _grpc_timer +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +cdef class _AsyncioTimer: + cdef: + grpc_custom_timer * _grpc_timer object _timer_future bint _active object _loop - - @staticmethod + + @staticmethod cdef _AsyncioTimer create(grpc_custom_timer * grpc_timer, float timeout) - - cdef stop(self) + + cdef stop(self) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pyx.pxi index bace27429f..c1508373a4 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/iomgr/timer.pyx.pxi @@ -1,48 +1,48 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -cdef class _AsyncioTimer: - def __cinit__(self): - self._grpc_timer = NULL +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +cdef class _AsyncioTimer: + def __cinit__(self): + self._grpc_timer = NULL self._timer_future = None self._active = False self._loop = get_working_loop() cpython.Py_INCREF(self) - - @staticmethod + + @staticmethod cdef _AsyncioTimer create(grpc_custom_timer * grpc_timer, float timeout): - timer = _AsyncioTimer() - timer._grpc_timer = grpc_timer + timer = _AsyncioTimer() + timer._grpc_timer = grpc_timer timer._timer_future = timer._loop.call_later(timeout, timer.on_time_up) timer._active = True - return timer - + return timer + def on_time_up(self): self._active = False - grpc_custom_timer_callback(self._grpc_timer, <grpc_error*>0) + grpc_custom_timer_callback(self._grpc_timer, <grpc_error*>0) cpython.Py_DECREF(self) - - def __repr__(self): - class_name = self.__class__.__name__ - id_ = id(self) - return f"<{class_name} {id_} deadline={self._deadline} active={self._active}>" - - cdef stop(self): + + def __repr__(self): + class_name = self.__class__.__name__ + id_ = id(self) + return f"<{class_name} {id_} deadline={self._deadline} active={self._active}>" + + cdef stop(self): if not self._active: - return - + return + self._timer_future.cancel() self._active = False cpython.Py_DECREF(self) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pxd.pxi index 4623c2340c..46a47bd1ba 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pxd.pxi @@ -1,25 +1,25 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -cdef class _HandlerCallDetails: - cdef readonly str method - cdef readonly tuple invocation_metadata - - -cdef class RPCState(GrpcCallWrapper): - cdef grpc_call_details details - cdef grpc_metadata_array request_metadata +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +cdef class _HandlerCallDetails: + cdef readonly str method + cdef readonly tuple invocation_metadata + + +cdef class RPCState(GrpcCallWrapper): + cdef grpc_call_details details + cdef grpc_metadata_array request_metadata cdef AioServer server # NOTE(lidiz) Under certain corner case, receiving the client close # operation won't immediately fail ongoing RECV_MESSAGE operations. Here I @@ -33,14 +33,14 @@ cdef class RPCState(GrpcCallWrapper): cdef tuple trailing_metadata cdef object compression_algorithm cdef bint disable_next_compression - - cdef bytes method(self) + + cdef bytes method(self) cdef tuple invocation_metadata(self) cdef void raise_for_termination(self) except * cdef int get_write_flag(self) cdef Operation create_send_initial_metadata_op_if_not_sent(self) - - + + cdef class _ServicerContext: cdef RPCState _rpc_state cdef object _loop # asyncio.AbstractEventLoop @@ -59,24 +59,24 @@ cdef class _MessageReceiver: cdef object _agen -cdef enum AioServerStatus: - AIO_SERVER_STATUS_UNKNOWN - AIO_SERVER_STATUS_READY - AIO_SERVER_STATUS_RUNNING - AIO_SERVER_STATUS_STOPPED - AIO_SERVER_STATUS_STOPPING - - -cdef class AioServer: - cdef Server _server - cdef list _generic_handlers - cdef AioServerStatus _status - cdef object _loop # asyncio.EventLoop - cdef object _serving_task # asyncio.Task - cdef object _shutdown_lock # asyncio.Lock - cdef object _shutdown_completed # asyncio.Future - cdef CallbackWrapper _shutdown_callback_wrapper - cdef object _crash_exception # Exception +cdef enum AioServerStatus: + AIO_SERVER_STATUS_UNKNOWN + AIO_SERVER_STATUS_READY + AIO_SERVER_STATUS_RUNNING + AIO_SERVER_STATUS_STOPPED + AIO_SERVER_STATUS_STOPPING + + +cdef class AioServer: + cdef Server _server + cdef list _generic_handlers + cdef AioServerStatus _status + cdef object _loop # asyncio.EventLoop + cdef object _serving_task # asyncio.Task + cdef object _shutdown_lock # asyncio.Lock + cdef object _shutdown_completed # asyncio.Future + cdef CallbackWrapper _shutdown_callback_wrapper + cdef object _crash_exception # Exception cdef tuple _interceptors cdef object _thread_pool # concurrent.futures.ThreadPoolExecutor diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi index 74a61f2a7a..a630ed8811 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/aio/server.pyx.pxi @@ -1,27 +1,27 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import inspect import traceback import functools -cdef int _EMPTY_FLAG = 0 +cdef int _EMPTY_FLAG = 0 cdef str _RPC_FINISHED_DETAILS = 'RPC already finished.' cdef str _SERVER_STOPPED_DETAILS = 'Server already stopped.' - + cdef _augment_metadata(tuple metadata, object compression): if compression is None: return metadata @@ -32,24 +32,24 @@ cdef _augment_metadata(tuple metadata, object compression): ),) + metadata -cdef class _HandlerCallDetails: - def __cinit__(self, str method, tuple invocation_metadata): - self.method = method - self.invocation_metadata = invocation_metadata - - +cdef class _HandlerCallDetails: + def __cinit__(self, str method, tuple invocation_metadata): + self.method = method + self.invocation_metadata = invocation_metadata + + class _ServerStoppedError(BaseError): """Raised if the server is stopped.""" -cdef class RPCState: - +cdef class RPCState: + def __cinit__(self, AioServer server): init_grpc_aio() self.call = NULL self.server = server - grpc_metadata_array_init(&self.request_metadata) - grpc_call_details_init(&self.details) + grpc_metadata_array_init(&self.request_metadata) + grpc_call_details_init(&self.details) self.client_closed = False self.abort_exception = None self.metadata_sent = False @@ -59,10 +59,10 @@ cdef class RPCState: self.trailing_metadata = _IMMUTABLE_EMPTY_METADATA self.compression_algorithm = None self.disable_next_compression = False - - cdef bytes method(self): + + cdef bytes method(self): return _slice_bytes(self.details.method) - + cdef tuple invocation_metadata(self): return _metadata(&self.request_metadata) @@ -101,15 +101,15 @@ cdef class RPCState: ) return op - def __dealloc__(self): - """Cleans the Core objects.""" - grpc_call_details_destroy(&self.details) - grpc_metadata_array_destroy(&self.request_metadata) - if self.call: - grpc_call_unref(self.call) + def __dealloc__(self): + """Cleans the Core objects.""" + grpc_call_details_destroy(&self.details) + grpc_metadata_array_destroy(&self.request_metadata) + if self.call: + grpc_call_unref(self.call) shutdown_grpc_aio() - - + + cdef class _ServicerContext: def __cinit__(self, @@ -346,7 +346,7 @@ async def _find_method_handler(str method, tuple metadata, list generic_handlers else: return query_handlers(handler_call_details) - + async def _finish_handler_with_unary_response(RPCState rpc_state, object unary_handler, object request, @@ -484,21 +484,21 @@ async def _finish_handler_with_stream_responses(RPCState rpc_state, await execute_batch(rpc_state, finish_ops, loop) -async def _handle_unary_unary_rpc(object method_handler, - RPCState rpc_state, - object loop): - # Receives request message +async def _handle_unary_unary_rpc(object method_handler, + RPCState rpc_state, + object loop): + # Receives request message cdef bytes request_raw = await _receive_message(rpc_state, loop) if request_raw is None: # The RPC was cancelled immediately after start on client side. return - - # Deserializes the request message + + # Deserializes the request message cdef object request_message = deserialize( method_handler.request_deserializer, request_raw, ) - + # Creates a dedecated ServicerContext cdef _ServicerContext servicer_context = _ServicerContext( rpc_state, @@ -506,7 +506,7 @@ async def _handle_unary_unary_rpc(object method_handler, None, loop, ) - + # Finishes the application handler await _finish_handler_with_unary_response( rpc_state, @@ -516,7 +516,7 @@ async def _handle_unary_unary_rpc(object method_handler, method_handler.response_serializer, loop ) - + async def _handle_unary_stream_rpc(object method_handler, RPCState rpc_state, @@ -530,8 +530,8 @@ async def _handle_unary_stream_rpc(object method_handler, cdef object request_message = deserialize( method_handler.request_deserializer, request_raw, - ) - + ) + # Creates a dedecated ServicerContext cdef _ServicerContext servicer_context = _ServicerContext( rpc_state, @@ -539,7 +539,7 @@ async def _handle_unary_stream_rpc(object method_handler, method_handler.response_serializer, loop, ) - + # Finishes the application handler await _finish_handler_with_stream_responses( rpc_state, @@ -547,16 +547,16 @@ async def _handle_unary_stream_rpc(object method_handler, request_message, servicer_context, loop, - ) - - + ) + + cdef class _MessageReceiver: """Bridge between the async generator API and the reader-writer API.""" - + def __cinit__(self, _ServicerContext servicer_context): self._servicer_context = servicer_context self._agen = None - + async def _async_message_receiver(self): """An async generator that receives messages.""" cdef object message @@ -587,7 +587,7 @@ async def _handle_stream_unary_rpc(object method_handler, None, loop, ) - + # Prepares the request generator cdef object request_iterator if _is_async_handler(method_handler.stream_unary): @@ -597,7 +597,7 @@ async def _handle_stream_unary_rpc(object method_handler, _MessageReceiver(servicer_context), loop ) - + # Finishes the application handler await _finish_handler_with_unary_response( rpc_state, @@ -611,15 +611,15 @@ async def _handle_stream_unary_rpc(object method_handler, async def _handle_stream_stream_rpc(object method_handler, RPCState rpc_state, - object loop): + object loop): # Creates a dedecated ServicerContext cdef _ServicerContext servicer_context = _ServicerContext( rpc_state, method_handler.request_deserializer, method_handler.response_serializer, loop, - ) - + ) + # Prepares the request generator cdef object request_iterator if _is_async_handler(method_handler.stream_stream): @@ -629,7 +629,7 @@ async def _handle_stream_stream_rpc(object method_handler, _MessageReceiver(servicer_context), loop ) - + # Finishes the application handler await _finish_handler_with_stream_responses( rpc_state, @@ -638,7 +638,7 @@ async def _handle_stream_stream_rpc(object method_handler, servicer_context, loop, ) - + async def _handle_exceptions(RPCState rpc_state, object rpc_coro, object loop): try: @@ -689,11 +689,11 @@ async def _handle_exceptions(RPCState rpc_state, object rpc_coro, object loop): ) -async def _handle_cancellation_from_core(object rpc_task, +async def _handle_cancellation_from_core(object rpc_task, RPCState rpc_state, object loop): - cdef ReceiveCloseOnServerOperation op = ReceiveCloseOnServerOperation(_EMPTY_FLAG) - cdef tuple ops = (op,) + cdef ReceiveCloseOnServerOperation op = ReceiveCloseOnServerOperation(_EMPTY_FLAG) + cdef tuple ops = (op,) # Awaits cancellation from peer. await execute_batch(rpc_state, ops, loop) @@ -703,9 +703,9 @@ async def _handle_cancellation_from_core(object rpc_task, # log that an RPC is both aborted and cancelled. if op.cancelled() and not rpc_task.done() and not rpc_state.status_sent: # Injects `CancelledError` to halt the RPC coroutine - rpc_task.cancel() - - + rpc_task.cancel() + + async def _schedule_rpc_coro(object rpc_coro, RPCState rpc_state, object loop): @@ -775,60 +775,60 @@ cdef CallbackFailureHandler REQUEST_CALL_FAILURE_HANDLER = CallbackFailureHandle 'grpc_server_request_call', None, _RequestCallError) -cdef CallbackFailureHandler SERVER_SHUTDOWN_FAILURE_HANDLER = CallbackFailureHandler( - 'grpc_server_shutdown_and_notify', +cdef CallbackFailureHandler SERVER_SHUTDOWN_FAILURE_HANDLER = CallbackFailureHandler( + 'grpc_server_shutdown_and_notify', None, InternalError) - - -cdef class AioServer: - - def __init__(self, loop, thread_pool, generic_handlers, interceptors, + + +cdef class AioServer: + + def __init__(self, loop, thread_pool, generic_handlers, interceptors, options, maximum_concurrent_rpcs): init_grpc_aio() - # NOTE(lidiz) Core objects won't be deallocated automatically. - # If AioServer.shutdown is not called, those objects will leak. - self._server = Server(options) - grpc_server_register_completion_queue( - self._server.c_server, + # NOTE(lidiz) Core objects won't be deallocated automatically. + # If AioServer.shutdown is not called, those objects will leak. + self._server = Server(options) + grpc_server_register_completion_queue( + self._server.c_server, global_completion_queue(), - NULL - ) - - self._loop = loop - self._status = AIO_SERVER_STATUS_READY - self._generic_handlers = [] - self.add_generic_rpc_handlers(generic_handlers) - self._serving_task = None - - self._shutdown_lock = asyncio.Lock(loop=self._loop) - self._shutdown_completed = self._loop.create_future() - self._shutdown_callback_wrapper = CallbackWrapper( - self._shutdown_completed, + NULL + ) + + self._loop = loop + self._status = AIO_SERVER_STATUS_READY + self._generic_handlers = [] + self.add_generic_rpc_handlers(generic_handlers) + self._serving_task = None + + self._shutdown_lock = asyncio.Lock(loop=self._loop) + self._shutdown_completed = self._loop.create_future() + self._shutdown_callback_wrapper = CallbackWrapper( + self._shutdown_completed, self._loop, - SERVER_SHUTDOWN_FAILURE_HANDLER) - self._crash_exception = None - - if interceptors: + SERVER_SHUTDOWN_FAILURE_HANDLER) + self._crash_exception = None + + if interceptors: self._interceptors = interceptors else: self._interceptors = () self._thread_pool = thread_pool - if maximum_concurrent_rpcs: - raise NotImplementedError() - + if maximum_concurrent_rpcs: + raise NotImplementedError() + def add_generic_rpc_handlers(self, object generic_rpc_handlers): self._generic_handlers.extend(generic_rpc_handlers) - - def add_insecure_port(self, address): - return self._server.add_http2_port(address) - - def add_secure_port(self, address, server_credentials): - return self._server.add_http2_port(address, + + def add_insecure_port(self, address): + return self._server.add_http2_port(address) + + def add_secure_port(self, address, server_credentials): + return self._server.add_http2_port(address, server_credentials._credentials) - + async def _request_call(self): cdef grpc_call_error error cdef RPCState rpc_state = RPCState(self) @@ -849,20 +849,20 @@ cdef class AioServer: await future return rpc_state - async def _server_main_loop(self, - object server_started): + async def _server_main_loop(self, + object server_started): self._server.start(backup_queue=False) - cdef RPCState rpc_state - server_started.set_result(True) - - while True: - # When shutdown begins, no more new connections. - if self._status != AIO_SERVER_STATUS_RUNNING: - break - + cdef RPCState rpc_state + server_started.set_result(True) + + while True: + # When shutdown begins, no more new connections. + if self._status != AIO_SERVER_STATUS_RUNNING: + break + # Accepts new request from Core rpc_state = await self._request_call() - + # Creates the dedicated RPC coroutine. If we schedule it right now, # there is no guarantee if the cancellation listening coroutine is # ready or not. So, we should control the ordering by scheduling @@ -874,124 +874,124 @@ cdef class AioServer: self._loop) # Fires off a task that listens on the cancellation from client. - self._loop.create_task( + self._loop.create_task( _schedule_rpc_coro( rpc_coro, - rpc_state, - self._loop - ) - ) - - def _serving_task_crash_handler(self, object task): - """Shutdown the server immediately if unexpectedly exited.""" - if task.exception() is None: - return - if self._status != AIO_SERVER_STATUS_STOPPING: - self._crash_exception = task.exception() - _LOGGER.exception(self._crash_exception) - self._loop.create_task(self.shutdown(None)) - - async def start(self): - if self._status == AIO_SERVER_STATUS_RUNNING: - return - elif self._status != AIO_SERVER_STATUS_READY: + rpc_state, + self._loop + ) + ) + + def _serving_task_crash_handler(self, object task): + """Shutdown the server immediately if unexpectedly exited.""" + if task.exception() is None: + return + if self._status != AIO_SERVER_STATUS_STOPPING: + self._crash_exception = task.exception() + _LOGGER.exception(self._crash_exception) + self._loop.create_task(self.shutdown(None)) + + async def start(self): + if self._status == AIO_SERVER_STATUS_RUNNING: + return + elif self._status != AIO_SERVER_STATUS_READY: raise UsageError('Server not in ready state') - - self._status = AIO_SERVER_STATUS_RUNNING - cdef object server_started = self._loop.create_future() - self._serving_task = self._loop.create_task(self._server_main_loop(server_started)) - self._serving_task.add_done_callback(self._serving_task_crash_handler) - # Needs to explicitly wait for the server to start up. - # Otherwise, the actual start time of the server is un-controllable. - await server_started - - async def _start_shutting_down(self): - """Prepares the server to shutting down. - - This coroutine function is NOT coroutine-safe. - """ - # The shutdown callback won't be called until there is no live RPC. - grpc_server_shutdown_and_notify( - self._server.c_server, + + self._status = AIO_SERVER_STATUS_RUNNING + cdef object server_started = self._loop.create_future() + self._serving_task = self._loop.create_task(self._server_main_loop(server_started)) + self._serving_task.add_done_callback(self._serving_task_crash_handler) + # Needs to explicitly wait for the server to start up. + # Otherwise, the actual start time of the server is un-controllable. + await server_started + + async def _start_shutting_down(self): + """Prepares the server to shutting down. + + This coroutine function is NOT coroutine-safe. + """ + # The shutdown callback won't be called until there is no live RPC. + grpc_server_shutdown_and_notify( + self._server.c_server, global_completion_queue(), - self._shutdown_callback_wrapper.c_functor()) - - # Ensures the serving task (coroutine) exits. - try: - await self._serving_task - except _RequestCallError: - pass - - async def shutdown(self, grace): + self._shutdown_callback_wrapper.c_functor()) + + # Ensures the serving task (coroutine) exits. + try: + await self._serving_task + except _RequestCallError: + pass + + async def shutdown(self, grace): """Gracefully shutdown the Core server. - - Application should only call shutdown once. - - Args: - grace: An optional float indicating the length of grace period in - seconds. - """ - if self._status == AIO_SERVER_STATUS_READY or self._status == AIO_SERVER_STATUS_STOPPED: - return - - async with self._shutdown_lock: - if self._status == AIO_SERVER_STATUS_RUNNING: - self._server.is_shutting_down = True - self._status = AIO_SERVER_STATUS_STOPPING - await self._start_shutting_down() - - if grace is None: - # Directly cancels all calls - grpc_server_cancel_all_calls(self._server.c_server) - await self._shutdown_completed - else: - try: - await asyncio.wait_for( - asyncio.shield( - self._shutdown_completed, - loop=self._loop - ), - grace, - loop=self._loop, - ) - except asyncio.TimeoutError: - # Cancels all ongoing calls by the end of grace period. - grpc_server_cancel_all_calls(self._server.c_server) - await self._shutdown_completed - - async with self._shutdown_lock: - if self._status == AIO_SERVER_STATUS_STOPPING: - grpc_server_destroy(self._server.c_server) - self._server.c_server = NULL - self._server.is_shutdown = True - self._status = AIO_SERVER_STATUS_STOPPED - + + Application should only call shutdown once. + + Args: + grace: An optional float indicating the length of grace period in + seconds. + """ + if self._status == AIO_SERVER_STATUS_READY or self._status == AIO_SERVER_STATUS_STOPPED: + return + + async with self._shutdown_lock: + if self._status == AIO_SERVER_STATUS_RUNNING: + self._server.is_shutting_down = True + self._status = AIO_SERVER_STATUS_STOPPING + await self._start_shutting_down() + + if grace is None: + # Directly cancels all calls + grpc_server_cancel_all_calls(self._server.c_server) + await self._shutdown_completed + else: + try: + await asyncio.wait_for( + asyncio.shield( + self._shutdown_completed, + loop=self._loop + ), + grace, + loop=self._loop, + ) + except asyncio.TimeoutError: + # Cancels all ongoing calls by the end of grace period. + grpc_server_cancel_all_calls(self._server.c_server) + await self._shutdown_completed + + async with self._shutdown_lock: + if self._status == AIO_SERVER_STATUS_STOPPING: + grpc_server_destroy(self._server.c_server) + self._server.c_server = NULL + self._server.is_shutdown = True + self._status = AIO_SERVER_STATUS_STOPPED + async def wait_for_termination(self, object timeout): - if timeout is None: - await self._shutdown_completed - else: - try: - await asyncio.wait_for( - asyncio.shield( - self._shutdown_completed, - loop=self._loop, - ), - timeout, - loop=self._loop, - ) - except asyncio.TimeoutError: - if self._crash_exception is not None: - raise self._crash_exception - return False - if self._crash_exception is not None: - raise self._crash_exception - return True - - def __dealloc__(self): + if timeout is None: + await self._shutdown_completed + else: + try: + await asyncio.wait_for( + asyncio.shield( + self._shutdown_completed, + loop=self._loop, + ), + timeout, + loop=self._loop, + ) + except asyncio.TimeoutError: + if self._crash_exception is not None: + raise self._crash_exception + return False + if self._crash_exception is not None: + raise self._crash_exception + return True + + def __dealloc__(self): """Deallocation of Core objects are ensured by Python layer.""" # TODO(lidiz) if users create server, and then dealloc it immediately. # There is a potential memory leak of created Core server. - if self._status != AIO_SERVER_STATUS_STOPPED: + if self._status != AIO_SERVER_STATUS_STOPPED: _LOGGER.debug( '__dealloc__ called on running server %s with status %d', self, diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pxd.pxi index 24c22e6771..251efe15b3 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pxd.pxi @@ -23,7 +23,7 @@ cdef class _ChannelArg: cdef grpc_arg c_argument - cdef void c(self, argument, references) except * + cdef void c(self, argument, references) except * cdef class _ChannelArgs: diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pyx.pxi index c0c8d59b33..9df308cdbc 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/arguments.pyx.pxi @@ -31,7 +31,7 @@ cdef grpc_arg _unwrap_grpc_arg(tuple wrapped_arg): cdef class _ChannelArg: - cdef void c(self, argument, references) except *: + cdef void c(self, argument, references) except *: key, value = argument cdef bytes encoded_key = _encode(key) if encoded_key is not key: @@ -54,7 +54,7 @@ cdef class _ChannelArg: # lifecycle of the pointer is fixed to the lifecycle of the # python object wrapping it. self.c_argument.type = GRPC_ARG_POINTER - self.c_argument.value.pointer.vtable = &default_vtable + self.c_argument.value.pointer.vtable = &default_vtable self.c_argument.value.pointer.address = <void*>(<intptr_t>int(value)) else: raise TypeError( @@ -73,7 +73,7 @@ cdef class _ChannelArgs: self._c_arguments.arguments_length * sizeof(grpc_arg)) for index, argument in enumerate(self._arguments): channel_arg = _ChannelArg() - channel_arg.c(argument, self._references) + channel_arg.c(argument, self._references) self._c_arguments.arguments[index] = channel_arg.c_argument self._channel_args.append(channel_arg) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi index b97dc738f8..bdd155bea9 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi @@ -83,10 +83,10 @@ cdef class Call: return result def __dealloc__(self): - with nogil: - if self.c_call != NULL: - grpc_call_unref(self.c_call) - grpc_shutdown_blocking() + with nogil: + if self.c_call != NULL: + grpc_call_unref(self.c_call) + grpc_shutdown_blocking() # The object *should* always be valid from Python. Used for debugging. @property diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi index 9699aa1bc6..74c7f6c140 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi @@ -142,38 +142,38 @@ cdef _cancel( cdef _next_call_event( _ChannelState channel_state, grpc_completion_queue *c_completion_queue, - on_success, on_failure, deadline): - """Block on the next event out of the completion queue. - - On success, `on_success` will be invoked with the tag taken from the CQ. - In the case of a failure due to an exception raised in a signal handler, - `on_failure` will be invoked with no arguments. Note that this situation - can only occur on the main thread. - - Args: - channel_state: The state for the channel on which the RPC is running. - c_completion_queue: The CQ which will be polled. - on_success: A callable object to be invoked upon successful receipt of a - tag from the CQ. - on_failure: A callable object to be invoked in case a Python exception is - raised from a signal handler during polling. - deadline: The point after which the RPC will time out. - """ - try: - tag, event = _latent_event(c_completion_queue, deadline) - # NOTE(rbellevi): This broad except enables us to clean up resources before - # propagating any exceptions raised by signal handlers to the application. - except: - if on_failure is not None: - on_failure() - raise - else: - with channel_state.condition: - on_success(tag) - channel_state.condition.notify_all() - return event - - + on_success, on_failure, deadline): + """Block on the next event out of the completion queue. + + On success, `on_success` will be invoked with the tag taken from the CQ. + In the case of a failure due to an exception raised in a signal handler, + `on_failure` will be invoked with no arguments. Note that this situation + can only occur on the main thread. + + Args: + channel_state: The state for the channel on which the RPC is running. + c_completion_queue: The CQ which will be polled. + on_success: A callable object to be invoked upon successful receipt of a + tag from the CQ. + on_failure: A callable object to be invoked in case a Python exception is + raised from a signal handler during polling. + deadline: The point after which the RPC will time out. + """ + try: + tag, event = _latent_event(c_completion_queue, deadline) + # NOTE(rbellevi): This broad except enables us to clean up resources before + # propagating any exceptions raised by signal handlers to the application. + except: + if on_failure is not None: + on_failure() + raise + else: + with channel_state.condition: + on_success(tag) + channel_state.condition.notify_all() + return event + + # TODO(https://github.com/grpc/grpc/issues/14569): This could be a lot simpler. cdef void _call( _ChannelState channel_state, _CallState call_state, @@ -329,14 +329,14 @@ cdef class SegregatedCall: def on_success(tag): _process_segregated_call_tag( self._channel_state, self._call_state, self._c_completion_queue, tag) - def on_failure(): - self._call_state.due.clear() - grpc_call_unref(self._call_state.c_call) - self._call_state.c_call = NULL - self._channel_state.segregated_call_states.remove(self._call_state) - _destroy_c_completion_queue(self._c_completion_queue) + def on_failure(): + self._call_state.due.clear() + grpc_call_unref(self._call_state.c_call) + self._call_state.c_call = NULL + self._channel_state.segregated_call_states.remove(self._call_state) + _destroy_c_completion_queue(self._c_completion_queue) return _next_call_event( - self._channel_state, self._c_completion_queue, on_success, on_failure, None) + self._channel_state, self._c_completion_queue, on_success, on_failure, None) cdef SegregatedCall _segregated_call( @@ -425,7 +425,7 @@ cdef _close(Channel channel, grpc_status_code code, object details, _destroy_c_completion_queue(state.c_connectivity_completion_queue) grpc_channel_destroy(state.c_channel) state.c_channel = NULL - grpc_shutdown_blocking() + grpc_shutdown_blocking() state.condition.notify_all() else: # Another call to close already completed in the past or is currently @@ -446,12 +446,12 @@ cdef class Channel: arguments = () if arguments is None else tuple(arguments) fork_handlers_and_grpc_init() self._state = _ChannelState() - self._state.c_call_completion_queue = ( - grpc_completion_queue_create_for_next(NULL)) - self._state.c_connectivity_completion_queue = ( - grpc_completion_queue_create_for_next(NULL)) - self._arguments = arguments - cdef _ChannelArgs channel_args = _ChannelArgs(arguments) + self._state.c_call_completion_queue = ( + grpc_completion_queue_create_for_next(NULL)) + self._state.c_connectivity_completion_queue = ( + grpc_completion_queue_create_for_next(NULL)) + self._arguments = arguments + cdef _ChannelArgs channel_args = _ChannelArgs(arguments) if channel_credentials is None: self._state.c_channel = grpc_insecure_channel_create( <char *>target, channel_args.c_args(), NULL) @@ -485,11 +485,11 @@ cdef class Channel: queue_deadline = time.time() + 1.0 else: queue_deadline = None - # NOTE(gnossen): It is acceptable for on_failure to be None here because - # failure conditions can only ever happen on the main thread and this - # method is only ever invoked on the channel spin thread. + # NOTE(gnossen): It is acceptable for on_failure to be None here because + # failure conditions can only ever happen on the main thread and this + # method is only ever invoked on the channel spin thread. return _next_call_event(self._state, self._state.c_call_completion_queue, - on_success, None, queue_deadline) + on_success, None, queue_deadline) def segregated_call( self, int flags, method, host, object deadline, object metadata, diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi index a5b2c6eaed..a47403ac51 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi @@ -26,20 +26,20 @@ cdef grpc_event _next(grpc_completion_queue *c_completion_queue, deadline) excep else: c_deadline = _timespec_from_time(deadline) - while True: - with nogil: + while True: + with nogil: c_timeout = gpr_time_add(gpr_now(GPR_CLOCK_REALTIME), c_increment) if gpr_time_cmp(c_timeout, c_deadline) > 0: c_timeout = c_deadline - + c_event = grpc_completion_queue_next(c_completion_queue, c_timeout, NULL) - + if (c_event.type != GRPC_QUEUE_TIMEOUT or gpr_time_cmp(c_timeout, c_deadline) == 0): break - # Handle any signals - cpython.PyErr_CheckSignals() + # Handle any signals + cpython.PyErr_CheckSignals() return c_event @@ -115,4 +115,4 @@ cdef class CompletionQueue: self.c_completion_queue, c_deadline, NULL) self._interpret_event(event) grpc_completion_queue_destroy(self.c_completion_queue) - grpc_shutdown_blocking() + grpc_shutdown_blocking() diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi index 91d2f36195..ddaedb30bd 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi @@ -26,9 +26,9 @@ cdef int _get_metadata( grpc_credentials_plugin_metadata_cb cb, void *user_data, grpc_metadata creds_md[GRPC_METADATA_CREDENTIALS_PLUGIN_SYNC_MAX], size_t *num_creds_md, grpc_status_code *status, - const char **error_details) except * with gil + const char **error_details) except * with gil -cdef void _destroy(void *state) except * with gil +cdef void _destroy(void *state) except * with gil cdef class MetadataPluginCallCredentials(CallCredentials): @@ -97,11 +97,11 @@ cdef class ServerCredentials: cdef object cert_config_fetcher # whether C-core has asked for the initial_cert_config cdef bint initial_cert_config_fetched - - -cdef class LocalChannelCredentials(ChannelCredentials): - - cdef grpc_local_connect_type _local_connect_type + + +cdef class LocalChannelCredentials(ChannelCredentials): + + cdef grpc_local_connect_type _local_connect_type cdef class ALTSChannelCredentials(ChannelCredentials): diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi index 7408f2960d..c75579cc04 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi @@ -56,9 +56,9 @@ cdef int _get_metadata(void *state, return 0 # Asynchronous return -cdef void _destroy(void *state) except * with gil: +cdef void _destroy(void *state) except * with gil: cpython.Py_DECREF(<object>state) - grpc_shutdown_blocking() + grpc_shutdown_blocking() cdef class MetadataPluginCallCredentials(CallCredentials): @@ -124,7 +124,7 @@ cdef class SSLSessionCacheLRU: def __dealloc__(self): if self._cache != NULL: grpc_ssl_session_cache_destroy(self._cache) - grpc_shutdown_blocking() + grpc_shutdown_blocking() cdef class SSLChannelCredentials(ChannelCredentials): @@ -190,7 +190,7 @@ cdef class ServerCertificateConfig: def __dealloc__(self): grpc_ssl_server_certificate_config_destroy(self.c_cert_config) gpr_free(self.c_ssl_pem_key_cert_pairs) - grpc_shutdown_blocking() + grpc_shutdown_blocking() cdef class ServerCredentials: @@ -206,7 +206,7 @@ cdef class ServerCredentials: def __dealloc__(self): if self.c_credentials != NULL: grpc_server_credentials_release(self.c_credentials) - grpc_shutdown_blocking() + grpc_shutdown_blocking() cdef const char* _get_c_pem_root_certs(pem_root_certs): if pem_root_certs is None: @@ -329,28 +329,28 @@ cdef grpc_ssl_certificate_config_reload_status _server_cert_config_fetcher_wrapp cert_config.c_ssl_pem_key_cert_pairs_count) return GRPC_SSL_CERTIFICATE_CONFIG_RELOAD_NEW - -class LocalConnectionType: - uds = UDS - local_tcp = LOCAL_TCP - -cdef class LocalChannelCredentials(ChannelCredentials): - - def __cinit__(self, grpc_local_connect_type local_connect_type): - self._local_connect_type = local_connect_type - - cdef grpc_channel_credentials *c(self) except *: - cdef grpc_local_connect_type local_connect_type - local_connect_type = self._local_connect_type - return grpc_local_credentials_create(local_connect_type) - -def channel_credentials_local(grpc_local_connect_type local_connect_type): - return LocalChannelCredentials(local_connect_type) - -def server_credentials_local(grpc_local_connect_type local_connect_type): - cdef ServerCredentials credentials = ServerCredentials() - credentials.c_credentials = grpc_local_server_credentials_create(local_connect_type) - return credentials + +class LocalConnectionType: + uds = UDS + local_tcp = LOCAL_TCP + +cdef class LocalChannelCredentials(ChannelCredentials): + + def __cinit__(self, grpc_local_connect_type local_connect_type): + self._local_connect_type = local_connect_type + + cdef grpc_channel_credentials *c(self) except *: + cdef grpc_local_connect_type local_connect_type + local_connect_type = self._local_connect_type + return grpc_local_credentials_create(local_connect_type) + +def channel_credentials_local(grpc_local_connect_type local_connect_type): + return LocalChannelCredentials(local_connect_type) + +def server_credentials_local(grpc_local_connect_type local_connect_type): + cdef ServerCredentials credentials = ServerCredentials() + credentials.c_credentials = grpc_local_server_credentials_create(local_connect_type) + return credentials cdef class ALTSChannelCredentials(ChannelCredentials): diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/fork_posix.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/fork_posix.pyx.pxi index e430ec5309..53657e8b1a 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/fork_posix.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/fork_posix.pyx.pxi @@ -32,12 +32,12 @@ _GRPC_ENABLE_FORK_SUPPORT = ( os.environ.get('GRPC_ENABLE_FORK_SUPPORT', '0') .lower() in _TRUE_VALUES) -_fork_handler_failed = False - +_fork_handler_failed = False + cdef void __prefork() nogil: with gil: - global _fork_handler_failed - _fork_handler_failed = False + global _fork_handler_failed + _fork_handler_failed = False with _fork_state.fork_in_progress_condition: _fork_state.fork_in_progress = True if not _fork_state.active_thread_count.await_zero_threads( @@ -45,7 +45,7 @@ cdef void __prefork() nogil: _LOGGER.error( 'Failed to shutdown gRPC Python threads prior to fork. ' 'Behavior after fork will be undefined.') - _fork_handler_failed = True + _fork_handler_failed = True cdef void __postfork_parent() nogil: @@ -57,28 +57,28 @@ cdef void __postfork_parent() nogil: cdef void __postfork_child() nogil: with gil: - try: - if _fork_handler_failed: - return - # Thread could be holding the fork_in_progress_condition inside of - # block_if_fork_in_progress() when fork occurs. Reset the lock here. - _fork_state.fork_in_progress_condition = threading.Condition() - # A thread in return_from_user_request_generator() may hold this lock - # when fork occurs. - _fork_state.active_thread_count = _ActiveThreadCount() - for state_to_reset in _fork_state.postfork_states_to_reset: - state_to_reset.reset_postfork_child() - _fork_state.postfork_states_to_reset = [] - _fork_state.fork_epoch += 1 - for channel in _fork_state.channels: - channel._close_on_fork() - with _fork_state.fork_in_progress_condition: - _fork_state.fork_in_progress = False - except: - _LOGGER.error('Exiting child due to raised exception') - _LOGGER.error(sys.exc_info()[0]) - os._exit(os.EX_USAGE) - + try: + if _fork_handler_failed: + return + # Thread could be holding the fork_in_progress_condition inside of + # block_if_fork_in_progress() when fork occurs. Reset the lock here. + _fork_state.fork_in_progress_condition = threading.Condition() + # A thread in return_from_user_request_generator() may hold this lock + # when fork occurs. + _fork_state.active_thread_count = _ActiveThreadCount() + for state_to_reset in _fork_state.postfork_states_to_reset: + state_to_reset.reset_postfork_child() + _fork_state.postfork_states_to_reset = [] + _fork_state.fork_epoch += 1 + for channel in _fork_state.channels: + channel._close_on_fork() + with _fork_state.fork_in_progress_condition: + _fork_state.fork_in_progress = False + except: + _LOGGER.error('Exiting child due to raised exception') + _LOGGER.error(sys.exc_info()[0]) + os._exit(os.EX_USAGE) + if grpc_is_initialized() > 0: with gil: _LOGGER.error('Failed to shutdown gRPC Core after fork()') @@ -158,7 +158,7 @@ def fork_register_channel(channel): def fork_unregister_channel(channel): if _GRPC_ENABLE_FORK_SUPPORT: - _fork_state.channels.discard(channel) + _fork_state.channels.discard(channel) class _ActiveThreadCount(object): diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi index 1ffd1fcbd6..54eb7fdffc 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi @@ -14,16 +14,16 @@ cimport libc.time -ctypedef ssize_t intptr_t -ctypedef size_t uintptr_t -ctypedef signed char int8_t -ctypedef signed short int16_t -ctypedef signed int int32_t -ctypedef signed long long int64_t -ctypedef unsigned char uint8_t -ctypedef unsigned short uint16_t -ctypedef unsigned int uint32_t -ctypedef unsigned long long uint64_t +ctypedef ssize_t intptr_t +ctypedef size_t uintptr_t +ctypedef signed char int8_t +ctypedef signed short int16_t +ctypedef signed int int32_t +ctypedef signed long long int64_t +ctypedef unsigned char uint8_t +ctypedef unsigned short uint16_t +ctypedef unsigned int uint32_t +ctypedef unsigned long long uint64_t cdef extern from "grpc/support/alloc.h": @@ -41,11 +41,11 @@ cdef extern from "grpc/byte_buffer_reader.h": pass -cdef extern from "grpc/impl/codegen/grpc_types.h": - ctypedef struct grpc_experimental_completion_queue_functor: - void (*functor_run)(grpc_experimental_completion_queue_functor*, int); - - +cdef extern from "grpc/impl/codegen/grpc_types.h": + ctypedef struct grpc_experimental_completion_queue_functor: + void (*functor_run)(grpc_experimental_completion_queue_functor*, int); + + cdef extern from "grpc/grpc.h": ctypedef struct grpc_slice: @@ -68,10 +68,10 @@ cdef extern from "grpc/grpc.h": void *grpc_slice_start_ptr "GRPC_SLICE_START_PTR" (grpc_slice s) nogil size_t grpc_slice_length "GRPC_SLICE_LENGTH" (grpc_slice s) nogil - const int GPR_MS_PER_SEC - const int GPR_US_PER_SEC - const int GPR_NS_PER_SEC - + const int GPR_MS_PER_SEC + const int GPR_US_PER_SEC + const int GPR_NS_PER_SEC + ctypedef enum gpr_clock_type: GPR_CLOCK_MONOTONIC GPR_CLOCK_REALTIME @@ -93,8 +93,8 @@ cdef extern from "grpc/grpc.h": gpr_clock_type target_clock) nogil gpr_timespec gpr_time_from_millis(int64_t ms, gpr_clock_type type) nogil - gpr_timespec gpr_time_from_nanos(int64_t ns, gpr_clock_type type) nogil - double gpr_timespec_to_micros(gpr_timespec t) nogil + gpr_timespec gpr_time_from_nanos(int64_t ns, gpr_clock_type type) nogil + double gpr_timespec_to_micros(gpr_timespec t) nogil gpr_timespec gpr_time_add(gpr_timespec a, gpr_timespec b) nogil @@ -145,8 +145,8 @@ cdef extern from "grpc/grpc.h": const char *GRPC_ARG_SECONDARY_USER_AGENT_STRING const char *GRPC_SSL_TARGET_NAME_OVERRIDE_ARG const char *GRPC_SSL_SESSION_CACHE_ARG - const char *_GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM \ - "GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM" + const char *_GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM \ + "GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM" const char *GRPC_COMPRESSION_CHANNEL_DEFAULT_LEVEL const char *GRPC_COMPRESSION_CHANNEL_ENABLED_ALGORITHMS_BITSET @@ -330,12 +330,12 @@ cdef extern from "grpc/grpc.h": ctypedef struct grpc_op: grpc_op_type type "op" uint32_t flags - void * reserved + void * reserved grpc_op_data data void grpc_dont_init_openssl() nogil void grpc_init() nogil - void grpc_shutdown_blocking() nogil + void grpc_shutdown_blocking() nogil int grpc_is_initialized() nogil ctypedef struct grpc_completion_queue_factory: @@ -357,10 +357,10 @@ cdef extern from "grpc/grpc.h": void grpc_completion_queue_shutdown(grpc_completion_queue *cq) nogil void grpc_completion_queue_destroy(grpc_completion_queue *cq) nogil - grpc_completion_queue *grpc_completion_queue_create_for_callback( - grpc_experimental_completion_queue_functor* shutdown_callback, - void *reserved) nogil - + grpc_completion_queue *grpc_completion_queue_create_for_callback( + grpc_experimental_completion_queue_functor* shutdown_callback, + void *reserved) nogil + grpc_call_error grpc_call_start_batch( grpc_call *call, const grpc_op *ops, size_t nops, void *tag, void *reserved) nogil @@ -564,8 +564,8 @@ cdef extern from "grpc/grpc_security.h": grpc_credentials_plugin_metadata_cb cb, void *user_data, grpc_metadata creds_md[GRPC_METADATA_CREDENTIALS_PLUGIN_SYNC_MAX], size_t *num_creds_md, grpc_status_code *status, - const char **error_details) except * - void (*destroy)(void *state) except * + const char **error_details) except * + void (*destroy)(void *state) except * void *state const char *type @@ -602,11 +602,11 @@ cdef extern from "grpc/grpc_security.h": void grpc_auth_context_release(grpc_auth_context *context) - grpc_channel_credentials *grpc_local_credentials_create( - grpc_local_connect_type type) - grpc_server_credentials *grpc_local_server_credentials_create( - grpc_local_connect_type type) - + grpc_channel_credentials *grpc_local_credentials_create( + grpc_local_connect_type type) + grpc_server_credentials *grpc_local_server_credentials_create( + grpc_local_connect_type type) + ctypedef struct grpc_alts_credentials_options: # We don't care about the internals (and in fact don't know them) pass @@ -615,7 +615,7 @@ cdef extern from "grpc/grpc_security.h": const grpc_alts_credentials_options *options) grpc_server_credentials *grpc_alts_server_credentials_create( const grpc_alts_credentials_options *options) - + grpc_alts_credentials_options* grpc_alts_credentials_client_options_create() grpc_alts_credentials_options* grpc_alts_credentials_server_options_create() void grpc_alts_credentials_options_destroy(grpc_alts_credentials_options *options) @@ -658,14 +658,14 @@ cdef extern from "grpc/compression.h": int grpc_compression_options_is_algorithm_enabled( const grpc_compression_options *opts, grpc_compression_algorithm algorithm) nogil - -cdef extern from "grpc/impl/codegen/compression_types.h": - - const char *_GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY \ - "GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY" - - -cdef extern from "grpc/grpc_security_constants.h": - ctypedef enum grpc_local_connect_type: - UDS - LOCAL_TCP + +cdef extern from "grpc/impl/codegen/compression_types.h": + + const char *_GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY \ + "GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY" + + +cdef extern from "grpc/grpc_security_constants.h": + ctypedef enum grpc_local_connect_type: + UDS + LOCAL_TCP diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_gevent.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_gevent.pyx.pxi index 6888af71c3..0f693ec691 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_gevent.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_gevent.pyx.pxi @@ -31,16 +31,16 @@ def _spawn_greenlet(*args): cdef class SocketWrapper: def __cinit__(self): - fork_handlers_and_grpc_init() + fork_handlers_and_grpc_init() self.sockopts = [] self.socket = None self.c_socket = NULL self.c_buffer = NULL self.len = 0 - def __dealloc__(self): - grpc_shutdown_blocking() - + def __dealloc__(self): + grpc_shutdown_blocking() + cdef grpc_error* socket_init(grpc_custom_socket* socket, int domain) with gil: sw = SocketWrapper() sw.c_socket = socket @@ -261,14 +261,14 @@ cdef void socket_accept(grpc_custom_socket* socket, grpc_custom_socket* client, cdef class ResolveWrapper: def __cinit__(self): - fork_handlers_and_grpc_init() + fork_handlers_and_grpc_init() self.c_resolver = NULL self.c_host = NULL self.c_port = NULL - def __dealloc__(self): - grpc_shutdown_blocking() - + def __dealloc__(self): + grpc_shutdown_blocking() + cdef socket_resolve_async_cython(ResolveWrapper resolve_wrapper): try: res = gevent_socket.getaddrinfo(resolve_wrapper.c_host, resolve_wrapper.c_port) @@ -305,7 +305,7 @@ cdef grpc_error* socket_resolve(const char* host, const char* port, cdef class TimerWrapper: def __cinit__(self, deadline): - fork_handlers_and_grpc_init() + fork_handlers_and_grpc_init() self.timer = gevent_hub.get_hub().loop.timer(deadline) self.event = None @@ -322,9 +322,9 @@ cdef class TimerWrapper: self.event.set() self.timer.stop() - def __dealloc__(self): - grpc_shutdown_blocking() - + def __dealloc__(self): + grpc_shutdown_blocking() + cdef void timer_start(grpc_custom_timer* t) with gil: timer = TimerWrapper(t.timeout_ms / 1000.0) timer.c_timer = t diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi index 3bdc172585..5c1e0679a9 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi @@ -13,7 +13,7 @@ # limitations under the License. -# This function will ascii encode unicode string inputs if necessary. +# This function will ascii encode unicode string inputs if necessary. # In Python3, unicode strings are the default str type. cdef bytes str_to_bytes(object s): if s is None or isinstance(s, bytes): diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pxd.pxi index 59abfda2cb..0c5a4e5763 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pxd.pxi @@ -1,129 +1,129 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# distutils: language=c++ - +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# distutils: language=c++ + from libcpp cimport bool as bool_t from libcpp.string cimport string as cppstring -cdef extern from "grpc/impl/codegen/slice.h": - struct grpc_slice_buffer: - int count - -cdef extern from "src/core/lib/iomgr/error.h": - struct grpc_error: - pass - -# TODO(https://github.com/grpc/grpc/issues/20135) Change the filename -# for something more meaningful. -cdef extern from "src/core/lib/iomgr/python_util.h": - grpc_error* grpc_socket_error(char* error) - char* grpc_slice_buffer_start(grpc_slice_buffer* buffer, int i) - int grpc_slice_buffer_length(grpc_slice_buffer* buffer, int i) - -cdef extern from "src/core/lib/iomgr/sockaddr.h": - ctypedef struct grpc_sockaddr: - pass - -cdef extern from "src/core/lib/iomgr/resolve_address.h": - ctypedef struct grpc_resolved_addresses: - size_t naddrs - grpc_resolved_address* addrs - - ctypedef struct grpc_resolved_address: - char[128] addr - size_t len - -cdef extern from "src/core/lib/iomgr/resolve_address_custom.h": - struct grpc_custom_resolver: - pass - - struct grpc_custom_resolver_vtable: +cdef extern from "grpc/impl/codegen/slice.h": + struct grpc_slice_buffer: + int count + +cdef extern from "src/core/lib/iomgr/error.h": + struct grpc_error: + pass + +# TODO(https://github.com/grpc/grpc/issues/20135) Change the filename +# for something more meaningful. +cdef extern from "src/core/lib/iomgr/python_util.h": + grpc_error* grpc_socket_error(char* error) + char* grpc_slice_buffer_start(grpc_slice_buffer* buffer, int i) + int grpc_slice_buffer_length(grpc_slice_buffer* buffer, int i) + +cdef extern from "src/core/lib/iomgr/sockaddr.h": + ctypedef struct grpc_sockaddr: + pass + +cdef extern from "src/core/lib/iomgr/resolve_address.h": + ctypedef struct grpc_resolved_addresses: + size_t naddrs + grpc_resolved_address* addrs + + ctypedef struct grpc_resolved_address: + char[128] addr + size_t len + +cdef extern from "src/core/lib/iomgr/resolve_address_custom.h": + struct grpc_custom_resolver: + pass + + struct grpc_custom_resolver_vtable: grpc_error* (*resolve)(const char* host, const char* port, grpc_resolved_addresses** res); void (*resolve_async)(grpc_custom_resolver* resolver, const char* host, const char* port); - - void grpc_custom_resolve_callback(grpc_custom_resolver* resolver, - grpc_resolved_addresses* result, - grpc_error* error); - -cdef extern from "src/core/lib/iomgr/tcp_custom.h": + + void grpc_custom_resolve_callback(grpc_custom_resolver* resolver, + grpc_resolved_addresses* result, + grpc_error* error); + +cdef extern from "src/core/lib/iomgr/tcp_custom.h": cdef int GRPC_CUSTOM_SOCKET_OPT_SO_REUSEPORT - struct grpc_custom_socket: - void* impl - # We don't care about the rest of the fields - ctypedef void (*grpc_custom_connect_callback)(grpc_custom_socket* socket, - grpc_error* error) - ctypedef void (*grpc_custom_write_callback)(grpc_custom_socket* socket, - grpc_error* error) - ctypedef void (*grpc_custom_read_callback)(grpc_custom_socket* socket, - size_t nread, grpc_error* error) - ctypedef void (*grpc_custom_accept_callback)(grpc_custom_socket* socket, - grpc_custom_socket* client, - grpc_error* error) - ctypedef void (*grpc_custom_close_callback)(grpc_custom_socket* socket) - - struct grpc_socket_vtable: - grpc_error* (*init)(grpc_custom_socket* socket, int domain); - void (*connect)(grpc_custom_socket* socket, const grpc_sockaddr* addr, - size_t len, grpc_custom_connect_callback cb); - void (*destroy)(grpc_custom_socket* socket); - void (*shutdown)(grpc_custom_socket* socket); - void (*close)(grpc_custom_socket* socket, grpc_custom_close_callback cb); - void (*write)(grpc_custom_socket* socket, grpc_slice_buffer* slices, - grpc_custom_write_callback cb); - void (*read)(grpc_custom_socket* socket, char* buffer, size_t length, - grpc_custom_read_callback cb); - grpc_error* (*getpeername)(grpc_custom_socket* socket, - const grpc_sockaddr* addr, int* len); - grpc_error* (*getsockname)(grpc_custom_socket* socket, - const grpc_sockaddr* addr, int* len); - grpc_error* (*bind)(grpc_custom_socket* socket, const grpc_sockaddr* addr, - size_t len, int flags); - grpc_error* (*listen)(grpc_custom_socket* socket); - void (*accept)(grpc_custom_socket* socket, grpc_custom_socket* client, - grpc_custom_accept_callback cb); - -cdef extern from "src/core/lib/iomgr/timer_custom.h": - struct grpc_custom_timer: - void* timer - int timeout_ms - # We don't care about the rest of the fields - - struct grpc_custom_timer_vtable: - void (*start)(grpc_custom_timer* t); - void (*stop)(grpc_custom_timer* t); - - void grpc_custom_timer_callback(grpc_custom_timer* t, grpc_error* error); - -cdef extern from "src/core/lib/iomgr/pollset_custom.h": - struct grpc_custom_poller_vtable: - void (*init)() - void (*poll)(size_t timeout_ms) - void (*kick)() - void (*shutdown)() - -cdef extern from "src/core/lib/iomgr/iomgr_custom.h": - void grpc_custom_iomgr_init(grpc_socket_vtable* socket, - grpc_custom_resolver_vtable* resolver, - grpc_custom_timer_vtable* timer, - grpc_custom_poller_vtable* poller); - -cdef extern from "src/core/lib/iomgr/sockaddr_utils.h": - int grpc_sockaddr_get_port(const grpc_resolved_address *addr); + struct grpc_custom_socket: + void* impl + # We don't care about the rest of the fields + ctypedef void (*grpc_custom_connect_callback)(grpc_custom_socket* socket, + grpc_error* error) + ctypedef void (*grpc_custom_write_callback)(grpc_custom_socket* socket, + grpc_error* error) + ctypedef void (*grpc_custom_read_callback)(grpc_custom_socket* socket, + size_t nread, grpc_error* error) + ctypedef void (*grpc_custom_accept_callback)(grpc_custom_socket* socket, + grpc_custom_socket* client, + grpc_error* error) + ctypedef void (*grpc_custom_close_callback)(grpc_custom_socket* socket) + + struct grpc_socket_vtable: + grpc_error* (*init)(grpc_custom_socket* socket, int domain); + void (*connect)(grpc_custom_socket* socket, const grpc_sockaddr* addr, + size_t len, grpc_custom_connect_callback cb); + void (*destroy)(grpc_custom_socket* socket); + void (*shutdown)(grpc_custom_socket* socket); + void (*close)(grpc_custom_socket* socket, grpc_custom_close_callback cb); + void (*write)(grpc_custom_socket* socket, grpc_slice_buffer* slices, + grpc_custom_write_callback cb); + void (*read)(grpc_custom_socket* socket, char* buffer, size_t length, + grpc_custom_read_callback cb); + grpc_error* (*getpeername)(grpc_custom_socket* socket, + const grpc_sockaddr* addr, int* len); + grpc_error* (*getsockname)(grpc_custom_socket* socket, + const grpc_sockaddr* addr, int* len); + grpc_error* (*bind)(grpc_custom_socket* socket, const grpc_sockaddr* addr, + size_t len, int flags); + grpc_error* (*listen)(grpc_custom_socket* socket); + void (*accept)(grpc_custom_socket* socket, grpc_custom_socket* client, + grpc_custom_accept_callback cb); + +cdef extern from "src/core/lib/iomgr/timer_custom.h": + struct grpc_custom_timer: + void* timer + int timeout_ms + # We don't care about the rest of the fields + + struct grpc_custom_timer_vtable: + void (*start)(grpc_custom_timer* t); + void (*stop)(grpc_custom_timer* t); + + void grpc_custom_timer_callback(grpc_custom_timer* t, grpc_error* error); + +cdef extern from "src/core/lib/iomgr/pollset_custom.h": + struct grpc_custom_poller_vtable: + void (*init)() + void (*poll)(size_t timeout_ms) + void (*kick)() + void (*shutdown)() + +cdef extern from "src/core/lib/iomgr/iomgr_custom.h": + void grpc_custom_iomgr_init(grpc_socket_vtable* socket, + grpc_custom_resolver_vtable* resolver, + grpc_custom_timer_vtable* timer, + grpc_custom_poller_vtable* poller); + +cdef extern from "src/core/lib/iomgr/sockaddr_utils.h": + int grpc_sockaddr_get_port(const grpc_resolved_address *addr); cppstring grpc_sockaddr_to_string(const grpc_resolved_address *addr, bool_t normalize); - void grpc_string_to_sockaddr(grpc_resolved_address *out, char* addr, int port); - int grpc_sockaddr_set_port(const grpc_resolved_address *resolved_addr, - int port) - const char* grpc_sockaddr_get_uri_scheme(const grpc_resolved_address* resolved_addr) + void grpc_string_to_sockaddr(grpc_resolved_address *out, char* addr, int port); + int grpc_sockaddr_set_port(const grpc_resolved_address *resolved_addr, + int port) + const char* grpc_sockaddr_get_uri_scheme(const grpc_resolved_address* resolved_addr) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pyx.pxi index 65d61cce38..3d6bb24f9a 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/iomgr.pyx.pxi @@ -1,63 +1,63 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# distutils: language=c++ - -from libc cimport string -from libc.stdlib cimport malloc +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# distutils: language=c++ + +from libc cimport string +from libc.stdlib cimport malloc from libcpp.string cimport string as cppstring - -cdef grpc_error* grpc_error_none(): - return <grpc_error*>0 - -cdef grpc_error* socket_error(str syscall, str err): - error_str = "{} failed: {}".format(syscall, err) - error_bytes = str_to_bytes(error_str) - return grpc_socket_error(error_bytes) - -cdef resolved_addr_to_tuple(grpc_resolved_address* address): + +cdef grpc_error* grpc_error_none(): + return <grpc_error*>0 + +cdef grpc_error* socket_error(str syscall, str err): + error_str = "{} failed: {}".format(syscall, err) + error_bytes = str_to_bytes(error_str) + return grpc_socket_error(error_bytes) + +cdef resolved_addr_to_tuple(grpc_resolved_address* address): cdef cppstring res_str - port = grpc_sockaddr_get_port(address) + port = grpc_sockaddr_get_port(address) res_str = grpc_sockaddr_to_string(address, False) byte_str = _decode(res_str) - if byte_str.endswith(':' + str(port)): - byte_str = byte_str[:(0 - len(str(port)) - 1)] - byte_str = byte_str.lstrip('[') - byte_str = byte_str.rstrip(']') - byte_str = '{}'.format(byte_str) - return byte_str, port - -cdef sockaddr_to_tuple(const grpc_sockaddr* address, size_t length): - cdef grpc_resolved_address c_addr - string.memcpy(<void*>c_addr.addr, <void*> address, length) - c_addr.len = length - return resolved_addr_to_tuple(&c_addr) - -cdef sockaddr_is_ipv4(const grpc_sockaddr* address, size_t length): - cdef grpc_resolved_address c_addr - string.memcpy(<void*>c_addr.addr, <void*> address, length) - c_addr.len = length - return grpc_sockaddr_get_uri_scheme(&c_addr) == b'ipv4' - -cdef grpc_resolved_addresses* tuples_to_resolvaddr(tups): - cdef grpc_resolved_addresses* addresses - tups_set = set((tup[4][0], tup[4][1]) for tup in tups) - addresses = <grpc_resolved_addresses*> malloc(sizeof(grpc_resolved_addresses)) - addresses.naddrs = len(tups_set) - addresses.addrs = <grpc_resolved_address*> malloc(sizeof(grpc_resolved_address) * len(tups_set)) - i = 0 - for tup in set(tups_set): - hostname = str_to_bytes(tup[0]) - grpc_string_to_sockaddr(&addresses.addrs[i], hostname, tup[1]) - i += 1 - return addresses + if byte_str.endswith(':' + str(port)): + byte_str = byte_str[:(0 - len(str(port)) - 1)] + byte_str = byte_str.lstrip('[') + byte_str = byte_str.rstrip(']') + byte_str = '{}'.format(byte_str) + return byte_str, port + +cdef sockaddr_to_tuple(const grpc_sockaddr* address, size_t length): + cdef grpc_resolved_address c_addr + string.memcpy(<void*>c_addr.addr, <void*> address, length) + c_addr.len = length + return resolved_addr_to_tuple(&c_addr) + +cdef sockaddr_is_ipv4(const grpc_sockaddr* address, size_t length): + cdef grpc_resolved_address c_addr + string.memcpy(<void*>c_addr.addr, <void*> address, length) + c_addr.len = length + return grpc_sockaddr_get_uri_scheme(&c_addr) == b'ipv4' + +cdef grpc_resolved_addresses* tuples_to_resolvaddr(tups): + cdef grpc_resolved_addresses* addresses + tups_set = set((tup[4][0], tup[4][1]) for tup in tups) + addresses = <grpc_resolved_addresses*> malloc(sizeof(grpc_resolved_addresses)) + addresses.naddrs = len(tups_set) + addresses.addrs = <grpc_resolved_address*> malloc(sizeof(grpc_resolved_address) * len(tups_set)) + i = 0 + for tup in set(tups_set): + hostname = str_to_bytes(tup[0]) + grpc_string_to_sockaddr(&addresses.addrs[i], hostname, tup[1]) + i += 1 + return addresses diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi index 9e1e6b3ba4..308d677695 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi @@ -108,12 +108,12 @@ class OperationType: receive_status_on_client = GRPC_OP_RECV_STATUS_ON_CLIENT receive_close_on_server = GRPC_OP_RECV_CLOSE_ON_SERVER -GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM= ( - _GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM) +GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM= ( + _GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM) + +GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY = ( + _GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY) -GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY = ( - _GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY) - class CompressionAlgorithm: none = GRPC_COMPRESS_NONE deflate = GRPC_COMPRESS_DEFLATE @@ -137,7 +137,7 @@ cdef class CallDetails: def __dealloc__(self): with nogil: grpc_call_details_destroy(&self.c_details) - grpc_shutdown_blocking() + grpc_shutdown_blocking() @property def method(self): diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi index 8c777169cb..b89ed99d97 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi @@ -15,7 +15,7 @@ cdef class Server: cdef grpc_server *c_server - + cdef bint is_started # start has been called cdef bint is_shutting_down # shutdown has been called cdef bint is_shutdown # notification of complete shutdown received diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi index 306b779601..eff95c4f29 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi @@ -22,10 +22,10 @@ cdef class Server: self.is_started = False self.is_shutting_down = False self.is_shutdown = False - self.c_server = NULL - cdef _ChannelArgs channel_args = _ChannelArgs(arguments) - self.c_server = grpc_server_create(channel_args.c_args(), NULL) - self.references.append(arguments) + self.c_server = NULL + cdef _ChannelArgs channel_args = _ChannelArgs(arguments) + self.c_server = grpc_server_create(channel_args.c_args(), NULL) + self.references.append(arguments) def request_call( self, CompletionQueue call_queue not None, @@ -53,25 +53,25 @@ cdef class Server: self.c_server, queue.c_completion_queue, NULL) self.registered_completion_queues.append(queue) - def start(self, backup_queue=True): - """Start the Cython gRPC Server. - - Args: - backup_queue: a bool indicates whether to spawn a backup completion - queue. In the case that no CQ is bound to the server, and the shutdown - of server becomes un-observable. - """ + def start(self, backup_queue=True): + """Start the Cython gRPC Server. + + Args: + backup_queue: a bool indicates whether to spawn a backup completion + queue. In the case that no CQ is bound to the server, and the shutdown + of server becomes un-observable. + """ if self.is_started: raise ValueError("the server has already started") - if backup_queue: - self.backup_shutdown_queue = CompletionQueue(shutdown_cq=True) - self.register_completion_queue(self.backup_shutdown_queue) + if backup_queue: + self.backup_shutdown_queue = CompletionQueue(shutdown_cq=True) + self.register_completion_queue(self.backup_shutdown_queue) self.is_started = True with nogil: grpc_server_start(self.c_server) - if backup_queue: - # Ensure the core has gotten a chance to do the start-up work - self.backup_shutdown_queue.poll(deadline=time.time()) + if backup_queue: + # Ensure the core has gotten a chance to do the start-up work + self.backup_shutdown_queue.poll(deadline=time.time()) def add_http2_port(self, bytes address, ServerCredentials server_credentials=None): @@ -135,14 +135,14 @@ cdef class Server: elif self.is_shutdown: pass elif not self.is_shutting_down: - if self.backup_shutdown_queue is None: + if self.backup_shutdown_queue is None: raise InternalError('Server shutdown failed: no completion queue.') - else: - # the user didn't call shutdown - use our backup queue - self._c_shutdown(self.backup_shutdown_queue, None) - # and now we wait - while not self.is_shutdown: - self.backup_shutdown_queue.poll() + else: + # the user didn't call shutdown - use our backup queue + self._c_shutdown(self.backup_shutdown_queue, None) + # and now we wait + while not self.is_shutdown: + self.backup_shutdown_queue.poll() else: # We're in the process of shutting down, but have not shutdown; can't do # much but repeatedly release the GIL and wait @@ -154,4 +154,4 @@ cdef class Server: def __dealloc__(self): if self.c_server == NULL: - grpc_shutdown_blocking() + grpc_shutdown_blocking() diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/tag.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/tag.pyx.pxi index e3cc059e65..e80dc88767 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/tag.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/tag.pyx.pxi @@ -56,19 +56,19 @@ cdef class _BatchOperationTag: self._retained_call = call cdef void prepare(self) except *: - cdef Operation operation + cdef Operation operation self.c_nops = 0 if self._operations is None else len(self._operations) if 0 < self.c_nops: self.c_ops = <grpc_op *>gpr_malloc(sizeof(grpc_op) * self.c_nops) for index, operation in enumerate(self._operations): - operation.c() - self.c_ops[index] = operation.c_op + operation.c() + self.c_ops[index] = operation.c_op cdef BatchOperationEvent event(self, grpc_event c_event): - cdef Operation operation + cdef Operation operation if 0 < self.c_nops: - for operation in self._operations: - operation.un_c() + for operation in self._operations: + operation.un_c() gpr_free(self.c_ops) return BatchOperationEvent( c_event.type, c_event.success, self._user_tag, self._operations) @@ -85,4 +85,4 @@ cdef class _ServerShutdownTag(_Tag): cdef ServerShutdownEvent event(self, grpc_event c_event): self._shutting_down_server.notify_shutdown_complete() - return ServerShutdownEvent(c_event.type, c_event.success, self._user_tag) + return ServerShutdownEvent(c_event.type, c_event.success, self._user_tag) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pxd.pxi index f81ac229c3..c46e8a98b0 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pxd.pxi @@ -13,7 +13,7 @@ # limitations under the License. -cdef gpr_timespec _timespec_from_time(object time) except * +cdef gpr_timespec _timespec_from_time(object time) except * cdef double _time_from_timespec(gpr_timespec timespec) except * diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pyx.pxi index c8d1b6307e..6d181bb1d6 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/time.pyx.pxi @@ -13,17 +13,17 @@ # limitations under the License. -cdef gpr_timespec _timespec_from_time(object time) except *: +cdef gpr_timespec _timespec_from_time(object time) except *: if time is None: return gpr_inf_future(GPR_CLOCK_REALTIME) else: - return gpr_time_from_nanos( - <int64_t>(<double>time * GPR_NS_PER_SEC), - GPR_CLOCK_REALTIME, - ) + return gpr_time_from_nanos( + <int64_t>(<double>time * GPR_NS_PER_SEC), + GPR_CLOCK_REALTIME, + ) cdef double _time_from_timespec(gpr_timespec timespec) except *: cdef gpr_timespec real_timespec = gpr_convert_clock_type( timespec, GPR_CLOCK_REALTIME) - return gpr_timespec_to_micros(real_timespec) / GPR_US_PER_SEC + return gpr_timespec_to_micros(real_timespec) / GPR_US_PER_SEC diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pxd.pxi index 8d1f75064e..c96e5cb669 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pxd.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pxd.pxi @@ -1,23 +1,23 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -cdef void* _copy_pointer(void* pointer) - -cdef void _destroy_pointer(void* pointer) - -cdef int _compare_pointer(void* first_pointer, void* second_pointer) - - -cdef grpc_arg_pointer_vtable default_vtable +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +cdef void* _copy_pointer(void* pointer) + +cdef void _destroy_pointer(void* pointer) + +cdef int _compare_pointer(void* first_pointer, void* second_pointer) + + +cdef grpc_arg_pointer_vtable default_vtable diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pyx.pxi index 71f927dcff..da4b81bd97 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pyx.pxi +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/vtable.pyx.pxi @@ -1,36 +1,36 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# TODO(https://github.com/grpc/grpc/issues/15662): Reform this. -cdef void* _copy_pointer(void* pointer): - return pointer - - -# TODO(https://github.com/grpc/grpc/issues/15662): Reform this. -cdef void _destroy_pointer(void* pointer): - pass - - -cdef int _compare_pointer(void* first_pointer, void* second_pointer): - if first_pointer < second_pointer: - return -1 - elif first_pointer > second_pointer: - return 1 - else: - return 0 - -cdef grpc_arg_pointer_vtable default_vtable -default_vtable.copy = &_copy_pointer -default_vtable.destroy = &_destroy_pointer -default_vtable.cmp = &_compare_pointer +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO(https://github.com/grpc/grpc/issues/15662): Reform this. +cdef void* _copy_pointer(void* pointer): + return pointer + + +# TODO(https://github.com/grpc/grpc/issues/15662): Reform this. +cdef void _destroy_pointer(void* pointer): + pass + + +cdef int _compare_pointer(void* first_pointer, void* second_pointer): + if first_pointer < second_pointer: + return -1 + elif first_pointer > second_pointer: + return 1 + else: + return 0 + +cdef grpc_arg_pointer_vtable default_vtable +default_vtable.copy = &_copy_pointer +default_vtable.destroy = &_destroy_pointer +default_vtable.cmp = &_compare_pointer diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd index a20909b5ae..166be37022 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd @@ -25,30 +25,30 @@ include "_cygrpc/completion_queue.pxd.pxi" include "_cygrpc/event.pxd.pxi" include "_cygrpc/metadata.pxd.pxi" include "_cygrpc/operation.pxd.pxi" -include "_cygrpc/propagation_bits.pxd.pxi" +include "_cygrpc/propagation_bits.pxd.pxi" include "_cygrpc/records.pxd.pxi" include "_cygrpc/security.pxd.pxi" include "_cygrpc/server.pxd.pxi" include "_cygrpc/tag.pxd.pxi" include "_cygrpc/time.pxd.pxi" -include "_cygrpc/vtable.pxd.pxi" +include "_cygrpc/vtable.pxd.pxi" include "_cygrpc/_hooks.pxd.pxi" -include "_cygrpc/iomgr.pxd.pxi" - +include "_cygrpc/iomgr.pxd.pxi" + include "_cygrpc/grpc_gevent.pxd.pxi" IF UNAME_SYSNAME != "Windows": include "_cygrpc/fork_posix.pxd.pxi" - -# Following pxi files are part of the Aio module -include "_cygrpc/aio/iomgr/socket.pxd.pxi" -include "_cygrpc/aio/iomgr/timer.pxd.pxi" -include "_cygrpc/aio/iomgr/resolver.pxd.pxi" + +# Following pxi files are part of the Aio module +include "_cygrpc/aio/iomgr/socket.pxd.pxi" +include "_cygrpc/aio/iomgr/timer.pxd.pxi" +include "_cygrpc/aio/iomgr/resolver.pxd.pxi" include "_cygrpc/aio/completion_queue.pxd.pxi" include "_cygrpc/aio/rpc_status.pxd.pxi" -include "_cygrpc/aio/grpc_aio.pxd.pxi" -include "_cygrpc/aio/callback_common.pxd.pxi" -include "_cygrpc/aio/call.pxd.pxi" -include "_cygrpc/aio/channel.pxd.pxi" -include "_cygrpc/aio/server.pxd.pxi" +include "_cygrpc/aio/grpc_aio.pxd.pxi" +include "_cygrpc/aio/callback_common.pxd.pxi" +include "_cygrpc/aio/call.pxd.pxi" +include "_cygrpc/aio/channel.pxd.pxi" +include "_cygrpc/aio/server.pxd.pxi" diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx index 702a583461..8d355c6fbb 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx @@ -23,13 +23,13 @@ import time import grpc -try: - import asyncio -except ImportError: - # TODO(https://github.com/grpc/grpc/issues/19728) Improve how Aio Cython is - # distributed without breaking none compatible Python versions. For now, if - # Asyncio package is not available we just skip it. - pass +try: + import asyncio +except ImportError: + # TODO(https://github.com/grpc/grpc/issues/19728) Improve how Aio Cython is + # distributed without breaking none compatible Python versions. For now, if + # Asyncio package is not available we just skip it. + pass # The only copy of Python logger for the Cython extension _LOGGER = logging.getLogger(__name__) @@ -40,23 +40,23 @@ include "_cygrpc/grpc_string.pyx.pxi" include "_cygrpc/arguments.pyx.pxi" include "_cygrpc/call.pyx.pxi" include "_cygrpc/channel.pyx.pxi" -include "_cygrpc/channelz.pyx.pxi" +include "_cygrpc/channelz.pyx.pxi" include "_cygrpc/credentials.pyx.pxi" include "_cygrpc/completion_queue.pyx.pxi" include "_cygrpc/event.pyx.pxi" include "_cygrpc/metadata.pyx.pxi" include "_cygrpc/operation.pyx.pxi" -include "_cygrpc/propagation_bits.pyx.pxi" +include "_cygrpc/propagation_bits.pyx.pxi" include "_cygrpc/records.pyx.pxi" include "_cygrpc/security.pyx.pxi" include "_cygrpc/server.pyx.pxi" include "_cygrpc/tag.pyx.pxi" include "_cygrpc/time.pyx.pxi" -include "_cygrpc/vtable.pyx.pxi" +include "_cygrpc/vtable.pyx.pxi" include "_cygrpc/_hooks.pyx.pxi" -include "_cygrpc/iomgr.pyx.pxi" - +include "_cygrpc/iomgr.pyx.pxi" + include "_cygrpc/grpc_gevent.pyx.pxi" include "_cygrpc/thread.pyx.pxi" @@ -66,21 +66,21 @@ IF UNAME_SYSNAME == "Windows": ELSE: include "_cygrpc/fork_posix.pyx.pxi" -# Following pxi files are part of the Aio module -include "_cygrpc/aio/iomgr/iomgr.pyx.pxi" -include "_cygrpc/aio/iomgr/socket.pyx.pxi" -include "_cygrpc/aio/iomgr/timer.pyx.pxi" -include "_cygrpc/aio/iomgr/resolver.pyx.pxi" +# Following pxi files are part of the Aio module +include "_cygrpc/aio/iomgr/iomgr.pyx.pxi" +include "_cygrpc/aio/iomgr/socket.pyx.pxi" +include "_cygrpc/aio/iomgr/timer.pyx.pxi" +include "_cygrpc/aio/iomgr/resolver.pyx.pxi" include "_cygrpc/aio/common.pyx.pxi" include "_cygrpc/aio/rpc_status.pyx.pxi" include "_cygrpc/aio/completion_queue.pyx.pxi" include "_cygrpc/aio/callback_common.pyx.pxi" -include "_cygrpc/aio/grpc_aio.pyx.pxi" -include "_cygrpc/aio/call.pyx.pxi" -include "_cygrpc/aio/channel.pyx.pxi" -include "_cygrpc/aio/server.pyx.pxi" - - +include "_cygrpc/aio/grpc_aio.pyx.pxi" +include "_cygrpc/aio/call.pyx.pxi" +include "_cygrpc/aio/channel.pyx.pxi" +include "_cygrpc/aio/server.pyx.pxi" + + # # initialize gRPC # diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_interceptor.py b/contrib/libs/grpc/src/python/grpcio/grpc/_interceptor.py index 4219be1548..ee63cb3145 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_interceptor.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_interceptor.py @@ -44,9 +44,9 @@ def service_pipeline(interceptors): class _ClientCallDetails( - collections.namedtuple('_ClientCallDetails', - ('method', 'timeout', 'metadata', 'credentials', - 'wait_for_ready', 'compression')), + collections.namedtuple('_ClientCallDetails', + ('method', 'timeout', 'metadata', 'credentials', + 'wait_for_ready', 'compression')), grpc.ClientCallDetails): pass @@ -77,16 +77,16 @@ def _unwrap_client_call_details(call_details, default_details): except AttributeError: wait_for_ready = default_details.wait_for_ready - try: - compression = call_details.compression - except AttributeError: - compression = default_details.compression + try: + compression = call_details.compression + except AttributeError: + compression = default_details.compression + + return method, timeout, metadata, credentials, wait_for_ready, compression - return method, timeout, metadata, credentials, wait_for_ready, compression +class _FailureOutcome(grpc.RpcError, grpc.Future, grpc.Call): # pylint: disable=too-many-ancestors -class _FailureOutcome(grpc.RpcError, grpc.Future, grpc.Call): # pylint: disable=too-many-ancestors - def __init__(self, exception, traceback): super(_FailureOutcome, self).__init__() self._exception = exception @@ -131,7 +131,7 @@ class _FailureOutcome(grpc.RpcError, grpc.Future, grpc.Call): # pylint: disable def traceback(self, ignored_timeout=None): return self._traceback - def add_callback(self, unused_callback): + def add_callback(self, unused_callback): return False def add_done_callback(self, fn): @@ -211,8 +211,8 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): response, ignored_call = self._with_call(request, timeout=timeout, metadata=metadata, @@ -226,25 +226,25 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): - client_call_details = _ClientCallDetails(self._method, timeout, - metadata, credentials, - wait_for_ready, compression) + wait_for_ready=None, + compression=None): + client_call_details = _ClientCallDetails(self._method, timeout, + metadata, credentials, + wait_for_ready, compression) def continuation(new_details, request): - (new_method, new_timeout, new_metadata, new_credentials, - new_wait_for_ready, - new_compression) = (_unwrap_client_call_details( - new_details, client_call_details)) + (new_method, new_timeout, new_metadata, new_credentials, + new_wait_for_ready, + new_compression) = (_unwrap_client_call_details( + new_details, client_call_details)) try: response, call = self._thunk(new_method).with_call( request, timeout=new_timeout, metadata=new_metadata, credentials=new_credentials, - wait_for_ready=new_wait_for_ready, - compression=new_compression) + wait_for_ready=new_wait_for_ready, + compression=new_compression) return _UnaryOutcome(response, call) except grpc.RpcError as rpc_error: return rpc_error @@ -261,8 +261,8 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): return self._with_call(request, timeout=timeout, metadata=metadata, @@ -275,24 +275,24 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): - client_call_details = _ClientCallDetails(self._method, timeout, - metadata, credentials, - wait_for_ready, compression) + wait_for_ready=None, + compression=None): + client_call_details = _ClientCallDetails(self._method, timeout, + metadata, credentials, + wait_for_ready, compression) def continuation(new_details, request): - (new_method, new_timeout, new_metadata, new_credentials, - new_wait_for_ready, - new_compression) = (_unwrap_client_call_details( - new_details, client_call_details)) + (new_method, new_timeout, new_metadata, new_credentials, + new_wait_for_ready, + new_compression) = (_unwrap_client_call_details( + new_details, client_call_details)) return self._thunk(new_method).future( request, timeout=new_timeout, metadata=new_metadata, credentials=new_credentials, - wait_for_ready=new_wait_for_ready, - compression=new_compression) + wait_for_ready=new_wait_for_ready, + compression=new_compression) try: return self._interceptor.intercept_unary_unary( @@ -313,17 +313,17 @@ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): - client_call_details = _ClientCallDetails(self._method, timeout, - metadata, credentials, - wait_for_ready, compression) + wait_for_ready=None, + compression=None): + client_call_details = _ClientCallDetails(self._method, timeout, + metadata, credentials, + wait_for_ready, compression) def continuation(new_details, request): - (new_method, new_timeout, new_metadata, new_credentials, - new_wait_for_ready, - new_compression) = (_unwrap_client_call_details( - new_details, client_call_details)) + (new_method, new_timeout, new_metadata, new_credentials, + new_wait_for_ready, + new_compression) = (_unwrap_client_call_details( + new_details, client_call_details)) return self._thunk(new_method)(request, timeout=new_timeout, metadata=new_metadata, @@ -350,8 +350,8 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): response, ignored_call = self._with_call(request_iterator, timeout=timeout, metadata=metadata, @@ -365,25 +365,25 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): - client_call_details = _ClientCallDetails(self._method, timeout, - metadata, credentials, - wait_for_ready, compression) + wait_for_ready=None, + compression=None): + client_call_details = _ClientCallDetails(self._method, timeout, + metadata, credentials, + wait_for_ready, compression) def continuation(new_details, request_iterator): - (new_method, new_timeout, new_metadata, new_credentials, - new_wait_for_ready, - new_compression) = (_unwrap_client_call_details( - new_details, client_call_details)) + (new_method, new_timeout, new_metadata, new_credentials, + new_wait_for_ready, + new_compression) = (_unwrap_client_call_details( + new_details, client_call_details)) try: response, call = self._thunk(new_method).with_call( request_iterator, timeout=new_timeout, metadata=new_metadata, credentials=new_credentials, - wait_for_ready=new_wait_for_ready, - compression=new_compression) + wait_for_ready=new_wait_for_ready, + compression=new_compression) return _UnaryOutcome(response, call) except grpc.RpcError as rpc_error: return rpc_error @@ -400,8 +400,8 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): + wait_for_ready=None, + compression=None): return self._with_call(request_iterator, timeout=timeout, metadata=metadata, @@ -414,24 +414,24 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): - client_call_details = _ClientCallDetails(self._method, timeout, - metadata, credentials, - wait_for_ready, compression) + wait_for_ready=None, + compression=None): + client_call_details = _ClientCallDetails(self._method, timeout, + metadata, credentials, + wait_for_ready, compression) def continuation(new_details, request_iterator): - (new_method, new_timeout, new_metadata, new_credentials, - new_wait_for_ready, - new_compression) = (_unwrap_client_call_details( - new_details, client_call_details)) + (new_method, new_timeout, new_metadata, new_credentials, + new_wait_for_ready, + new_compression) = (_unwrap_client_call_details( + new_details, client_call_details)) return self._thunk(new_method).future( request_iterator, timeout=new_timeout, metadata=new_metadata, credentials=new_credentials, - wait_for_ready=new_wait_for_ready, - compression=new_compression) + wait_for_ready=new_wait_for_ready, + compression=new_compression) try: return self._interceptor.intercept_stream_unary( @@ -452,17 +452,17 @@ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable): timeout=None, metadata=None, credentials=None, - wait_for_ready=None, - compression=None): - client_call_details = _ClientCallDetails(self._method, timeout, - metadata, credentials, - wait_for_ready, compression) + wait_for_ready=None, + compression=None): + client_call_details = _ClientCallDetails(self._method, timeout, + metadata, credentials, + wait_for_ready, compression) def continuation(new_details, request_iterator): - (new_method, new_timeout, new_metadata, new_credentials, - new_wait_for_ready, - new_compression) = (_unwrap_client_call_details( - new_details, client_call_details)) + (new_method, new_timeout, new_metadata, new_credentials, + new_wait_for_ready, + new_compression) = (_unwrap_client_call_details( + new_details, client_call_details)) return self._thunk(new_method)(request_iterator, timeout=new_timeout, metadata=new_metadata, diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_server.py b/contrib/libs/grpc/src/python/grpcio/grpc/_server.py index d77bbf755d..48ff743995 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/_server.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/_server.py @@ -19,12 +19,12 @@ import logging import threading import time -from concurrent import futures +from concurrent import futures import six import grpc from grpc import _common -from grpc import _compression +from grpc import _compression from grpc import _interceptor from grpc._cython import cygrpc @@ -50,7 +50,7 @@ _CANCELLED = 'cancelled' _EMPTY_FLAGS = 0 _DEALLOCATED_SERVER_CHECK_PERIOD_S = 1.0 -_INF_TIMEOUT = 1e9 +_INF_TIMEOUT = 1e9 def _serialized_request(request_event): @@ -96,7 +96,7 @@ class _RPCState(object): self.request = None self.client = _OPEN self.initial_metadata_allowed = True - self.compression_algorithm = None + self.compression_algorithm = None self.disable_next_compression = False self.trailing_metadata = None self.code = None @@ -115,7 +115,7 @@ def _raise_rpc_error(state): def _possibly_finish_call(state, token): state.due.remove(token) - if not _is_rpc_state_active(state) and not state.due: + if not _is_rpc_state_active(state) and not state.due: callbacks = state.callbacks state.callbacks = None return state, callbacks @@ -132,33 +132,33 @@ def _send_status_from_server(state, token): return send_status_from_server -def _get_initial_metadata(state, metadata): - with state.condition: - if state.compression_algorithm: - compression_metadata = ( - _compression.compression_algorithm_to_metadata( - state.compression_algorithm),) - if metadata is None: - return compression_metadata - else: - return compression_metadata + tuple(metadata) - else: - return metadata - - -def _get_initial_metadata_operation(state, metadata): - operation = cygrpc.SendInitialMetadataOperation( - _get_initial_metadata(state, metadata), _EMPTY_FLAGS) - return operation - - +def _get_initial_metadata(state, metadata): + with state.condition: + if state.compression_algorithm: + compression_metadata = ( + _compression.compression_algorithm_to_metadata( + state.compression_algorithm),) + if metadata is None: + return compression_metadata + else: + return compression_metadata + tuple(metadata) + else: + return metadata + + +def _get_initial_metadata_operation(state, metadata): + operation = cygrpc.SendInitialMetadataOperation( + _get_initial_metadata(state, metadata), _EMPTY_FLAGS) + return operation + + def _abort(state, call, code, details): if state.client is not _CANCELLED: effective_code = _abortion_code(state, code) effective_details = details if state.details is None else state.details if state.initial_metadata_allowed: operations = ( - _get_initial_metadata_operation(state, None), + _get_initial_metadata_operation(state, None), cygrpc.SendStatusFromServerOperation(state.trailing_metadata, effective_code, effective_details, @@ -243,7 +243,7 @@ class _Context(grpc.ServicerContext): def is_active(self): with self._state.condition: - return _is_rpc_state_active(self._state) + return _is_rpc_state_active(self._state) def time_remaining(self): return max(self._rpc_event.call_details.deadline - time.time(), 0) @@ -282,18 +282,18 @@ class _Context(grpc.ServicerContext): cygrpc.auth_context(self._rpc_event.call)) } - def set_compression(self, compression): - with self._state.condition: - self._state.compression_algorithm = compression - + def set_compression(self, compression): + with self._state.condition: + self._state.compression_algorithm = compression + def send_initial_metadata(self, initial_metadata): with self._state.condition: if self._state.client is _CANCELLED: _raise_rpc_error(self._state) else: if self._state.initial_metadata_allowed: - operation = _get_initial_metadata_operation( - self._state, initial_metadata) + operation = _get_initial_metadata_operation( + self._state, initial_metadata) self._rpc_event.call.start_server_batch( (operation,), _send_initial_metadata(self._state)) self._state.initial_metadata_allowed = False @@ -330,10 +330,10 @@ class _Context(grpc.ServicerContext): with self._state.condition: self._state.details = _common.encode(details) - def _finalize_state(self): - pass + def _finalize_state(self): + pass + - class _RequestIterator(object): def __init__(self, state, call, request_deserializer): @@ -344,7 +344,7 @@ class _RequestIterator(object): def _raise_or_start_receive_message(self): if self._state.client is _CANCELLED: _raise_rpc_error(self._state) - elif not _is_rpc_state_active(self._state): + elif not _is_rpc_state_active(self._state): raise StopIteration() else: self._call.start_server_batch( @@ -389,7 +389,7 @@ def _unary_request(rpc_event, state, request_deserializer): def unary_request(): with state.condition: - if not _is_rpc_state_active(state): + if not _is_rpc_state_active(state): return None else: rpc_event.call.start_server_batch( @@ -417,35 +417,35 @@ def _unary_request(rpc_event, state, request_deserializer): return unary_request -def _call_behavior(rpc_event, - state, - behavior, - argument, - request_deserializer, - send_response_callback=None): - from grpc import _create_servicer_context - with _create_servicer_context(rpc_event, state, - request_deserializer) as context: - try: - response_or_iterator = None - if send_response_callback is not None: - response_or_iterator = behavior(argument, context, - send_response_callback) - else: - response_or_iterator = behavior(argument, context) - return response_or_iterator, True - except Exception as exception: # pylint: disable=broad-except - with state.condition: - if state.aborted: - _abort(state, rpc_event.call, cygrpc.StatusCode.unknown, - b'RPC Aborted') - elif exception not in state.rpc_errors: - details = 'Exception calling application: {}'.format( - exception) - _LOGGER.exception(details) - _abort(state, rpc_event.call, cygrpc.StatusCode.unknown, - _common.encode(details)) - return None, False +def _call_behavior(rpc_event, + state, + behavior, + argument, + request_deserializer, + send_response_callback=None): + from grpc import _create_servicer_context + with _create_servicer_context(rpc_event, state, + request_deserializer) as context: + try: + response_or_iterator = None + if send_response_callback is not None: + response_or_iterator = behavior(argument, context, + send_response_callback) + else: + response_or_iterator = behavior(argument, context) + return response_or_iterator, True + except Exception as exception: # pylint: disable=broad-except + with state.condition: + if state.aborted: + _abort(state, rpc_event.call, cygrpc.StatusCode.unknown, + b'RPC Aborted') + elif exception not in state.rpc_errors: + details = 'Exception calling application: {}'.format( + exception) + _LOGGER.exception(details) + _abort(state, rpc_event.call, cygrpc.StatusCode.unknown, + _common.encode(details)) + return None, False def _take_response_from_response_iterator(rpc_event, state, response_iterator): @@ -477,45 +477,45 @@ def _serialize_response(rpc_event, state, response, response_serializer): return serialized_response -def _get_send_message_op_flags_from_state(state): - if state.disable_next_compression: - return cygrpc.WriteFlag.no_compress - else: - return _EMPTY_FLAGS - - -def _reset_per_message_state(state): - with state.condition: - state.disable_next_compression = False - - +def _get_send_message_op_flags_from_state(state): + if state.disable_next_compression: + return cygrpc.WriteFlag.no_compress + else: + return _EMPTY_FLAGS + + +def _reset_per_message_state(state): + with state.condition: + state.disable_next_compression = False + + def _send_response(rpc_event, state, serialized_response): with state.condition: - if not _is_rpc_state_active(state): + if not _is_rpc_state_active(state): return False else: if state.initial_metadata_allowed: operations = ( - _get_initial_metadata_operation(state, None), - cygrpc.SendMessageOperation( - serialized_response, - _get_send_message_op_flags_from_state(state)), + _get_initial_metadata_operation(state, None), + cygrpc.SendMessageOperation( + serialized_response, + _get_send_message_op_flags_from_state(state)), ) state.initial_metadata_allowed = False token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN else: operations = (cygrpc.SendMessageOperation( - serialized_response, - _get_send_message_op_flags_from_state(state)),) + serialized_response, + _get_send_message_op_flags_from_state(state)),) token = _SEND_MESSAGE_TOKEN rpc_event.call.start_server_batch(operations, _send_message(state, token)) state.due.add(token) - _reset_per_message_state(state) + _reset_per_message_state(state) while True: state.condition.wait() if token not in state.due: - return _is_rpc_state_active(state) + return _is_rpc_state_active(state) def _status(rpc_event, state, serialized_response): @@ -529,23 +529,23 @@ def _status(rpc_event, state, serialized_response): _EMPTY_FLAGS), ] if state.initial_metadata_allowed: - operations.append(_get_initial_metadata_operation(state, None)) + operations.append(_get_initial_metadata_operation(state, None)) if serialized_response is not None: operations.append( - cygrpc.SendMessageOperation( - serialized_response, - _get_send_message_op_flags_from_state(state))) + cygrpc.SendMessageOperation( + serialized_response, + _get_send_message_op_flags_from_state(state))) rpc_event.call.start_server_batch( operations, _send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN)) state.statused = True - _reset_per_message_state(state) + _reset_per_message_state(state) state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN) def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk, request_deserializer, response_serializer): - cygrpc.install_context_from_request_call_event(rpc_event) + cygrpc.install_context_from_request_call_event(rpc_event) try: argument = argument_thunk() if argument is not None: @@ -562,92 +562,92 @@ def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk, def _stream_response_in_pool(rpc_event, state, behavior, argument_thunk, request_deserializer, response_serializer): - cygrpc.install_context_from_request_call_event(rpc_event) - - def send_response(response): - if response is None: - _status(rpc_event, state, None) - else: + cygrpc.install_context_from_request_call_event(rpc_event) + + def send_response(response): + if response is None: + _status(rpc_event, state, None) + else: serialized_response = _serialize_response(rpc_event, state, response, response_serializer) - if serialized_response is not None: - _send_response(rpc_event, state, serialized_response) - + if serialized_response is not None: + _send_response(rpc_event, state, serialized_response) + try: argument = argument_thunk() if argument is not None: - if hasattr(behavior, 'experimental_non_blocking' - ) and behavior.experimental_non_blocking: + if hasattr(behavior, 'experimental_non_blocking' + ) and behavior.experimental_non_blocking: _call_behavior(rpc_event, state, behavior, argument, request_deserializer, send_response_callback=send_response) - else: - response_iterator, proceed = _call_behavior( - rpc_event, state, behavior, argument, request_deserializer) - if proceed: - _send_message_callback_to_blocking_iterator_adapter( - rpc_event, state, send_response, response_iterator) + else: + response_iterator, proceed = _call_behavior( + rpc_event, state, behavior, argument, request_deserializer) + if proceed: + _send_message_callback_to_blocking_iterator_adapter( + rpc_event, state, send_response, response_iterator) finally: cygrpc.uninstall_context() -def _is_rpc_state_active(state): - return state.client is not _CANCELLED and not state.statused - - +def _is_rpc_state_active(state): + return state.client is not _CANCELLED and not state.statused + + def _send_message_callback_to_blocking_iterator_adapter(rpc_event, state, send_response_callback, response_iterator): - while True: - response, proceed = _take_response_from_response_iterator( - rpc_event, state, response_iterator) - if proceed: - send_response_callback(response) - if not _is_rpc_state_active(state): - break - else: - break - - -def _select_thread_pool_for_behavior(behavior, default_thread_pool): - if hasattr(behavior, 'experimental_thread_pool') and isinstance( - behavior.experimental_thread_pool, futures.ThreadPoolExecutor): - return behavior.experimental_thread_pool - else: - return default_thread_pool - - -def _handle_unary_unary(rpc_event, state, method_handler, default_thread_pool): + while True: + response, proceed = _take_response_from_response_iterator( + rpc_event, state, response_iterator) + if proceed: + send_response_callback(response) + if not _is_rpc_state_active(state): + break + else: + break + + +def _select_thread_pool_for_behavior(behavior, default_thread_pool): + if hasattr(behavior, 'experimental_thread_pool') and isinstance( + behavior.experimental_thread_pool, futures.ThreadPoolExecutor): + return behavior.experimental_thread_pool + else: + return default_thread_pool + + +def _handle_unary_unary(rpc_event, state, method_handler, default_thread_pool): unary_request = _unary_request(rpc_event, state, method_handler.request_deserializer) - thread_pool = _select_thread_pool_for_behavior(method_handler.unary_unary, - default_thread_pool) + thread_pool = _select_thread_pool_for_behavior(method_handler.unary_unary, + default_thread_pool) return thread_pool.submit(_unary_response_in_pool, rpc_event, state, method_handler.unary_unary, unary_request, method_handler.request_deserializer, method_handler.response_serializer) -def _handle_unary_stream(rpc_event, state, method_handler, default_thread_pool): +def _handle_unary_stream(rpc_event, state, method_handler, default_thread_pool): unary_request = _unary_request(rpc_event, state, method_handler.request_deserializer) - thread_pool = _select_thread_pool_for_behavior(method_handler.unary_stream, - default_thread_pool) + thread_pool = _select_thread_pool_for_behavior(method_handler.unary_stream, + default_thread_pool) return thread_pool.submit(_stream_response_in_pool, rpc_event, state, method_handler.unary_stream, unary_request, method_handler.request_deserializer, method_handler.response_serializer) -def _handle_stream_unary(rpc_event, state, method_handler, default_thread_pool): +def _handle_stream_unary(rpc_event, state, method_handler, default_thread_pool): request_iterator = _RequestIterator(state, rpc_event.call, method_handler.request_deserializer) - thread_pool = _select_thread_pool_for_behavior(method_handler.stream_unary, - default_thread_pool) + thread_pool = _select_thread_pool_for_behavior(method_handler.stream_unary, + default_thread_pool) return thread_pool.submit(_unary_response_in_pool, rpc_event, state, method_handler.stream_unary, lambda: request_iterator, @@ -655,12 +655,12 @@ def _handle_stream_unary(rpc_event, state, method_handler, default_thread_pool): method_handler.response_serializer) -def _handle_stream_stream(rpc_event, state, method_handler, - default_thread_pool): +def _handle_stream_stream(rpc_event, state, method_handler, + default_thread_pool): request_iterator = _RequestIterator(state, rpc_event.call, method_handler.request_deserializer) - thread_pool = _select_thread_pool_for_behavior(method_handler.stream_stream, - default_thread_pool) + thread_pool = _select_thread_pool_for_behavior(method_handler.stream_stream, + default_thread_pool) return thread_pool.submit(_stream_response_in_pool, rpc_event, state, method_handler.stream_stream, lambda: request_iterator, @@ -689,9 +689,9 @@ def _find_method_handler(rpc_event, generic_handlers, interceptor_pipeline): def _reject_rpc(rpc_event, status, details): - rpc_state = _RPCState() + rpc_state = _RPCState() operations = ( - _get_initial_metadata_operation(rpc_state, None), + _get_initial_metadata_operation(rpc_state, None), cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), cygrpc.SendStatusFromServerOperation(None, status, details, _EMPTY_FLAGS), @@ -771,8 +771,8 @@ class _ServerState(object): self.interceptor_pipeline = interceptor_pipeline self.thread_pool = thread_pool self.stage = _ServerStage.STOPPED - self.termination_event = threading.Event() - self.shutdown_events = [self.termination_event] + self.termination_event = threading.Event() + self.shutdown_events = [self.termination_event] self.maximum_concurrent_rpcs = maximum_concurrent_rpcs self.active_rpc_count = 0 @@ -936,18 +936,18 @@ def _validate_generic_rpc_handlers(generic_rpc_handlers): 'not have "service" method!'.format(generic_rpc_handler)) -def _augment_options(base_options, compression): - compression_option = _compression.create_channel_option(compression) - return tuple(base_options) + compression_option - - +def _augment_options(base_options, compression): + compression_option = _compression.create_channel_option(compression) + return tuple(base_options) + compression_option + + class _Server(grpc.Server): # pylint: disable=too-many-arguments def __init__(self, thread_pool, generic_handlers, interceptors, options, - maximum_concurrent_rpcs, compression): + maximum_concurrent_rpcs, compression): completion_queue = cygrpc.CompletionQueue() - server = cygrpc.Server(_augment_options(options, compression)) + server = cygrpc.Server(_augment_options(options, compression)) server.register_completion_queue(completion_queue) self._state = _ServerState(completion_queue, server, generic_handlers, _interceptor.service_pipeline(interceptors), @@ -970,14 +970,14 @@ class _Server(grpc.Server): def start(self): _start(self._state) - def wait_for_termination(self, timeout=None): - # NOTE(https://bugs.python.org/issue35935) - # Remove this workaround once threading.Event.wait() is working with - # CTRL+C across platforms. + def wait_for_termination(self, timeout=None): + # NOTE(https://bugs.python.org/issue35935) + # Remove this workaround once threading.Event.wait() is working with + # CTRL+C across platforms. return _common.wait(self._state.termination_event.wait, self._state.termination_event.is_set, timeout=timeout) - + def stop(self, grace): return _stop(self._state, grace) @@ -989,7 +989,7 @@ class _Server(grpc.Server): def create_server(thread_pool, generic_rpc_handlers, interceptors, options, - maximum_concurrent_rpcs, compression): + maximum_concurrent_rpcs, compression): _validate_generic_rpc_handlers(generic_rpc_handlers) return _Server(thread_pool, generic_rpc_handlers, interceptors, options, - maximum_concurrent_rpcs, compression) + maximum_concurrent_rpcs, compression) diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/experimental/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/experimental/__init__.py index 7814f49a1a..a4e2660fb4 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/experimental/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/experimental/__init__.py @@ -15,30 +15,30 @@ These APIs are subject to be removed during any minor version release. """ - + import copy import functools import sys import warnings - + import grpc _EXPERIMENTAL_APIS_USED = set() -class ChannelOptions(object): - """Indicates a channel option unique to gRPC Python. - - This enumeration is part of an EXPERIMENTAL API. - - Attributes: - SingleThreadedUnaryStream: Perform unary-stream RPCs on a single thread. - """ - SingleThreadedUnaryStream = "SingleThreadedUnaryStream" - - -class UsageError(Exception): - """Raised by the gRPC library to indicate usage not allowed by the API.""" +class ChannelOptions(object): + """Indicates a channel option unique to gRPC Python. + + This enumeration is part of an EXPERIMENTAL API. + + Attributes: + SingleThreadedUnaryStream: Perform unary-stream RPCs on a single thread. + """ + SingleThreadedUnaryStream = "SingleThreadedUnaryStream" + + +class UsageError(Exception): + """Raised by the gRPC library to indicate usage not allowed by the API.""" _insecure_channel_credentials_sentinel = object() diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/experimental/aio/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/experimental/aio/__init__.py index bdfcd76b2f..576cb8dcde 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc/experimental/aio/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc/experimental/aio/__init__.py @@ -1,16 +1,16 @@ # Copyright 2020 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. """Alias of grpc.aio to keep backward compatibility.""" - + from grpc.aio import * diff --git a/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py b/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py index 424a6751d2..67f985e1f4 100644 --- a/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py +++ b/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py @@ -15,112 +15,112 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!! CORE_SOURCE_FILES = [ - 'src/core/ext/filters/census/grpc_context.cc', - 'src/core/ext/filters/client_channel/backend_metric.cc', - 'src/core/ext/filters/client_channel/backup_poller.cc', - 'src/core/ext/filters/client_channel/channel_connectivity.cc', - 'src/core/ext/filters/client_channel/client_channel.cc', - 'src/core/ext/filters/client_channel/client_channel_channelz.cc', - 'src/core/ext/filters/client_channel/client_channel_factory.cc', - 'src/core/ext/filters/client_channel/client_channel_plugin.cc', + 'src/core/ext/filters/census/grpc_context.cc', + 'src/core/ext/filters/client_channel/backend_metric.cc', + 'src/core/ext/filters/client_channel/backup_poller.cc', + 'src/core/ext/filters/client_channel/channel_connectivity.cc', + 'src/core/ext/filters/client_channel/client_channel.cc', + 'src/core/ext/filters/client_channel/client_channel_channelz.cc', + 'src/core/ext/filters/client_channel/client_channel_factory.cc', + 'src/core/ext/filters/client_channel/client_channel_plugin.cc', 'src/core/ext/filters/client_channel/config_selector.cc', - 'src/core/ext/filters/client_channel/global_subchannel_pool.cc', - 'src/core/ext/filters/client_channel/health/health_check_client.cc', - 'src/core/ext/filters/client_channel/http_connect_handshaker.cc', - 'src/core/ext/filters/client_channel/http_proxy.cc', - 'src/core/ext/filters/client_channel/lb_policy.cc', + 'src/core/ext/filters/client_channel/global_subchannel_pool.cc', + 'src/core/ext/filters/client_channel/health/health_check_client.cc', + 'src/core/ext/filters/client_channel/http_connect_handshaker.cc', + 'src/core/ext/filters/client_channel/http_proxy.cc', + 'src/core/ext/filters/client_channel/lb_policy.cc', 'src/core/ext/filters/client_channel/lb_policy/address_filtering.cc', 'src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc', - 'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc', - 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc', + 'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc', + 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc', 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc', - 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc', - 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc', - 'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc', - 'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc', + 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc', + 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc', + 'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc', + 'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc', 'src/core/ext/filters/client_channel/lb_policy/priority/priority.cc', - 'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc', + 'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc', 'src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc', - 'src/core/ext/filters/client_channel/lb_policy/xds/cds.cc', + 'src/core/ext/filters/client_channel/lb_policy/xds/cds.cc', 'src/core/ext/filters/client_channel/lb_policy/xds/eds.cc', 'src/core/ext/filters/client_channel/lb_policy/xds/eds_drop.cc', 'src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_manager.cc', - 'src/core/ext/filters/client_channel/lb_policy_registry.cc', - 'src/core/ext/filters/client_channel/local_subchannel_pool.cc', - 'src/core/ext/filters/client_channel/proxy_mapper_registry.cc', - 'src/core/ext/filters/client_channel/resolver.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc', - 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc', - 'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc', - 'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc', - 'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc', - 'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc', - 'src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc', - 'src/core/ext/filters/client_channel/resolver_registry.cc', - 'src/core/ext/filters/client_channel/resolver_result_parsing.cc', - 'src/core/ext/filters/client_channel/resolving_lb_policy.cc', - 'src/core/ext/filters/client_channel/retry_throttle.cc', - 'src/core/ext/filters/client_channel/server_address.cc', - 'src/core/ext/filters/client_channel/service_config.cc', + 'src/core/ext/filters/client_channel/lb_policy_registry.cc', + 'src/core/ext/filters/client_channel/local_subchannel_pool.cc', + 'src/core/ext/filters/client_channel/proxy_mapper_registry.cc', + 'src/core/ext/filters/client_channel/resolver.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc', + 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc', + 'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc', + 'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc', + 'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc', + 'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc', + 'src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc', + 'src/core/ext/filters/client_channel/resolver_registry.cc', + 'src/core/ext/filters/client_channel/resolver_result_parsing.cc', + 'src/core/ext/filters/client_channel/resolving_lb_policy.cc', + 'src/core/ext/filters/client_channel/retry_throttle.cc', + 'src/core/ext/filters/client_channel/server_address.cc', + 'src/core/ext/filters/client_channel/service_config.cc', 'src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc', 'src/core/ext/filters/client_channel/service_config_parser.cc', - 'src/core/ext/filters/client_channel/subchannel.cc', - 'src/core/ext/filters/client_channel/subchannel_pool_interface.cc', - 'src/core/ext/filters/client_idle/client_idle_filter.cc', - 'src/core/ext/filters/deadline/deadline_filter.cc', - 'src/core/ext/filters/http/client/http_client_filter.cc', - 'src/core/ext/filters/http/client_authority_filter.cc', - 'src/core/ext/filters/http/http_filters_plugin.cc', - 'src/core/ext/filters/http/message_compress/message_compress_filter.cc', + 'src/core/ext/filters/client_channel/subchannel.cc', + 'src/core/ext/filters/client_channel/subchannel_pool_interface.cc', + 'src/core/ext/filters/client_idle/client_idle_filter.cc', + 'src/core/ext/filters/deadline/deadline_filter.cc', + 'src/core/ext/filters/http/client/http_client_filter.cc', + 'src/core/ext/filters/http/client_authority_filter.cc', + 'src/core/ext/filters/http/http_filters_plugin.cc', + 'src/core/ext/filters/http/message_compress/message_compress_filter.cc', 'src/core/ext/filters/http/message_compress/message_decompress_filter.cc', - 'src/core/ext/filters/http/server/http_server_filter.cc', - 'src/core/ext/filters/max_age/max_age_filter.cc', - 'src/core/ext/filters/message_size/message_size_filter.cc', - 'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc', - 'src/core/ext/filters/workarounds/workaround_utils.cc', - 'src/core/ext/transport/chttp2/alpn/alpn.cc', - 'src/core/ext/transport/chttp2/client/authority.cc', - 'src/core/ext/transport/chttp2/client/chttp2_connector.cc', - 'src/core/ext/transport/chttp2/client/insecure/channel_create.cc', - 'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc', - 'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc', - 'src/core/ext/transport/chttp2/server/chttp2_server.cc', - 'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc', - 'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc', - 'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc', - 'src/core/ext/transport/chttp2/transport/bin_decoder.cc', - 'src/core/ext/transport/chttp2/transport/bin_encoder.cc', - 'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc', - 'src/core/ext/transport/chttp2/transport/chttp2_transport.cc', - 'src/core/ext/transport/chttp2/transport/context_list.cc', - 'src/core/ext/transport/chttp2/transport/flow_control.cc', - 'src/core/ext/transport/chttp2/transport/frame_data.cc', - 'src/core/ext/transport/chttp2/transport/frame_goaway.cc', - 'src/core/ext/transport/chttp2/transport/frame_ping.cc', - 'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc', - 'src/core/ext/transport/chttp2/transport/frame_settings.cc', - 'src/core/ext/transport/chttp2/transport/frame_window_update.cc', - 'src/core/ext/transport/chttp2/transport/hpack_encoder.cc', - 'src/core/ext/transport/chttp2/transport/hpack_parser.cc', - 'src/core/ext/transport/chttp2/transport/hpack_table.cc', - 'src/core/ext/transport/chttp2/transport/http2_settings.cc', - 'src/core/ext/transport/chttp2/transport/huffsyms.cc', - 'src/core/ext/transport/chttp2/transport/incoming_metadata.cc', - 'src/core/ext/transport/chttp2/transport/parsing.cc', - 'src/core/ext/transport/chttp2/transport/stream_lists.cc', - 'src/core/ext/transport/chttp2/transport/stream_map.cc', - 'src/core/ext/transport/chttp2/transport/varint.cc', - 'src/core/ext/transport/chttp2/transport/writing.cc', - 'src/core/ext/transport/inproc/inproc_plugin.cc', - 'src/core/ext/transport/inproc/inproc_transport.cc', + 'src/core/ext/filters/http/server/http_server_filter.cc', + 'src/core/ext/filters/max_age/max_age_filter.cc', + 'src/core/ext/filters/message_size/message_size_filter.cc', + 'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc', + 'src/core/ext/filters/workarounds/workaround_utils.cc', + 'src/core/ext/transport/chttp2/alpn/alpn.cc', + 'src/core/ext/transport/chttp2/client/authority.cc', + 'src/core/ext/transport/chttp2/client/chttp2_connector.cc', + 'src/core/ext/transport/chttp2/client/insecure/channel_create.cc', + 'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc', + 'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc', + 'src/core/ext/transport/chttp2/server/chttp2_server.cc', + 'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc', + 'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc', + 'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc', + 'src/core/ext/transport/chttp2/transport/bin_decoder.cc', + 'src/core/ext/transport/chttp2/transport/bin_encoder.cc', + 'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc', + 'src/core/ext/transport/chttp2/transport/chttp2_transport.cc', + 'src/core/ext/transport/chttp2/transport/context_list.cc', + 'src/core/ext/transport/chttp2/transport/flow_control.cc', + 'src/core/ext/transport/chttp2/transport/frame_data.cc', + 'src/core/ext/transport/chttp2/transport/frame_goaway.cc', + 'src/core/ext/transport/chttp2/transport/frame_ping.cc', + 'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc', + 'src/core/ext/transport/chttp2/transport/frame_settings.cc', + 'src/core/ext/transport/chttp2/transport/frame_window_update.cc', + 'src/core/ext/transport/chttp2/transport/hpack_encoder.cc', + 'src/core/ext/transport/chttp2/transport/hpack_parser.cc', + 'src/core/ext/transport/chttp2/transport/hpack_table.cc', + 'src/core/ext/transport/chttp2/transport/http2_settings.cc', + 'src/core/ext/transport/chttp2/transport/huffsyms.cc', + 'src/core/ext/transport/chttp2/transport/incoming_metadata.cc', + 'src/core/ext/transport/chttp2/transport/parsing.cc', + 'src/core/ext/transport/chttp2/transport/stream_lists.cc', + 'src/core/ext/transport/chttp2/transport/stream_map.cc', + 'src/core/ext/transport/chttp2/transport/varint.cc', + 'src/core/ext/transport/chttp2/transport/writing.cc', + 'src/core/ext/transport/inproc/inproc_plugin.cc', + 'src/core/ext/transport/inproc/inproc_transport.cc', 'src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c', 'src/core/ext/upb-generated/envoy/annotations/resource.upb.c', 'src/core/ext/upb-generated/envoy/config/accesslog/v3/accesslog.upb.c', @@ -178,23 +178,23 @@ CORE_SOURCE_FILES = [ 'src/core/ext/upb-generated/envoy/type/v3/percent.upb.c', 'src/core/ext/upb-generated/envoy/type/v3/range.upb.c', 'src/core/ext/upb-generated/envoy/type/v3/semantic_version.upb.c', - 'src/core/ext/upb-generated/google/api/annotations.upb.c', + 'src/core/ext/upb-generated/google/api/annotations.upb.c', 'src/core/ext/upb-generated/google/api/expr/v1alpha1/checked.upb.c', 'src/core/ext/upb-generated/google/api/expr/v1alpha1/syntax.upb.c', - 'src/core/ext/upb-generated/google/api/http.upb.c', - 'src/core/ext/upb-generated/google/protobuf/any.upb.c', - 'src/core/ext/upb-generated/google/protobuf/descriptor.upb.c', - 'src/core/ext/upb-generated/google/protobuf/duration.upb.c', - 'src/core/ext/upb-generated/google/protobuf/empty.upb.c', - 'src/core/ext/upb-generated/google/protobuf/struct.upb.c', - 'src/core/ext/upb-generated/google/protobuf/timestamp.upb.c', - 'src/core/ext/upb-generated/google/protobuf/wrappers.upb.c', - 'src/core/ext/upb-generated/google/rpc/status.upb.c', - 'src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c', - 'src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c', - 'src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c', - 'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c', - 'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c', + 'src/core/ext/upb-generated/google/api/http.upb.c', + 'src/core/ext/upb-generated/google/protobuf/any.upb.c', + 'src/core/ext/upb-generated/google/protobuf/descriptor.upb.c', + 'src/core/ext/upb-generated/google/protobuf/duration.upb.c', + 'src/core/ext/upb-generated/google/protobuf/empty.upb.c', + 'src/core/ext/upb-generated/google/protobuf/struct.upb.c', + 'src/core/ext/upb-generated/google/protobuf/timestamp.upb.c', + 'src/core/ext/upb-generated/google/protobuf/wrappers.upb.c', + 'src/core/ext/upb-generated/google/rpc/status.upb.c', + 'src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c', + 'src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c', + 'src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c', + 'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c', + 'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c', 'src/core/ext/upb-generated/udpa/annotations/migrate.upb.c', 'src/core/ext/upb-generated/udpa/annotations/security.upb.c', 'src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c', @@ -206,36 +206,36 @@ CORE_SOURCE_FILES = [ 'src/core/ext/upb-generated/udpa/core/v1/resource.upb.c', 'src/core/ext/upb-generated/udpa/core/v1/resource_locator.upb.c', 'src/core/ext/upb-generated/udpa/core/v1/resource_name.upb.c', - 'src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c', - 'src/core/ext/upb-generated/validate/validate.upb.c', + 'src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c', + 'src/core/ext/upb-generated/validate/validate.upb.c', 'src/core/ext/xds/certificate_provider_registry.cc', 'src/core/ext/xds/google_mesh_ca_certificate_provider_factory.cc', 'src/core/ext/xds/xds_api.cc', 'src/core/ext/xds/xds_bootstrap.cc', 'src/core/ext/xds/xds_client.cc', 'src/core/ext/xds/xds_client_stats.cc', - 'src/core/lib/avl/avl.cc', - 'src/core/lib/backoff/backoff.cc', - 'src/core/lib/channel/channel_args.cc', - 'src/core/lib/channel/channel_stack.cc', - 'src/core/lib/channel/channel_stack_builder.cc', - 'src/core/lib/channel/channel_trace.cc', - 'src/core/lib/channel/channelz.cc', - 'src/core/lib/channel/channelz_registry.cc', - 'src/core/lib/channel/connected_channel.cc', - 'src/core/lib/channel/handshaker.cc', - 'src/core/lib/channel/handshaker_registry.cc', - 'src/core/lib/channel/status_util.cc', - 'src/core/lib/compression/compression.cc', - 'src/core/lib/compression/compression_args.cc', - 'src/core/lib/compression/compression_internal.cc', - 'src/core/lib/compression/message_compress.cc', - 'src/core/lib/compression/stream_compression.cc', - 'src/core/lib/compression/stream_compression_gzip.cc', - 'src/core/lib/compression/stream_compression_identity.cc', - 'src/core/lib/debug/stats.cc', - 'src/core/lib/debug/stats_data.cc', - 'src/core/lib/debug/trace.cc', + 'src/core/lib/avl/avl.cc', + 'src/core/lib/backoff/backoff.cc', + 'src/core/lib/channel/channel_args.cc', + 'src/core/lib/channel/channel_stack.cc', + 'src/core/lib/channel/channel_stack_builder.cc', + 'src/core/lib/channel/channel_trace.cc', + 'src/core/lib/channel/channelz.cc', + 'src/core/lib/channel/channelz_registry.cc', + 'src/core/lib/channel/connected_channel.cc', + 'src/core/lib/channel/handshaker.cc', + 'src/core/lib/channel/handshaker_registry.cc', + 'src/core/lib/channel/status_util.cc', + 'src/core/lib/compression/compression.cc', + 'src/core/lib/compression/compression_args.cc', + 'src/core/lib/compression/compression_internal.cc', + 'src/core/lib/compression/message_compress.cc', + 'src/core/lib/compression/stream_compression.cc', + 'src/core/lib/compression/stream_compression_gzip.cc', + 'src/core/lib/compression/stream_compression_identity.cc', + 'src/core/lib/debug/stats.cc', + 'src/core/lib/debug/stats_data.cc', + 'src/core/lib/debug/trace.cc', 'src/core/lib/gpr/alloc.cc', 'src/core/lib/gpr/atm.cc', 'src/core/lib/gpr/cpu_iphone.cc', @@ -268,29 +268,29 @@ CORE_SOURCE_FILES = [ 'src/core/lib/gpr/tmpfile_posix.cc', 'src/core/lib/gpr/tmpfile_windows.cc', 'src/core/lib/gpr/wrap_memcpy.cc', - 'src/core/lib/gprpp/arena.cc', + 'src/core/lib/gprpp/arena.cc', 'src/core/lib/gprpp/fork.cc', - 'src/core/lib/gprpp/global_config_env.cc', - 'src/core/lib/gprpp/host_port.cc', - 'src/core/lib/gprpp/mpscq.cc', + 'src/core/lib/gprpp/global_config_env.cc', + 'src/core/lib/gprpp/host_port.cc', + 'src/core/lib/gprpp/mpscq.cc', 'src/core/lib/gprpp/thd_posix.cc', 'src/core/lib/gprpp/thd_windows.cc', 'src/core/lib/http/format_request.cc', 'src/core/lib/http/httpcli.cc', - 'src/core/lib/http/httpcli_security_connector.cc', + 'src/core/lib/http/httpcli_security_connector.cc', 'src/core/lib/http/parser.cc', 'src/core/lib/iomgr/buffer_list.cc', 'src/core/lib/iomgr/call_combiner.cc', - 'src/core/lib/iomgr/cfstream_handle.cc', + 'src/core/lib/iomgr/cfstream_handle.cc', 'src/core/lib/iomgr/combiner.cc', 'src/core/lib/iomgr/dualstack_socket_posix.cc', 'src/core/lib/iomgr/endpoint.cc', - 'src/core/lib/iomgr/endpoint_cfstream.cc', + 'src/core/lib/iomgr/endpoint_cfstream.cc', 'src/core/lib/iomgr/endpoint_pair_posix.cc', 'src/core/lib/iomgr/endpoint_pair_uv.cc', 'src/core/lib/iomgr/endpoint_pair_windows.cc', 'src/core/lib/iomgr/error.cc', - 'src/core/lib/iomgr/error_cfstream.cc', + 'src/core/lib/iomgr/error_cfstream.cc', 'src/core/lib/iomgr/ev_apple.cc', 'src/core/lib/iomgr/ev_epoll1_linux.cc', 'src/core/lib/iomgr/ev_epollex_linux.cc', @@ -299,8 +299,8 @@ CORE_SOURCE_FILES = [ 'src/core/lib/iomgr/ev_windows.cc', 'src/core/lib/iomgr/exec_ctx.cc', 'src/core/lib/iomgr/executor.cc', - 'src/core/lib/iomgr/executor/mpmcqueue.cc', - 'src/core/lib/iomgr/executor/threadpool.cc', + 'src/core/lib/iomgr/executor/mpmcqueue.cc', + 'src/core/lib/iomgr/executor/threadpool.cc', 'src/core/lib/iomgr/fork_posix.cc', 'src/core/lib/iomgr/fork_windows.cc', 'src/core/lib/iomgr/gethostname_fallback.cc', @@ -314,7 +314,7 @@ CORE_SOURCE_FILES = [ 'src/core/lib/iomgr/iomgr_custom.cc', 'src/core/lib/iomgr/iomgr_internal.cc', 'src/core/lib/iomgr/iomgr_posix.cc', - 'src/core/lib/iomgr/iomgr_posix_cfstream.cc', + 'src/core/lib/iomgr/iomgr_posix_cfstream.cc', 'src/core/lib/iomgr/iomgr_uv.cc', 'src/core/lib/iomgr/iomgr_windows.cc', 'src/core/lib/iomgr/is_epollexclusive_available.cc', @@ -345,7 +345,7 @@ CORE_SOURCE_FILES = [ 'src/core/lib/iomgr/socket_utils_windows.cc', 'src/core/lib/iomgr/socket_windows.cc', 'src/core/lib/iomgr/tcp_client.cc', - 'src/core/lib/iomgr/tcp_client_cfstream.cc', + 'src/core/lib/iomgr/tcp_client_cfstream.cc', 'src/core/lib/iomgr/tcp_client_custom.cc', 'src/core/lib/iomgr/tcp_client_posix.cc', 'src/core/lib/iomgr/tcp_client_windows.cc', @@ -378,53 +378,53 @@ CORE_SOURCE_FILES = [ 'src/core/lib/json/json_reader.cc', 'src/core/lib/json/json_util.cc', 'src/core/lib/json/json_writer.cc', - 'src/core/lib/profiling/basic_timers.cc', - 'src/core/lib/profiling/stap_timers.cc', + 'src/core/lib/profiling/basic_timers.cc', + 'src/core/lib/profiling/stap_timers.cc', 'src/core/lib/security/authorization/authorization_engine.cc', 'src/core/lib/security/authorization/evaluate_args.cc', - 'src/core/lib/security/context/security_context.cc', - 'src/core/lib/security/credentials/alts/alts_credentials.cc', - 'src/core/lib/security/credentials/alts/check_gcp_environment.cc', - 'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc', - 'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc', - 'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc', - 'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc', - 'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc', - 'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc', - 'src/core/lib/security/credentials/composite/composite_credentials.cc', - 'src/core/lib/security/credentials/credentials.cc', - 'src/core/lib/security/credentials/credentials_metadata.cc', - 'src/core/lib/security/credentials/fake/fake_credentials.cc', - 'src/core/lib/security/credentials/google_default/credentials_generic.cc', - 'src/core/lib/security/credentials/google_default/google_default_credentials.cc', - 'src/core/lib/security/credentials/iam/iam_credentials.cc', - 'src/core/lib/security/credentials/jwt/json_token.cc', - 'src/core/lib/security/credentials/jwt/jwt_credentials.cc', - 'src/core/lib/security/credentials/jwt/jwt_verifier.cc', - 'src/core/lib/security/credentials/local/local_credentials.cc', - 'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc', - 'src/core/lib/security/credentials/plugin/plugin_credentials.cc', - 'src/core/lib/security/credentials/ssl/ssl_credentials.cc', + 'src/core/lib/security/context/security_context.cc', + 'src/core/lib/security/credentials/alts/alts_credentials.cc', + 'src/core/lib/security/credentials/alts/check_gcp_environment.cc', + 'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc', + 'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc', + 'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc', + 'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc', + 'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc', + 'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc', + 'src/core/lib/security/credentials/composite/composite_credentials.cc', + 'src/core/lib/security/credentials/credentials.cc', + 'src/core/lib/security/credentials/credentials_metadata.cc', + 'src/core/lib/security/credentials/fake/fake_credentials.cc', + 'src/core/lib/security/credentials/google_default/credentials_generic.cc', + 'src/core/lib/security/credentials/google_default/google_default_credentials.cc', + 'src/core/lib/security/credentials/iam/iam_credentials.cc', + 'src/core/lib/security/credentials/jwt/json_token.cc', + 'src/core/lib/security/credentials/jwt/jwt_credentials.cc', + 'src/core/lib/security/credentials/jwt/jwt_verifier.cc', + 'src/core/lib/security/credentials/local/local_credentials.cc', + 'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc', + 'src/core/lib/security/credentials/plugin/plugin_credentials.cc', + 'src/core/lib/security/credentials/ssl/ssl_credentials.cc', 'src/core/lib/security/credentials/tls/grpc_tls_certificate_distributor.cc', - 'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc', + 'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc', 'src/core/lib/security/credentials/tls/tls_credentials.cc', 'src/core/lib/security/credentials/xds/xds_credentials.cc', - 'src/core/lib/security/security_connector/alts/alts_security_connector.cc', - 'src/core/lib/security/security_connector/fake/fake_security_connector.cc', - 'src/core/lib/security/security_connector/load_system_roots_fallback.cc', - 'src/core/lib/security/security_connector/load_system_roots_linux.cc', - 'src/core/lib/security/security_connector/local/local_security_connector.cc', - 'src/core/lib/security/security_connector/security_connector.cc', - 'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc', - 'src/core/lib/security/security_connector/ssl_utils.cc', - 'src/core/lib/security/security_connector/ssl_utils_config.cc', + 'src/core/lib/security/security_connector/alts/alts_security_connector.cc', + 'src/core/lib/security/security_connector/fake/fake_security_connector.cc', + 'src/core/lib/security/security_connector/load_system_roots_fallback.cc', + 'src/core/lib/security/security_connector/load_system_roots_linux.cc', + 'src/core/lib/security/security_connector/local/local_security_connector.cc', + 'src/core/lib/security/security_connector/security_connector.cc', + 'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc', + 'src/core/lib/security/security_connector/ssl_utils.cc', + 'src/core/lib/security/security_connector/ssl_utils_config.cc', 'src/core/lib/security/security_connector/tls/tls_security_connector.cc', - 'src/core/lib/security/transport/client_auth_filter.cc', - 'src/core/lib/security/transport/secure_endpoint.cc', - 'src/core/lib/security/transport/security_handshaker.cc', - 'src/core/lib/security/transport/server_auth_filter.cc', - 'src/core/lib/security/transport/tsi_error.cc', - 'src/core/lib/security/util/json_util.cc', + 'src/core/lib/security/transport/client_auth_filter.cc', + 'src/core/lib/security/transport/secure_endpoint.cc', + 'src/core/lib/security/transport/security_handshaker.cc', + 'src/core/lib/security/transport/server_auth_filter.cc', + 'src/core/lib/security/transport/tsi_error.cc', + 'src/core/lib/security/util/json_util.cc', 'src/core/lib/slice/b64.cc', 'src/core/lib/slice/percent_encoding.cc', 'src/core/lib/slice/slice.cc', @@ -444,8 +444,8 @@ CORE_SOURCE_FILES = [ 'src/core/lib/surface/completion_queue.cc', 'src/core/lib/surface/completion_queue_factory.cc', 'src/core/lib/surface/event_string.cc', - 'src/core/lib/surface/init.cc', - 'src/core/lib/surface/init_secure.cc', + 'src/core/lib/surface/init.cc', + 'src/core/lib/surface/init_secure.cc', 'src/core/lib/surface/lame_client.cc', 'src/core/lib/surface/metadata_array.cc', 'src/core/lib/surface/server.cc', @@ -466,7 +466,7 @@ CORE_SOURCE_FILES = [ 'src/core/lib/transport/transport.cc', 'src/core/lib/transport/transport_op_string.cc', 'src/core/lib/uri/uri_parser.cc', - 'src/core/plugin_registry/grpc_plugin_registry.cc', + 'src/core/plugin_registry/grpc_plugin_registry.cc', 'src/core/tsi/alts/crypt/aes_gcm.cc', 'src/core/tsi/alts/crypt/gsec.cc', 'src/core/tsi/alts/frame_protector/alts_counter.cc', @@ -479,8 +479,8 @@ CORE_SOURCE_FILES = [ 'src/core/tsi/alts/handshaker/alts_handshaker_client.cc', 'src/core/tsi/alts/handshaker/alts_shared_resource.cc', 'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc', - 'src/core/tsi/alts/handshaker/alts_tsi_utils.cc', - 'src/core/tsi/alts/handshaker/transport_security_common_api.cc', + 'src/core/tsi/alts/handshaker/alts_tsi_utils.cc', + 'src/core/tsi/alts/handshaker/transport_security_common_api.cc', 'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc', 'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc', 'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc', @@ -492,7 +492,7 @@ CORE_SOURCE_FILES = [ 'src/core/tsi/ssl/session_cache/ssl_session_cache.cc', 'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc', 'src/core/tsi/ssl_transport_security.cc', - 'src/core/tsi/transport_security.cc', + 'src/core/tsi/transport_security.cc', 'src/core/tsi/transport_security_grpc.cc', 'third_party/abseil-cpp/y_absl/base/dynamic_annotations.cc', 'third_party/abseil-cpp/y_absl/base/internal/cycleclock.cc', @@ -568,9 +568,9 @@ CORE_SOURCE_FILES = [ 'third_party/abseil-cpp/y_absl/time/time.cc', 'third_party/abseil-cpp/y_absl/types/bad_optional_access.cc', 'third_party/abseil-cpp/y_absl/types/bad_variant_access.cc', - 'third_party/address_sorting/address_sorting.c', - 'third_party/address_sorting/address_sorting_posix.c', - 'third_party/address_sorting/address_sorting_windows.c', + 'third_party/address_sorting/address_sorting.c', + 'third_party/address_sorting/address_sorting_posix.c', + 'third_party/address_sorting/address_sorting_windows.c', 'third_party/boringssl-with-bazel/err_data.c', 'third_party/boringssl-with-bazel/src/crypto/asn1/a_bitstr.c', 'third_party/boringssl-with-bazel/src/crypto/asn1/a_bool.c', @@ -879,7 +879,7 @@ CORE_SOURCE_FILES = [ 'third_party/cares/cares/ares_strcasecmp.c', 'third_party/cares/cares/ares_strdup.c', 'third_party/cares/cares/ares_strerror.c', - 'third_party/cares/cares/ares_strsplit.c', + 'third_party/cares/cares/ares_strsplit.c', 'third_party/cares/cares/ares_timeout.c', 'third_party/cares/cares/ares_version.c', 'third_party/cares/cares/ares_writev.c', @@ -910,27 +910,27 @@ CORE_SOURCE_FILES = [ 'third_party/re2/util/pcre.cc', 'third_party/re2/util/rune.cc', 'third_party/re2/util/strutil.cc', - 'third_party/upb/upb/decode.c', - 'third_party/upb/upb/encode.c', - 'third_party/upb/upb/msg.c', - 'third_party/upb/upb/port.c', - 'third_party/upb/upb/table.c', - 'third_party/upb/upb/upb.c', - 'third_party/zlib/adler32.c', - 'third_party/zlib/compress.c', - 'third_party/zlib/crc32.c', - 'third_party/zlib/deflate.c', - 'third_party/zlib/gzclose.c', - 'third_party/zlib/gzlib.c', - 'third_party/zlib/gzread.c', - 'third_party/zlib/gzwrite.c', - 'third_party/zlib/infback.c', - 'third_party/zlib/inffast.c', - 'third_party/zlib/inflate.c', - 'third_party/zlib/inftrees.c', - 'third_party/zlib/trees.c', - 'third_party/zlib/uncompr.c', - 'third_party/zlib/zutil.c', + 'third_party/upb/upb/decode.c', + 'third_party/upb/upb/encode.c', + 'third_party/upb/upb/msg.c', + 'third_party/upb/upb/port.c', + 'third_party/upb/upb/table.c', + 'third_party/upb/upb/upb.c', + 'third_party/zlib/adler32.c', + 'third_party/zlib/compress.c', + 'third_party/zlib/crc32.c', + 'third_party/zlib/deflate.c', + 'third_party/zlib/gzclose.c', + 'third_party/zlib/gzlib.c', + 'third_party/zlib/gzread.c', + 'third_party/zlib/gzwrite.c', + 'third_party/zlib/infback.c', + 'third_party/zlib/inffast.c', + 'third_party/zlib/inflate.c', + 'third_party/zlib/inftrees.c', + 'third_party/zlib/trees.c', + 'third_party/zlib/uncompr.c', + 'third_party/zlib/zutil.c', ] ASM_SOURCE_FILES = { diff --git a/contrib/libs/grpc/src/python/grpcio/ya.make b/contrib/libs/grpc/src/python/grpcio/ya.make index 5de0a4d591..5cdb0230c4 100644 --- a/contrib/libs/grpc/src/python/grpcio/ya.make +++ b/contrib/libs/grpc/src/python/grpcio/ya.make @@ -5,13 +5,13 @@ LICENSE(Apache-2.0) LICENSE_TEXTS(.yandex_meta/licenses.list.txt) OWNER( - akastornov - g:contrib - g:cpp-contrib + akastornov + g:contrib + g:cpp-contrib ) PEERDIR( - contrib/libs/grpc/grpc + contrib/libs/grpc/grpc contrib/python/six ) @@ -31,10 +31,10 @@ ADDINCL( IF (SANITIZER_TYPE == undefined) # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 CXXFLAGS(-fno-sanitize=function) -ENDIF() +ENDIF() NO_LINT() - + NO_COMPILER_WARNINGS() PY_SRCS( @@ -43,7 +43,7 @@ PY_SRCS( grpc/_auth.py grpc/_channel.py grpc/_common.py - grpc/_compression.py + grpc/_compression.py grpc/_cython/__init__.py grpc/_cython/_cygrpc/__init__.py grpc/_cython/cygrpc.pyx @@ -83,9 +83,9 @@ PY_SRCS( grpc/framework/interfaces/face/utilities.py ) -IF (PYTHON3) - PY_SRCS( - TOP_LEVEL +IF (PYTHON3) + PY_SRCS( + TOP_LEVEL grpc/_simple_stubs.py grpc/aio/_base_call.py grpc/aio/_base_channel.py @@ -98,8 +98,8 @@ IF (PYTHON3) grpc/aio/_server.py grpc/aio/_typing.py grpc/aio/_utils.py - grpc/experimental/aio/__init__.py - ) -ENDIF() - + grpc/experimental/aio/__init__.py + ) +ENDIF() + END() diff --git a/contrib/libs/grpc/src/python/grpcio_channelz/README.rst b/contrib/libs/grpc/src/python/grpcio_channelz/README.rst index 7a6a4845cd..d66d0c4f92 100644 --- a/contrib/libs/grpc/src/python/grpcio_channelz/README.rst +++ b/contrib/libs/grpc/src/python/grpcio_channelz/README.rst @@ -1,17 +1,17 @@ -gRPC Python Channelz package -============================== - -Channelz is a live debug tool in gRPC Python. - -Supported Python Versions -------------------------- -Python >= 3.5 - -Deprecated Python Versions --------------------------- -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - -Dependencies ------------- - -Depends on the `grpcio` package, available from PyPI via `pip install grpcio`. +gRPC Python Channelz package +============================== + +Channelz is a live debug tool in gRPC Python. + +Supported Python Versions +------------------------- +Python >= 3.5 + +Deprecated Python Versions +-------------------------- +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + +Dependencies +------------ + +Depends on the `grpcio` package, available from PyPI via `pip install grpcio`. diff --git a/contrib/libs/grpc/src/python/grpcio_channelz/setup.py b/contrib/libs/grpc/src/python/grpcio_channelz/setup.py index a6526b400d..678b5ef13f 100644 --- a/contrib/libs/grpc/src/python/grpcio_channelz/setup.py +++ b/contrib/libs/grpc/src/python/grpcio_channelz/setup.py @@ -18,9 +18,9 @@ import sys import setuptools -_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) -_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') - +_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) +_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') + # Ensure we're in the proper directory whether or not we're being used by pip. os.chdir(os.path.dirname(os.path.abspath(__file__))) @@ -53,8 +53,8 @@ CLASSIFIERS = [ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: Apache Software License', ] @@ -91,7 +91,7 @@ setuptools.setup( version=grpc_version.VERSION, license='Apache License 2.0', description='Channel Level Live Debug Information Service for gRPC', - long_description=open(_README_PATH, 'r').read(), + long_description=open(_README_PATH, 'r').read(), author='The gRPC Authors', author_email='grpc-io@googlegroups.com', classifiers=CLASSIFIERS, diff --git a/contrib/libs/grpc/src/python/grpcio_channelz/ya.make b/contrib/libs/grpc/src/python/grpcio_channelz/ya.make index 7c3506de6f..2703d70ab0 100644 --- a/contrib/libs/grpc/src/python/grpcio_channelz/ya.make +++ b/contrib/libs/grpc/src/python/grpcio_channelz/ya.make @@ -1,51 +1,51 @@ -PY23_LIBRARY() - +PY23_LIBRARY() + LICENSE(Apache-2.0) LICENSE_TEXTS(.yandex_meta/licenses.list.txt) -OWNER( - akastornov - dvshkurko - g:contrib - g:cpp-contrib -) - -PEERDIR( - contrib/libs/grpc/grpc - contrib/python/six -) - -IF (PYTHON2) - PEERDIR( - contrib/python/enum34 - contrib/python/futures - ) -ENDIF() - -ADDINCL( +OWNER( + akastornov + dvshkurko + g:contrib + g:cpp-contrib +) + +PEERDIR( + contrib/libs/grpc/grpc + contrib/python/six +) + +IF (PYTHON2) + PEERDIR( + contrib/python/enum34 + contrib/python/futures + ) +ENDIF() + +ADDINCL( ${ARCADIA_BUILD_ROOT}/contrib/libs/grpc - contrib/libs/grpc - contrib/libs/grpc/include -) - + contrib/libs/grpc + contrib/libs/grpc/include +) + IF (SANITIZER_TYPE == undefined) - # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 - CXXFLAGS(-fno-sanitize=function) -ENDIF() - -NO_LINT() - -NO_COMPILER_WARNINGS() - -PY_SRCS( - TOP_LEVEL - grpc_channelz/__init__.py - grpc_channelz/v1/__init__.py + # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 + CXXFLAGS(-fno-sanitize=function) +ENDIF() + +NO_LINT() + +NO_COMPILER_WARNINGS() + +PY_SRCS( + TOP_LEVEL + grpc_channelz/__init__.py + grpc_channelz/v1/__init__.py grpc_channelz/v1/_servicer.py - grpc_channelz/v1/channelz.py -) - + grpc_channelz/v1/channelz.py +) + IF (PYTHON3) PY_SRCS( TOP_LEVEL @@ -53,4 +53,4 @@ IF (PYTHON3) ) ENDIF() -END() +END() diff --git a/contrib/libs/grpc/src/python/grpcio_health_checking/README.rst b/contrib/libs/grpc/src/python/grpcio_health_checking/README.rst index 0b43dac9b3..044377a582 100644 --- a/contrib/libs/grpc/src/python/grpcio_health_checking/README.rst +++ b/contrib/libs/grpc/src/python/grpcio_health_checking/README.rst @@ -3,14 +3,14 @@ gRPC Python Health Checking Reference package for GRPC Python health checking. -Supported Python Versions -------------------------- -Python >= 3.5 - -Deprecated Python Versions --------------------------- -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - +Supported Python Versions +------------------------- +Python >= 3.5 + +Deprecated Python Versions +-------------------------- +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + Dependencies ------------ diff --git a/contrib/libs/grpc/src/python/grpcio_health_checking/grpc_health/v1/health.py b/contrib/libs/grpc/src/python/grpcio_health_checking/grpc_health/v1/health.py index 50f125b851..f7ee639ec8 100644 --- a/contrib/libs/grpc/src/python/grpcio_health_checking/grpc_health/v1/health.py +++ b/contrib/libs/grpc/src/python/grpcio_health_checking/grpc_health/v1/health.py @@ -13,7 +13,7 @@ # limitations under the License. """Reference implementation for health checking in gRPC Python.""" -import collections +import collections import threading import sys import grpc @@ -35,7 +35,7 @@ class _Watcher(): def __init__(self): self._condition = threading.Condition() - self._responses = collections.deque() + self._responses = collections.deque() self._open = True def __iter__(self): @@ -46,7 +46,7 @@ class _Watcher(): while not self._responses and self._open: self._condition.wait() if self._responses: - return self._responses.popleft() + return self._responses.popleft() else: raise StopIteration() @@ -67,37 +67,37 @@ class _Watcher(): self._condition.notify() -def _watcher_to_send_response_callback_adapter(watcher): - - def send_response_callback(response): - if response is None: - watcher.close() - else: - watcher.add(response) - - return send_response_callback - - +def _watcher_to_send_response_callback_adapter(watcher): + + def send_response_callback(response): + if response is None: + watcher.close() + else: + watcher.add(response) + + return send_response_callback + + class HealthServicer(_health_pb2_grpc.HealthServicer): """Servicer handling RPCs for service statuses.""" - def __init__(self, - experimental_non_blocking=True, - experimental_thread_pool=None): + def __init__(self, + experimental_non_blocking=True, + experimental_thread_pool=None): self._lock = threading.RLock() self._server_status = {"": _health_pb2.HealthCheckResponse.SERVING} - self._send_response_callbacks = {} - self.Watch.__func__.experimental_non_blocking = experimental_non_blocking - self.Watch.__func__.experimental_thread_pool = experimental_thread_pool - self._gracefully_shutting_down = False + self._send_response_callbacks = {} + self.Watch.__func__.experimental_non_blocking = experimental_non_blocking + self.Watch.__func__.experimental_thread_pool = experimental_thread_pool + self._gracefully_shutting_down = False - def _on_close_callback(self, send_response_callback, service): + def _on_close_callback(self, send_response_callback, service): def callback(): with self._lock: - self._send_response_callbacks[service].remove( - send_response_callback) - send_response_callback(None) + self._send_response_callbacks[service].remove( + send_response_callback) + send_response_callback(None) return callback @@ -110,29 +110,29 @@ class HealthServicer(_health_pb2_grpc.HealthServicer): else: return _health_pb2.HealthCheckResponse(status=status) - # pylint: disable=arguments-differ - def Watch(self, request, context, send_response_callback=None): - blocking_watcher = None - if send_response_callback is None: - # The server does not support the experimental_non_blocking - # parameter. For backwards compatibility, return a blocking response - # generator. - blocking_watcher = _Watcher() - send_response_callback = _watcher_to_send_response_callback_adapter( - blocking_watcher) + # pylint: disable=arguments-differ + def Watch(self, request, context, send_response_callback=None): + blocking_watcher = None + if send_response_callback is None: + # The server does not support the experimental_non_blocking + # parameter. For backwards compatibility, return a blocking response + # generator. + blocking_watcher = _Watcher() + send_response_callback = _watcher_to_send_response_callback_adapter( + blocking_watcher) service = request.service with self._lock: status = self._server_status.get(service) if status is None: status = _health_pb2.HealthCheckResponse.SERVICE_UNKNOWN # pylint: disable=no-member - send_response_callback( - _health_pb2.HealthCheckResponse(status=status)) - if service not in self._send_response_callbacks: - self._send_response_callbacks[service] = set() - self._send_response_callbacks[service].add(send_response_callback) - context.add_callback( - self._on_close_callback(send_response_callback, service)) - return blocking_watcher + send_response_callback( + _health_pb2.HealthCheckResponse(status=status)) + if service not in self._send_response_callbacks: + self._send_response_callbacks[service] = set() + self._send_response_callbacks[service].add(send_response_callback) + context.add_callback( + self._on_close_callback(send_response_callback, service)) + return blocking_watcher def set(self, service, status): """Sets the status of a service. @@ -143,30 +143,30 @@ class HealthServicer(_health_pb2_grpc.HealthServicer): the service """ with self._lock: - if self._gracefully_shutting_down: - return - else: - self._server_status[service] = status - if service in self._send_response_callbacks: - for send_response_callback in self._send_response_callbacks[ - service]: - send_response_callback( - _health_pb2.HealthCheckResponse(status=status)) - - def enter_graceful_shutdown(self): - """Permanently sets the status of all services to NOT_SERVING. - - This should be invoked when the server is entering a graceful shutdown - period. After this method is invoked, future attempts to set the status - of a service will be ignored. - - This is an EXPERIMENTAL API. - """ - with self._lock: - if self._gracefully_shutting_down: - return - else: - for service in self._server_status: - self.set(service, - _health_pb2.HealthCheckResponse.NOT_SERVING) # pylint: disable=no-member - self._gracefully_shutting_down = True + if self._gracefully_shutting_down: + return + else: + self._server_status[service] = status + if service in self._send_response_callbacks: + for send_response_callback in self._send_response_callbacks[ + service]: + send_response_callback( + _health_pb2.HealthCheckResponse(status=status)) + + def enter_graceful_shutdown(self): + """Permanently sets the status of all services to NOT_SERVING. + + This should be invoked when the server is entering a graceful shutdown + period. After this method is invoked, future attempts to set the status + of a service will be ignored. + + This is an EXPERIMENTAL API. + """ + with self._lock: + if self._gracefully_shutting_down: + return + else: + for service in self._server_status: + self.set(service, + _health_pb2.HealthCheckResponse.NOT_SERVING) # pylint: disable=no-member + self._gracefully_shutting_down = True diff --git a/contrib/libs/grpc/src/python/grpcio_health_checking/setup.py b/contrib/libs/grpc/src/python/grpcio_health_checking/setup.py index c7259d1843..fa1fbbd55a 100644 --- a/contrib/libs/grpc/src/python/grpcio_health_checking/setup.py +++ b/contrib/libs/grpc/src/python/grpcio_health_checking/setup.py @@ -17,9 +17,9 @@ import os import setuptools -_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) -_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') - +_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) +_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') + # Ensure we're in the proper directory whether or not we're being used by pip. os.chdir(os.path.dirname(os.path.abspath(__file__))) @@ -52,8 +52,8 @@ CLASSIFIERS = [ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: Apache Software License', ] diff --git a/contrib/libs/grpc/src/python/grpcio_health_checking/ya.make b/contrib/libs/grpc/src/python/grpcio_health_checking/ya.make index 696f0008b0..4bab1758a5 100644 --- a/contrib/libs/grpc/src/python/grpcio_health_checking/ya.make +++ b/contrib/libs/grpc/src/python/grpcio_health_checking/ya.make @@ -1,48 +1,48 @@ -PY23_LIBRARY() - +PY23_LIBRARY() + LICENSE(Apache-2.0) LICENSE_TEXTS(.yandex_meta/licenses.list.txt) -OWNER( - akastornov - dvshkurko - g:contrib - g:cpp-contrib -) - -PEERDIR( - contrib/libs/grpc/grpc - contrib/python/six -) - -IF (PYTHON2) - PEERDIR( - contrib/python/enum34 - contrib/python/futures - ) -ENDIF() - -ADDINCL( +OWNER( + akastornov + dvshkurko + g:contrib + g:cpp-contrib +) + +PEERDIR( + contrib/libs/grpc/grpc + contrib/python/six +) + +IF (PYTHON2) + PEERDIR( + contrib/python/enum34 + contrib/python/futures + ) +ENDIF() + +ADDINCL( ${ARCADIA_BUILD_ROOT}/contrib/libs/grpc - contrib/libs/grpc - contrib/libs/grpc/include -) - + contrib/libs/grpc + contrib/libs/grpc/include +) + IF (SANITIZER_TYPE == undefined) - # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 - CXXFLAGS(-fno-sanitize=function) -ENDIF() - -NO_LINT() - -NO_COMPILER_WARNINGS() - -PY_SRCS( - TOP_LEVEL - grpc_health/__init__.py - grpc_health/v1/__init__.py - grpc_health/v1/health.py -) - -END() + # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 + CXXFLAGS(-fno-sanitize=function) +ENDIF() + +NO_LINT() + +NO_COMPILER_WARNINGS() + +PY_SRCS( + TOP_LEVEL + grpc_health/__init__.py + grpc_health/v1/__init__.py + grpc_health/v1/health.py +) + +END() diff --git a/contrib/libs/grpc/src/python/grpcio_reflection/README.rst b/contrib/libs/grpc/src/python/grpcio_reflection/README.rst index d3a7633599..56f9953373 100644 --- a/contrib/libs/grpc/src/python/grpcio_reflection/README.rst +++ b/contrib/libs/grpc/src/python/grpcio_reflection/README.rst @@ -3,14 +3,14 @@ gRPC Python Reflection package Reference package for reflection in GRPC Python. -Supported Python Versions -------------------------- -Python >= 3.5 - -Deprecated Python Versions --------------------------- -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - +Supported Python Versions +------------------------- +Python >= 3.5 + +Deprecated Python Versions +-------------------------- +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + Dependencies ------------ diff --git a/contrib/libs/grpc/src/python/grpcio_reflection/setup.py b/contrib/libs/grpc/src/python/grpcio_reflection/setup.py index b44ea9206c..2d0a3fcdaa 100644 --- a/contrib/libs/grpc/src/python/grpcio_reflection/setup.py +++ b/contrib/libs/grpc/src/python/grpcio_reflection/setup.py @@ -18,9 +18,9 @@ import sys import setuptools -_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) -_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') - +_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) +_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') + # Ensure we're in the proper directory whether or not we're being used by pip. os.chdir(os.path.dirname(os.path.abspath(__file__))) @@ -53,8 +53,8 @@ CLASSIFIERS = [ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: Apache Software License', ] diff --git a/contrib/libs/grpc/src/python/grpcio_reflection/ya.make b/contrib/libs/grpc/src/python/grpcio_reflection/ya.make index 8a15784d91..c0f7d61dcc 100644 --- a/contrib/libs/grpc/src/python/grpcio_reflection/ya.make +++ b/contrib/libs/grpc/src/python/grpcio_reflection/ya.make @@ -1,52 +1,52 @@ -PY23_LIBRARY() - +PY23_LIBRARY() + LICENSE(Apache-2.0) LICENSE_TEXTS(.yandex_meta/licenses.list.txt) -OWNER( - akastornov - dvshkurko - g:contrib - g:cpp-contrib -) - -PEERDIR( - contrib/libs/grpc/grpc - contrib/python/six +OWNER( + akastornov + dvshkurko + g:contrib + g:cpp-contrib +) + +PEERDIR( + contrib/libs/grpc/grpc + contrib/python/six contrib/libs/grpc/src/proto/grpc/reflection/v1alpha -) - -IF (PYTHON2) - PEERDIR( - contrib/python/enum34 - contrib/python/futures - ) -ENDIF() - -ADDINCL( +) + +IF (PYTHON2) + PEERDIR( + contrib/python/enum34 + contrib/python/futures + ) +ENDIF() + +ADDINCL( ${ARCADIA_BUILD_ROOT}/contrib/libs/grpc - contrib/libs/grpc - contrib/libs/grpc/include -) - + contrib/libs/grpc + contrib/libs/grpc/include +) + IF (SANITIZER_TYPE == undefined) - # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 - CXXFLAGS(-fno-sanitize=function) -ENDIF() - -NO_LINT() - -NO_COMPILER_WARNINGS() - -PY_SRCS( - TOP_LEVEL - grpc_reflection/__init__.py - grpc_reflection/v1alpha/__init__.py + # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 + CXXFLAGS(-fno-sanitize=function) +ENDIF() + +NO_LINT() + +NO_COMPILER_WARNINGS() + +PY_SRCS( + TOP_LEVEL + grpc_reflection/__init__.py + grpc_reflection/v1alpha/__init__.py grpc_reflection/v1alpha/_base.py - grpc_reflection/v1alpha/reflection.py -) - + grpc_reflection/v1alpha/reflection.py +) + IF (PYTHON3) PY_SRCS( TOP_LEVEL @@ -54,4 +54,4 @@ IF (PYTHON3) ) ENDIF() -END() +END() diff --git a/contrib/libs/grpc/src/python/grpcio_status/README.rst b/contrib/libs/grpc/src/python/grpcio_status/README.rst index 5774c5f696..16c59387a6 100644 --- a/contrib/libs/grpc/src/python/grpcio_status/README.rst +++ b/contrib/libs/grpc/src/python/grpcio_status/README.rst @@ -1,17 +1,17 @@ -gRPC Python Status Proto -=========================== - -Reference package for GRPC Python status proto mapping. - -Supported Python Versions -------------------------- -Python >= 3.5 - -Deprecated Python Versions --------------------------- -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - -Dependencies ------------- - -Depends on the `grpcio` package, available from PyPI via `pip install grpcio`. +gRPC Python Status Proto +=========================== + +Reference package for GRPC Python status proto mapping. + +Supported Python Versions +------------------------- +Python >= 3.5 + +Deprecated Python Versions +-------------------------- +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + +Dependencies +------------ + +Depends on the `grpcio` package, available from PyPI via `pip install grpcio`. diff --git a/contrib/libs/grpc/src/python/grpcio_status/grpc_status/rpc_status.py b/contrib/libs/grpc/src/python/grpcio_status/grpc_status/rpc_status.py index e6d3a98775..d0ec08e3a5 100644 --- a/contrib/libs/grpc/src/python/grpcio_status/grpc_status/rpc_status.py +++ b/contrib/libs/grpc/src/python/grpcio_status/grpc_status/rpc_status.py @@ -44,8 +44,8 @@ def from_call(call): ValueError: If the gRPC call's code or details are inconsistent with the status code and message inside of the google.rpc.status.Status. """ - if call.trailing_metadata() is None: - return None + if call.trailing_metadata() is None: + return None for key, value in call.trailing_metadata(): if key == GRPC_DETAILS_METADATA_KEY: rich_status = status_pb2.Status.FromString(value) diff --git a/contrib/libs/grpc/src/python/grpcio_status/setup.py b/contrib/libs/grpc/src/python/grpcio_status/setup.py index 2262e2ae28..eb49069c34 100644 --- a/contrib/libs/grpc/src/python/grpcio_status/setup.py +++ b/contrib/libs/grpc/src/python/grpcio_status/setup.py @@ -17,9 +17,9 @@ import os import setuptools -_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) -_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') - +_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) +_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') + # Ensure we're in the proper directory whether or not we're being used by pip. os.chdir(os.path.dirname(os.path.abspath(__file__))) @@ -53,7 +53,7 @@ CLASSIFIERS = [ 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: Apache Software License', ] diff --git a/contrib/libs/grpc/src/python/grpcio_status/ya.make b/contrib/libs/grpc/src/python/grpcio_status/ya.make index 68e8961a22..61a3ee4336 100644 --- a/contrib/libs/grpc/src/python/grpcio_status/ya.make +++ b/contrib/libs/grpc/src/python/grpcio_status/ya.make @@ -1,51 +1,51 @@ -PY23_LIBRARY() - +PY23_LIBRARY() + LICENSE(Apache-2.0) LICENSE_TEXTS(.yandex_meta/licenses.list.txt) -OWNER( - akastornov - dvshkurko - g:contrib - g:cpp-contrib -) - -PEERDIR( +OWNER( + akastornov + dvshkurko + g:contrib + g:cpp-contrib +) + +PEERDIR( contrib/python/google-api-core - contrib/libs/grpc/grpc - contrib/python/six -) - -IF (PYTHON2) - PEERDIR( - contrib/python/enum34 - contrib/python/futures - ) -ENDIF() - -ADDINCL( + contrib/libs/grpc/grpc + contrib/python/six +) + +IF (PYTHON2) + PEERDIR( + contrib/python/enum34 + contrib/python/futures + ) +ENDIF() + +ADDINCL( ${ARCADIA_BUILD_ROOT}/contrib/libs/grpc - contrib/libs/grpc - contrib/libs/grpc/include -) - + contrib/libs/grpc + contrib/libs/grpc/include +) + IF (SANITIZER_TYPE == undefined) - # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 - CXXFLAGS(-fno-sanitize=function) -ENDIF() - -NO_LINT() - -NO_COMPILER_WARNINGS() - -PY_SRCS( - TOP_LEVEL - grpc_status/__init__.py + # https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43 + CXXFLAGS(-fno-sanitize=function) +ENDIF() + +NO_LINT() + +NO_COMPILER_WARNINGS() + +PY_SRCS( + TOP_LEVEL + grpc_status/__init__.py grpc_status/_common.py grpc_status/rpc_status.py -) - +) + IF (PYTHON3) PY_SRCS( TOP_LEVEL @@ -53,4 +53,4 @@ IF (PYTHON3) ) ENDIF() -END() +END() diff --git a/contrib/libs/grpc/src/python/grpcio_testing/README.rst b/contrib/libs/grpc/src/python/grpcio_testing/README.rst index 4d7a594d90..968dec8507 100644 --- a/contrib/libs/grpc/src/python/grpcio_testing/README.rst +++ b/contrib/libs/grpc/src/python/grpcio_testing/README.rst @@ -3,14 +3,14 @@ gRPC Python Testing Package Testing utilities for gRPC Python -Supported Python Versions -------------------------- -Python >= 3.5 - -Deprecated Python Versions --------------------------- -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. - +Supported Python Versions +------------------------- +Python >= 3.5 + +Deprecated Python Versions +-------------------------- +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + Dependencies ------------ diff --git a/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_service.py b/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_service.py index 59d901491a..a65628a121 100644 --- a/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_service.py +++ b/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_service.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy +import copy import grpc @@ -60,7 +60,7 @@ def _stream_response(argument, implementation, rpc, servicer_context): else: while True: try: - response = copy.deepcopy(next(response_iterator)) + response = copy.deepcopy(next(response_iterator)) except StopIteration: rpc.stream_response_complete() break diff --git a/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_servicer_context.py b/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_servicer_context.py index abf6c6b565..c63750f978 100644 --- a/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_servicer_context.py +++ b/contrib/libs/grpc/src/python/grpcio_testing/grpc_testing/_server/_servicer_context.py @@ -56,9 +56,9 @@ class ServicerContext(grpc.ServicerContext): def auth_context(self): raise NotImplementedError() - def set_compression(self): - raise NotImplementedError() - + def set_compression(self): + raise NotImplementedError() + def send_initial_metadata(self, initial_metadata): initial_metadata_sent = self._rpc.send_initial_metadata( _common.fuss_with_metadata(initial_metadata)) @@ -66,17 +66,17 @@ class ServicerContext(grpc.ServicerContext): raise ValueError( 'ServicerContext.send_initial_metadata called too late!') - def disable_next_message_compression(self): - raise NotImplementedError() - + def disable_next_message_compression(self): + raise NotImplementedError() + def set_trailing_metadata(self, trailing_metadata): self._rpc.set_trailing_metadata( _common.fuss_with_metadata(trailing_metadata)) def abort(self, code, details): - with self._rpc._condition: - self._rpc._abort(code, details) - raise Exception() + with self._rpc._condition: + self._rpc._abort(code, details) + raise Exception() def abort_with_status(self, status): raise NotImplementedError() diff --git a/contrib/libs/grpc/src/python/grpcio_testing/setup.py b/contrib/libs/grpc/src/python/grpcio_testing/setup.py index ff54dd72e6..1946793fe6 100644 --- a/contrib/libs/grpc/src/python/grpcio_testing/setup.py +++ b/contrib/libs/grpc/src/python/grpcio_testing/setup.py @@ -18,9 +18,9 @@ import sys import setuptools -_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) -_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') - +_PACKAGE_PATH = os.path.realpath(os.path.dirname(__file__)) +_README_PATH = os.path.join(_PACKAGE_PATH, 'README.rst') + # Ensure we're in the proper directory whether or not we're being used by pip. os.chdir(os.path.dirname(os.path.abspath(__file__))) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/commands.py b/contrib/libs/grpc/src/python/grpcio_tests/commands.py index d421d6fe61..889b0bd9dc 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/commands.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/commands.py @@ -1,111 +1,111 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Provides distutils command classes for the gRPC Python setup process.""" - -from distutils import errors as _errors -import glob -import os -import os.path -import platform -import re -import shutil -import sys - -import setuptools -from setuptools.command import build_ext -from setuptools.command import build_py -from setuptools.command import easy_install -from setuptools.command import install -from setuptools.command import test - -PYTHON_STEM = os.path.dirname(os.path.abspath(__file__)) -GRPC_STEM = os.path.abspath(PYTHON_STEM + '../../../../') -GRPC_PROTO_STEM = os.path.join(GRPC_STEM, 'src', 'proto') -PROTO_STEM = os.path.join(PYTHON_STEM, 'src', 'proto') -PYTHON_PROTO_TOP_LEVEL = os.path.join(PYTHON_STEM, 'src') - - -class CommandError(object): - pass - - -class GatherProto(setuptools.Command): - - description = 'gather proto dependencies' - user_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - # TODO(atash) ensure that we're running from the repository directory when - # this command is used - try: - shutil.rmtree(PROTO_STEM) - except Exception as error: - # We don't care if this command fails - pass - shutil.copytree(GRPC_PROTO_STEM, PROTO_STEM) - for root, _, _ in os.walk(PYTHON_PROTO_TOP_LEVEL): - path = os.path.join(root, '__init__.py') - open(path, 'a').close() - - -class BuildPy(build_py.build_py): - """Custom project build command.""" - - def run(self): - try: - self.run_command('build_package_protos') - except CommandError as error: - sys.stderr.write('warning: %s\n' % error.message) - build_py.build_py.run(self) - - -class TestLite(setuptools.Command): - """Command to run tests without fetching or building anything.""" - - description = 'run tests without fetching or building anything.' - user_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - # distutils requires this override. - pass - - def run(self): - self._add_eggs_to_path() - - import tests - loader = tests.Loader() - loader.loadTestsFromNames(['tests']) - runner = tests.Runner(dedicated_threads=True) - result = runner.run(loader.suite) - if not result.wasSuccessful(): - sys.exit('Test failure') - - def _add_eggs_to_path(self): - """Fetch install and test requirements""" - self.distribution.fetch_build_eggs(self.distribution.install_requires) - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Provides distutils command classes for the gRPC Python setup process.""" + +from distutils import errors as _errors +import glob +import os +import os.path +import platform +import re +import shutil +import sys + +import setuptools +from setuptools.command import build_ext +from setuptools.command import build_py +from setuptools.command import easy_install +from setuptools.command import install +from setuptools.command import test + +PYTHON_STEM = os.path.dirname(os.path.abspath(__file__)) +GRPC_STEM = os.path.abspath(PYTHON_STEM + '../../../../') +GRPC_PROTO_STEM = os.path.join(GRPC_STEM, 'src', 'proto') +PROTO_STEM = os.path.join(PYTHON_STEM, 'src', 'proto') +PYTHON_PROTO_TOP_LEVEL = os.path.join(PYTHON_STEM, 'src') + + +class CommandError(object): + pass + + +class GatherProto(setuptools.Command): + + description = 'gather proto dependencies' + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + # TODO(atash) ensure that we're running from the repository directory when + # this command is used + try: + shutil.rmtree(PROTO_STEM) + except Exception as error: + # We don't care if this command fails + pass + shutil.copytree(GRPC_PROTO_STEM, PROTO_STEM) + for root, _, _ in os.walk(PYTHON_PROTO_TOP_LEVEL): + path = os.path.join(root, '__init__.py') + open(path, 'a').close() + + +class BuildPy(build_py.build_py): + """Custom project build command.""" + + def run(self): + try: + self.run_command('build_package_protos') + except CommandError as error: + sys.stderr.write('warning: %s\n' % error.message) + build_py.build_py.run(self) + + +class TestLite(setuptools.Command): + """Command to run tests without fetching or building anything.""" + + description = 'run tests without fetching or building anything.' + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + # distutils requires this override. + pass + + def run(self): + self._add_eggs_to_path() + + import tests + loader = tests.Loader() + loader.loadTestsFromNames(['tests']) + runner = tests.Runner(dedicated_threads=True) + result = runner.run(loader.suite) + if not result.wasSuccessful(): + sys.exit('Test failure') + + def _add_eggs_to_path(self): + """Fetch install and test requirements""" + self.distribution.fetch_build_eggs(self.distribution.install_requires) + self.distribution.fetch_build_eggs(self.distribution.tests_require) + + class TestPy3Only(setuptools.Command): """Command to run tests for Python 3+ features. @@ -137,170 +137,170 @@ class TestPy3Only(setuptools.Command): self.distribution.fetch_build_eggs(self.distribution.tests_require) -class TestAio(setuptools.Command): - """Command to run aio tests without fetching or building anything.""" - - description = 'run aio tests without fetching or building anything.' - user_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - self._add_eggs_to_path() - - import tests - loader = tests.Loader() - loader.loadTestsFromNames(['tests_aio']) - # Even without dedicated threads, the framework will somehow spawn a - # new thread for tests to run upon. New thread doesn't have event loop - # attached by default, so initialization is needed. - runner = tests.Runner(dedicated_threads=False) - result = runner.run(loader.suite) - if not result.wasSuccessful(): - sys.exit('Test failure') - - def _add_eggs_to_path(self): - """Fetch install and test requirements""" - self.distribution.fetch_build_eggs(self.distribution.install_requires) - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - -class TestGevent(setuptools.Command): - """Command to run tests w/gevent.""" - - BANNED_TESTS = ( - # Fork support is not compatible with gevent - 'fork._fork_interop_test.ForkInteropTest', - # These tests send a lot of RPCs and are really slow on gevent. They will - # eventually succeed, but need to dig into performance issues. - 'unit._cython._no_messages_server_completion_queue_per_call_test.Test.test_rpcs', - 'unit._cython._no_messages_single_server_completion_queue_test.Test.test_rpcs', - 'unit._compression_test', - # TODO(https://github.com/grpc/grpc/issues/16890) enable this test - 'unit._cython._channel_test.ChannelTest.test_multiple_channels_lonely_connectivity', - # I have no idea why this doesn't work in gevent, but it shouldn't even be - # using the c-core - 'testing._client_test.ClientTest.test_infinite_request_stream_real_time', - # TODO(https://github.com/grpc/grpc/issues/15743) enable this test - 'unit._session_cache_test.SSLSessionCacheTest.testSSLSessionCacheLRU', - # TODO(https://github.com/grpc/grpc/issues/14789) enable this test - 'unit._server_ssl_cert_config_test', - # TODO(https://github.com/grpc/grpc/issues/14901) enable this test - 'protoc_plugin._python_plugin_test.PythonPluginTest', +class TestAio(setuptools.Command): + """Command to run aio tests without fetching or building anything.""" + + description = 'run aio tests without fetching or building anything.' + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + self._add_eggs_to_path() + + import tests + loader = tests.Loader() + loader.loadTestsFromNames(['tests_aio']) + # Even without dedicated threads, the framework will somehow spawn a + # new thread for tests to run upon. New thread doesn't have event loop + # attached by default, so initialization is needed. + runner = tests.Runner(dedicated_threads=False) + result = runner.run(loader.suite) + if not result.wasSuccessful(): + sys.exit('Test failure') + + def _add_eggs_to_path(self): + """Fetch install and test requirements""" + self.distribution.fetch_build_eggs(self.distribution.install_requires) + self.distribution.fetch_build_eggs(self.distribution.tests_require) + + +class TestGevent(setuptools.Command): + """Command to run tests w/gevent.""" + + BANNED_TESTS = ( + # Fork support is not compatible with gevent + 'fork._fork_interop_test.ForkInteropTest', + # These tests send a lot of RPCs and are really slow on gevent. They will + # eventually succeed, but need to dig into performance issues. + 'unit._cython._no_messages_server_completion_queue_per_call_test.Test.test_rpcs', + 'unit._cython._no_messages_single_server_completion_queue_test.Test.test_rpcs', + 'unit._compression_test', + # TODO(https://github.com/grpc/grpc/issues/16890) enable this test + 'unit._cython._channel_test.ChannelTest.test_multiple_channels_lonely_connectivity', + # I have no idea why this doesn't work in gevent, but it shouldn't even be + # using the c-core + 'testing._client_test.ClientTest.test_infinite_request_stream_real_time', + # TODO(https://github.com/grpc/grpc/issues/15743) enable this test + 'unit._session_cache_test.SSLSessionCacheTest.testSSLSessionCacheLRU', + # TODO(https://github.com/grpc/grpc/issues/14789) enable this test + 'unit._server_ssl_cert_config_test', + # TODO(https://github.com/grpc/grpc/issues/14901) enable this test + 'protoc_plugin._python_plugin_test.PythonPluginTest', 'protoc_plugin._python_plugin_test.SimpleStubsPluginTest', - # Beta API is unsupported for gevent - 'protoc_plugin.beta_python_plugin_test', - 'unit.beta._beta_features_test', - # TODO(https://github.com/grpc/grpc/issues/15411) unpin gevent version - # This test will stuck while running higher version of gevent - 'unit._auth_context_test.AuthContextTest.testSessionResumption', - # TODO(https://github.com/grpc/grpc/issues/15411) enable these tests - 'unit._channel_ready_future_test.ChannelReadyFutureTest.test_immediately_connectable_channel_connectivity', - "unit._cython._channel_test.ChannelTest.test_single_channel_lonely_connectivity", - 'unit._exit_test.ExitTest.test_in_flight_unary_unary_call', - 'unit._exit_test.ExitTest.test_in_flight_unary_stream_call', - 'unit._exit_test.ExitTest.test_in_flight_stream_unary_call', - 'unit._exit_test.ExitTest.test_in_flight_stream_stream_call', - 'unit._exit_test.ExitTest.test_in_flight_partial_unary_stream_call', - 'unit._exit_test.ExitTest.test_in_flight_partial_stream_unary_call', - 'unit._exit_test.ExitTest.test_in_flight_partial_stream_stream_call', - # TODO(https://github.com/grpc/grpc/issues/18980): Reenable. - 'unit._signal_handling_test.SignalHandlingTest', - 'unit._metadata_flags_test', - 'health_check._health_servicer_test.HealthServicerTest.test_cancelled_watch_removed_from_watch_list', - # TODO(https://github.com/grpc/grpc/issues/17330) enable these three tests - 'channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels', - 'channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels_and_sockets', - 'channelz._channelz_servicer_test.ChannelzServicerTest.test_streaming_rpc', - # TODO(https://github.com/grpc/grpc/issues/15411) enable this test - 'unit._cython._channel_test.ChannelTest.test_negative_deadline_connectivity', - # TODO(https://github.com/grpc/grpc/issues/15411) enable this test - 'unit._local_credentials_test.LocalCredentialsTest', + # Beta API is unsupported for gevent + 'protoc_plugin.beta_python_plugin_test', + 'unit.beta._beta_features_test', + # TODO(https://github.com/grpc/grpc/issues/15411) unpin gevent version + # This test will stuck while running higher version of gevent + 'unit._auth_context_test.AuthContextTest.testSessionResumption', + # TODO(https://github.com/grpc/grpc/issues/15411) enable these tests + 'unit._channel_ready_future_test.ChannelReadyFutureTest.test_immediately_connectable_channel_connectivity', + "unit._cython._channel_test.ChannelTest.test_single_channel_lonely_connectivity", + 'unit._exit_test.ExitTest.test_in_flight_unary_unary_call', + 'unit._exit_test.ExitTest.test_in_flight_unary_stream_call', + 'unit._exit_test.ExitTest.test_in_flight_stream_unary_call', + 'unit._exit_test.ExitTest.test_in_flight_stream_stream_call', + 'unit._exit_test.ExitTest.test_in_flight_partial_unary_stream_call', + 'unit._exit_test.ExitTest.test_in_flight_partial_stream_unary_call', + 'unit._exit_test.ExitTest.test_in_flight_partial_stream_stream_call', + # TODO(https://github.com/grpc/grpc/issues/18980): Reenable. + 'unit._signal_handling_test.SignalHandlingTest', + 'unit._metadata_flags_test', + 'health_check._health_servicer_test.HealthServicerTest.test_cancelled_watch_removed_from_watch_list', + # TODO(https://github.com/grpc/grpc/issues/17330) enable these three tests + 'channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels', + 'channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels_and_sockets', + 'channelz._channelz_servicer_test.ChannelzServicerTest.test_streaming_rpc', + # TODO(https://github.com/grpc/grpc/issues/15411) enable this test + 'unit._cython._channel_test.ChannelTest.test_negative_deadline_connectivity', + # TODO(https://github.com/grpc/grpc/issues/15411) enable this test + 'unit._local_credentials_test.LocalCredentialsTest', # TODO(https://github.com/grpc/grpc/issues/22020) LocalCredentials # aren't supported with custom io managers. 'unit._contextvars_propagation_test', - 'testing._time_test.StrictRealTimeTest', - ) - BANNED_WINDOWS_TESTS = ( - # TODO(https://github.com/grpc/grpc/pull/15411) enable this test + 'testing._time_test.StrictRealTimeTest', + ) + BANNED_WINDOWS_TESTS = ( + # TODO(https://github.com/grpc/grpc/pull/15411) enable this test 'unit._dns_resolver_test.DNSResolverTest.test_connect_loopback', # TODO(https://github.com/grpc/grpc/pull/15411) enable this test 'unit._server_test.ServerTest.test_failed_port_binding_exception', ) - description = 'run tests with gevent. Assumes grpc/gevent are installed' - user_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - # distutils requires this override. - pass - - def run(self): - from gevent import monkey - monkey.patch_all() - - import tests - - import grpc.experimental.gevent - grpc.experimental.gevent.init_gevent() - - import gevent - - import tests - loader = tests.Loader() - loader.loadTestsFromNames(['tests']) - runner = tests.Runner() - if sys.platform == 'win32': - runner.skip_tests(self.BANNED_TESTS + self.BANNED_WINDOWS_TESTS) - else: - runner.skip_tests(self.BANNED_TESTS) - result = gevent.spawn(runner.run, loader.suite) - result.join() - if not result.value.wasSuccessful(): - sys.exit('Test failure') - - -class RunInterop(test.test): - - description = 'run interop test client/server' + description = 'run tests with gevent. Assumes grpc/gevent are installed' + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + # distutils requires this override. + pass + + def run(self): + from gevent import monkey + monkey.patch_all() + + import tests + + import grpc.experimental.gevent + grpc.experimental.gevent.init_gevent() + + import gevent + + import tests + loader = tests.Loader() + loader.loadTestsFromNames(['tests']) + runner = tests.Runner() + if sys.platform == 'win32': + runner.skip_tests(self.BANNED_TESTS + self.BANNED_WINDOWS_TESTS) + else: + runner.skip_tests(self.BANNED_TESTS) + result = gevent.spawn(runner.run, loader.suite) + result.join() + if not result.value.wasSuccessful(): + sys.exit('Test failure') + + +class RunInterop(test.test): + + description = 'run interop test client/server' user_options = [ ('args=', None, 'pass-thru arguments for the client/server'), ('client', None, 'flag indicating to run the client'), ('server', None, 'flag indicating to run the server'), ('use-asyncio', None, 'flag indicating to run the asyncio stack') ] - - def initialize_options(self): - self.args = '' - self.client = False - self.server = False + + def initialize_options(self): + self.args = '' + self.client = False + self.server = False self.use_asyncio = False - - def finalize_options(self): - if self.client and self.server: - raise _errors.DistutilsOptionError( - 'you may only specify one of client or server') - - def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs( - self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - if self.client: - self.run_client() - elif self.server: - self.run_server() - - def run_server(self): - # We import here to ensure that our setuptools parent has had a chance to - # edit the Python system path. + + def finalize_options(self): + if self.client and self.server: + raise _errors.DistutilsOptionError( + 'you may only specify one of client or server') + + def run(self): + if self.distribution.install_requires: + self.distribution.fetch_build_eggs( + self.distribution.install_requires) + if self.distribution.tests_require: + self.distribution.fetch_build_eggs(self.distribution.tests_require) + if self.client: + self.run_client() + elif self.server: + self.run_server() + + def run_server(self): + # We import here to ensure that our setuptools parent has had a chance to + # edit the Python system path. if self.use_asyncio: import asyncio from tests_aio.interop import server @@ -310,35 +310,35 @@ class RunInterop(test.test): from tests.interop import server sys.argv[1:] = self.args.split() server.serve() - - def run_client(self): - # We import here to ensure that our setuptools parent has had a chance to - # edit the Python system path. - from tests.interop import client - sys.argv[1:] = self.args.split() - client.test_interoperability() - - -class RunFork(test.test): - - description = 'run fork test client' - user_options = [('args=', 'a', 'pass-thru arguments for the client')] - - def initialize_options(self): - self.args = '' - - def finalize_options(self): - # distutils requires this override. - pass - - def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs( - self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - # We import here to ensure that our setuptools parent has had a chance to - # edit the Python system path. - from tests.fork import client - sys.argv[1:] = self.args.split() - client.test_fork() + + def run_client(self): + # We import here to ensure that our setuptools parent has had a chance to + # edit the Python system path. + from tests.interop import client + sys.argv[1:] = self.args.split() + client.test_interoperability() + + +class RunFork(test.test): + + description = 'run fork test client' + user_options = [('args=', 'a', 'pass-thru arguments for the client')] + + def initialize_options(self): + self.args = '' + + def finalize_options(self): + # distutils requires this override. + pass + + def run(self): + if self.distribution.install_requires: + self.distribution.fetch_build_eggs( + self.distribution.install_requires) + if self.distribution.tests_require: + self.distribution.fetch_build_eggs(self.distribution.tests_require) + # We import here to ensure that our setuptools parent has had a chance to + # edit the Python system path. + from tests.fork import client + sys.argv[1:] = self.args.split() + client.test_fork() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py b/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py index 852194b4b0..219b336a42 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py @@ -1,17 +1,17 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!! - +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!! + VERSION = '1.33.2' diff --git a/contrib/libs/grpc/src/python/grpcio_tests/setup.py b/contrib/libs/grpc/src/python/grpcio_tests/setup.py index e4167f51dd..87cccda425 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/setup.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/setup.py @@ -1,99 +1,99 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""A setup module for the gRPC Python package.""" - +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""A setup module for the gRPC Python package.""" + import multiprocessing -import os -import os.path -import sys - -import setuptools - -import grpc_tools.command - -PY3 = sys.version_info.major == 3 - -# Ensure we're in the proper directory whether or not we're being used by pip. -os.chdir(os.path.dirname(os.path.abspath(__file__))) - -# Break import-style to ensure we can actually find our in-repo dependencies. -import commands -import grpc_version - -LICENSE = 'Apache License 2.0' - -PACKAGE_DIRECTORIES = { - '': '.', -} - -INSTALL_REQUIRES = ( +import os +import os.path +import sys + +import setuptools + +import grpc_tools.command + +PY3 = sys.version_info.major == 3 + +# Ensure we're in the proper directory whether or not we're being used by pip. +os.chdir(os.path.dirname(os.path.abspath(__file__))) + +# Break import-style to ensure we can actually find our in-repo dependencies. +import commands +import grpc_version + +LICENSE = 'Apache License 2.0' + +PACKAGE_DIRECTORIES = { + '': '.', +} + +INSTALL_REQUIRES = ( 'coverage>=4.0', 'grpcio>={version}'.format(version=grpc_version.VERSION), - 'grpcio-channelz>={version}'.format(version=grpc_version.VERSION), - 'grpcio-status>={version}'.format(version=grpc_version.VERSION), - 'grpcio-tools>={version}'.format(version=grpc_version.VERSION), - 'grpcio-health-checking>={version}'.format(version=grpc_version.VERSION), + 'grpcio-channelz>={version}'.format(version=grpc_version.VERSION), + 'grpcio-status>={version}'.format(version=grpc_version.VERSION), + 'grpcio-tools>={version}'.format(version=grpc_version.VERSION), + 'grpcio-health-checking>={version}'.format(version=grpc_version.VERSION), 'oauth2client>=1.4.7', 'protobuf>=3.6.0', 'six>=1.10', 'google-auth>=1.17.2', 'requests>=2.14.2') - -if not PY3: + +if not PY3: INSTALL_REQUIRES += ('futures>=2.2.0', 'enum34>=1.0.4') - -COMMAND_CLASS = { - # Run `preprocess` *before* doing any packaging! - 'preprocess': commands.GatherProto, - 'build_package_protos': grpc_tools.command.BuildPackageProtos, - 'build_py': commands.BuildPy, - 'run_fork': commands.RunFork, - 'run_interop': commands.RunInterop, - 'test_lite': commands.TestLite, - 'test_gevent': commands.TestGevent, - 'test_aio': commands.TestAio, + +COMMAND_CLASS = { + # Run `preprocess` *before* doing any packaging! + 'preprocess': commands.GatherProto, + 'build_package_protos': grpc_tools.command.BuildPackageProtos, + 'build_py': commands.BuildPy, + 'run_fork': commands.RunFork, + 'run_interop': commands.RunInterop, + 'test_lite': commands.TestLite, + 'test_gevent': commands.TestGevent, + 'test_aio': commands.TestAio, 'test_py3_only': commands.TestPy3Only, -} - -PACKAGE_DATA = { - 'tests.interop': [ - 'credentials/ca.pem', - 'credentials/server1.key', - 'credentials/server1.pem', - ], +} + +PACKAGE_DATA = { + 'tests.interop': [ + 'credentials/ca.pem', + 'credentials/server1.key', + 'credentials/server1.pem', + ], 'tests.protoc_plugin.protos.invocation_testing': ['same.proto',], - 'tests.protoc_plugin.protos.invocation_testing.split_messages': [ - 'messages.proto', - ], - 'tests.protoc_plugin.protos.invocation_testing.split_services': [ - 'services.proto', - ], - 'tests.testing.proto': [ - 'requests.proto', - 'services.proto', - ], - 'tests.unit': [ - 'credentials/ca.pem', - 'credentials/server1.key', - 'credentials/server1.pem', - ], - 'tests': ['tests.json'], -} - -TEST_SUITE = 'tests' -TEST_LOADER = 'tests:Loader' -TEST_RUNNER = 'tests:Runner' -TESTS_REQUIRE = INSTALL_REQUIRES - -PACKAGES = setuptools.find_packages('.') - + 'tests.protoc_plugin.protos.invocation_testing.split_messages': [ + 'messages.proto', + ], + 'tests.protoc_plugin.protos.invocation_testing.split_services': [ + 'services.proto', + ], + 'tests.testing.proto': [ + 'requests.proto', + 'services.proto', + ], + 'tests.unit': [ + 'credentials/ca.pem', + 'credentials/server1.key', + 'credentials/server1.pem', + ], + 'tests': ['tests.json'], +} + +TEST_SUITE = 'tests' +TEST_LOADER = 'tests:Loader' +TEST_RUNNER = 'tests:Runner' +TESTS_REQUIRE = INSTALL_REQUIRES + +PACKAGES = setuptools.find_packages('.') + if __name__ == "__main__": multiprocessing.freeze_support() setuptools.setup( diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py index ac7756c090..d2466fd022 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py @@ -1,21 +1,21 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -from tests import _loader -from tests import _runner - -Loader = _loader.Loader -Runner = _runner.Runner +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from tests import _loader +from tests import _runner + +Loader = _loader.Loader +Runner = _runner.Runner diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py index dfdbfc9fe0..80c107aa8e 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py @@ -1,106 +1,106 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import importlib -import pkgutil -import re -import unittest - -import coverage - -TEST_MODULE_REGEX = r'^.*_test$' - - -class Loader(object): - """Test loader for setuptools test suite support. - - Attributes: - suite (unittest.TestSuite): All tests collected by the loader. - loader (unittest.TestLoader): Standard Python unittest loader to be ran per - module discovered. - module_matcher (re.RegexObject): A regular expression object to match - against module names and determine whether or not the discovered module - contributes to the test suite. - """ - - def __init__(self): - self.suite = unittest.TestSuite() - self.loader = unittest.TestLoader() - self.module_matcher = re.compile(TEST_MODULE_REGEX) - - def loadTestsFromNames(self, names, module=None): - """Function mirroring TestLoader::loadTestsFromNames, as expected by - setuptools.setup argument `test_loader`.""" - # ensure that we capture decorators and definitions (else our coverage - # measure unnecessarily suffers) - coverage_context = coverage.Coverage(data_suffix=True) - coverage_context.start() - imported_modules = tuple( - importlib.import_module(name) for name in names) - for imported_module in imported_modules: - self.visit_module(imported_module) - for imported_module in imported_modules: - try: - package_paths = imported_module.__path__ - except AttributeError: - continue - self.walk_packages(package_paths) - coverage_context.stop() - coverage_context.save() - return self.suite - - def walk_packages(self, package_paths): - """Walks over the packages, dispatching `visit_module` calls. - - Args: - package_paths (list): A list of paths over which to walk through modules - along. - """ - for importer, module_name, is_package in ( - pkgutil.walk_packages(package_paths)): - module = importer.find_module(module_name).load_module(module_name) - self.visit_module(module) - - def visit_module(self, module): - """Visits the module, adding discovered tests to the test suite. - - Args: - module (module): Module to match against self.module_matcher; if matched - it has its tests loaded via self.loader into self.suite. - """ - if self.module_matcher.match(module.__name__): - module_suite = self.loader.loadTestsFromModule(module) - self.suite.addTest(module_suite) - - -def iterate_suite_cases(suite): - """Generator over all unittest.TestCases in a unittest.TestSuite. - - Args: - suite (unittest.TestSuite): Suite to iterate over in the generator. - - Returns: - generator: A generator over all unittest.TestCases in `suite`. - """ - for item in suite: - if isinstance(item, unittest.TestSuite): - for child_item in iterate_suite_cases(item): - yield child_item - elif isinstance(item, unittest.TestCase): - yield item - else: - raise ValueError('unexpected suite item of type {}'.format( - type(item))) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import importlib +import pkgutil +import re +import unittest + +import coverage + +TEST_MODULE_REGEX = r'^.*_test$' + + +class Loader(object): + """Test loader for setuptools test suite support. + + Attributes: + suite (unittest.TestSuite): All tests collected by the loader. + loader (unittest.TestLoader): Standard Python unittest loader to be ran per + module discovered. + module_matcher (re.RegexObject): A regular expression object to match + against module names and determine whether or not the discovered module + contributes to the test suite. + """ + + def __init__(self): + self.suite = unittest.TestSuite() + self.loader = unittest.TestLoader() + self.module_matcher = re.compile(TEST_MODULE_REGEX) + + def loadTestsFromNames(self, names, module=None): + """Function mirroring TestLoader::loadTestsFromNames, as expected by + setuptools.setup argument `test_loader`.""" + # ensure that we capture decorators and definitions (else our coverage + # measure unnecessarily suffers) + coverage_context = coverage.Coverage(data_suffix=True) + coverage_context.start() + imported_modules = tuple( + importlib.import_module(name) for name in names) + for imported_module in imported_modules: + self.visit_module(imported_module) + for imported_module in imported_modules: + try: + package_paths = imported_module.__path__ + except AttributeError: + continue + self.walk_packages(package_paths) + coverage_context.stop() + coverage_context.save() + return self.suite + + def walk_packages(self, package_paths): + """Walks over the packages, dispatching `visit_module` calls. + + Args: + package_paths (list): A list of paths over which to walk through modules + along. + """ + for importer, module_name, is_package in ( + pkgutil.walk_packages(package_paths)): + module = importer.find_module(module_name).load_module(module_name) + self.visit_module(module) + + def visit_module(self, module): + """Visits the module, adding discovered tests to the test suite. + + Args: + module (module): Module to match against self.module_matcher; if matched + it has its tests loaded via self.loader into self.suite. + """ + if self.module_matcher.match(module.__name__): + module_suite = self.loader.loadTestsFromModule(module) + self.suite.addTest(module_suite) + + +def iterate_suite_cases(suite): + """Generator over all unittest.TestCases in a unittest.TestSuite. + + Args: + suite (unittest.TestSuite): Suite to iterate over in the generator. + + Returns: + generator: A generator over all unittest.TestCases in `suite`. + """ + for item in suite: + if isinstance(item, unittest.TestSuite): + for child_item in iterate_suite_cases(item): + yield child_item + elif isinstance(item, unittest.TestCase): + yield item + else: + raise ValueError('unexpected suite item of type {}'.format( + type(item))) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py index c390fe9d69..389d5f4f96 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py @@ -1,111 +1,111 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import collections -import itertools -import traceback -import unittest -from xml.etree import ElementTree - -import coverage -from six import moves - -from tests import _loader - - -class CaseResult( - collections.namedtuple('CaseResult', [ - 'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback' - ])): - """A serializable result of a single test case. - - Attributes: - id (object): Any serializable object used to denote the identity of this - test case. - name (str or None): A human-readable name of the test case. - kind (CaseResult.Kind): The kind of test result. - stdout (object or None): Output on stdout, or None if nothing was captured. - stderr (object or None): Output on stderr, or None if nothing was captured. - skip_reason (object or None): The reason the test was skipped. Must be - something if self.kind is CaseResult.Kind.SKIP, else None. - traceback (object or None): The traceback of the test. Must be something if - self.kind is CaseResult.Kind.{ERROR, FAILURE, EXPECTED_FAILURE}, else - None. - """ - - class Kind(object): - UNTESTED = 'untested' - RUNNING = 'running' - ERROR = 'error' - FAILURE = 'failure' - SUCCESS = 'success' - SKIP = 'skip' - EXPECTED_FAILURE = 'expected failure' - UNEXPECTED_SUCCESS = 'unexpected success' - - def __new__(cls, - id=None, - name=None, - kind=None, - stdout=None, - stderr=None, - skip_reason=None, - traceback=None): - """Helper keyword constructor for the namedtuple. - - See this class' attributes for information on the arguments.""" - assert id is not None - assert name is None or isinstance(name, str) - if kind is CaseResult.Kind.UNTESTED: - pass - elif kind is CaseResult.Kind.RUNNING: - pass - elif kind is CaseResult.Kind.ERROR: - assert traceback is not None - elif kind is CaseResult.Kind.FAILURE: - assert traceback is not None - elif kind is CaseResult.Kind.SUCCESS: - pass - elif kind is CaseResult.Kind.SKIP: - assert skip_reason is not None - elif kind is CaseResult.Kind.EXPECTED_FAILURE: - assert traceback is not None - elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS: - pass - else: - assert False - return super(cls, CaseResult).__new__(cls, id, name, kind, stdout, - stderr, skip_reason, traceback) - - def updated(self, - name=None, - kind=None, - stdout=None, - stderr=None, - skip_reason=None, - traceback=None): - """Get a new validated CaseResult with the fields updated. - - See this class' attributes for information on the arguments.""" - name = self.name if name is None else name - kind = self.kind if kind is None else kind - stdout = self.stdout if stdout is None else stdout - stderr = self.stderr if stderr is None else stderr - skip_reason = self.skip_reason if skip_reason is None else skip_reason - traceback = self.traceback if traceback is None else traceback +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import itertools +import traceback +import unittest +from xml.etree import ElementTree + +import coverage +from six import moves + +from tests import _loader + + +class CaseResult( + collections.namedtuple('CaseResult', [ + 'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback' + ])): + """A serializable result of a single test case. + + Attributes: + id (object): Any serializable object used to denote the identity of this + test case. + name (str or None): A human-readable name of the test case. + kind (CaseResult.Kind): The kind of test result. + stdout (object or None): Output on stdout, or None if nothing was captured. + stderr (object or None): Output on stderr, or None if nothing was captured. + skip_reason (object or None): The reason the test was skipped. Must be + something if self.kind is CaseResult.Kind.SKIP, else None. + traceback (object or None): The traceback of the test. Must be something if + self.kind is CaseResult.Kind.{ERROR, FAILURE, EXPECTED_FAILURE}, else + None. + """ + + class Kind(object): + UNTESTED = 'untested' + RUNNING = 'running' + ERROR = 'error' + FAILURE = 'failure' + SUCCESS = 'success' + SKIP = 'skip' + EXPECTED_FAILURE = 'expected failure' + UNEXPECTED_SUCCESS = 'unexpected success' + + def __new__(cls, + id=None, + name=None, + kind=None, + stdout=None, + stderr=None, + skip_reason=None, + traceback=None): + """Helper keyword constructor for the namedtuple. + + See this class' attributes for information on the arguments.""" + assert id is not None + assert name is None or isinstance(name, str) + if kind is CaseResult.Kind.UNTESTED: + pass + elif kind is CaseResult.Kind.RUNNING: + pass + elif kind is CaseResult.Kind.ERROR: + assert traceback is not None + elif kind is CaseResult.Kind.FAILURE: + assert traceback is not None + elif kind is CaseResult.Kind.SUCCESS: + pass + elif kind is CaseResult.Kind.SKIP: + assert skip_reason is not None + elif kind is CaseResult.Kind.EXPECTED_FAILURE: + assert traceback is not None + elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS: + pass + else: + assert False + return super(cls, CaseResult).__new__(cls, id, name, kind, stdout, + stderr, skip_reason, traceback) + + def updated(self, + name=None, + kind=None, + stdout=None, + stderr=None, + skip_reason=None, + traceback=None): + """Get a new validated CaseResult with the fields updated. + + See this class' attributes for information on the arguments.""" + name = self.name if name is None else name + kind = self.kind if kind is None else kind + stdout = self.stdout if stdout is None else stdout + stderr = self.stderr if stderr is None else stderr + skip_reason = self.skip_reason if skip_reason is None else skip_reason + traceback = self.traceback if traceback is None else traceback return CaseResult(id=self.id, name=name, kind=kind, @@ -113,288 +113,288 @@ class CaseResult( stderr=stderr, skip_reason=skip_reason, traceback=traceback) - - -class AugmentedResult(unittest.TestResult): - """unittest.Result that keeps track of additional information. - - Uses CaseResult objects to store test-case results, providing additional - information beyond that of the standard Python unittest library, such as - standard output. - - Attributes: - id_map (callable): A unary callable mapping unittest.TestCase objects to - unique identifiers. - cases (dict): A dictionary mapping from the identifiers returned by id_map - to CaseResult objects corresponding to those IDs. - """ - - def __init__(self, id_map): - """Initialize the object with an identifier mapping. - - Arguments: - id_map (callable): Corresponds to the attribute `id_map`.""" - super(AugmentedResult, self).__init__() - self.id_map = id_map - self.cases = None - - def startTestRun(self): - """See unittest.TestResult.startTestRun.""" - super(AugmentedResult, self).startTestRun() - self.cases = dict() - - def startTest(self, test): - """See unittest.TestResult.startTest.""" - super(AugmentedResult, self).startTest(test) - case_id = self.id_map(test) + + +class AugmentedResult(unittest.TestResult): + """unittest.Result that keeps track of additional information. + + Uses CaseResult objects to store test-case results, providing additional + information beyond that of the standard Python unittest library, such as + standard output. + + Attributes: + id_map (callable): A unary callable mapping unittest.TestCase objects to + unique identifiers. + cases (dict): A dictionary mapping from the identifiers returned by id_map + to CaseResult objects corresponding to those IDs. + """ + + def __init__(self, id_map): + """Initialize the object with an identifier mapping. + + Arguments: + id_map (callable): Corresponds to the attribute `id_map`.""" + super(AugmentedResult, self).__init__() + self.id_map = id_map + self.cases = None + + def startTestRun(self): + """See unittest.TestResult.startTestRun.""" + super(AugmentedResult, self).startTestRun() + self.cases = dict() + + def startTest(self, test): + """See unittest.TestResult.startTest.""" + super(AugmentedResult, self).startTest(test) + case_id = self.id_map(test) self.cases[case_id] = CaseResult(id=case_id, name=test.id(), kind=CaseResult.Kind.RUNNING) - - def addError(self, test, err): - """See unittest.TestResult.addError.""" - super(AugmentedResult, self).addError(test, err) - case_id = self.id_map(test) - self.cases[case_id] = self.cases[case_id].updated( - kind=CaseResult.Kind.ERROR, traceback=err) - - def addFailure(self, test, err): - """See unittest.TestResult.addFailure.""" - super(AugmentedResult, self).addFailure(test, err) - case_id = self.id_map(test) - self.cases[case_id] = self.cases[case_id].updated( - kind=CaseResult.Kind.FAILURE, traceback=err) - - def addSuccess(self, test): - """See unittest.TestResult.addSuccess.""" - super(AugmentedResult, self).addSuccess(test) - case_id = self.id_map(test) - self.cases[case_id] = self.cases[case_id].updated( - kind=CaseResult.Kind.SUCCESS) - - def addSkip(self, test, reason): - """See unittest.TestResult.addSkip.""" - super(AugmentedResult, self).addSkip(test, reason) - case_id = self.id_map(test) - self.cases[case_id] = self.cases[case_id].updated( - kind=CaseResult.Kind.SKIP, skip_reason=reason) - - def addExpectedFailure(self, test, err): - """See unittest.TestResult.addExpectedFailure.""" - super(AugmentedResult, self).addExpectedFailure(test, err) - case_id = self.id_map(test) - self.cases[case_id] = self.cases[case_id].updated( - kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=err) - - def addUnexpectedSuccess(self, test): - """See unittest.TestResult.addUnexpectedSuccess.""" - super(AugmentedResult, self).addUnexpectedSuccess(test) - case_id = self.id_map(test) - self.cases[case_id] = self.cases[case_id].updated( - kind=CaseResult.Kind.UNEXPECTED_SUCCESS) - - def set_output(self, test, stdout, stderr): - """Set the output attributes for the CaseResult corresponding to a test. - - Args: - test (unittest.TestCase): The TestCase to set the outputs of. - stdout (str): Output from stdout to assign to self.id_map(test). - stderr (str): Output from stderr to assign to self.id_map(test). - """ - case_id = self.id_map(test) - self.cases[case_id] = self.cases[case_id].updated( - stdout=stdout.decode(), stderr=stderr.decode()) - - def augmented_results(self, filter): - """Convenience method to retrieve filtered case results. - - Args: - filter (callable): A unary predicate to filter over CaseResult objects. - """ - return (self.cases[case_id] - for case_id in self.cases - if filter(self.cases[case_id])) - - -class CoverageResult(AugmentedResult): - """Extension to AugmentedResult adding coverage.py support per test.\ - - Attributes: - coverage_context (coverage.Coverage): coverage.py management object. - """ - - def __init__(self, id_map): - """See AugmentedResult.__init__.""" - super(CoverageResult, self).__init__(id_map=id_map) - self.coverage_context = None - - def startTest(self, test): - """See unittest.TestResult.startTest. - - Additionally initializes and begins code coverage tracking.""" - super(CoverageResult, self).startTest(test) - self.coverage_context = coverage.Coverage(data_suffix=True) - self.coverage_context.start() - - def stopTest(self, test): - """See unittest.TestResult.stopTest. - - Additionally stops and deinitializes code coverage tracking.""" - super(CoverageResult, self).stopTest(test) - self.coverage_context.stop() - self.coverage_context.save() - self.coverage_context = None - - -class _Colors(object): - """Namespaced constants for terminal color magic numbers.""" - HEADER = '\033[95m' - INFO = '\033[94m' - OK = '\033[92m' - WARN = '\033[93m' - FAIL = '\033[91m' - BOLD = '\033[1m' - UNDERLINE = '\033[4m' - END = '\033[0m' - - -class TerminalResult(CoverageResult): - """Extension to CoverageResult adding basic terminal reporting.""" - - def __init__(self, out, id_map): - """Initialize the result object. - - Args: - out (file-like): Output file to which terminal-colored live results will - be written. - id_map (callable): See AugmentedResult.__init__. - """ - super(TerminalResult, self).__init__(id_map=id_map) - self.out = out - - def startTestRun(self): - """See unittest.TestResult.startTestRun.""" - super(TerminalResult, self).startTestRun() + + def addError(self, test, err): + """See unittest.TestResult.addError.""" + super(AugmentedResult, self).addError(test, err) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.ERROR, traceback=err) + + def addFailure(self, test, err): + """See unittest.TestResult.addFailure.""" + super(AugmentedResult, self).addFailure(test, err) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.FAILURE, traceback=err) + + def addSuccess(self, test): + """See unittest.TestResult.addSuccess.""" + super(AugmentedResult, self).addSuccess(test) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.SUCCESS) + + def addSkip(self, test, reason): + """See unittest.TestResult.addSkip.""" + super(AugmentedResult, self).addSkip(test, reason) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.SKIP, skip_reason=reason) + + def addExpectedFailure(self, test, err): + """See unittest.TestResult.addExpectedFailure.""" + super(AugmentedResult, self).addExpectedFailure(test, err) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=err) + + def addUnexpectedSuccess(self, test): + """See unittest.TestResult.addUnexpectedSuccess.""" + super(AugmentedResult, self).addUnexpectedSuccess(test) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.UNEXPECTED_SUCCESS) + + def set_output(self, test, stdout, stderr): + """Set the output attributes for the CaseResult corresponding to a test. + + Args: + test (unittest.TestCase): The TestCase to set the outputs of. + stdout (str): Output from stdout to assign to self.id_map(test). + stderr (str): Output from stderr to assign to self.id_map(test). + """ + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + stdout=stdout.decode(), stderr=stderr.decode()) + + def augmented_results(self, filter): + """Convenience method to retrieve filtered case results. + + Args: + filter (callable): A unary predicate to filter over CaseResult objects. + """ + return (self.cases[case_id] + for case_id in self.cases + if filter(self.cases[case_id])) + + +class CoverageResult(AugmentedResult): + """Extension to AugmentedResult adding coverage.py support per test.\ + + Attributes: + coverage_context (coverage.Coverage): coverage.py management object. + """ + + def __init__(self, id_map): + """See AugmentedResult.__init__.""" + super(CoverageResult, self).__init__(id_map=id_map) + self.coverage_context = None + + def startTest(self, test): + """See unittest.TestResult.startTest. + + Additionally initializes and begins code coverage tracking.""" + super(CoverageResult, self).startTest(test) + self.coverage_context = coverage.Coverage(data_suffix=True) + self.coverage_context.start() + + def stopTest(self, test): + """See unittest.TestResult.stopTest. + + Additionally stops and deinitializes code coverage tracking.""" + super(CoverageResult, self).stopTest(test) + self.coverage_context.stop() + self.coverage_context.save() + self.coverage_context = None + + +class _Colors(object): + """Namespaced constants for terminal color magic numbers.""" + HEADER = '\033[95m' + INFO = '\033[94m' + OK = '\033[92m' + WARN = '\033[93m' + FAIL = '\033[91m' + BOLD = '\033[1m' + UNDERLINE = '\033[4m' + END = '\033[0m' + + +class TerminalResult(CoverageResult): + """Extension to CoverageResult adding basic terminal reporting.""" + + def __init__(self, out, id_map): + """Initialize the result object. + + Args: + out (file-like): Output file to which terminal-colored live results will + be written. + id_map (callable): See AugmentedResult.__init__. + """ + super(TerminalResult, self).__init__(id_map=id_map) + self.out = out + + def startTestRun(self): + """See unittest.TestResult.startTestRun.""" + super(TerminalResult, self).startTestRun() self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' + _Colors.END) - - def stopTestRun(self): - """See unittest.TestResult.stopTestRun.""" - super(TerminalResult, self).stopTestRun() - self.out.write(summary(self)) - self.out.flush() - - def addError(self, test, err): - """See unittest.TestResult.addError.""" - super(TerminalResult, self).addError(test, err) + + def stopTestRun(self): + """See unittest.TestResult.stopTestRun.""" + super(TerminalResult, self).stopTestRun() + self.out.write(summary(self)) + self.out.flush() + + def addError(self, test, err): + """See unittest.TestResult.addError.""" + super(TerminalResult, self).addError(test, err) self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) + _Colors.END) - self.out.flush() - - def addFailure(self, test, err): - """See unittest.TestResult.addFailure.""" - super(TerminalResult, self).addFailure(test, err) + self.out.flush() + + def addFailure(self, test, err): + """See unittest.TestResult.addFailure.""" + super(TerminalResult, self).addFailure(test, err) self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) + _Colors.END) - self.out.flush() - - def addSuccess(self, test): - """See unittest.TestResult.addSuccess.""" - super(TerminalResult, self).addSuccess(test) + self.out.flush() + + def addSuccess(self, test): + """See unittest.TestResult.addSuccess.""" + super(TerminalResult, self).addSuccess(test) self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) + _Colors.END) - self.out.flush() - - def addSkip(self, test, reason): - """See unittest.TestResult.addSkip.""" - super(TerminalResult, self).addSkip(test, reason) + self.out.flush() + + def addSkip(self, test, reason): + """See unittest.TestResult.addSkip.""" + super(TerminalResult, self).addSkip(test, reason) self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) + _Colors.END) - self.out.flush() - - def addExpectedFailure(self, test, err): - """See unittest.TestResult.addExpectedFailure.""" - super(TerminalResult, self).addExpectedFailure(test, err) + self.out.flush() + + def addExpectedFailure(self, test, err): + """See unittest.TestResult.addExpectedFailure.""" + super(TerminalResult, self).addExpectedFailure(test, err) self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) + _Colors.END) - self.out.flush() - - def addUnexpectedSuccess(self, test): - """See unittest.TestResult.addUnexpectedSuccess.""" - super(TerminalResult, self).addUnexpectedSuccess(test) + self.out.flush() + + def addUnexpectedSuccess(self, test): + """See unittest.TestResult.addUnexpectedSuccess.""" + super(TerminalResult, self).addUnexpectedSuccess(test) self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) + _Colors.END) - self.out.flush() - - -def _traceback_string(type, value, trace): - """Generate a descriptive string of a Python exception traceback. - - Args: - type (class): The type of the exception. - value (Exception): The value of the exception. - trace (traceback): Traceback of the exception. - - Returns: - str: Formatted exception descriptive string. - """ - buffer = moves.cStringIO() - traceback.print_exception(type, value, trace, file=buffer) - return buffer.getvalue() - - -def summary(result): - """A summary string of a result object. - - Args: - result (AugmentedResult): The result object to get the summary of. - - Returns: - str: The summary string. - """ - assert isinstance(result, AugmentedResult) - untested = list( - result.augmented_results( - lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED)) - running = list( - result.augmented_results( - lambda case_result: case_result.kind is CaseResult.Kind.RUNNING)) - failures = list( - result.augmented_results( - lambda case_result: case_result.kind is CaseResult.Kind.FAILURE)) - errors = list( - result.augmented_results( - lambda case_result: case_result.kind is CaseResult.Kind.ERROR)) - successes = list( - result.augmented_results( - lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS)) - skips = list( - result.augmented_results( - lambda case_result: case_result.kind is CaseResult.Kind.SKIP)) - expected_failures = list( + self.out.flush() + + +def _traceback_string(type, value, trace): + """Generate a descriptive string of a Python exception traceback. + + Args: + type (class): The type of the exception. + value (Exception): The value of the exception. + trace (traceback): Traceback of the exception. + + Returns: + str: Formatted exception descriptive string. + """ + buffer = moves.cStringIO() + traceback.print_exception(type, value, trace, file=buffer) + return buffer.getvalue() + + +def summary(result): + """A summary string of a result object. + + Args: + result (AugmentedResult): The result object to get the summary of. + + Returns: + str: The summary string. + """ + assert isinstance(result, AugmentedResult) + untested = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED)) + running = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.RUNNING)) + failures = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.FAILURE)) + errors = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.ERROR)) + successes = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS)) + skips = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.SKIP)) + expected_failures = list( result.augmented_results(lambda case_result: case_result.kind is CaseResult.Kind.EXPECTED_FAILURE)) - unexpected_successes = list( + unexpected_successes = list( result.augmented_results(lambda case_result: case_result.kind is CaseResult.Kind.UNEXPECTED_SUCCESS)) - running_names = [case.name for case in running] - finished_count = (len(failures) + len(errors) + len(successes) + - len(expected_failures) + len(unexpected_successes)) - statistics = ('{finished} tests finished:\n' - '\t{successful} successful\n' - '\t{unsuccessful} unsuccessful\n' - '\t{skipped} skipped\n' - '\t{expected_fail} expected failures\n' - '\t{unexpected_successful} unexpected successes\n' - 'Interrupted Tests:\n' - '\t{interrupted}\n'.format( - finished=finished_count, - successful=len(successes), - unsuccessful=(len(failures) + len(errors)), - skipped=len(skips), - expected_fail=len(expected_failures), - unexpected_successful=len(unexpected_successes), - interrupted=str(running_names))) + running_names = [case.name for case in running] + finished_count = (len(failures) + len(errors) + len(successes) + + len(expected_failures) + len(unexpected_successes)) + statistics = ('{finished} tests finished:\n' + '\t{successful} successful\n' + '\t{unsuccessful} unsuccessful\n' + '\t{skipped} skipped\n' + '\t{expected_fail} expected failures\n' + '\t{unexpected_successful} unexpected successes\n' + 'Interrupted Tests:\n' + '\t{interrupted}\n'.format( + finished=finished_count, + successful=len(successes), + unsuccessful=(len(failures) + len(errors)), + skipped=len(skips), + expected_fail=len(expected_failures), + unexpected_successful=len(unexpected_successes), + interrupted=str(running_names))) tracebacks = '\n\n'.join([ (_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD + 'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD + @@ -406,34 +406,34 @@ def summary(result): stderr=result.stderr) for result in itertools.chain(failures, errors) ]) - notes = 'Unexpected successes: {}\n'.format( - [result.name for result in unexpected_successes]) - return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes - - -def jenkins_junit_xml(result): - """An XML tree object that when written is recognizable by Jenkins. - - Args: - result (AugmentedResult): The result object to get the junit xml output of. - - Returns: - ElementTree.ElementTree: The XML tree. - """ - assert isinstance(result, AugmentedResult) - root = ElementTree.Element('testsuites') - suite = ElementTree.SubElement(root, 'testsuite', { - 'name': 'Python gRPC tests', - }) - for case in result.cases.values(): - if case.kind is CaseResult.Kind.SUCCESS: - ElementTree.SubElement(suite, 'testcase', { - 'name': case.name, - }) - elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE): - case_xml = ElementTree.SubElement(suite, 'testcase', { - 'name': case.name, - }) - error_xml = ElementTree.SubElement(case_xml, 'error', {}) - error_xml.text = ''.format(case.stderr, case.traceback) - return ElementTree.ElementTree(element=root) + notes = 'Unexpected successes: {}\n'.format( + [result.name for result in unexpected_successes]) + return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes + + +def jenkins_junit_xml(result): + """An XML tree object that when written is recognizable by Jenkins. + + Args: + result (AugmentedResult): The result object to get the junit xml output of. + + Returns: + ElementTree.ElementTree: The XML tree. + """ + assert isinstance(result, AugmentedResult) + root = ElementTree.Element('testsuites') + suite = ElementTree.SubElement(root, 'testsuite', { + 'name': 'Python gRPC tests', + }) + for case in result.cases.values(): + if case.kind is CaseResult.Kind.SUCCESS: + ElementTree.SubElement(suite, 'testcase', { + 'name': case.name, + }) + elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE): + case_xml = ElementTree.SubElement(suite, 'testcase', { + 'name': case.name, + }) + error_xml = ElementTree.SubElement(case_xml, 'error', {}) + error_xml.text = ''.format(case.stderr, case.traceback) + return ElementTree.ElementTree(element=root) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py index 7b1546a46c..39da0399b0 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py @@ -1,239 +1,239 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import collections -import os -import select -import signal -import sys -import tempfile -import threading -import time -import unittest -import uuid - -import six -from six import moves - -from tests import _loader -from tests import _result - - -class CaptureFile(object): - """A context-managed file to redirect output to a byte array. - - Use by invoking `start` (`__enter__`) and at some point invoking `stop` - (`__exit__`). At any point after the initial call to `start` call `output` to - get the current redirected output. Note that we don't currently use file - locking, so calling `output` between calls to `start` and `stop` may muddle - the result (you should only be doing this during a Python-handled interrupt as - a last ditch effort to provide output to the user). - - Attributes: - _redirected_fd (int): File descriptor of file to redirect writes from. - _saved_fd (int): A copy of the original value of the redirected file - descriptor. - _into_file (TemporaryFile or None): File to which writes are redirected. - Only non-None when self is started. - """ - - def __init__(self, fd): - self._redirected_fd = fd - self._saved_fd = os.dup(self._redirected_fd) - self._into_file = None - - def output(self): - """Get all output from the redirected-to file if it exists.""" - if self._into_file: - self._into_file.seek(0) - return bytes(self._into_file.read()) - else: - return bytes() - - def start(self): - """Start redirection of writes to the file descriptor.""" - self._into_file = tempfile.TemporaryFile() - os.dup2(self._into_file.fileno(), self._redirected_fd) - - def stop(self): - """Stop redirection of writes to the file descriptor.""" - # n.b. this dup2 call auto-closes self._redirected_fd - os.dup2(self._saved_fd, self._redirected_fd) - - def write_bypass(self, value): - """Bypass the redirection and write directly to the original file. - - Arguments: - value (str): What to write to the original file. - """ - if six.PY3 and not isinstance(value, six.binary_type): - value = bytes(value, 'ascii') - if self._saved_fd is None: - os.write(self._redirect_fd, value) - else: - os.write(self._saved_fd, value) - - def __enter__(self): - self.start() - return self - - def __exit__(self, type, value, traceback): - self.stop() - - def close(self): - """Close any resources used by self not closed by stop().""" - os.close(self._saved_fd) - - -class AugmentedCase(collections.namedtuple('AugmentedCase', ['case', 'id'])): - """A test case with a guaranteed unique externally specified identifier. - - Attributes: - case (unittest.TestCase): TestCase we're decorating with an additional - identifier. - id (object): Any identifier that may be considered 'unique' for testing - purposes. - """ - - def __new__(cls, case, id=None): - if id is None: - id = uuid.uuid4() - return super(cls, AugmentedCase).__new__(cls, case, id) - - +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import os +import select +import signal +import sys +import tempfile +import threading +import time +import unittest +import uuid + +import six +from six import moves + +from tests import _loader +from tests import _result + + +class CaptureFile(object): + """A context-managed file to redirect output to a byte array. + + Use by invoking `start` (`__enter__`) and at some point invoking `stop` + (`__exit__`). At any point after the initial call to `start` call `output` to + get the current redirected output. Note that we don't currently use file + locking, so calling `output` between calls to `start` and `stop` may muddle + the result (you should only be doing this during a Python-handled interrupt as + a last ditch effort to provide output to the user). + + Attributes: + _redirected_fd (int): File descriptor of file to redirect writes from. + _saved_fd (int): A copy of the original value of the redirected file + descriptor. + _into_file (TemporaryFile or None): File to which writes are redirected. + Only non-None when self is started. + """ + + def __init__(self, fd): + self._redirected_fd = fd + self._saved_fd = os.dup(self._redirected_fd) + self._into_file = None + + def output(self): + """Get all output from the redirected-to file if it exists.""" + if self._into_file: + self._into_file.seek(0) + return bytes(self._into_file.read()) + else: + return bytes() + + def start(self): + """Start redirection of writes to the file descriptor.""" + self._into_file = tempfile.TemporaryFile() + os.dup2(self._into_file.fileno(), self._redirected_fd) + + def stop(self): + """Stop redirection of writes to the file descriptor.""" + # n.b. this dup2 call auto-closes self._redirected_fd + os.dup2(self._saved_fd, self._redirected_fd) + + def write_bypass(self, value): + """Bypass the redirection and write directly to the original file. + + Arguments: + value (str): What to write to the original file. + """ + if six.PY3 and not isinstance(value, six.binary_type): + value = bytes(value, 'ascii') + if self._saved_fd is None: + os.write(self._redirect_fd, value) + else: + os.write(self._saved_fd, value) + + def __enter__(self): + self.start() + return self + + def __exit__(self, type, value, traceback): + self.stop() + + def close(self): + """Close any resources used by self not closed by stop().""" + os.close(self._saved_fd) + + +class AugmentedCase(collections.namedtuple('AugmentedCase', ['case', 'id'])): + """A test case with a guaranteed unique externally specified identifier. + + Attributes: + case (unittest.TestCase): TestCase we're decorating with an additional + identifier. + id (object): Any identifier that may be considered 'unique' for testing + purposes. + """ + + def __new__(cls, case, id=None): + if id is None: + id = uuid.uuid4() + return super(cls, AugmentedCase).__new__(cls, case, id) + + # NOTE(lidiz) This complex wrapper is not triggering setUpClass nor # tearDownClass. Do not use those methods, or fix this wrapper! -class Runner(object): - - def __init__(self, dedicated_threads=False): - """Constructs the Runner object. - - Args: - dedicated_threads: A bool indicates whether to spawn each unit test - in separate thread or not. - """ - self._skipped_tests = [] - self._dedicated_threads = dedicated_threads - - def skip_tests(self, tests): - self._skipped_tests = tests - - def run(self, suite): - """See setuptools' test_runner setup argument for information.""" - # only run test cases with id starting with given prefix - testcase_filter = os.getenv('GRPC_PYTHON_TESTRUNNER_FILTER') - filtered_cases = [] - for case in _loader.iterate_suite_cases(suite): - if not testcase_filter or case.id().startswith(testcase_filter): - filtered_cases.append(case) - - # Ensure that every test case has no collision with any other test case in - # the augmented results. - augmented_cases = [ - AugmentedCase(case, uuid.uuid4()) for case in filtered_cases - ] - case_id_by_case = dict((augmented_case.case, augmented_case.id) - for augmented_case in augmented_cases) - result_out = moves.cStringIO() - result = _result.TerminalResult( - result_out, id_map=lambda case: case_id_by_case[case]) - stdout_pipe = CaptureFile(sys.stdout.fileno()) - stderr_pipe = CaptureFile(sys.stderr.fileno()) - kill_flag = [False] - - def sigint_handler(signal_number, frame): - if signal_number == signal.SIGINT: - kill_flag[0] = True # Python 2.7 not having 'local'... :-( - signal.signal(signal_number, signal.SIG_DFL) - - def fault_handler(signal_number, frame): - stdout_pipe.write_bypass( - 'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'.format( - signal_number, stdout_pipe.output(), stderr_pipe.output())) - os._exit(1) - - def check_kill_self(): - if kill_flag[0]: - stdout_pipe.write_bypass('Stopping tests short...') - result.stopTestRun() - stdout_pipe.write_bypass(result_out.getvalue()) - stdout_pipe.write_bypass('\ninterrupted stdout:\n{}\n'.format( - stdout_pipe.output().decode())) - stderr_pipe.write_bypass('\ninterrupted stderr:\n{}\n'.format( - stderr_pipe.output().decode())) - os._exit(1) - - def try_set_handler(name, handler): - try: - signal.signal(getattr(signal, name), handler) - except AttributeError: - pass - - try_set_handler('SIGINT', sigint_handler) - try_set_handler('SIGBUS', fault_handler) - try_set_handler('SIGABRT', fault_handler) - try_set_handler('SIGFPE', fault_handler) - try_set_handler('SIGILL', fault_handler) - # Sometimes output will lag after a test has successfully finished; we - # ignore such writes to our pipes. - try_set_handler('SIGPIPE', signal.SIG_IGN) - - # Run the tests - result.startTestRun() - for augmented_case in augmented_cases: - for skipped_test in self._skipped_tests: - if skipped_test in augmented_case.case.id(): - break - else: - sys.stdout.write('Running {}\n'.format( - augmented_case.case.id())) - sys.stdout.flush() - if self._dedicated_threads: - # (Deprecated) Spawns dedicated thread for each test case. - case_thread = threading.Thread( - target=augmented_case.case.run, args=(result,)) - try: - with stdout_pipe, stderr_pipe: - case_thread.start() - # If the thread is exited unexpected, stop testing. - while case_thread.is_alive(): - check_kill_self() - time.sleep(0) - case_thread.join() - except: # pylint: disable=try-except-raise - # re-raise the exception after forcing the with-block to end - raise - # Records the result of the test case run. - result.set_output(augmented_case.case, stdout_pipe.output(), - stderr_pipe.output()) - sys.stdout.write(result_out.getvalue()) - sys.stdout.flush() - result_out.truncate(0) - check_kill_self() - else: - # Donates current thread to test case execution. - augmented_case.case.run(result) - result.stopTestRun() - stdout_pipe.close() - stderr_pipe.close() - - # Report results - sys.stdout.write(result_out.getvalue()) - sys.stdout.flush() - signal.signal(signal.SIGINT, signal.SIG_DFL) - with open('report.xml', 'wb') as report_xml_file: - _result.jenkins_junit_xml(result).write(report_xml_file) - return result +class Runner(object): + + def __init__(self, dedicated_threads=False): + """Constructs the Runner object. + + Args: + dedicated_threads: A bool indicates whether to spawn each unit test + in separate thread or not. + """ + self._skipped_tests = [] + self._dedicated_threads = dedicated_threads + + def skip_tests(self, tests): + self._skipped_tests = tests + + def run(self, suite): + """See setuptools' test_runner setup argument for information.""" + # only run test cases with id starting with given prefix + testcase_filter = os.getenv('GRPC_PYTHON_TESTRUNNER_FILTER') + filtered_cases = [] + for case in _loader.iterate_suite_cases(suite): + if not testcase_filter or case.id().startswith(testcase_filter): + filtered_cases.append(case) + + # Ensure that every test case has no collision with any other test case in + # the augmented results. + augmented_cases = [ + AugmentedCase(case, uuid.uuid4()) for case in filtered_cases + ] + case_id_by_case = dict((augmented_case.case, augmented_case.id) + for augmented_case in augmented_cases) + result_out = moves.cStringIO() + result = _result.TerminalResult( + result_out, id_map=lambda case: case_id_by_case[case]) + stdout_pipe = CaptureFile(sys.stdout.fileno()) + stderr_pipe = CaptureFile(sys.stderr.fileno()) + kill_flag = [False] + + def sigint_handler(signal_number, frame): + if signal_number == signal.SIGINT: + kill_flag[0] = True # Python 2.7 not having 'local'... :-( + signal.signal(signal_number, signal.SIG_DFL) + + def fault_handler(signal_number, frame): + stdout_pipe.write_bypass( + 'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'.format( + signal_number, stdout_pipe.output(), stderr_pipe.output())) + os._exit(1) + + def check_kill_self(): + if kill_flag[0]: + stdout_pipe.write_bypass('Stopping tests short...') + result.stopTestRun() + stdout_pipe.write_bypass(result_out.getvalue()) + stdout_pipe.write_bypass('\ninterrupted stdout:\n{}\n'.format( + stdout_pipe.output().decode())) + stderr_pipe.write_bypass('\ninterrupted stderr:\n{}\n'.format( + stderr_pipe.output().decode())) + os._exit(1) + + def try_set_handler(name, handler): + try: + signal.signal(getattr(signal, name), handler) + except AttributeError: + pass + + try_set_handler('SIGINT', sigint_handler) + try_set_handler('SIGBUS', fault_handler) + try_set_handler('SIGABRT', fault_handler) + try_set_handler('SIGFPE', fault_handler) + try_set_handler('SIGILL', fault_handler) + # Sometimes output will lag after a test has successfully finished; we + # ignore such writes to our pipes. + try_set_handler('SIGPIPE', signal.SIG_IGN) + + # Run the tests + result.startTestRun() + for augmented_case in augmented_cases: + for skipped_test in self._skipped_tests: + if skipped_test in augmented_case.case.id(): + break + else: + sys.stdout.write('Running {}\n'.format( + augmented_case.case.id())) + sys.stdout.flush() + if self._dedicated_threads: + # (Deprecated) Spawns dedicated thread for each test case. + case_thread = threading.Thread( + target=augmented_case.case.run, args=(result,)) + try: + with stdout_pipe, stderr_pipe: + case_thread.start() + # If the thread is exited unexpected, stop testing. + while case_thread.is_alive(): + check_kill_self() + time.sleep(0) + case_thread.join() + except: # pylint: disable=try-except-raise + # re-raise the exception after forcing the with-block to end + raise + # Records the result of the test case run. + result.set_output(augmented_case.case, stdout_pipe.output(), + stderr_pipe.output()) + sys.stdout.write(result_out.getvalue()) + sys.stdout.flush() + result_out.truncate(0) + check_kill_self() + else: + # Donates current thread to test case execution. + augmented_case.case.run(result) + result.stopTestRun() + stdout_pipe.close() + stderr_pipe.close() + + # Report results + sys.stdout.write(result_out.getvalue()) + sys.stdout.flush() + signal.signal(signal.SIGINT, signal.SIG_DFL) + with open('report.xml', 'wb') as report_xml_file: + _result.jenkins_junit_xml(result).write(report_xml_file) + return result diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py index 8d89990e82..5772620b60 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py index 0cd18af931..3aa92f37fb 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py @@ -1,48 +1,48 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import pkgutil -import unittest - -import six - -import tests - - -class SanityTest(unittest.TestCase): - - maxDiff = 32768 - - TEST_PKG_MODULE_NAME = 'tests' - TEST_PKG_PATH = 'tests' - - def testTestsJsonUpToDate(self): - """Autodiscovers all test suites and checks that tests.json is up to date""" - loader = tests.Loader() - loader.loadTestsFromNames([self.TEST_PKG_MODULE_NAME]) - test_suite_names = sorted({ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import pkgutil +import unittest + +import six + +import tests + + +class SanityTest(unittest.TestCase): + + maxDiff = 32768 + + TEST_PKG_MODULE_NAME = 'tests' + TEST_PKG_PATH = 'tests' + + def testTestsJsonUpToDate(self): + """Autodiscovers all test suites and checks that tests.json is up to date""" + loader = tests.Loader() + loader.loadTestsFromNames([self.TEST_PKG_MODULE_NAME]) + test_suite_names = sorted({ test_case_class.id().rsplit('.', 1)[0] for test_case_class in tests._loader.iterate_suite_cases(loader.suite) - }) - - tests_json_string = pkgutil.get_data(self.TEST_PKG_PATH, 'tests.json') + }) + + tests_json_string = pkgutil.get_data(self.TEST_PKG_PATH, 'tests.json') tests_json = json.loads( tests_json_string.decode() if six.PY3 else tests_json_string) - - self.assertSequenceEqual(tests_json, test_suite_names) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + + self.assertSequenceEqual(tests_json, test_suite_names) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py index d0a646536a..994a8e1e80 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py @@ -1,40 +1,40 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import site -import sys - -_GRPC_BAZEL_RUNTIME_ENV = "GRPC_BAZEL_RUNTIME" - - -# TODO(https://github.com/bazelbuild/bazel/issues/6844) Bazel failed to -# interpret namespace packages correctly. This monkey patch will force the -# Python process to parse the .pth file in the sys.path to resolve namespace -# package in the right place. -# Analysis in depth: https://github.com/bazelbuild/rules_python/issues/55 -def sys_path_to_site_dir_hack(): - """Add valid sys.path item to site directory to parse the .pth files.""" - # Only run within our Bazel environment - if not os.environ.get(_GRPC_BAZEL_RUNTIME_ENV): - return - items = [] - for item in sys.path: - if os.path.exists(item): - # The only difference between sys.path and site-directory is - # whether the .pth file will be parsed or not. A site-directory - # will always exist in sys.path, but not another way around. - items.append(item) - for item in items: - site.addsitedir(item) +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import site +import sys + +_GRPC_BAZEL_RUNTIME_ENV = "GRPC_BAZEL_RUNTIME" + + +# TODO(https://github.com/bazelbuild/bazel/issues/6844) Bazel failed to +# interpret namespace packages correctly. This monkey patch will force the +# Python process to parse the .pth file in the sys.path to resolve namespace +# package in the right place. +# Analysis in depth: https://github.com/bazelbuild/rules_python/issues/55 +def sys_path_to_site_dir_hack(): + """Add valid sys.path item to site directory to parse the .pth files.""" + # Only run within our Bazel environment + if not os.environ.get(_GRPC_BAZEL_RUNTIME_ENV): + return + items = [] + for item in sys.path: + if os.path.exists(item): + # The only difference between sys.path and site-directory is + # whether the .pth file will be parsed or not. A site-directory + # will always exist in sys.path, but not another way around. + items.append(item) + for item in items: + site.addsitedir(item) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py index 10b401503b..38fdfc9c5c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2018 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py index ccbe40df84..784307ae00 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py @@ -1,469 +1,469 @@ -# Copyright 2018 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc_channelz.v1.channelz.""" - -import unittest - -from concurrent import futures - -import grpc - -from grpc_channelz.v1 import channelz -from grpc_channelz.v1 import channelz_pb2 -from grpc_channelz.v1 import channelz_pb2_grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_SUCCESSFUL_UNARY_UNARY = '/test/SuccessfulUnaryUnary' -_FAILED_UNARY_UNARY = '/test/FailedUnaryUnary' -_SUCCESSFUL_STREAM_STREAM = '/test/SuccessfulStreamStream' - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x01\x01\x01' - -_DISABLE_REUSE_PORT = (('grpc.so_reuseport', 0),) -_ENABLE_CHANNELZ = (('grpc.enable_channelz', 1),) -_DISABLE_CHANNELZ = (('grpc.enable_channelz', 0),) - - -def _successful_unary_unary(request, servicer_context): - return _RESPONSE - - -def _failed_unary_unary(request, servicer_context): - servicer_context.set_code(grpc.StatusCode.INTERNAL) - servicer_context.set_details("Channelz Test Intended Failure") - - -def _successful_stream_stream(request_iterator, servicer_context): - for _ in request_iterator: - yield _RESPONSE - - -class _GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY: - return grpc.unary_unary_rpc_method_handler(_successful_unary_unary) - elif handler_call_details.method == _FAILED_UNARY_UNARY: - return grpc.unary_unary_rpc_method_handler(_failed_unary_unary) - elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM: - return grpc.stream_stream_rpc_method_handler( - _successful_stream_stream) - else: - return None - - -class _ChannelServerPair(object): - - def __init__(self): - # Server will enable channelz service +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_channelz.v1.channelz.""" + +import unittest + +from concurrent import futures + +import grpc + +from grpc_channelz.v1 import channelz +from grpc_channelz.v1 import channelz_pb2 +from grpc_channelz.v1 import channelz_pb2_grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_SUCCESSFUL_UNARY_UNARY = '/test/SuccessfulUnaryUnary' +_FAILED_UNARY_UNARY = '/test/FailedUnaryUnary' +_SUCCESSFUL_STREAM_STREAM = '/test/SuccessfulStreamStream' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x01\x01\x01' + +_DISABLE_REUSE_PORT = (('grpc.so_reuseport', 0),) +_ENABLE_CHANNELZ = (('grpc.enable_channelz', 1),) +_DISABLE_CHANNELZ = (('grpc.enable_channelz', 0),) + + +def _successful_unary_unary(request, servicer_context): + return _RESPONSE + + +def _failed_unary_unary(request, servicer_context): + servicer_context.set_code(grpc.StatusCode.INTERNAL) + servicer_context.set_details("Channelz Test Intended Failure") + + +def _successful_stream_stream(request_iterator, servicer_context): + for _ in request_iterator: + yield _RESPONSE + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY: + return grpc.unary_unary_rpc_method_handler(_successful_unary_unary) + elif handler_call_details.method == _FAILED_UNARY_UNARY: + return grpc.unary_unary_rpc_method_handler(_failed_unary_unary) + elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM: + return grpc.stream_stream_rpc_method_handler( + _successful_stream_stream) + else: + return None + + +class _ChannelServerPair(object): + + def __init__(self): + # Server will enable channelz service self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=3), options=_DISABLE_REUSE_PORT + _ENABLE_CHANNELZ) - port = self.server.add_insecure_port('[::]:0') - self.server.add_generic_rpc_handlers((_GenericHandler(),)) - self.server.start() - - # Channel will enable channelz service... - self.channel = grpc.insecure_channel('localhost:%d' % port, - _ENABLE_CHANNELZ) - - -def _generate_channel_server_pairs(n): - return [_ChannelServerPair() for i in range(n)] - - -def _close_channel_server_pairs(pairs): - for pair in pairs: - pair.server.stop(None) - pair.channel.close() - - -class ChannelzServicerTest(unittest.TestCase): - - def _send_successful_unary_unary(self, idx): - _, r = self._pairs[idx].channel.unary_unary( - _SUCCESSFUL_UNARY_UNARY).with_call(_REQUEST) - self.assertEqual(r.code(), grpc.StatusCode.OK) - - def _send_failed_unary_unary(self, idx): - try: - self._pairs[idx].channel.unary_unary(_FAILED_UNARY_UNARY).with_call( - _REQUEST) - except grpc.RpcError: - return - else: - self.fail("This call supposed to fail") - - def _send_successful_stream_stream(self, idx): - response_iterator = self._pairs[idx].channel.stream_stream( - _SUCCESSFUL_STREAM_STREAM).__call__( - iter([_REQUEST] * test_constants.STREAM_LENGTH)) - cnt = 0 - for _ in response_iterator: - cnt += 1 - self.assertEqual(cnt, test_constants.STREAM_LENGTH) - - def _get_channel_id(self, idx): - """Channel id may not be consecutive""" - resp = self._channelz_stub.GetTopChannels( - channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) - self.assertGreater(len(resp.channel), idx) - return resp.channel[idx].ref.channel_id - - def setUp(self): - self._pairs = [] - # This server is for Channelz info fetching only - # It self should not enable Channelz + port = self.server.add_insecure_port('[::]:0') + self.server.add_generic_rpc_handlers((_GenericHandler(),)) + self.server.start() + + # Channel will enable channelz service... + self.channel = grpc.insecure_channel('localhost:%d' % port, + _ENABLE_CHANNELZ) + + +def _generate_channel_server_pairs(n): + return [_ChannelServerPair() for i in range(n)] + + +def _close_channel_server_pairs(pairs): + for pair in pairs: + pair.server.stop(None) + pair.channel.close() + + +class ChannelzServicerTest(unittest.TestCase): + + def _send_successful_unary_unary(self, idx): + _, r = self._pairs[idx].channel.unary_unary( + _SUCCESSFUL_UNARY_UNARY).with_call(_REQUEST) + self.assertEqual(r.code(), grpc.StatusCode.OK) + + def _send_failed_unary_unary(self, idx): + try: + self._pairs[idx].channel.unary_unary(_FAILED_UNARY_UNARY).with_call( + _REQUEST) + except grpc.RpcError: + return + else: + self.fail("This call supposed to fail") + + def _send_successful_stream_stream(self, idx): + response_iterator = self._pairs[idx].channel.stream_stream( + _SUCCESSFUL_STREAM_STREAM).__call__( + iter([_REQUEST] * test_constants.STREAM_LENGTH)) + cnt = 0 + for _ in response_iterator: + cnt += 1 + self.assertEqual(cnt, test_constants.STREAM_LENGTH) + + def _get_channel_id(self, idx): + """Channel id may not be consecutive""" + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertGreater(len(resp.channel), idx) + return resp.channel[idx].ref.channel_id + + def setUp(self): + self._pairs = [] + # This server is for Channelz info fetching only + # It self should not enable Channelz self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=3), options=_DISABLE_REUSE_PORT + _DISABLE_CHANNELZ) - port = self._server.add_insecure_port('[::]:0') - channelz.add_channelz_servicer(self._server) - self._server.start() - - # This channel is used to fetch Channelz info only - # Channelz should not be enabled - self._channel = grpc.insecure_channel('localhost:%d' % port, - _DISABLE_CHANNELZ) - self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel) - - def tearDown(self): - self._server.stop(None) - self._channel.close() - _close_channel_server_pairs(self._pairs) - - def test_get_top_channels_basic(self): - self._pairs = _generate_channel_server_pairs(1) - resp = self._channelz_stub.GetTopChannels( - channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) - self.assertEqual(len(resp.channel), 1) - self.assertEqual(resp.end, True) - - def test_get_top_channels_high_start_id(self): - self._pairs = _generate_channel_server_pairs(1) - resp = self._channelz_stub.GetTopChannels( - channelz_pb2.GetTopChannelsRequest(start_channel_id=10000)) - self.assertEqual(len(resp.channel), 0) - self.assertEqual(resp.end, True) - - def test_successful_request(self): - self._pairs = _generate_channel_server_pairs(1) - self._send_successful_unary_unary(0) - resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) - self.assertEqual(resp.channel.data.calls_started, 1) - self.assertEqual(resp.channel.data.calls_succeeded, 1) - self.assertEqual(resp.channel.data.calls_failed, 0) - - def test_failed_request(self): - self._pairs = _generate_channel_server_pairs(1) - self._send_failed_unary_unary(0) - resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) - self.assertEqual(resp.channel.data.calls_started, 1) - self.assertEqual(resp.channel.data.calls_succeeded, 0) - self.assertEqual(resp.channel.data.calls_failed, 1) - - def test_many_requests(self): - self._pairs = _generate_channel_server_pairs(1) - k_success = 7 - k_failed = 9 - for i in range(k_success): - self._send_successful_unary_unary(0) - for i in range(k_failed): - self._send_failed_unary_unary(0) - resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) - self.assertEqual(resp.channel.data.calls_started, k_success + k_failed) - self.assertEqual(resp.channel.data.calls_succeeded, k_success) - self.assertEqual(resp.channel.data.calls_failed, k_failed) - - def test_many_channel(self): - k_channels = 4 - self._pairs = _generate_channel_server_pairs(k_channels) - resp = self._channelz_stub.GetTopChannels( - channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) - self.assertEqual(len(resp.channel), k_channels) - - def test_many_requests_many_channel(self): - k_channels = 4 - self._pairs = _generate_channel_server_pairs(k_channels) - k_success = 11 - k_failed = 13 - for i in range(k_success): - self._send_successful_unary_unary(0) - self._send_successful_unary_unary(2) - for i in range(k_failed): - self._send_failed_unary_unary(1) - self._send_failed_unary_unary(2) - - # The first channel saw only successes - resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) - self.assertEqual(resp.channel.data.calls_started, k_success) - self.assertEqual(resp.channel.data.calls_succeeded, k_success) - self.assertEqual(resp.channel.data.calls_failed, 0) - - # The second channel saw only failures - resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(1))) - self.assertEqual(resp.channel.data.calls_started, k_failed) - self.assertEqual(resp.channel.data.calls_succeeded, 0) - self.assertEqual(resp.channel.data.calls_failed, k_failed) - - # The third channel saw both successes and failures - resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(2))) - self.assertEqual(resp.channel.data.calls_started, k_success + k_failed) - self.assertEqual(resp.channel.data.calls_succeeded, k_success) - self.assertEqual(resp.channel.data.calls_failed, k_failed) - - # The fourth channel saw nothing - resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(3))) - self.assertEqual(resp.channel.data.calls_started, 0) - self.assertEqual(resp.channel.data.calls_succeeded, 0) - self.assertEqual(resp.channel.data.calls_failed, 0) - - def test_many_subchannels(self): - k_channels = 4 - self._pairs = _generate_channel_server_pairs(k_channels) - k_success = 17 - k_failed = 19 - for i in range(k_success): - self._send_successful_unary_unary(0) - self._send_successful_unary_unary(2) - for i in range(k_failed): - self._send_failed_unary_unary(1) - self._send_failed_unary_unary(2) - - gtc_resp = self._channelz_stub.GetTopChannels( - channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) - self.assertEqual(len(gtc_resp.channel), k_channels) - for i in range(k_channels): - # If no call performed in the channel, there shouldn't be any subchannel - if gtc_resp.channel[i].data.calls_started == 0: - self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0) - continue - - # Otherwise, the subchannel should exist - self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0) - gsc_resp = self._channelz_stub.GetSubchannel( - channelz_pb2.GetSubchannelRequest( + port = self._server.add_insecure_port('[::]:0') + channelz.add_channelz_servicer(self._server) + self._server.start() + + # This channel is used to fetch Channelz info only + # Channelz should not be enabled + self._channel = grpc.insecure_channel('localhost:%d' % port, + _DISABLE_CHANNELZ) + self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + _close_channel_server_pairs(self._pairs) + + def test_get_top_channels_basic(self): + self._pairs = _generate_channel_server_pairs(1) + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(resp.channel), 1) + self.assertEqual(resp.end, True) + + def test_get_top_channels_high_start_id(self): + self._pairs = _generate_channel_server_pairs(1) + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=10000)) + self.assertEqual(len(resp.channel), 0) + self.assertEqual(resp.end, True) + + def test_successful_request(self): + self._pairs = _generate_channel_server_pairs(1) + self._send_successful_unary_unary(0) + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, 1) + self.assertEqual(resp.channel.data.calls_succeeded, 1) + self.assertEqual(resp.channel.data.calls_failed, 0) + + def test_failed_request(self): + self._pairs = _generate_channel_server_pairs(1) + self._send_failed_unary_unary(0) + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, 1) + self.assertEqual(resp.channel.data.calls_succeeded, 0) + self.assertEqual(resp.channel.data.calls_failed, 1) + + def test_many_requests(self): + self._pairs = _generate_channel_server_pairs(1) + k_success = 7 + k_failed = 9 + for i in range(k_success): + self._send_successful_unary_unary(0) + for i in range(k_failed): + self._send_failed_unary_unary(0) + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, k_success + k_failed) + self.assertEqual(resp.channel.data.calls_succeeded, k_success) + self.assertEqual(resp.channel.data.calls_failed, k_failed) + + def test_many_channel(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(resp.channel), k_channels) + + def test_many_requests_many_channel(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + k_success = 11 + k_failed = 13 + for i in range(k_success): + self._send_successful_unary_unary(0) + self._send_successful_unary_unary(2) + for i in range(k_failed): + self._send_failed_unary_unary(1) + self._send_failed_unary_unary(2) + + # The first channel saw only successes + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, k_success) + self.assertEqual(resp.channel.data.calls_succeeded, k_success) + self.assertEqual(resp.channel.data.calls_failed, 0) + + # The second channel saw only failures + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(1))) + self.assertEqual(resp.channel.data.calls_started, k_failed) + self.assertEqual(resp.channel.data.calls_succeeded, 0) + self.assertEqual(resp.channel.data.calls_failed, k_failed) + + # The third channel saw both successes and failures + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(2))) + self.assertEqual(resp.channel.data.calls_started, k_success + k_failed) + self.assertEqual(resp.channel.data.calls_succeeded, k_success) + self.assertEqual(resp.channel.data.calls_failed, k_failed) + + # The fourth channel saw nothing + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(3))) + self.assertEqual(resp.channel.data.calls_started, 0) + self.assertEqual(resp.channel.data.calls_succeeded, 0) + self.assertEqual(resp.channel.data.calls_failed, 0) + + def test_many_subchannels(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + k_success = 17 + k_failed = 19 + for i in range(k_success): + self._send_successful_unary_unary(0) + self._send_successful_unary_unary(2) + for i in range(k_failed): + self._send_failed_unary_unary(1) + self._send_failed_unary_unary(2) + + gtc_resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(gtc_resp.channel), k_channels) + for i in range(k_channels): + # If no call performed in the channel, there shouldn't be any subchannel + if gtc_resp.channel[i].data.calls_started == 0: + self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0) + continue + + # Otherwise, the subchannel should exist + self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0) + gsc_resp = self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest( subchannel_id=gtc_resp.channel[i].subchannel_ref[0]. subchannel_id)) - self.assertEqual(gtc_resp.channel[i].data.calls_started, - gsc_resp.subchannel.data.calls_started) - self.assertEqual(gtc_resp.channel[i].data.calls_succeeded, - gsc_resp.subchannel.data.calls_succeeded) - self.assertEqual(gtc_resp.channel[i].data.calls_failed, - gsc_resp.subchannel.data.calls_failed) - - def test_server_basic(self): - self._pairs = _generate_channel_server_pairs(1) - resp = self._channelz_stub.GetServers( - channelz_pb2.GetServersRequest(start_server_id=0)) - self.assertEqual(len(resp.server), 1) - - def test_get_one_server(self): - self._pairs = _generate_channel_server_pairs(1) - gss_resp = self._channelz_stub.GetServers( - channelz_pb2.GetServersRequest(start_server_id=0)) - self.assertEqual(len(gss_resp.server), 1) - gs_resp = self._channelz_stub.GetServer( - channelz_pb2.GetServerRequest( - server_id=gss_resp.server[0].ref.server_id)) - self.assertEqual(gss_resp.server[0].ref.server_id, - gs_resp.server.ref.server_id) - - def test_server_call(self): - self._pairs = _generate_channel_server_pairs(1) - k_success = 23 - k_failed = 29 - for i in range(k_success): - self._send_successful_unary_unary(0) - for i in range(k_failed): - self._send_failed_unary_unary(0) - - resp = self._channelz_stub.GetServers( - channelz_pb2.GetServersRequest(start_server_id=0)) - self.assertEqual(len(resp.server), 1) - self.assertEqual(resp.server[0].data.calls_started, - k_success + k_failed) - self.assertEqual(resp.server[0].data.calls_succeeded, k_success) - self.assertEqual(resp.server[0].data.calls_failed, k_failed) - - def test_many_subchannels_and_sockets(self): - k_channels = 4 - self._pairs = _generate_channel_server_pairs(k_channels) - k_success = 3 - k_failed = 5 - for i in range(k_success): - self._send_successful_unary_unary(0) - self._send_successful_unary_unary(2) - for i in range(k_failed): - self._send_failed_unary_unary(1) - self._send_failed_unary_unary(2) - - gtc_resp = self._channelz_stub.GetTopChannels( - channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) - self.assertEqual(len(gtc_resp.channel), k_channels) - for i in range(k_channels): - # If no call performed in the channel, there shouldn't be any subchannel - if gtc_resp.channel[i].data.calls_started == 0: - self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0) - continue - - # Otherwise, the subchannel should exist - self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0) - gsc_resp = self._channelz_stub.GetSubchannel( - channelz_pb2.GetSubchannelRequest( + self.assertEqual(gtc_resp.channel[i].data.calls_started, + gsc_resp.subchannel.data.calls_started) + self.assertEqual(gtc_resp.channel[i].data.calls_succeeded, + gsc_resp.subchannel.data.calls_succeeded) + self.assertEqual(gtc_resp.channel[i].data.calls_failed, + gsc_resp.subchannel.data.calls_failed) + + def test_server_basic(self): + self._pairs = _generate_channel_server_pairs(1) + resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(resp.server), 1) + + def test_get_one_server(self): + self._pairs = _generate_channel_server_pairs(1) + gss_resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(gss_resp.server), 1) + gs_resp = self._channelz_stub.GetServer( + channelz_pb2.GetServerRequest( + server_id=gss_resp.server[0].ref.server_id)) + self.assertEqual(gss_resp.server[0].ref.server_id, + gs_resp.server.ref.server_id) + + def test_server_call(self): + self._pairs = _generate_channel_server_pairs(1) + k_success = 23 + k_failed = 29 + for i in range(k_success): + self._send_successful_unary_unary(0) + for i in range(k_failed): + self._send_failed_unary_unary(0) + + resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(resp.server), 1) + self.assertEqual(resp.server[0].data.calls_started, + k_success + k_failed) + self.assertEqual(resp.server[0].data.calls_succeeded, k_success) + self.assertEqual(resp.server[0].data.calls_failed, k_failed) + + def test_many_subchannels_and_sockets(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + k_success = 3 + k_failed = 5 + for i in range(k_success): + self._send_successful_unary_unary(0) + self._send_successful_unary_unary(2) + for i in range(k_failed): + self._send_failed_unary_unary(1) + self._send_failed_unary_unary(2) + + gtc_resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(gtc_resp.channel), k_channels) + for i in range(k_channels): + # If no call performed in the channel, there shouldn't be any subchannel + if gtc_resp.channel[i].data.calls_started == 0: + self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0) + continue + + # Otherwise, the subchannel should exist + self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0) + gsc_resp = self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest( subchannel_id=gtc_resp.channel[i].subchannel_ref[0]. subchannel_id)) - self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1) - - gs_resp = self._channelz_stub.GetSocket( - channelz_pb2.GetSocketRequest( - socket_id=gsc_resp.subchannel.socket_ref[0].socket_id)) - self.assertEqual(gsc_resp.subchannel.data.calls_started, - gs_resp.socket.data.streams_started) - self.assertEqual(gsc_resp.subchannel.data.calls_started, - gs_resp.socket.data.streams_succeeded) - # Calls started == messages sent, only valid for unary calls - self.assertEqual(gsc_resp.subchannel.data.calls_started, - gs_resp.socket.data.messages_sent) - # Only receive responses when the RPC was successful - self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, - gs_resp.socket.data.messages_received) - - def test_streaming_rpc(self): - self._pairs = _generate_channel_server_pairs(1) - # In C++, the argument for _send_successful_stream_stream is message length. - # Here the argument is still channel idx, to be consistent with the other two. - self._send_successful_stream_stream(0) - - gc_resp = self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) - self.assertEqual(gc_resp.channel.data.calls_started, 1) - self.assertEqual(gc_resp.channel.data.calls_succeeded, 1) - self.assertEqual(gc_resp.channel.data.calls_failed, 0) - # Subchannel exists - self.assertGreater(len(gc_resp.channel.subchannel_ref), 0) - - gsc_resp = self._channelz_stub.GetSubchannel( - channelz_pb2.GetSubchannelRequest( - subchannel_id=gc_resp.channel.subchannel_ref[0].subchannel_id)) - self.assertEqual(gsc_resp.subchannel.data.calls_started, 1) - self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1) - self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0) - # Socket exists - self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1) - - gs_resp = self._channelz_stub.GetSocket( - channelz_pb2.GetSocketRequest( - socket_id=gsc_resp.subchannel.socket_ref[0].socket_id)) - self.assertEqual(gs_resp.socket.data.streams_started, 1) - self.assertEqual(gs_resp.socket.data.streams_succeeded, 1) - self.assertEqual(gs_resp.socket.data.streams_failed, 0) - self.assertEqual(gs_resp.socket.data.messages_sent, - test_constants.STREAM_LENGTH) - self.assertEqual(gs_resp.socket.data.messages_received, - test_constants.STREAM_LENGTH) - - def test_server_sockets(self): - self._pairs = _generate_channel_server_pairs(1) - self._send_successful_unary_unary(0) - self._send_failed_unary_unary(0) - - gs_resp = self._channelz_stub.GetServers( - channelz_pb2.GetServersRequest(start_server_id=0)) - self.assertEqual(len(gs_resp.server), 1) - self.assertEqual(gs_resp.server[0].data.calls_started, 2) - self.assertEqual(gs_resp.server[0].data.calls_succeeded, 1) - self.assertEqual(gs_resp.server[0].data.calls_failed, 1) - - gss_resp = self._channelz_stub.GetServerSockets( - channelz_pb2.GetServerSocketsRequest( - server_id=gs_resp.server[0].ref.server_id, start_socket_id=0)) - # If the RPC call failed, it will raise a grpc.RpcError - # So, if there is no exception raised, considered pass - - def test_server_listen_sockets(self): - self._pairs = _generate_channel_server_pairs(1) - - gss_resp = self._channelz_stub.GetServers( - channelz_pb2.GetServersRequest(start_server_id=0)) - self.assertEqual(len(gss_resp.server), 1) - self.assertEqual(len(gss_resp.server[0].listen_socket), 1) - - gs_resp = self._channelz_stub.GetSocket( - channelz_pb2.GetSocketRequest( - socket_id=gss_resp.server[0].listen_socket[0].socket_id)) - # If the RPC call failed, it will raise a grpc.RpcError - # So, if there is no exception raised, considered pass - - def test_invalid_query_get_server(self): - try: - self._channelz_stub.GetServer( - channelz_pb2.GetServerRequest(server_id=10000)) - except BaseException as e: - self.assertIn('StatusCode.NOT_FOUND', str(e)) - else: - self.fail('Invalid query not detected') - - def test_invalid_query_get_channel(self): - try: - self._channelz_stub.GetChannel( - channelz_pb2.GetChannelRequest(channel_id=10000)) - except BaseException as e: - self.assertIn('StatusCode.NOT_FOUND', str(e)) - else: - self.fail('Invalid query not detected') - - def test_invalid_query_get_subchannel(self): - try: - self._channelz_stub.GetSubchannel( - channelz_pb2.GetSubchannelRequest(subchannel_id=10000)) - except BaseException as e: - self.assertIn('StatusCode.NOT_FOUND', str(e)) - else: - self.fail('Invalid query not detected') - - def test_invalid_query_get_socket(self): - try: - self._channelz_stub.GetSocket( - channelz_pb2.GetSocketRequest(socket_id=10000)) - except BaseException as e: - self.assertIn('StatusCode.NOT_FOUND', str(e)) - else: - self.fail('Invalid query not detected') - - def test_invalid_query_get_server_sockets(self): - try: - self._channelz_stub.GetServerSockets( - channelz_pb2.GetServerSocketsRequest( - server_id=10000, - start_socket_id=0, - )) - except BaseException as e: - self.assertIn('StatusCode.NOT_FOUND', str(e)) - else: - self.fail('Invalid query not detected') - - -if __name__ == '__main__': - unittest.main(verbosity=2) + self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1) + + gs_resp = self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest( + socket_id=gsc_resp.subchannel.socket_ref[0].socket_id)) + self.assertEqual(gsc_resp.subchannel.data.calls_started, + gs_resp.socket.data.streams_started) + self.assertEqual(gsc_resp.subchannel.data.calls_started, + gs_resp.socket.data.streams_succeeded) + # Calls started == messages sent, only valid for unary calls + self.assertEqual(gsc_resp.subchannel.data.calls_started, + gs_resp.socket.data.messages_sent) + # Only receive responses when the RPC was successful + self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, + gs_resp.socket.data.messages_received) + + def test_streaming_rpc(self): + self._pairs = _generate_channel_server_pairs(1) + # In C++, the argument for _send_successful_stream_stream is message length. + # Here the argument is still channel idx, to be consistent with the other two. + self._send_successful_stream_stream(0) + + gc_resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(gc_resp.channel.data.calls_started, 1) + self.assertEqual(gc_resp.channel.data.calls_succeeded, 1) + self.assertEqual(gc_resp.channel.data.calls_failed, 0) + # Subchannel exists + self.assertGreater(len(gc_resp.channel.subchannel_ref), 0) + + gsc_resp = self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest( + subchannel_id=gc_resp.channel.subchannel_ref[0].subchannel_id)) + self.assertEqual(gsc_resp.subchannel.data.calls_started, 1) + self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1) + self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0) + # Socket exists + self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1) + + gs_resp = self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest( + socket_id=gsc_resp.subchannel.socket_ref[0].socket_id)) + self.assertEqual(gs_resp.socket.data.streams_started, 1) + self.assertEqual(gs_resp.socket.data.streams_succeeded, 1) + self.assertEqual(gs_resp.socket.data.streams_failed, 0) + self.assertEqual(gs_resp.socket.data.messages_sent, + test_constants.STREAM_LENGTH) + self.assertEqual(gs_resp.socket.data.messages_received, + test_constants.STREAM_LENGTH) + + def test_server_sockets(self): + self._pairs = _generate_channel_server_pairs(1) + self._send_successful_unary_unary(0) + self._send_failed_unary_unary(0) + + gs_resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(gs_resp.server), 1) + self.assertEqual(gs_resp.server[0].data.calls_started, 2) + self.assertEqual(gs_resp.server[0].data.calls_succeeded, 1) + self.assertEqual(gs_resp.server[0].data.calls_failed, 1) + + gss_resp = self._channelz_stub.GetServerSockets( + channelz_pb2.GetServerSocketsRequest( + server_id=gs_resp.server[0].ref.server_id, start_socket_id=0)) + # If the RPC call failed, it will raise a grpc.RpcError + # So, if there is no exception raised, considered pass + + def test_server_listen_sockets(self): + self._pairs = _generate_channel_server_pairs(1) + + gss_resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(gss_resp.server), 1) + self.assertEqual(len(gss_resp.server[0].listen_socket), 1) + + gs_resp = self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest( + socket_id=gss_resp.server[0].listen_socket[0].socket_id)) + # If the RPC call failed, it will raise a grpc.RpcError + # So, if there is no exception raised, considered pass + + def test_invalid_query_get_server(self): + try: + self._channelz_stub.GetServer( + channelz_pb2.GetServerRequest(server_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_channel(self): + try: + self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_subchannel(self): + try: + self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest(subchannel_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_socket(self): + try: + self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest(socket_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_server_sockets(self): + try: + self._channelz_stub.GetServerSockets( + channelz_pb2.GetServerSocketsRequest( + server_id=10000, + start_socket_id=0, + )) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py index e9db7e5970..9a26bac010 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py index a1b77bb48e..e2eff257fa 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py @@ -1,151 +1,151 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client-side fork interop tests as a unit test.""" - -import six -import subprocess -import sys -import threading -import unittest -from grpc._cython import cygrpc -from tests.fork import methods - -# New instance of multiprocessing.Process using fork without exec can and will -# hang if the Python process has any other threads running. This includes the -# additional thread spawned by our _runner.py class. So in order to test our -# compatibility with multiprocessing, we first fork+exec a new process to ensure -# we don't have any conflicting background threads. -_CLIENT_FORK_SCRIPT_TEMPLATE = """if True: - import os - import sys - from grpc._cython import cygrpc - from tests.fork import methods - - cygrpc._GRPC_ENABLE_FORK_SUPPORT = True - os.environ['GRPC_POLL_STRATEGY'] = 'epoll1' - methods.TestCase.%s.run_test({ - 'server_host': 'localhost', - 'server_port': %d, - 'use_tls': False - }) -""" -_SUBPROCESS_TIMEOUT_S = 30 - - -@unittest.skipUnless( - sys.platform.startswith("linux"), - "not supported on windows, and fork+exec networking blocked on mac") -@unittest.skipUnless(six.PY2, "https://github.com/grpc/grpc/issues/18075") -class ForkInteropTest(unittest.TestCase): - - def setUp(self): - start_server_script = """if True: - import sys - import time - - import grpc - from src.proto.grpc.testing import test_pb2_grpc - from tests.interop import service as interop_service - from tests.unit import test_common - - server = test_common.test_server() - test_pb2_grpc.add_TestServiceServicer_to_server( - interop_service.TestService(), server) - port = server.add_insecure_port('[::]:0') - server.start() - print(port) - sys.stdout.flush() - while True: - time.sleep(1) - """ - self._server_process = subprocess.Popen( - [sys.executable, '-c', start_server_script], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, - self._server_process.kill) - try: - timer.start() - self._port = int(self._server_process.stdout.readline()) - except ValueError: - raise Exception('Failed to get port from server') - finally: - timer.cancel() - - def testConnectivityWatch(self): - self._verifyTestCase(methods.TestCase.CONNECTIVITY_WATCH) - - def testCloseChannelBeforeFork(self): - self._verifyTestCase(methods.TestCase.CLOSE_CHANNEL_BEFORE_FORK) - - def testAsyncUnarySameChannel(self): - self._verifyTestCase(methods.TestCase.ASYNC_UNARY_SAME_CHANNEL) - - def testAsyncUnaryNewChannel(self): - self._verifyTestCase(methods.TestCase.ASYNC_UNARY_NEW_CHANNEL) - - def testBlockingUnarySameChannel(self): - self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_SAME_CHANNEL) - - def testBlockingUnaryNewChannel(self): - self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_NEW_CHANNEL) - - def testInProgressBidiContinueCall(self): - self._verifyTestCase(methods.TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL) - - def testInProgressBidiSameChannelAsyncCall(self): - self._verifyTestCase( - methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL) - - def testInProgressBidiSameChannelBlockingCall(self): - self._verifyTestCase( - methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL) - - def testInProgressBidiNewChannelAsyncCall(self): - self._verifyTestCase( - methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL) - - def testInProgressBidiNewChannelBlockingCall(self): - self._verifyTestCase( - methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL) - - def tearDown(self): - self._server_process.kill() - - def _verifyTestCase(self, test_case): - script = _CLIENT_FORK_SCRIPT_TEMPLATE % (test_case.name, self._port) +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Client-side fork interop tests as a unit test.""" + +import six +import subprocess +import sys +import threading +import unittest +from grpc._cython import cygrpc +from tests.fork import methods + +# New instance of multiprocessing.Process using fork without exec can and will +# hang if the Python process has any other threads running. This includes the +# additional thread spawned by our _runner.py class. So in order to test our +# compatibility with multiprocessing, we first fork+exec a new process to ensure +# we don't have any conflicting background threads. +_CLIENT_FORK_SCRIPT_TEMPLATE = """if True: + import os + import sys + from grpc._cython import cygrpc + from tests.fork import methods + + cygrpc._GRPC_ENABLE_FORK_SUPPORT = True + os.environ['GRPC_POLL_STRATEGY'] = 'epoll1' + methods.TestCase.%s.run_test({ + 'server_host': 'localhost', + 'server_port': %d, + 'use_tls': False + }) +""" +_SUBPROCESS_TIMEOUT_S = 30 + + +@unittest.skipUnless( + sys.platform.startswith("linux"), + "not supported on windows, and fork+exec networking blocked on mac") +@unittest.skipUnless(six.PY2, "https://github.com/grpc/grpc/issues/18075") +class ForkInteropTest(unittest.TestCase): + + def setUp(self): + start_server_script = """if True: + import sys + import time + + import grpc + from src.proto.grpc.testing import test_pb2_grpc + from tests.interop import service as interop_service + from tests.unit import test_common + + server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server( + interop_service.TestService(), server) + port = server.add_insecure_port('[::]:0') + server.start() + print(port) + sys.stdout.flush() + while True: + time.sleep(1) + """ + self._server_process = subprocess.Popen( + [sys.executable, '-c', start_server_script], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, + self._server_process.kill) + try: + timer.start() + self._port = int(self._server_process.stdout.readline()) + except ValueError: + raise Exception('Failed to get port from server') + finally: + timer.cancel() + + def testConnectivityWatch(self): + self._verifyTestCase(methods.TestCase.CONNECTIVITY_WATCH) + + def testCloseChannelBeforeFork(self): + self._verifyTestCase(methods.TestCase.CLOSE_CHANNEL_BEFORE_FORK) + + def testAsyncUnarySameChannel(self): + self._verifyTestCase(methods.TestCase.ASYNC_UNARY_SAME_CHANNEL) + + def testAsyncUnaryNewChannel(self): + self._verifyTestCase(methods.TestCase.ASYNC_UNARY_NEW_CHANNEL) + + def testBlockingUnarySameChannel(self): + self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_SAME_CHANNEL) + + def testBlockingUnaryNewChannel(self): + self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_NEW_CHANNEL) + + def testInProgressBidiContinueCall(self): + self._verifyTestCase(methods.TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL) + + def testInProgressBidiSameChannelAsyncCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL) + + def testInProgressBidiSameChannelBlockingCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL) + + def testInProgressBidiNewChannelAsyncCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL) + + def testInProgressBidiNewChannelBlockingCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL) + + def tearDown(self): + self._server_process.kill() + + def _verifyTestCase(self, test_case): + script = _CLIENT_FORK_SCRIPT_TEMPLATE % (test_case.name, self._port) process = subprocess.Popen([sys.executable, '-c', script], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, process.kill) - try: - timer.start() - try: - out, err = process.communicate(timeout=_SUBPROCESS_TIMEOUT_S) - except TypeError: - # The timeout parameter was added in Python 3.3. - out, err = process.communicate() - except subprocess.TimeoutExpired: - process.kill() - raise RuntimeError('Process failed to terminate') - finally: - timer.cancel() - self.assertEqual( - 0, process.returncode, - 'process failed with exit code %d (stdout: %s, stderr: %s)' % - (process.returncode, out, err)) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, process.kill) + try: + timer.start() + try: + out, err = process.communicate(timeout=_SUBPROCESS_TIMEOUT_S) + except TypeError: + # The timeout parameter was added in Python 3.3. + out, err = process.communicate() + except subprocess.TimeoutExpired: + process.kill() + raise RuntimeError('Process failed to terminate') + finally: + timer.cancel() + self.assertEqual( + 0, process.returncode, + 'process failed with exit code %d (stdout: %s, stderr: %s)' % + (process.returncode, out, err)) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py index f68e8f731d..852e6da4d6 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py @@ -1,35 +1,35 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""The Python implementation of the GRPC interoperability test client.""" - -import argparse -import logging -import sys - -from tests.fork import methods - - -def _args(): - - def parse_bool(value): - if value == 'true': - return True - if value == 'false': - return False - raise argparse.ArgumentTypeError('Only true/false allowed') - - parser = argparse.ArgumentParser() +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the GRPC interoperability test client.""" + +import argparse +import logging +import sys + +from tests.fork import methods + + +def _args(): + + def parse_bool(value): + if value == 'true': + return True + if value == 'false': + return False + raise argparse.ArgumentTypeError('Only true/false allowed') + + parser = argparse.ArgumentParser() parser.add_argument('--server_host', default="localhost", type=str, @@ -46,27 +46,27 @@ def _args(): default=False, type=parse_bool, help='require a secure connection') - return parser.parse_args() - - -def _test_case_from_arg(test_case_arg): - for test_case in methods.TestCase: - if test_case_arg == test_case.value: - return test_case - else: - raise ValueError('No test case "%s"!' % test_case_arg) - - -def test_fork(): - logging.basicConfig(level=logging.INFO) - args = vars(_args()) - if args['test_case'] == "all": - for test_case in methods.TestCase: - test_case.run_test(args) - else: - test_case = _test_case_from_arg(args['test_case']) - test_case.run_test(args) - - -if __name__ == '__main__': - test_fork() + return parser.parse_args() + + +def _test_case_from_arg(test_case_arg): + for test_case in methods.TestCase: + if test_case_arg == test_case.value: + return test_case + else: + raise ValueError('No test case "%s"!' % test_case_arg) + + +def test_fork(): + logging.basicConfig(level=logging.INFO) + args = vars(_args()) + if args['test_case'] == "all": + for test_case in methods.TestCase: + test_case.run_test(args) + else: + test_case = _test_case_from_arg(args['test_case']) + test_case.run_test(args) + + +if __name__ == '__main__': + test_fork() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py index 08674a5903..2123c69916 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py @@ -1,451 +1,451 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Implementations of fork support test methods.""" - -import enum -import json -import logging -import multiprocessing -import os -import threading -import time - -import grpc - -from six.moves import queue - -from src.proto.grpc.testing import empty_pb2 -from src.proto.grpc.testing import messages_pb2 -from src.proto.grpc.testing import test_pb2_grpc - -_LOGGER = logging.getLogger(__name__) -_RPC_TIMEOUT_S = 10 -_CHILD_FINISH_TIMEOUT_S = 60 - - -def _channel(args): - target = '{}:{}'.format(args['server_host'], args['server_port']) - if args['use_tls']: - channel_credentials = grpc.ssl_channel_credentials() - channel = grpc.secure_channel(target, channel_credentials) - else: - channel = grpc.insecure_channel(target) - return channel - - -def _validate_payload_type_and_length(response, expected_type, expected_length): - if response.payload.type is not expected_type: - raise ValueError('expected payload type %s, got %s' % - (expected_type, type(response.payload.type))) - elif len(response.payload.body) != expected_length: - raise ValueError('expected payload body size %d, got %d' % - (expected_length, len(response.payload.body))) - - -def _async_unary(stub): - size = 314159 - request = messages_pb2.SimpleRequest( - response_type=messages_pb2.COMPRESSABLE, - response_size=size, - payload=messages_pb2.Payload(body=b'\x00' * 271828)) - response_future = stub.UnaryCall.future(request, timeout=_RPC_TIMEOUT_S) - response = response_future.result() - _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) - - -def _blocking_unary(stub): - size = 314159 - request = messages_pb2.SimpleRequest( - response_type=messages_pb2.COMPRESSABLE, - response_size=size, - payload=messages_pb2.Payload(body=b'\x00' * 271828)) - response = stub.UnaryCall(request, timeout=_RPC_TIMEOUT_S) - _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) - - -class _Pipe(object): - - def __init__(self): - self._condition = threading.Condition() - self._values = [] - self._open = True - - def __iter__(self): - return self - - def __next__(self): - return self.next() - - def next(self): - with self._condition: - while not self._values and self._open: - self._condition.wait() - if self._values: - return self._values.pop(0) - else: - raise StopIteration() - - def add(self, value): - with self._condition: - self._values.append(value) - self._condition.notify() - - def close(self): - with self._condition: - self._open = False - self._condition.notify() - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - -class _ChildProcess(object): - - def __init__(self, task, args=None): - if args is None: - args = () - self._exceptions = multiprocessing.Queue() - - def record_exceptions(): - try: - task(*args) - except grpc.RpcError as rpc_error: - self._exceptions.put('RpcError: %s' % rpc_error) - except Exception as e: # pylint: disable=broad-except - self._exceptions.put(e) - - self._process = multiprocessing.Process(target=record_exceptions) - - def start(self): - self._process.start() - - def finish(self): - self._process.join(timeout=_CHILD_FINISH_TIMEOUT_S) - if self._process.is_alive(): - raise RuntimeError('Child process did not terminate') - if self._process.exitcode != 0: - raise ValueError('Child process failed with exitcode %d' % - self._process.exitcode) - try: - exception = self._exceptions.get(block=False) - raise ValueError('Child process failed: %s' % exception) - except queue.Empty: - pass - - -def _async_unary_same_channel(channel): - - def child_target(): - try: - _async_unary(stub) - raise Exception( - 'Child should not be able to re-use channel after fork') - except ValueError as expected_value_error: - pass - - stub = test_pb2_grpc.TestServiceStub(channel) - _async_unary(stub) - child_process = _ChildProcess(child_target) - child_process.start() - _async_unary(stub) - child_process.finish() - - -def _async_unary_new_channel(channel, args): - - def child_target(): - with _channel(args) as child_channel: - child_stub = test_pb2_grpc.TestServiceStub(child_channel) - _async_unary(child_stub) - child_channel.close() - - stub = test_pb2_grpc.TestServiceStub(channel) - _async_unary(stub) - child_process = _ChildProcess(child_target) - child_process.start() - _async_unary(stub) - child_process.finish() - - -def _blocking_unary_same_channel(channel): - - def child_target(): - try: - _blocking_unary(stub) - raise Exception( - 'Child should not be able to re-use channel after fork') - except ValueError as expected_value_error: - pass - - stub = test_pb2_grpc.TestServiceStub(channel) - _blocking_unary(stub) - child_process = _ChildProcess(child_target) - child_process.start() - child_process.finish() - - -def _blocking_unary_new_channel(channel, args): - - def child_target(): - with _channel(args) as child_channel: - child_stub = test_pb2_grpc.TestServiceStub(child_channel) - _blocking_unary(child_stub) - - stub = test_pb2_grpc.TestServiceStub(channel) - _blocking_unary(stub) - child_process = _ChildProcess(child_target) - child_process.start() - _blocking_unary(stub) - child_process.finish() - - -# Verify that the fork channel registry can handle already closed channels -def _close_channel_before_fork(channel, args): - - def child_target(): - new_channel.close() - with _channel(args) as child_channel: - child_stub = test_pb2_grpc.TestServiceStub(child_channel) - _blocking_unary(child_stub) - - stub = test_pb2_grpc.TestServiceStub(channel) - _blocking_unary(stub) - channel.close() - - with _channel(args) as new_channel: - new_stub = test_pb2_grpc.TestServiceStub(new_channel) - child_process = _ChildProcess(child_target) - child_process.start() - _blocking_unary(new_stub) - child_process.finish() - - -def _connectivity_watch(channel, args): - - parent_states = [] - parent_channel_ready_event = threading.Event() - - def child_target(): - - child_channel_ready_event = threading.Event() - - def child_connectivity_callback(state): - if state is grpc.ChannelConnectivity.READY: - child_channel_ready_event.set() - - with _channel(args) as child_channel: - child_stub = test_pb2_grpc.TestServiceStub(child_channel) - child_channel.subscribe(child_connectivity_callback) - _async_unary(child_stub) - if not child_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S): - raise ValueError('Channel did not move to READY') - if len(parent_states) > 1: - raise ValueError( - 'Received connectivity updates on parent callback', - parent_states) - child_channel.unsubscribe(child_connectivity_callback) - - def parent_connectivity_callback(state): - parent_states.append(state) - if state is grpc.ChannelConnectivity.READY: - parent_channel_ready_event.set() - - channel.subscribe(parent_connectivity_callback) - stub = test_pb2_grpc.TestServiceStub(channel) - child_process = _ChildProcess(child_target) - child_process.start() - _async_unary(stub) - if not parent_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S): - raise ValueError('Channel did not move to READY') - channel.unsubscribe(parent_connectivity_callback) - child_process.finish() - - -def _ping_pong_with_child_processes_after_first_response( - channel, args, child_target, run_after_close=True): - request_response_sizes = ( - 31415, - 9, - 2653, - 58979, - ) - request_payload_sizes = ( - 27182, - 8, - 1828, - 45904, - ) - stub = test_pb2_grpc.TestServiceStub(channel) - pipe = _Pipe() - parent_bidi_call = stub.FullDuplexCall(pipe) - child_processes = [] - first_message_received = False - for response_size, payload_size in zip(request_response_sizes, - request_payload_sizes): - request = messages_pb2.StreamingOutputCallRequest( - response_type=messages_pb2.COMPRESSABLE, +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Implementations of fork support test methods.""" + +import enum +import json +import logging +import multiprocessing +import os +import threading +import time + +import grpc + +from six.moves import queue + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import test_pb2_grpc + +_LOGGER = logging.getLogger(__name__) +_RPC_TIMEOUT_S = 10 +_CHILD_FINISH_TIMEOUT_S = 60 + + +def _channel(args): + target = '{}:{}'.format(args['server_host'], args['server_port']) + if args['use_tls']: + channel_credentials = grpc.ssl_channel_credentials() + channel = grpc.secure_channel(target, channel_credentials) + else: + channel = grpc.insecure_channel(target) + return channel + + +def _validate_payload_type_and_length(response, expected_type, expected_length): + if response.payload.type is not expected_type: + raise ValueError('expected payload type %s, got %s' % + (expected_type, type(response.payload.type))) + elif len(response.payload.body) != expected_length: + raise ValueError('expected payload body size %d, got %d' % + (expected_length, len(response.payload.body))) + + +def _async_unary(stub): + size = 314159 + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=size, + payload=messages_pb2.Payload(body=b'\x00' * 271828)) + response_future = stub.UnaryCall.future(request, timeout=_RPC_TIMEOUT_S) + response = response_future.result() + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) + + +def _blocking_unary(stub): + size = 314159 + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=size, + payload=messages_pb2.Payload(body=b'\x00' * 271828)) + response = stub.UnaryCall(request, timeout=_RPC_TIMEOUT_S) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) + + +class _Pipe(object): + + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._open = True + + def __iter__(self): + return self + + def __next__(self): + return self.next() + + def next(self): + with self._condition: + while not self._values and self._open: + self._condition.wait() + if self._values: + return self._values.pop(0) + else: + raise StopIteration() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify() + + def close(self): + with self._condition: + self._open = False + self._condition.notify() + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +class _ChildProcess(object): + + def __init__(self, task, args=None): + if args is None: + args = () + self._exceptions = multiprocessing.Queue() + + def record_exceptions(): + try: + task(*args) + except grpc.RpcError as rpc_error: + self._exceptions.put('RpcError: %s' % rpc_error) + except Exception as e: # pylint: disable=broad-except + self._exceptions.put(e) + + self._process = multiprocessing.Process(target=record_exceptions) + + def start(self): + self._process.start() + + def finish(self): + self._process.join(timeout=_CHILD_FINISH_TIMEOUT_S) + if self._process.is_alive(): + raise RuntimeError('Child process did not terminate') + if self._process.exitcode != 0: + raise ValueError('Child process failed with exitcode %d' % + self._process.exitcode) + try: + exception = self._exceptions.get(block=False) + raise ValueError('Child process failed: %s' % exception) + except queue.Empty: + pass + + +def _async_unary_same_channel(channel): + + def child_target(): + try: + _async_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + stub = test_pb2_grpc.TestServiceStub(channel) + _async_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + _async_unary(stub) + child_process.finish() + + +def _async_unary_new_channel(channel, args): + + def child_target(): + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + _async_unary(child_stub) + child_channel.close() + + stub = test_pb2_grpc.TestServiceStub(channel) + _async_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + _async_unary(stub) + child_process.finish() + + +def _blocking_unary_same_channel(channel): + + def child_target(): + try: + _blocking_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + child_process.finish() + + +def _blocking_unary_new_channel(channel, args): + + def child_target(): + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + _blocking_unary(child_stub) + + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + _blocking_unary(stub) + child_process.finish() + + +# Verify that the fork channel registry can handle already closed channels +def _close_channel_before_fork(channel, args): + + def child_target(): + new_channel.close() + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + _blocking_unary(child_stub) + + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + channel.close() + + with _channel(args) as new_channel: + new_stub = test_pb2_grpc.TestServiceStub(new_channel) + child_process = _ChildProcess(child_target) + child_process.start() + _blocking_unary(new_stub) + child_process.finish() + + +def _connectivity_watch(channel, args): + + parent_states = [] + parent_channel_ready_event = threading.Event() + + def child_target(): + + child_channel_ready_event = threading.Event() + + def child_connectivity_callback(state): + if state is grpc.ChannelConnectivity.READY: + child_channel_ready_event.set() + + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + child_channel.subscribe(child_connectivity_callback) + _async_unary(child_stub) + if not child_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S): + raise ValueError('Channel did not move to READY') + if len(parent_states) > 1: + raise ValueError( + 'Received connectivity updates on parent callback', + parent_states) + child_channel.unsubscribe(child_connectivity_callback) + + def parent_connectivity_callback(state): + parent_states.append(state) + if state is grpc.ChannelConnectivity.READY: + parent_channel_ready_event.set() + + channel.subscribe(parent_connectivity_callback) + stub = test_pb2_grpc.TestServiceStub(channel) + child_process = _ChildProcess(child_target) + child_process.start() + _async_unary(stub) + if not parent_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S): + raise ValueError('Channel did not move to READY') + channel.unsubscribe(parent_connectivity_callback) + child_process.finish() + + +def _ping_pong_with_child_processes_after_first_response( + channel, args, child_target, run_after_close=True): + request_response_sizes = ( + 31415, + 9, + 2653, + 58979, + ) + request_payload_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + stub = test_pb2_grpc.TestServiceStub(channel) + pipe = _Pipe() + parent_bidi_call = stub.FullDuplexCall(pipe) + child_processes = [] + first_message_received = False + for response_size, payload_size in zip(request_response_sizes, + request_payload_sizes): + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, response_parameters=(messages_pb2.ResponseParameters( size=response_size),), - payload=messages_pb2.Payload(body=b'\x00' * payload_size)) - pipe.add(request) - if first_message_received: - child_process = _ChildProcess(child_target, - (parent_bidi_call, channel, args)) - child_process.start() - child_processes.append(child_process) - response = next(parent_bidi_call) - first_message_received = True - child_process = _ChildProcess(child_target, - (parent_bidi_call, channel, args)) - child_process.start() - child_processes.append(child_process) - _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, - response_size) - pipe.close() - if run_after_close: - child_process = _ChildProcess(child_target, - (parent_bidi_call, channel, args)) - child_process.start() - child_processes.append(child_process) - for child_process in child_processes: - child_process.finish() - - -def _in_progress_bidi_continue_call(channel): - - def child_target(parent_bidi_call, parent_channel, args): - stub = test_pb2_grpc.TestServiceStub(parent_channel) - try: - _async_unary(stub) - raise Exception( - 'Child should not be able to re-use channel after fork') - except ValueError as expected_value_error: - pass - inherited_code = parent_bidi_call.code() - inherited_details = parent_bidi_call.details() - if inherited_code != grpc.StatusCode.CANCELLED: + payload=messages_pb2.Payload(body=b'\x00' * payload_size)) + pipe.add(request) + if first_message_received: + child_process = _ChildProcess(child_target, + (parent_bidi_call, channel, args)) + child_process.start() + child_processes.append(child_process) + response = next(parent_bidi_call) + first_message_received = True + child_process = _ChildProcess(child_target, + (parent_bidi_call, channel, args)) + child_process.start() + child_processes.append(child_process) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + response_size) + pipe.close() + if run_after_close: + child_process = _ChildProcess(child_target, + (parent_bidi_call, channel, args)) + child_process.start() + child_processes.append(child_process) + for child_process in child_processes: + child_process.finish() + + +def _in_progress_bidi_continue_call(channel): + + def child_target(parent_bidi_call, parent_channel, args): + stub = test_pb2_grpc.TestServiceStub(parent_channel) + try: + _async_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + inherited_code = parent_bidi_call.code() + inherited_details = parent_bidi_call.details() + if inherited_code != grpc.StatusCode.CANCELLED: raise ValueError('Expected inherited code CANCELLED, got %s' % inherited_code) - if inherited_details != 'Channel closed due to fork': - raise ValueError( - 'Expected inherited details Channel closed due to fork, got %s' - % inherited_details) - - # Don't run child_target after closing the parent call, as the call may have - # received a status from the server before fork occurs. + if inherited_details != 'Channel closed due to fork': + raise ValueError( + 'Expected inherited details Channel closed due to fork, got %s' + % inherited_details) + + # Don't run child_target after closing the parent call, as the call may have + # received a status from the server before fork occurs. _ping_pong_with_child_processes_after_first_response(channel, None, child_target, run_after_close=False) - - -def _in_progress_bidi_same_channel_async_call(channel): - - def child_target(parent_bidi_call, parent_channel, args): - stub = test_pb2_grpc.TestServiceStub(parent_channel) - try: - _async_unary(stub) - raise Exception( - 'Child should not be able to re-use channel after fork') - except ValueError as expected_value_error: - pass - - _ping_pong_with_child_processes_after_first_response( - channel, None, child_target) - - -def _in_progress_bidi_same_channel_blocking_call(channel): - - def child_target(parent_bidi_call, parent_channel, args): - stub = test_pb2_grpc.TestServiceStub(parent_channel) - try: - _blocking_unary(stub) - raise Exception( - 'Child should not be able to re-use channel after fork') - except ValueError as expected_value_error: - pass - - _ping_pong_with_child_processes_after_first_response( - channel, None, child_target) - - -def _in_progress_bidi_new_channel_async_call(channel, args): - - def child_target(parent_bidi_call, parent_channel, args): - with _channel(args) as channel: - stub = test_pb2_grpc.TestServiceStub(channel) - _async_unary(stub) - - _ping_pong_with_child_processes_after_first_response( - channel, args, child_target) - - -def _in_progress_bidi_new_channel_blocking_call(channel, args): - - def child_target(parent_bidi_call, parent_channel, args): - with _channel(args) as channel: - stub = test_pb2_grpc.TestServiceStub(channel) - _blocking_unary(stub) - - _ping_pong_with_child_processes_after_first_response( - channel, args, child_target) - - -@enum.unique -class TestCase(enum.Enum): - - CONNECTIVITY_WATCH = 'connectivity_watch' - CLOSE_CHANNEL_BEFORE_FORK = 'close_channel_before_fork' - ASYNC_UNARY_SAME_CHANNEL = 'async_unary_same_channel' - ASYNC_UNARY_NEW_CHANNEL = 'async_unary_new_channel' - BLOCKING_UNARY_SAME_CHANNEL = 'blocking_unary_same_channel' - BLOCKING_UNARY_NEW_CHANNEL = 'blocking_unary_new_channel' - IN_PROGRESS_BIDI_CONTINUE_CALL = 'in_progress_bidi_continue_call' - IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL = 'in_progress_bidi_same_channel_async_call' - IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_same_channel_blocking_call' - IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL = 'in_progress_bidi_new_channel_async_call' - IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_new_channel_blocking_call' - - def run_test(self, args): - _LOGGER.info("Running %s", self) - channel = _channel(args) - if self is TestCase.ASYNC_UNARY_SAME_CHANNEL: - _async_unary_same_channel(channel) - elif self is TestCase.ASYNC_UNARY_NEW_CHANNEL: - _async_unary_new_channel(channel, args) - elif self is TestCase.BLOCKING_UNARY_SAME_CHANNEL: - _blocking_unary_same_channel(channel) - elif self is TestCase.BLOCKING_UNARY_NEW_CHANNEL: - _blocking_unary_new_channel(channel, args) - elif self is TestCase.CLOSE_CHANNEL_BEFORE_FORK: - _close_channel_before_fork(channel, args) - elif self is TestCase.CONNECTIVITY_WATCH: - _connectivity_watch(channel, args) - elif self is TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL: - _in_progress_bidi_continue_call(channel) - elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL: - _in_progress_bidi_same_channel_async_call(channel) - elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL: - _in_progress_bidi_same_channel_blocking_call(channel) - elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL: - _in_progress_bidi_new_channel_async_call(channel, args) - elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL: - _in_progress_bidi_new_channel_blocking_call(channel, args) - else: + + +def _in_progress_bidi_same_channel_async_call(channel): + + def child_target(parent_bidi_call, parent_channel, args): + stub = test_pb2_grpc.TestServiceStub(parent_channel) + try: + _async_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + _ping_pong_with_child_processes_after_first_response( + channel, None, child_target) + + +def _in_progress_bidi_same_channel_blocking_call(channel): + + def child_target(parent_bidi_call, parent_channel, args): + stub = test_pb2_grpc.TestServiceStub(parent_channel) + try: + _blocking_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + _ping_pong_with_child_processes_after_first_response( + channel, None, child_target) + + +def _in_progress_bidi_new_channel_async_call(channel, args): + + def child_target(parent_bidi_call, parent_channel, args): + with _channel(args) as channel: + stub = test_pb2_grpc.TestServiceStub(channel) + _async_unary(stub) + + _ping_pong_with_child_processes_after_first_response( + channel, args, child_target) + + +def _in_progress_bidi_new_channel_blocking_call(channel, args): + + def child_target(parent_bidi_call, parent_channel, args): + with _channel(args) as channel: + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + + _ping_pong_with_child_processes_after_first_response( + channel, args, child_target) + + +@enum.unique +class TestCase(enum.Enum): + + CONNECTIVITY_WATCH = 'connectivity_watch' + CLOSE_CHANNEL_BEFORE_FORK = 'close_channel_before_fork' + ASYNC_UNARY_SAME_CHANNEL = 'async_unary_same_channel' + ASYNC_UNARY_NEW_CHANNEL = 'async_unary_new_channel' + BLOCKING_UNARY_SAME_CHANNEL = 'blocking_unary_same_channel' + BLOCKING_UNARY_NEW_CHANNEL = 'blocking_unary_new_channel' + IN_PROGRESS_BIDI_CONTINUE_CALL = 'in_progress_bidi_continue_call' + IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL = 'in_progress_bidi_same_channel_async_call' + IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_same_channel_blocking_call' + IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL = 'in_progress_bidi_new_channel_async_call' + IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_new_channel_blocking_call' + + def run_test(self, args): + _LOGGER.info("Running %s", self) + channel = _channel(args) + if self is TestCase.ASYNC_UNARY_SAME_CHANNEL: + _async_unary_same_channel(channel) + elif self is TestCase.ASYNC_UNARY_NEW_CHANNEL: + _async_unary_new_channel(channel, args) + elif self is TestCase.BLOCKING_UNARY_SAME_CHANNEL: + _blocking_unary_same_channel(channel) + elif self is TestCase.BLOCKING_UNARY_NEW_CHANNEL: + _blocking_unary_new_channel(channel, args) + elif self is TestCase.CLOSE_CHANNEL_BEFORE_FORK: + _close_channel_before_fork(channel, args) + elif self is TestCase.CONNECTIVITY_WATCH: + _connectivity_watch(channel, args) + elif self is TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL: + _in_progress_bidi_continue_call(channel) + elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL: + _in_progress_bidi_same_channel_async_call(channel) + elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL: + _in_progress_bidi_same_channel_blocking_call(channel) + elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL: + _in_progress_bidi_new_channel_async_call(channel, args) + elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL: + _in_progress_bidi_new_channel_blocking_call(channel, args) + else: raise NotImplementedError('Test case "%s" not implemented!' % self.name) - channel.close() + channel.close() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py index 8d89990e82..5772620b60 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py index 9484a9edde..01345aaca0 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py @@ -1,282 +1,282 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc_health.v1.health.""" - -import logging -import threading -import time -import unittest - -import grpc - -from grpc_health.v1 import health -from grpc_health.v1 import health_pb2 -from grpc_health.v1 import health_pb2_grpc - -from tests.unit import test_common -from tests.unit import thread_pool -from tests.unit.framework.common import test_constants - -from six.moves import queue - -_SERVING_SERVICE = 'grpc.test.TestServiceServing' -_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown' -_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing' -_WATCH_SERVICE = 'grpc.test.WatchService' - - -def _consume_responses(response_iterator, response_queue): - for response in response_iterator: - response_queue.put(response) - - -class BaseWatchTests(object): - - class WatchTests(unittest.TestCase): - - def start_server(self, non_blocking=False, thread_pool=None): - self._thread_pool = thread_pool - self._servicer = health.HealthServicer( - experimental_non_blocking=non_blocking, - experimental_thread_pool=thread_pool) - self._servicer.set(_SERVING_SERVICE, - health_pb2.HealthCheckResponse.SERVING) - self._servicer.set(_UNKNOWN_SERVICE, - health_pb2.HealthCheckResponse.UNKNOWN) - self._servicer.set(_NOT_SERVING_SERVICE, - health_pb2.HealthCheckResponse.NOT_SERVING) - self._server = test_common.test_server() - port = self._server.add_insecure_port('[::]:0') - health_pb2_grpc.add_HealthServicer_to_server( - self._servicer, self._server) - self._server.start() - - self._channel = grpc.insecure_channel('localhost:%d' % port) - self._stub = health_pb2_grpc.HealthStub(self._channel) - - def tearDown(self): - self._server.stop(None) - self._channel.close() - - def test_watch_empty_service(self): - request = health_pb2.HealthCheckRequest(service='') - response_queue = queue.Queue() - rendezvous = self._stub.Watch(request) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_health.v1.health.""" + +import logging +import threading +import time +import unittest + +import grpc + +from grpc_health.v1 import health +from grpc_health.v1 import health_pb2 +from grpc_health.v1 import health_pb2_grpc + +from tests.unit import test_common +from tests.unit import thread_pool +from tests.unit.framework.common import test_constants + +from six.moves import queue + +_SERVING_SERVICE = 'grpc.test.TestServiceServing' +_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown' +_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing' +_WATCH_SERVICE = 'grpc.test.WatchService' + + +def _consume_responses(response_iterator, response_queue): + for response in response_iterator: + response_queue.put(response) + + +class BaseWatchTests(object): + + class WatchTests(unittest.TestCase): + + def start_server(self, non_blocking=False, thread_pool=None): + self._thread_pool = thread_pool + self._servicer = health.HealthServicer( + experimental_non_blocking=non_blocking, + experimental_thread_pool=thread_pool) + self._servicer.set(_SERVING_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + self._servicer.set(_UNKNOWN_SERVICE, + health_pb2.HealthCheckResponse.UNKNOWN) + self._servicer.set(_NOT_SERVING_SERVICE, + health_pb2.HealthCheckResponse.NOT_SERVING) + self._server = test_common.test_server() + port = self._server.add_insecure_port('[::]:0') + health_pb2_grpc.add_HealthServicer_to_server( + self._servicer, self._server) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + self._stub = health_pb2_grpc.HealthStub(self._channel) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def test_watch_empty_service(self): + request = health_pb2.HealthCheckRequest(service='') + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) thread = threading.Thread(target=_consume_responses, args=(rendezvous, response_queue)) - thread.start() - - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVING, - response.status) - - rendezvous.cancel() - thread.join() - self.assertTrue(response_queue.empty()) - - if self._thread_pool is not None: - self.assertTrue(self._thread_pool.was_used()) - - def test_watch_new_service(self): - request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) - response_queue = queue.Queue() - rendezvous = self._stub.Watch(request) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response.status) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + if self._thread_pool is not None: + self.assertTrue(self._thread_pool.was_used()) + + def test_watch_new_service(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) thread = threading.Thread(target=_consume_responses, args=(rendezvous, response_queue)) - thread.start() - - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, - response.status) - - self._servicer.set(_WATCH_SERVICE, - health_pb2.HealthCheckResponse.SERVING) - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVING, - response.status) - - self._servicer.set(_WATCH_SERVICE, - health_pb2.HealthCheckResponse.NOT_SERVING) - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, - response.status) - - rendezvous.cancel() - thread.join() - self.assertTrue(response_queue.empty()) - - def test_watch_service_isolation(self): - request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) - response_queue = queue.Queue() - rendezvous = self._stub.Watch(request) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response.status) + + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response.status) + + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.NOT_SERVING) + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, + response.status) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + def test_watch_service_isolation(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) thread = threading.Thread(target=_consume_responses, args=(rendezvous, response_queue)) - thread.start() - - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, - response.status) - - self._servicer.set('some-other-service', - health_pb2.HealthCheckResponse.SERVING) - with self.assertRaises(queue.Empty): - response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - - rendezvous.cancel() - thread.join() - self.assertTrue(response_queue.empty()) - - def test_two_watchers(self): - request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) - response_queue1 = queue.Queue() - response_queue2 = queue.Queue() - rendezvous1 = self._stub.Watch(request) - rendezvous2 = self._stub.Watch(request) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response.status) + + self._servicer.set('some-other-service', + health_pb2.HealthCheckResponse.SERVING) + with self.assertRaises(queue.Empty): + response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + def test_two_watchers(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue1 = queue.Queue() + response_queue2 = queue.Queue() + rendezvous1 = self._stub.Watch(request) + rendezvous2 = self._stub.Watch(request) thread1 = threading.Thread(target=_consume_responses, args=(rendezvous1, response_queue1)) thread2 = threading.Thread(target=_consume_responses, args=(rendezvous2, response_queue2)) - thread1.start() - thread2.start() - - response1 = response_queue1.get( - timeout=test_constants.SHORT_TIMEOUT) - response2 = response_queue2.get( - timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, - response1.status) - self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, - response2.status) - - self._servicer.set(_WATCH_SERVICE, - health_pb2.HealthCheckResponse.SERVING) - response1 = response_queue1.get( - timeout=test_constants.SHORT_TIMEOUT) - response2 = response_queue2.get( - timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVING, - response1.status) - self.assertEqual(health_pb2.HealthCheckResponse.SERVING, - response2.status) - - rendezvous1.cancel() - rendezvous2.cancel() - thread1.join() - thread2.join() - self.assertTrue(response_queue1.empty()) - self.assertTrue(response_queue2.empty()) - - @unittest.skip("https://github.com/grpc/grpc/issues/18127") - def test_cancelled_watch_removed_from_watch_list(self): - request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) - response_queue = queue.Queue() - rendezvous = self._stub.Watch(request) + thread1.start() + thread2.start() + + response1 = response_queue1.get( + timeout=test_constants.SHORT_TIMEOUT) + response2 = response_queue2.get( + timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response1.status) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response2.status) + + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + response1 = response_queue1.get( + timeout=test_constants.SHORT_TIMEOUT) + response2 = response_queue2.get( + timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response1.status) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response2.status) + + rendezvous1.cancel() + rendezvous2.cancel() + thread1.join() + thread2.join() + self.assertTrue(response_queue1.empty()) + self.assertTrue(response_queue2.empty()) + + @unittest.skip("https://github.com/grpc/grpc/issues/18127") + def test_cancelled_watch_removed_from_watch_list(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) thread = threading.Thread(target=_consume_responses, args=(rendezvous, response_queue)) - thread.start() - - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, - response.status) - - rendezvous.cancel() - self._servicer.set(_WATCH_SERVICE, - health_pb2.HealthCheckResponse.SERVING) - thread.join() - - # Wait, if necessary, for serving thread to process client cancellation - timeout = time.time() + test_constants.TIME_ALLOWANCE + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response.status) + + rendezvous.cancel() + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + thread.join() + + # Wait, if necessary, for serving thread to process client cancellation + timeout = time.time() + test_constants.TIME_ALLOWANCE while (time.time() < timeout and self._servicer._send_response_callbacks[_WATCH_SERVICE]): - time.sleep(1) - self.assertFalse( - self._servicer._send_response_callbacks[_WATCH_SERVICE], - 'watch set should be empty') - self.assertTrue(response_queue.empty()) - - def test_graceful_shutdown(self): - request = health_pb2.HealthCheckRequest(service='') - response_queue = queue.Queue() - rendezvous = self._stub.Watch(request) + time.sleep(1) + self.assertFalse( + self._servicer._send_response_callbacks[_WATCH_SERVICE], + 'watch set should be empty') + self.assertTrue(response_queue.empty()) + + def test_graceful_shutdown(self): + request = health_pb2.HealthCheckRequest(service='') + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) thread = threading.Thread(target=_consume_responses, args=(rendezvous, response_queue)) - thread.start() - - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.SERVING, - response.status) - - self._servicer.enter_graceful_shutdown() - response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) - self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, - response.status) - - # This should be a no-op. - self._servicer.set('', health_pb2.HealthCheckResponse.SERVING) - - rendezvous.cancel() - thread.join() - self.assertTrue(response_queue.empty()) - - -class HealthServicerTest(BaseWatchTests.WatchTests): - - def setUp(self): - self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response.status) + + self._servicer.enter_graceful_shutdown() + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, + response.status) + + # This should be a no-op. + self._servicer.set('', health_pb2.HealthCheckResponse.SERVING) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + +class HealthServicerTest(BaseWatchTests.WatchTests): + + def setUp(self): + self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None) super(HealthServicerTest, self).start_server(non_blocking=True, thread_pool=self._thread_pool) - - def test_check_empty_service(self): - request = health_pb2.HealthCheckRequest() - resp = self._stub.Check(request) - self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status) - - def test_check_serving_service(self): - request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE) - resp = self._stub.Check(request) - self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status) - - def test_check_unknown_service(self): - request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE) - resp = self._stub.Check(request) - self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status) - - def test_check_not_serving_service(self): - request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE) - resp = self._stub.Check(request) - self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, - resp.status) - - def test_check_not_found_service(self): - request = health_pb2.HealthCheckRequest(service='not-found') - with self.assertRaises(grpc.RpcError) as context: - resp = self._stub.Check(request) - - self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code()) - - def test_health_service_name(self): - self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health') - - -class HealthServicerBackwardsCompatibleWatchTest(BaseWatchTests.WatchTests): - - def setUp(self): + + def test_check_empty_service(self): + request = health_pb2.HealthCheckRequest() + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status) + + def test_check_serving_service(self): + request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE) + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status) + + def test_check_unknown_service(self): + request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE) + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status) + + def test_check_not_serving_service(self): + request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE) + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, + resp.status) + + def test_check_not_found_service(self): + request = health_pb2.HealthCheckRequest(service='not-found') + with self.assertRaises(grpc.RpcError) as context: + resp = self._stub.Check(request) + + self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code()) + + def test_health_service_name(self): + self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health') + + +class HealthServicerBackwardsCompatibleWatchTest(BaseWatchTests.WatchTests): + + def setUp(self): super(HealthServicerBackwardsCompatibleWatchTest, self).start_server(non_blocking=False, thread_pool=None) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py index 05e65243e1..0753872b5e 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py @@ -1,131 +1,131 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""The Python client used to test negative http2 conditions.""" - -import argparse - -import grpc -import time -from src.proto.grpc.testing import test_pb2_grpc -from src.proto.grpc.testing import messages_pb2 - - -def _validate_payload_type_and_length(response, expected_type, expected_length): - if response.payload.type is not expected_type: - raise ValueError('expected payload type %s, got %s' % - (expected_type, type(response.payload.type))) - elif len(response.payload.body) != expected_length: - raise ValueError('expected payload body size %d, got %d' % - (expected_length, len(response.payload.body))) - - -def _expect_status_code(call, expected_code): - if call.code() != expected_code: +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python client used to test negative http2 conditions.""" + +import argparse + +import grpc +import time +from src.proto.grpc.testing import test_pb2_grpc +from src.proto.grpc.testing import messages_pb2 + + +def _validate_payload_type_and_length(response, expected_type, expected_length): + if response.payload.type is not expected_type: + raise ValueError('expected payload type %s, got %s' % + (expected_type, type(response.payload.type))) + elif len(response.payload.body) != expected_length: + raise ValueError('expected payload body size %d, got %d' % + (expected_length, len(response.payload.body))) + + +def _expect_status_code(call, expected_code): + if call.code() != expected_code: raise ValueError('expected code %s, got %s' % (expected_code, call.code())) - - -def _expect_status_details(call, expected_details): - if call.details() != expected_details: + + +def _expect_status_details(call, expected_details): + if call.details() != expected_details: raise ValueError('expected message %s, got %s' % (expected_details, call.details())) - - -def _validate_status_code_and_details(call, expected_code, expected_details): - _expect_status_code(call, expected_code) - _expect_status_details(call, expected_details) - - -# common requests -_REQUEST_SIZE = 314159 -_RESPONSE_SIZE = 271828 - -_SIMPLE_REQUEST = messages_pb2.SimpleRequest( - response_type=messages_pb2.COMPRESSABLE, - response_size=_RESPONSE_SIZE, - payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE)) - - -def _goaway(stub): - first_response = stub.UnaryCall(_SIMPLE_REQUEST) - _validate_payload_type_and_length(first_response, messages_pb2.COMPRESSABLE, - _RESPONSE_SIZE) - time.sleep(1) - second_response = stub.UnaryCall(_SIMPLE_REQUEST) - _validate_payload_type_and_length(second_response, - messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) - - -def _rst_after_header(stub): - resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) - _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, - "Received RST_STREAM with error code 0") - - -def _rst_during_data(stub): - resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) - _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, - "Received RST_STREAM with error code 0") - - -def _rst_after_data(stub): - resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) - _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, - "Received RST_STREAM with error code 0") - - -def _ping(stub): - response = stub.UnaryCall(_SIMPLE_REQUEST) - _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, - _RESPONSE_SIZE) - - -def _max_streams(stub): - # send one req to ensure server sets MAX_STREAMS - response = stub.UnaryCall(_SIMPLE_REQUEST) - _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, - _RESPONSE_SIZE) - - # give the streams a workout - futures = [] - for _ in range(15): - futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST)) - for future in futures: + + +def _validate_status_code_and_details(call, expected_code, expected_details): + _expect_status_code(call, expected_code) + _expect_status_details(call, expected_details) + + +# common requests +_REQUEST_SIZE = 314159 +_RESPONSE_SIZE = 271828 + +_SIMPLE_REQUEST = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=_RESPONSE_SIZE, + payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE)) + + +def _goaway(stub): + first_response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(first_response, messages_pb2.COMPRESSABLE, + _RESPONSE_SIZE) + time.sleep(1) + second_response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(second_response, + messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) + + +def _rst_after_header(stub): + resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) + _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, + "Received RST_STREAM with error code 0") + + +def _rst_during_data(stub): + resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) + _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, + "Received RST_STREAM with error code 0") + + +def _rst_after_data(stub): + resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) + _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, + "Received RST_STREAM with error code 0") + + +def _ping(stub): + response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + _RESPONSE_SIZE) + + +def _max_streams(stub): + # send one req to ensure server sets MAX_STREAMS + response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + _RESPONSE_SIZE) + + # give the streams a workout + futures = [] + for _ in range(15): + futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST)) + for future in futures: _validate_payload_type_and_length(future.result(), messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) - - -def _run_test_case(test_case, stub): - if test_case == 'goaway': - _goaway(stub) - elif test_case == 'rst_after_header': - _rst_after_header(stub) - elif test_case == 'rst_during_data': - _rst_during_data(stub) - elif test_case == 'rst_after_data': - _rst_after_data(stub) - elif test_case == 'ping': - _ping(stub) - elif test_case == 'max_streams': - _max_streams(stub) - else: - raise ValueError("Invalid test case: %s" % test_case) - - -def _args(): - parser = argparse.ArgumentParser() + + +def _run_test_case(test_case, stub): + if test_case == 'goaway': + _goaway(stub) + elif test_case == 'rst_after_header': + _rst_after_header(stub) + elif test_case == 'rst_during_data': + _rst_during_data(stub) + elif test_case == 'rst_after_data': + _rst_after_data(stub) + elif test_case == 'ping': + _ping(stub) + elif test_case == 'max_streams': + _max_streams(stub) + else: + raise ValueError("Invalid test case: %s" % test_case) + + +def _args(): + parser = argparse.ArgumentParser() parser.add_argument('--server_host', help='the host to which to connect', type=str, @@ -138,21 +138,21 @@ def _args(): help='the test case to execute', type=str, default="goaway") - return parser.parse_args() - - -def _stub(server_host, server_port): - target = '{}:{}'.format(server_host, server_port) - channel = grpc.insecure_channel(target) - grpc.channel_ready_future(channel).result() - return test_pb2_grpc.TestServiceStub(channel) - - -def main(): - args = _args() - stub = _stub(args.server_host, args.server_port) - _run_test_case(args.test_case, stub) - - -if __name__ == '__main__': - main() + return parser.parse_args() + + +def _stub(server_host, server_port): + target = '{}:{}'.format(server_host, server_port) + channel = grpc.insecure_channel(target) + grpc.channel_ready_future(channel).result() + return test_pb2_grpc.TestServiceStub(channel) + + +def main(): + args = _args() + stub = _stub(args.server_host, args.server_port) + _run_test_case(args.test_case, stub) + + +if __name__ == '__main__': + main() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py index dc985eebb4..5fb4f3c3cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py index ff89d9dc7f..fecf31767a 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py @@ -1,44 +1,44 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Insecure client-server interoperability as a unit test.""" - -import unittest - -import grpc -from src.proto.grpc.testing import test_pb2_grpc - -from tests.interop import _intraop_test_case -from tests.interop import service -from tests.interop import server -from tests.unit import test_common - - -class InsecureIntraopTest(_intraop_test_case.IntraopTestCase, - unittest.TestCase): - - def setUp(self): - self.server = test_common.test_server() - test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), - self.server) - port = self.server.add_insecure_port('[::]:0') - self.server.start() - self.stub = test_pb2_grpc.TestServiceStub( - grpc.insecure_channel('localhost:{}'.format(port))) - - def tearDown(self): - self.server.stop(None) - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Insecure client-server interoperability as a unit test.""" + +import unittest + +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import _intraop_test_case +from tests.interop import service +from tests.interop import server +from tests.unit import test_common + + +class InsecureIntraopTest(_intraop_test_case.IntraopTestCase, + unittest.TestCase): + + def setUp(self): + self.server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), + self.server) + port = self.server.add_insecure_port('[::]:0') + self.server.start() + self.stub = test_pb2_grpc.TestServiceStub( + grpc.insecure_channel('localhost:{}'.format(port))) + + def tearDown(self): + self.server.stop(None) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py index 964d3a6269..007db7ab41 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py @@ -1,51 +1,51 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common code for unit tests of the interoperability test code.""" - -from tests.interop import methods - - -class IntraopTestCase(object): - """Unit test methods. - - This class must be mixed in with unittest.TestCase and a class that defines - setUp and tearDown methods that manage a stub attribute. - """ - - def testEmptyUnary(self): - methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None) - - def testLargeUnary(self): - methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None) - - def testServerStreaming(self): - methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None) - - def testClientStreaming(self): - methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None) - - def testPingPong(self): - methods.TestCase.PING_PONG.test_interoperability(self.stub, None) - - def testCancelAfterBegin(self): - methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability( - self.stub, None) - - def testCancelAfterFirstResponse(self): - methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability( - self.stub, None) - - def testTimeoutOnSleepingServer(self): - methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability( - self.stub, None) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common code for unit tests of the interoperability test code.""" + +from tests.interop import methods + + +class IntraopTestCase(object): + """Unit test methods. + + This class must be mixed in with unittest.TestCase and a class that defines + setUp and tearDown methods that manage a stub attribute. + """ + + def testEmptyUnary(self): + methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None) + + def testLargeUnary(self): + methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None) + + def testServerStreaming(self): + methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None) + + def testClientStreaming(self): + methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None) + + def testPingPong(self): + methods.TestCase.PING_PONG.test_interoperability(self.stub, None) + + def testCancelAfterBegin(self): + methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability( + self.stub, None) + + def testCancelAfterFirstResponse(self): + methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability( + self.stub, None) + + def testTimeoutOnSleepingServer(self): + methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability( + self.stub, None) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py index af3a94e15a..bf1f1b118b 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py @@ -1,43 +1,43 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Secure client-server interoperability as a unit test.""" - -import unittest - -import grpc -from src.proto.grpc.testing import test_pb2_grpc - -from tests.interop import _intraop_test_case -from tests.interop import service -from tests.interop import resources -from tests.unit import test_common - -_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' - - -class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase): - - def setUp(self): - self.server = test_common.test_server() - test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), - self.server) - port = self.server.add_secure_port( - '[::]:0', - grpc.ssl_server_credentials([(resources.private_key(), - resources.certificate_chain())])) - self.server.start() - self.stub = test_pb2_grpc.TestServiceStub( +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Secure client-server interoperability as a unit test.""" + +import unittest + +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import _intraop_test_case +from tests.interop import service +from tests.interop import resources +from tests.unit import test_common + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' + + +class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase): + + def setUp(self): + self.server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), + self.server) + port = self.server.add_secure_port( + '[::]:0', + grpc.ssl_server_credentials([(resources.private_key(), + resources.certificate_chain())])) + self.server.start() + self.stub = test_pb2_grpc.TestServiceStub( grpc.secure_channel( 'localhost:{}'.format(port), grpc.ssl_channel_credentials( @@ -45,10 +45,10 @@ class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase): 'grpc.ssl_target_name_override', _SERVER_HOST_OVERRIDE, ),))) - - def tearDown(self): - self.server.stop(None) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + + def tearDown(self): + self.server.stop(None) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py index a47b0834d0..4d35f7ca32 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py @@ -1,32 +1,32 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""The Python implementation of the GRPC interoperability test client.""" - -import argparse -import os - -from google import auth as google_auth -from google.auth import jwt as google_auth_jwt -import grpc -from src.proto.grpc.testing import test_pb2_grpc - -from tests.interop import methods -from tests.interop import resources - - +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the GRPC interoperability test client.""" + +import argparse +import os + +from google import auth as google_auth +from google.auth import jwt as google_auth_jwt +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import methods +from tests.interop import resources + + def parse_interop_client_args(): - parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser() parser.add_argument('--server_host', default="localhost", type=str, @@ -72,29 +72,29 @@ def parse_interop_client_args(): + "grpc::CreateTestChannel, that configures the grpclb LB policy " + "with a child policy being the value of this flag (e.g. round_robin " + "or pick_first).")) - return parser.parse_args() - - + return parser.parse_args() + + def _create_call_credentials(args): - if args.test_case == 'oauth2_auth_token': - google_credentials, unused_project_id = google_auth.default( - scopes=[args.oauth_scope]) - google_credentials.refresh(google_auth.transport.requests.Request()) + if args.test_case == 'oauth2_auth_token': + google_credentials, unused_project_id = google_auth.default( + scopes=[args.oauth_scope]) + google_credentials.refresh(google_auth.transport.requests.Request()) return grpc.access_token_call_credentials(google_credentials.token) - elif args.test_case == 'compute_engine_creds': - google_credentials, unused_project_id = google_auth.default( - scopes=[args.oauth_scope]) + elif args.test_case == 'compute_engine_creds': + google_credentials, unused_project_id = google_auth.default( + scopes=[args.oauth_scope]) return grpc.metadata_call_credentials( - google_auth.transport.grpc.AuthMetadataPlugin( - credentials=google_credentials, - request=google_auth.transport.requests.Request())) - elif args.test_case == 'jwt_token_creds': - google_credentials = google_auth_jwt.OnDemandCredentials.from_service_account_file( - os.environ[google_auth.environment_vars.CREDENTIALS]) + google_auth.transport.grpc.AuthMetadataPlugin( + credentials=google_credentials, + request=google_auth.transport.requests.Request())) + elif args.test_case == 'jwt_token_creds': + google_credentials = google_auth_jwt.OnDemandCredentials.from_service_account_file( + os.environ[google_auth.environment_vars.CREDENTIALS]) return grpc.metadata_call_credentials( - google_auth.transport.grpc.AuthMetadataPlugin( - credentials=google_credentials, request=None)) - else: + google_auth.transport.grpc.AuthMetadataPlugin( + credentials=google_credentials, request=None)) + else: return None @@ -151,30 +151,30 @@ def _create_channel(args): return grpc.secure_channel(target, channel_credentials, options) else: return grpc.insecure_channel(target) - - + + def create_stub(channel, args): - if args.test_case == "unimplemented_service": - return test_pb2_grpc.UnimplementedServiceStub(channel) - else: - return test_pb2_grpc.TestServiceStub(channel) - - -def _test_case_from_arg(test_case_arg): - for test_case in methods.TestCase: - if test_case_arg == test_case.value: - return test_case - else: - raise ValueError('No test case "%s"!' % test_case_arg) - - -def test_interoperability(): + if args.test_case == "unimplemented_service": + return test_pb2_grpc.UnimplementedServiceStub(channel) + else: + return test_pb2_grpc.TestServiceStub(channel) + + +def _test_case_from_arg(test_case_arg): + for test_case in methods.TestCase: + if test_case_arg == test_case.value: + return test_case + else: + raise ValueError('No test case "%s"!' % test_case_arg) + + +def test_interoperability(): args = parse_interop_client_args() channel = _create_channel(args) stub = create_stub(channel, args) - test_case = _test_case_from_arg(args.test_case) - test_case.test_interoperability(stub, args) - - -if __name__ == '__main__': - test_interoperability() + test_case = _test_case_from_arg(args.test_case) + test_case.test_interoperability(stub, args) + + +if __name__ == '__main__': + test_interoperability() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py index c57093ea73..44a1c38bb9 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py @@ -1,482 +1,482 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Implementations of interoperability test methods.""" - -# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details -# please refer to comments in the "bazel_namespace_package_hack" module. -try: - from tests import bazel_namespace_package_hack - bazel_namespace_package_hack.sys_path_to_site_dir_hack() -except ImportError: - pass - -import enum -import json -import os -import threading -import time - -from google import auth as google_auth -from google.auth import environment_vars as google_auth_environment_vars -from google.auth.transport import grpc as google_auth_transport_grpc -from google.auth.transport import requests as google_auth_transport_requests -import grpc - -from src.proto.grpc.testing import empty_pb2 -from src.proto.grpc.testing import messages_pb2 - -_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" -_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" - - -def _expect_status_code(call, expected_code): - if call.code() != expected_code: +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Implementations of interoperability test methods.""" + +# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details +# please refer to comments in the "bazel_namespace_package_hack" module. +try: + from tests import bazel_namespace_package_hack + bazel_namespace_package_hack.sys_path_to_site_dir_hack() +except ImportError: + pass + +import enum +import json +import os +import threading +import time + +from google import auth as google_auth +from google.auth import environment_vars as google_auth_environment_vars +from google.auth.transport import grpc as google_auth_transport_grpc +from google.auth.transport import requests as google_auth_transport_requests +import grpc + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing import messages_pb2 + +_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" +_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" + + +def _expect_status_code(call, expected_code): + if call.code() != expected_code: raise ValueError('expected code %s, got %s' % (expected_code, call.code())) - - -def _expect_status_details(call, expected_details): - if call.details() != expected_details: + + +def _expect_status_details(call, expected_details): + if call.details() != expected_details: raise ValueError('expected message %s, got %s' % (expected_details, call.details())) - - -def _validate_status_code_and_details(call, expected_code, expected_details): - _expect_status_code(call, expected_code) - _expect_status_details(call, expected_details) - - -def _validate_payload_type_and_length(response, expected_type, expected_length): - if response.payload.type is not expected_type: - raise ValueError('expected payload type %s, got %s' % - (expected_type, type(response.payload.type))) - elif len(response.payload.body) != expected_length: - raise ValueError('expected payload body size %d, got %d' % - (expected_length, len(response.payload.body))) - - -def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope, - call_credentials): - size = 314159 - request = messages_pb2.SimpleRequest( - response_type=messages_pb2.COMPRESSABLE, - response_size=size, - payload=messages_pb2.Payload(body=b'\x00' * 271828), - fill_username=fill_username, - fill_oauth_scope=fill_oauth_scope) + + +def _validate_status_code_and_details(call, expected_code, expected_details): + _expect_status_code(call, expected_code) + _expect_status_details(call, expected_details) + + +def _validate_payload_type_and_length(response, expected_type, expected_length): + if response.payload.type is not expected_type: + raise ValueError('expected payload type %s, got %s' % + (expected_type, type(response.payload.type))) + elif len(response.payload.body) != expected_length: + raise ValueError('expected payload body size %d, got %d' % + (expected_length, len(response.payload.body))) + + +def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope, + call_credentials): + size = 314159 + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=size, + payload=messages_pb2.Payload(body=b'\x00' * 271828), + fill_username=fill_username, + fill_oauth_scope=fill_oauth_scope) response_future = stub.UnaryCall.future(request, credentials=call_credentials) - response = response_future.result() - _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) - return response - - -def _empty_unary(stub): - response = stub.EmptyCall(empty_pb2.Empty()) - if not isinstance(response, empty_pb2.Empty): + response = response_future.result() + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) + return response + + +def _empty_unary(stub): + response = stub.EmptyCall(empty_pb2.Empty()) + if not isinstance(response, empty_pb2.Empty): raise TypeError('response is of type "%s", not empty_pb2.Empty!' % type(response)) - - -def _large_unary(stub): - _large_unary_common_behavior(stub, False, False, None) - - -def _client_streaming(stub): - payload_body_sizes = ( - 27182, - 8, - 1828, - 45904, - ) - payloads = (messages_pb2.Payload(body=b'\x00' * size) - for size in payload_body_sizes) - requests = (messages_pb2.StreamingInputCallRequest(payload=payload) - for payload in payloads) - response = stub.StreamingInputCall(requests) - if response.aggregated_payload_size != 74922: + + +def _large_unary(stub): + _large_unary_common_behavior(stub, False, False, None) + + +def _client_streaming(stub): + payload_body_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + payloads = (messages_pb2.Payload(body=b'\x00' * size) + for size in payload_body_sizes) + requests = (messages_pb2.StreamingInputCallRequest(payload=payload) + for payload in payloads) + response = stub.StreamingInputCall(requests) + if response.aggregated_payload_size != 74922: raise ValueError('incorrect size %d!' % response.aggregated_payload_size) - - -def _server_streaming(stub): - sizes = ( - 31415, - 9, - 2653, - 58979, - ) - - request = messages_pb2.StreamingOutputCallRequest( - response_type=messages_pb2.COMPRESSABLE, - response_parameters=( - messages_pb2.ResponseParameters(size=sizes[0]), - messages_pb2.ResponseParameters(size=sizes[1]), - messages_pb2.ResponseParameters(size=sizes[2]), - messages_pb2.ResponseParameters(size=sizes[3]), - )) - response_iterator = stub.StreamingOutputCall(request) - for index, response in enumerate(response_iterator): - _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, - sizes[index]) - - -class _Pipe(object): - - def __init__(self): - self._condition = threading.Condition() - self._values = [] - self._open = True - - def __iter__(self): - return self - - def __next__(self): - return self.next() - - def next(self): - with self._condition: - while not self._values and self._open: - self._condition.wait() - if self._values: - return self._values.pop(0) - else: - raise StopIteration() - - def add(self, value): - with self._condition: - self._values.append(value) - self._condition.notify() - - def close(self): - with self._condition: - self._open = False - self._condition.notify() - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - -def _ping_pong(stub): - request_response_sizes = ( - 31415, - 9, - 2653, - 58979, - ) - request_payload_sizes = ( - 27182, - 8, - 1828, - 45904, - ) - - with _Pipe() as pipe: - response_iterator = stub.FullDuplexCall(pipe) - for response_size, payload_size in zip(request_response_sizes, - request_payload_sizes): - request = messages_pb2.StreamingOutputCallRequest( - response_type=messages_pb2.COMPRESSABLE, + + +def _server_streaming(stub): + sizes = ( + 31415, + 9, + 2653, + 58979, + ) + + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=( + messages_pb2.ResponseParameters(size=sizes[0]), + messages_pb2.ResponseParameters(size=sizes[1]), + messages_pb2.ResponseParameters(size=sizes[2]), + messages_pb2.ResponseParameters(size=sizes[3]), + )) + response_iterator = stub.StreamingOutputCall(request) + for index, response in enumerate(response_iterator): + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + sizes[index]) + + +class _Pipe(object): + + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._open = True + + def __iter__(self): + return self + + def __next__(self): + return self.next() + + def next(self): + with self._condition: + while not self._values and self._open: + self._condition.wait() + if self._values: + return self._values.pop(0) + else: + raise StopIteration() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify() + + def close(self): + with self._condition: + self._open = False + self._condition.notify() + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +def _ping_pong(stub): + request_response_sizes = ( + 31415, + 9, + 2653, + 58979, + ) + request_payload_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + for response_size, payload_size in zip(request_response_sizes, + request_payload_sizes): + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, response_parameters=(messages_pb2.ResponseParameters( size=response_size),), - payload=messages_pb2.Payload(body=b'\x00' * payload_size)) - pipe.add(request) - response = next(response_iterator) + payload=messages_pb2.Payload(body=b'\x00' * payload_size)) + pipe.add(request) + response = next(response_iterator) _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, response_size) - - -def _cancel_after_begin(stub): - with _Pipe() as pipe: - response_future = stub.StreamingInputCall.future(pipe) - response_future.cancel() - if not response_future.cancelled(): - raise ValueError('expected cancelled method to return True') - if response_future.code() is not grpc.StatusCode.CANCELLED: - raise ValueError('expected status code CANCELLED') - - -def _cancel_after_first_response(stub): - request_response_sizes = ( - 31415, - 9, - 2653, - 58979, - ) - request_payload_sizes = ( - 27182, - 8, - 1828, - 45904, - ) - with _Pipe() as pipe: - response_iterator = stub.FullDuplexCall(pipe) - - response_size = request_response_sizes[0] - payload_size = request_payload_sizes[0] - request = messages_pb2.StreamingOutputCallRequest( - response_type=messages_pb2.COMPRESSABLE, + + +def _cancel_after_begin(stub): + with _Pipe() as pipe: + response_future = stub.StreamingInputCall.future(pipe) + response_future.cancel() + if not response_future.cancelled(): + raise ValueError('expected cancelled method to return True') + if response_future.code() is not grpc.StatusCode.CANCELLED: + raise ValueError('expected status code CANCELLED') + + +def _cancel_after_first_response(stub): + request_response_sizes = ( + 31415, + 9, + 2653, + 58979, + ) + request_payload_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + + response_size = request_response_sizes[0] + payload_size = request_payload_sizes[0] + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, response_parameters=(messages_pb2.ResponseParameters( size=response_size),), - payload=messages_pb2.Payload(body=b'\x00' * payload_size)) - pipe.add(request) - response = next(response_iterator) - # We test the contents of `response` in the Ping Pong test - don't check - # them here. - response_iterator.cancel() - - try: - next(response_iterator) - except grpc.RpcError as rpc_error: - if rpc_error.code() is not grpc.StatusCode.CANCELLED: - raise - else: - raise ValueError('expected call to be cancelled') - - -def _timeout_on_sleeping_server(stub): - request_payload_size = 27182 - with _Pipe() as pipe: - response_iterator = stub.FullDuplexCall(pipe, timeout=0.001) - - request = messages_pb2.StreamingOutputCallRequest( - response_type=messages_pb2.COMPRESSABLE, - payload=messages_pb2.Payload(body=b'\x00' * request_payload_size)) - pipe.add(request) - try: - next(response_iterator) - except grpc.RpcError as rpc_error: - if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED: - raise - else: - raise ValueError('expected call to exceed deadline') - - -def _empty_stream(stub): - with _Pipe() as pipe: - response_iterator = stub.FullDuplexCall(pipe) - pipe.close() - try: - next(response_iterator) - raise ValueError('expected exactly 0 responses') - except StopIteration: - pass - - -def _status_code_and_message(stub): - details = 'test status message' - code = 2 - status = grpc.StatusCode.UNKNOWN # code = 2 - - # Test with a UnaryCall - request = messages_pb2.SimpleRequest( - response_type=messages_pb2.COMPRESSABLE, - response_size=1, - payload=messages_pb2.Payload(body=b'\x00'), - response_status=messages_pb2.EchoStatus(code=code, message=details)) - response_future = stub.UnaryCall.future(request) - _validate_status_code_and_details(response_future, status, details) - - # Test with a FullDuplexCall - with _Pipe() as pipe: - response_iterator = stub.FullDuplexCall(pipe) - request = messages_pb2.StreamingOutputCallRequest( - response_type=messages_pb2.COMPRESSABLE, - response_parameters=(messages_pb2.ResponseParameters(size=1),), - payload=messages_pb2.Payload(body=b'\x00'), - response_status=messages_pb2.EchoStatus(code=code, message=details)) - pipe.add(request) # sends the initial request. + payload=messages_pb2.Payload(body=b'\x00' * payload_size)) + pipe.add(request) + response = next(response_iterator) + # We test the contents of `response` in the Ping Pong test - don't check + # them here. + response_iterator.cancel() + + try: + next(response_iterator) + except grpc.RpcError as rpc_error: + if rpc_error.code() is not grpc.StatusCode.CANCELLED: + raise + else: + raise ValueError('expected call to be cancelled') + + +def _timeout_on_sleeping_server(stub): + request_payload_size = 27182 + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe, timeout=0.001) + + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + payload=messages_pb2.Payload(body=b'\x00' * request_payload_size)) + pipe.add(request) + try: + next(response_iterator) + except grpc.RpcError as rpc_error: + if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED: + raise + else: + raise ValueError('expected call to exceed deadline') + + +def _empty_stream(stub): + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + pipe.close() + try: + next(response_iterator) + raise ValueError('expected exactly 0 responses') + except StopIteration: + pass + + +def _status_code_and_message(stub): + details = 'test status message' + code = 2 + status = grpc.StatusCode.UNKNOWN # code = 2 + + # Test with a UnaryCall + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=1, + payload=messages_pb2.Payload(body=b'\x00'), + response_status=messages_pb2.EchoStatus(code=code, message=details)) + response_future = stub.UnaryCall.future(request) + _validate_status_code_and_details(response_future, status, details) + + # Test with a FullDuplexCall + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=(messages_pb2.ResponseParameters(size=1),), + payload=messages_pb2.Payload(body=b'\x00'), + response_status=messages_pb2.EchoStatus(code=code, message=details)) + pipe.add(request) # sends the initial request. try: next(response_iterator) except grpc.RpcError as rpc_error: assert rpc_error.code() == status - # Dropping out of with block closes the pipe - _validate_status_code_and_details(response_iterator, status, details) - - -def _unimplemented_method(test_service_stub): - response_future = (test_service_stub.UnimplementedCall.future( - empty_pb2.Empty())) - _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED) - - -def _unimplemented_service(unimplemented_service_stub): - response_future = (unimplemented_service_stub.UnimplementedCall.future( - empty_pb2.Empty())) - _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED) - - -def _custom_metadata(stub): - initial_metadata_value = "test_initial_metadata_value" - trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b" - metadata = ((_INITIAL_METADATA_KEY, initial_metadata_value), - (_TRAILING_METADATA_KEY, trailing_metadata_value)) - - def _validate_metadata(response): - initial_metadata = dict(response.initial_metadata()) - if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value: - raise ValueError('expected initial metadata %s, got %s' % - (initial_metadata_value, - initial_metadata[_INITIAL_METADATA_KEY])) - trailing_metadata = dict(response.trailing_metadata()) - if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value: - raise ValueError('expected trailing metadata %s, got %s' % - (trailing_metadata_value, - trailing_metadata[_TRAILING_METADATA_KEY])) - - # Testing with UnaryCall - request = messages_pb2.SimpleRequest( - response_type=messages_pb2.COMPRESSABLE, - response_size=1, - payload=messages_pb2.Payload(body=b'\x00')) - response_future = stub.UnaryCall.future(request, metadata=metadata) - _validate_metadata(response_future) - - # Testing with FullDuplexCall - with _Pipe() as pipe: - response_iterator = stub.FullDuplexCall(pipe, metadata=metadata) - request = messages_pb2.StreamingOutputCallRequest( - response_type=messages_pb2.COMPRESSABLE, - response_parameters=(messages_pb2.ResponseParameters(size=1),)) - pipe.add(request) # Sends the request - next(response_iterator) # Causes server to send trailing metadata - # Dropping out of the with block closes the pipe - _validate_metadata(response_iterator) - - -def _compute_engine_creds(stub, args): - response = _large_unary_common_behavior(stub, True, True, None) - if args.default_service_account != response.username: - raise ValueError('expected username %s, got %s' % - (args.default_service_account, response.username)) - - -def _oauth2_auth_token(stub, args): - json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] - wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] - response = _large_unary_common_behavior(stub, True, True, None) - if wanted_email != response.username: + # Dropping out of with block closes the pipe + _validate_status_code_and_details(response_iterator, status, details) + + +def _unimplemented_method(test_service_stub): + response_future = (test_service_stub.UnimplementedCall.future( + empty_pb2.Empty())) + _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED) + + +def _unimplemented_service(unimplemented_service_stub): + response_future = (unimplemented_service_stub.UnimplementedCall.future( + empty_pb2.Empty())) + _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED) + + +def _custom_metadata(stub): + initial_metadata_value = "test_initial_metadata_value" + trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b" + metadata = ((_INITIAL_METADATA_KEY, initial_metadata_value), + (_TRAILING_METADATA_KEY, trailing_metadata_value)) + + def _validate_metadata(response): + initial_metadata = dict(response.initial_metadata()) + if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value: + raise ValueError('expected initial metadata %s, got %s' % + (initial_metadata_value, + initial_metadata[_INITIAL_METADATA_KEY])) + trailing_metadata = dict(response.trailing_metadata()) + if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value: + raise ValueError('expected trailing metadata %s, got %s' % + (trailing_metadata_value, + trailing_metadata[_TRAILING_METADATA_KEY])) + + # Testing with UnaryCall + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=1, + payload=messages_pb2.Payload(body=b'\x00')) + response_future = stub.UnaryCall.future(request, metadata=metadata) + _validate_metadata(response_future) + + # Testing with FullDuplexCall + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe, metadata=metadata) + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=(messages_pb2.ResponseParameters(size=1),)) + pipe.add(request) # Sends the request + next(response_iterator) # Causes server to send trailing metadata + # Dropping out of the with block closes the pipe + _validate_metadata(response_iterator) + + +def _compute_engine_creds(stub, args): + response = _large_unary_common_behavior(stub, True, True, None) + if args.default_service_account != response.username: + raise ValueError('expected username %s, got %s' % + (args.default_service_account, response.username)) + + +def _oauth2_auth_token(stub, args): + json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] + wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] + response = _large_unary_common_behavior(stub, True, True, None) + if wanted_email != response.username: raise ValueError('expected username %s, got %s' % (wanted_email, response.username)) - if args.oauth_scope.find(response.oauth_scope) == -1: - raise ValueError( - 'expected to find oauth scope "{}" in received "{}"'.format( - response.oauth_scope, args.oauth_scope)) - - -def _jwt_token_creds(stub, args): - json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] - wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] - response = _large_unary_common_behavior(stub, True, False, None) - if wanted_email != response.username: + if args.oauth_scope.find(response.oauth_scope) == -1: + raise ValueError( + 'expected to find oauth scope "{}" in received "{}"'.format( + response.oauth_scope, args.oauth_scope)) + + +def _jwt_token_creds(stub, args): + json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] + wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] + response = _large_unary_common_behavior(stub, True, False, None) + if wanted_email != response.username: raise ValueError('expected username %s, got %s' % (wanted_email, response.username)) - - -def _per_rpc_creds(stub, args): - json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] - wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] - google_credentials, unused_project_id = google_auth.default( - scopes=[args.oauth_scope]) - call_credentials = grpc.metadata_call_credentials( - google_auth_transport_grpc.AuthMetadataPlugin( - credentials=google_credentials, - request=google_auth_transport_requests.Request())) - response = _large_unary_common_behavior(stub, True, False, call_credentials) - if wanted_email != response.username: + + +def _per_rpc_creds(stub, args): + json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] + wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] + google_credentials, unused_project_id = google_auth.default( + scopes=[args.oauth_scope]) + call_credentials = grpc.metadata_call_credentials( + google_auth_transport_grpc.AuthMetadataPlugin( + credentials=google_credentials, + request=google_auth_transport_requests.Request())) + response = _large_unary_common_behavior(stub, True, False, call_credentials) + if wanted_email != response.username: raise ValueError('expected username %s, got %s' % (wanted_email, response.username)) - - -def _special_status_message(stub, args): - details = b'\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP \xf0\x9f\x98\x88\t\n'.decode( - 'utf-8') - code = 2 - status = grpc.StatusCode.UNKNOWN # code = 2 - - # Test with a UnaryCall - request = messages_pb2.SimpleRequest( - response_type=messages_pb2.COMPRESSABLE, - response_size=1, - payload=messages_pb2.Payload(body=b'\x00'), - response_status=messages_pb2.EchoStatus(code=code, message=details)) - response_future = stub.UnaryCall.future(request) - _validate_status_code_and_details(response_future, status, details) - - -@enum.unique -class TestCase(enum.Enum): - EMPTY_UNARY = 'empty_unary' - LARGE_UNARY = 'large_unary' - SERVER_STREAMING = 'server_streaming' - CLIENT_STREAMING = 'client_streaming' - PING_PONG = 'ping_pong' - CANCEL_AFTER_BEGIN = 'cancel_after_begin' - CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response' - EMPTY_STREAM = 'empty_stream' - STATUS_CODE_AND_MESSAGE = 'status_code_and_message' - UNIMPLEMENTED_METHOD = 'unimplemented_method' - UNIMPLEMENTED_SERVICE = 'unimplemented_service' - CUSTOM_METADATA = "custom_metadata" - COMPUTE_ENGINE_CREDS = 'compute_engine_creds' - OAUTH2_AUTH_TOKEN = 'oauth2_auth_token' - JWT_TOKEN_CREDS = 'jwt_token_creds' - PER_RPC_CREDS = 'per_rpc_creds' - TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server' - SPECIAL_STATUS_MESSAGE = 'special_status_message' - - def test_interoperability(self, stub, args): - if self is TestCase.EMPTY_UNARY: - _empty_unary(stub) - elif self is TestCase.LARGE_UNARY: - _large_unary(stub) - elif self is TestCase.SERVER_STREAMING: - _server_streaming(stub) - elif self is TestCase.CLIENT_STREAMING: - _client_streaming(stub) - elif self is TestCase.PING_PONG: - _ping_pong(stub) - elif self is TestCase.CANCEL_AFTER_BEGIN: - _cancel_after_begin(stub) - elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE: - _cancel_after_first_response(stub) - elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER: - _timeout_on_sleeping_server(stub) - elif self is TestCase.EMPTY_STREAM: - _empty_stream(stub) - elif self is TestCase.STATUS_CODE_AND_MESSAGE: - _status_code_and_message(stub) - elif self is TestCase.UNIMPLEMENTED_METHOD: - _unimplemented_method(stub) - elif self is TestCase.UNIMPLEMENTED_SERVICE: - _unimplemented_service(stub) - elif self is TestCase.CUSTOM_METADATA: - _custom_metadata(stub) - elif self is TestCase.COMPUTE_ENGINE_CREDS: - _compute_engine_creds(stub, args) - elif self is TestCase.OAUTH2_AUTH_TOKEN: - _oauth2_auth_token(stub, args) - elif self is TestCase.JWT_TOKEN_CREDS: - _jwt_token_creds(stub, args) - elif self is TestCase.PER_RPC_CREDS: - _per_rpc_creds(stub, args) - elif self is TestCase.SPECIAL_STATUS_MESSAGE: - _special_status_message(stub, args) - else: + + +def _special_status_message(stub, args): + details = b'\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP \xf0\x9f\x98\x88\t\n'.decode( + 'utf-8') + code = 2 + status = grpc.StatusCode.UNKNOWN # code = 2 + + # Test with a UnaryCall + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=1, + payload=messages_pb2.Payload(body=b'\x00'), + response_status=messages_pb2.EchoStatus(code=code, message=details)) + response_future = stub.UnaryCall.future(request) + _validate_status_code_and_details(response_future, status, details) + + +@enum.unique +class TestCase(enum.Enum): + EMPTY_UNARY = 'empty_unary' + LARGE_UNARY = 'large_unary' + SERVER_STREAMING = 'server_streaming' + CLIENT_STREAMING = 'client_streaming' + PING_PONG = 'ping_pong' + CANCEL_AFTER_BEGIN = 'cancel_after_begin' + CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response' + EMPTY_STREAM = 'empty_stream' + STATUS_CODE_AND_MESSAGE = 'status_code_and_message' + UNIMPLEMENTED_METHOD = 'unimplemented_method' + UNIMPLEMENTED_SERVICE = 'unimplemented_service' + CUSTOM_METADATA = "custom_metadata" + COMPUTE_ENGINE_CREDS = 'compute_engine_creds' + OAUTH2_AUTH_TOKEN = 'oauth2_auth_token' + JWT_TOKEN_CREDS = 'jwt_token_creds' + PER_RPC_CREDS = 'per_rpc_creds' + TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server' + SPECIAL_STATUS_MESSAGE = 'special_status_message' + + def test_interoperability(self, stub, args): + if self is TestCase.EMPTY_UNARY: + _empty_unary(stub) + elif self is TestCase.LARGE_UNARY: + _large_unary(stub) + elif self is TestCase.SERVER_STREAMING: + _server_streaming(stub) + elif self is TestCase.CLIENT_STREAMING: + _client_streaming(stub) + elif self is TestCase.PING_PONG: + _ping_pong(stub) + elif self is TestCase.CANCEL_AFTER_BEGIN: + _cancel_after_begin(stub) + elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE: + _cancel_after_first_response(stub) + elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER: + _timeout_on_sleeping_server(stub) + elif self is TestCase.EMPTY_STREAM: + _empty_stream(stub) + elif self is TestCase.STATUS_CODE_AND_MESSAGE: + _status_code_and_message(stub) + elif self is TestCase.UNIMPLEMENTED_METHOD: + _unimplemented_method(stub) + elif self is TestCase.UNIMPLEMENTED_SERVICE: + _unimplemented_service(stub) + elif self is TestCase.CUSTOM_METADATA: + _custom_metadata(stub) + elif self is TestCase.COMPUTE_ENGINE_CREDS: + _compute_engine_creds(stub, args) + elif self is TestCase.OAUTH2_AUTH_TOKEN: + _oauth2_auth_token(stub, args) + elif self is TestCase.JWT_TOKEN_CREDS: + _jwt_token_creds(stub, args) + elif self is TestCase.PER_RPC_CREDS: + _per_rpc_creds(stub, args) + elif self is TestCase.SPECIAL_STATUS_MESSAGE: + _special_status_message(stub, args) + else: raise NotImplementedError('Test case "%s" not implemented!' % self.name) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py index ddded80a3f..a55919a60a 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py @@ -1,42 +1,42 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Constants and functions for data used in interoperability testing.""" - -import argparse -import pkgutil -import os - -_ROOT_CERTIFICATES_RESOURCE_PATH = 'credentials/ca.pem' -_PRIVATE_KEY_RESOURCE_PATH = 'credentials/server1.key' -_CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem' - - -def test_root_certificates(): - return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH) - - -def private_key(): - return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH) - - -def certificate_chain(): - return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH) - - -def parse_bool(value): - if value == 'true': - return True - if value == 'false': - return False - raise argparse.ArgumentTypeError('Only true/false allowed') +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants and functions for data used in interoperability testing.""" + +import argparse +import pkgutil +import os + +_ROOT_CERTIFICATES_RESOURCE_PATH = 'credentials/ca.pem' +_PRIVATE_KEY_RESOURCE_PATH = 'credentials/server1.key' +_CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem' + + +def test_root_certificates(): + return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH) + + +def private_key(): + return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH) + + +def certificate_chain(): + return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH) + + +def parse_bool(value): + if value == 'true': + return True + if value == 'false': + return False + raise argparse.ArgumentTypeError('Only true/false allowed') diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py index 5ac6983e58..c85adb0b0b 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py @@ -1,35 +1,35 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""The Python implementation of the GRPC interoperability test server.""" - -import argparse -from concurrent import futures -import logging - -import grpc -from src.proto.grpc.testing import test_pb2_grpc - -from tests.interop import service -from tests.interop import resources -from tests.unit import test_common - -logging.basicConfig() -_LOGGER = logging.getLogger(__name__) - - +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the GRPC interoperability test server.""" + +import argparse +from concurrent import futures +import logging + +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import service +from tests.interop import resources +from tests.unit import test_common + +logging.basicConfig() +_LOGGER = logging.getLogger(__name__) + + def parse_interop_server_arguments(): - parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser() parser.add_argument('--port', type=int, required=True, @@ -43,7 +43,7 @@ def parse_interop_server_arguments(): type=resources.parse_bool, help='require an ALTS connection') return parser.parse_args() - + def get_server_credentials(use_tls): if use_tls: @@ -57,20 +57,20 @@ def get_server_credentials(use_tls): def serve(): args = parse_interop_server_arguments() - server = test_common.test_server() - test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), - server) + server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), + server) if args.use_tls or args.use_alts: credentials = get_server_credentials(args.use_tls) - server.add_secure_port('[::]:{}'.format(args.port), credentials) - else: - server.add_insecure_port('[::]:{}'.format(args.port)) - - server.start() - _LOGGER.info('Server serving.') - server.wait_for_termination() - _LOGGER.info('Server stopped; exiting.') - - -if __name__ == '__main__': - serve() + server.add_secure_port('[::]:{}'.format(args.port), credentials) + else: + server.add_insecure_port('[::]:{}'.format(args.port)) + + server.start() + _LOGGER.info('Server serving.') + server.wait_for_termination() + _LOGGER.info('Server stopped; exiting.') + + +if __name__ == '__main__': + serve() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py index 1b29f7adbf..08bb0c45a2 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py @@ -1,96 +1,96 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""The Python implementation of the TestServicer.""" - -import time - -import grpc - -from src.proto.grpc.testing import empty_pb2 -from src.proto.grpc.testing import messages_pb2 -from src.proto.grpc.testing import test_pb2_grpc - -_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" -_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" -_US_IN_A_SECOND = 1000 * 1000 - - -def _maybe_echo_metadata(servicer_context): - """Copies metadata from request to response if it is present.""" - invocation_metadata = dict(servicer_context.invocation_metadata()) - if _INITIAL_METADATA_KEY in invocation_metadata: - initial_metadatum = (_INITIAL_METADATA_KEY, - invocation_metadata[_INITIAL_METADATA_KEY]) - servicer_context.send_initial_metadata((initial_metadatum,)) - if _TRAILING_METADATA_KEY in invocation_metadata: - trailing_metadatum = (_TRAILING_METADATA_KEY, - invocation_metadata[_TRAILING_METADATA_KEY]) - servicer_context.set_trailing_metadata((trailing_metadatum,)) - - -def _maybe_echo_status_and_message(request, servicer_context): - """Sets the response context code and details if the request asks for them""" - if request.HasField('response_status'): - servicer_context.set_code(request.response_status.code) - servicer_context.set_details(request.response_status.message) - - -class TestService(test_pb2_grpc.TestServiceServicer): - - def EmptyCall(self, request, context): - _maybe_echo_metadata(context) - return empty_pb2.Empty() - - def UnaryCall(self, request, context): - _maybe_echo_metadata(context) - _maybe_echo_status_and_message(request, context) - return messages_pb2.SimpleResponse( +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the TestServicer.""" + +import time + +import grpc + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import test_pb2_grpc + +_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" +_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" +_US_IN_A_SECOND = 1000 * 1000 + + +def _maybe_echo_metadata(servicer_context): + """Copies metadata from request to response if it is present.""" + invocation_metadata = dict(servicer_context.invocation_metadata()) + if _INITIAL_METADATA_KEY in invocation_metadata: + initial_metadatum = (_INITIAL_METADATA_KEY, + invocation_metadata[_INITIAL_METADATA_KEY]) + servicer_context.send_initial_metadata((initial_metadatum,)) + if _TRAILING_METADATA_KEY in invocation_metadata: + trailing_metadatum = (_TRAILING_METADATA_KEY, + invocation_metadata[_TRAILING_METADATA_KEY]) + servicer_context.set_trailing_metadata((trailing_metadatum,)) + + +def _maybe_echo_status_and_message(request, servicer_context): + """Sets the response context code and details if the request asks for them""" + if request.HasField('response_status'): + servicer_context.set_code(request.response_status.code) + servicer_context.set_details(request.response_status.message) + + +class TestService(test_pb2_grpc.TestServiceServicer): + + def EmptyCall(self, request, context): + _maybe_echo_metadata(context) + return empty_pb2.Empty() + + def UnaryCall(self, request, context): + _maybe_echo_metadata(context) + _maybe_echo_status_and_message(request, context) + return messages_pb2.SimpleResponse( payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE, body=b'\x00' * request.response_size)) - - def StreamingOutputCall(self, request, context): - _maybe_echo_status_and_message(request, context) - for response_parameters in request.response_parameters: - if response_parameters.interval_us != 0: - time.sleep(response_parameters.interval_us / _US_IN_A_SECOND) - yield messages_pb2.StreamingOutputCallResponse( + + def StreamingOutputCall(self, request, context): + _maybe_echo_status_and_message(request, context) + for response_parameters in request.response_parameters: + if response_parameters.interval_us != 0: + time.sleep(response_parameters.interval_us / _US_IN_A_SECOND) + yield messages_pb2.StreamingOutputCallResponse( payload=messages_pb2.Payload(type=request.response_type, body=b'\x00' * response_parameters.size)) - - def StreamingInputCall(self, request_iterator, context): - aggregate_size = 0 - for request in request_iterator: - if request.payload is not None and request.payload.body: - aggregate_size += len(request.payload.body) - return messages_pb2.StreamingInputCallResponse( - aggregated_payload_size=aggregate_size) - - def FullDuplexCall(self, request_iterator, context): - _maybe_echo_metadata(context) - for request in request_iterator: - _maybe_echo_status_and_message(request, context) - for response_parameters in request.response_parameters: - if response_parameters.interval_us != 0: + + def StreamingInputCall(self, request_iterator, context): + aggregate_size = 0 + for request in request_iterator: + if request.payload is not None and request.payload.body: + aggregate_size += len(request.payload.body) + return messages_pb2.StreamingInputCallResponse( + aggregated_payload_size=aggregate_size) + + def FullDuplexCall(self, request_iterator, context): + _maybe_echo_metadata(context) + for request in request_iterator: + _maybe_echo_status_and_message(request, context) + for response_parameters in request.response_parameters: + if response_parameters.interval_us != 0: time.sleep(response_parameters.interval_us / _US_IN_A_SECOND) - yield messages_pb2.StreamingOutputCallResponse( + yield messages_pb2.StreamingOutputCallResponse( payload=messages_pb2.Payload(type=request.payload.type, body=b'\x00' * response_parameters.size)) - - # NOTE(nathaniel): Apparently this is the same as the full-duplex call? - # NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)... - def HalfDuplexCall(self, request_iterator, context): - return self.FullDuplexCall(request_iterator, context) + + # NOTE(nathaniel): Apparently this is the same as the full-duplex call? + # NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)... + def HalfDuplexCall(self, request_iterator, context): + return self.FullDuplexCall(request_iterator, context) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py index 8d89990e82..5772620b60 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py index 60ac8f02b9..17835e7c0d 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py @@ -1,202 +1,202 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Defines test client behaviors (UNARY/STREAMING) (SYNC/ASYNC).""" - -import abc -import threading -import time - -from concurrent import futures -from six.moves import queue - -import grpc -from src.proto.grpc.testing import messages_pb2 -from src.proto.grpc.testing import benchmark_service_pb2_grpc -from tests.unit import resources -from tests.unit import test_common - -_TIMEOUT = 60 * 60 * 24 - - -class GenericStub(object): - - def __init__(self, channel): - self.UnaryCall = channel.unary_unary( - '/grpc.testing.BenchmarkService/UnaryCall') - self.StreamingCall = channel.stream_stream( - '/grpc.testing.BenchmarkService/StreamingCall') - - -class BenchmarkClient: - """Benchmark client interface that exposes a non-blocking send_request().""" - - __metaclass__ = abc.ABCMeta - - def __init__(self, server, config, hist): - # Create the stub - if config.HasField('security_params'): - creds = grpc.ssl_channel_credentials( - resources.test_root_certificates()) - channel = test_common.test_secure_channel( - server, creds, config.security_params.server_host_override) - else: - channel = grpc.insecure_channel(server) - - # waits for the channel to be ready before we start sending messages - grpc.channel_ready_future(channel).result() - - if config.payload_config.WhichOneof('payload') == 'simple_params': - self._generic = False - self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub( - channel) - payload = messages_pb2.Payload( +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines test client behaviors (UNARY/STREAMING) (SYNC/ASYNC).""" + +import abc +import threading +import time + +from concurrent import futures +from six.moves import queue + +import grpc +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import benchmark_service_pb2_grpc +from tests.unit import resources +from tests.unit import test_common + +_TIMEOUT = 60 * 60 * 24 + + +class GenericStub(object): + + def __init__(self, channel): + self.UnaryCall = channel.unary_unary( + '/grpc.testing.BenchmarkService/UnaryCall') + self.StreamingCall = channel.stream_stream( + '/grpc.testing.BenchmarkService/StreamingCall') + + +class BenchmarkClient: + """Benchmark client interface that exposes a non-blocking send_request().""" + + __metaclass__ = abc.ABCMeta + + def __init__(self, server, config, hist): + # Create the stub + if config.HasField('security_params'): + creds = grpc.ssl_channel_credentials( + resources.test_root_certificates()) + channel = test_common.test_secure_channel( + server, creds, config.security_params.server_host_override) + else: + channel = grpc.insecure_channel(server) + + # waits for the channel to be ready before we start sending messages + grpc.channel_ready_future(channel).result() + + if config.payload_config.WhichOneof('payload') == 'simple_params': + self._generic = False + self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub( + channel) + payload = messages_pb2.Payload( body=bytes(b'\0' * config.payload_config.simple_params.req_size)) - self._request = messages_pb2.SimpleRequest( - payload=payload, - response_size=config.payload_config.simple_params.resp_size) - else: - self._generic = True - self._stub = GenericStub(channel) + self._request = messages_pb2.SimpleRequest( + payload=payload, + response_size=config.payload_config.simple_params.resp_size) + else: + self._generic = True + self._stub = GenericStub(channel) self._request = bytes(b'\0' * config.payload_config.bytebuf_params.req_size) - - self._hist = hist - self._response_callbacks = [] - - def add_response_callback(self, callback): - """callback will be invoked as callback(client, query_time)""" - self._response_callbacks.append(callback) - - @abc.abstractmethod - def send_request(self): - """Non-blocking wrapper for a client's request operation.""" - raise NotImplementedError() - - def start(self): - pass - - def stop(self): - pass - - def _handle_response(self, client, query_time): - self._hist.add(query_time * 1e9) # Report times in nanoseconds - for callback in self._response_callbacks: - callback(client, query_time) - - -class UnarySyncBenchmarkClient(BenchmarkClient): - - def __init__(self, server, config, hist): - super(UnarySyncBenchmarkClient, self).__init__(server, config, hist) - self._pool = futures.ThreadPoolExecutor( - max_workers=config.outstanding_rpcs_per_channel) - - def send_request(self): - # Send requests in separate threads to support multiple outstanding rpcs - # (See src/proto/grpc/testing/control.proto) - self._pool.submit(self._dispatch_request) - - def stop(self): - self._pool.shutdown(wait=True) - self._stub = None - - def _dispatch_request(self): - start_time = time.time() - self._stub.UnaryCall(self._request, _TIMEOUT) - end_time = time.time() - self._handle_response(self, end_time - start_time) - - -class UnaryAsyncBenchmarkClient(BenchmarkClient): - - def send_request(self): - # Use the Future callback api to support multiple outstanding rpcs - start_time = time.time() - response_future = self._stub.UnaryCall.future(self._request, _TIMEOUT) - response_future.add_done_callback( - lambda resp: self._response_received(start_time, resp)) - - def _response_received(self, start_time, resp): - resp.result() - end_time = time.time() - self._handle_response(self, end_time - start_time) - - def stop(self): - self._stub = None - - -class _SyncStream(object): - - def __init__(self, stub, generic, request, handle_response): - self._stub = stub - self._generic = generic - self._request = request - self._handle_response = handle_response - self._is_streaming = False - self._request_queue = queue.Queue() - self._send_time_queue = queue.Queue() - - def send_request(self): - self._send_time_queue.put(time.time()) - self._request_queue.put(self._request) - - def start(self): - self._is_streaming = True - response_stream = self._stub.StreamingCall(self._request_generator(), - _TIMEOUT) - for _ in response_stream: - self._handle_response( - self, - time.time() - self._send_time_queue.get_nowait()) - - def stop(self): - self._is_streaming = False - - def _request_generator(self): - while self._is_streaming: - try: - request = self._request_queue.get(block=True, timeout=1.0) - yield request - except queue.Empty: - pass - - -class StreamingSyncBenchmarkClient(BenchmarkClient): - - def __init__(self, server, config, hist): - super(StreamingSyncBenchmarkClient, self).__init__(server, config, hist) - self._pool = futures.ThreadPoolExecutor( - max_workers=config.outstanding_rpcs_per_channel) - self._streams = [ - _SyncStream(self._stub, self._generic, self._request, - self._handle_response) - for _ in range(config.outstanding_rpcs_per_channel) - ] - self._curr_stream = 0 - - def send_request(self): - # Use a round_robin scheduler to determine what stream to send on - self._streams[self._curr_stream].send_request() - self._curr_stream = (self._curr_stream + 1) % len(self._streams) - - def start(self): - for stream in self._streams: - self._pool.submit(stream.start) - - def stop(self): - for stream in self._streams: - stream.stop() - self._pool.shutdown(wait=True) - self._stub = None + + self._hist = hist + self._response_callbacks = [] + + def add_response_callback(self, callback): + """callback will be invoked as callback(client, query_time)""" + self._response_callbacks.append(callback) + + @abc.abstractmethod + def send_request(self): + """Non-blocking wrapper for a client's request operation.""" + raise NotImplementedError() + + def start(self): + pass + + def stop(self): + pass + + def _handle_response(self, client, query_time): + self._hist.add(query_time * 1e9) # Report times in nanoseconds + for callback in self._response_callbacks: + callback(client, query_time) + + +class UnarySyncBenchmarkClient(BenchmarkClient): + + def __init__(self, server, config, hist): + super(UnarySyncBenchmarkClient, self).__init__(server, config, hist) + self._pool = futures.ThreadPoolExecutor( + max_workers=config.outstanding_rpcs_per_channel) + + def send_request(self): + # Send requests in separate threads to support multiple outstanding rpcs + # (See src/proto/grpc/testing/control.proto) + self._pool.submit(self._dispatch_request) + + def stop(self): + self._pool.shutdown(wait=True) + self._stub = None + + def _dispatch_request(self): + start_time = time.time() + self._stub.UnaryCall(self._request, _TIMEOUT) + end_time = time.time() + self._handle_response(self, end_time - start_time) + + +class UnaryAsyncBenchmarkClient(BenchmarkClient): + + def send_request(self): + # Use the Future callback api to support multiple outstanding rpcs + start_time = time.time() + response_future = self._stub.UnaryCall.future(self._request, _TIMEOUT) + response_future.add_done_callback( + lambda resp: self._response_received(start_time, resp)) + + def _response_received(self, start_time, resp): + resp.result() + end_time = time.time() + self._handle_response(self, end_time - start_time) + + def stop(self): + self._stub = None + + +class _SyncStream(object): + + def __init__(self, stub, generic, request, handle_response): + self._stub = stub + self._generic = generic + self._request = request + self._handle_response = handle_response + self._is_streaming = False + self._request_queue = queue.Queue() + self._send_time_queue = queue.Queue() + + def send_request(self): + self._send_time_queue.put(time.time()) + self._request_queue.put(self._request) + + def start(self): + self._is_streaming = True + response_stream = self._stub.StreamingCall(self._request_generator(), + _TIMEOUT) + for _ in response_stream: + self._handle_response( + self, + time.time() - self._send_time_queue.get_nowait()) + + def stop(self): + self._is_streaming = False + + def _request_generator(self): + while self._is_streaming: + try: + request = self._request_queue.get(block=True, timeout=1.0) + yield request + except queue.Empty: + pass + + +class StreamingSyncBenchmarkClient(BenchmarkClient): + + def __init__(self, server, config, hist): + super(StreamingSyncBenchmarkClient, self).__init__(server, config, hist) + self._pool = futures.ThreadPoolExecutor( + max_workers=config.outstanding_rpcs_per_channel) + self._streams = [ + _SyncStream(self._stub, self._generic, self._request, + self._handle_response) + for _ in range(config.outstanding_rpcs_per_channel) + ] + self._curr_stream = 0 + + def send_request(self): + # Use a round_robin scheduler to determine what stream to send on + self._streams[self._curr_stream].send_request() + self._curr_stream = (self._curr_stream + 1) % len(self._streams) + + def start(self): + for stream in self._streams: + self._pool.submit(stream.start) + + def stop(self): + for stream in self._streams: + stream.stop() + self._pool.shutdown(wait=True) + self._stub = None diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py index 6b6b9ea380..75280bd771 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py @@ -1,44 +1,44 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from src.proto.grpc.testing import messages_pb2 -from src.proto.grpc.testing import benchmark_service_pb2_grpc - - -class BenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer): - """Synchronous Server implementation for the Benchmark service.""" - - def UnaryCall(self, request, context): +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import benchmark_service_pb2_grpc + + +class BenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer): + """Synchronous Server implementation for the Benchmark service.""" + + def UnaryCall(self, request, context): payload = messages_pb2.Payload(body=b'\0' * request.response_size) - return messages_pb2.SimpleResponse(payload=payload) - - def StreamingCall(self, request_iterator, context): - for request in request_iterator: + return messages_pb2.SimpleResponse(payload=payload) + + def StreamingCall(self, request_iterator, context): + for request in request_iterator: payload = messages_pb2.Payload(body=b'\0' * request.response_size) - yield messages_pb2.SimpleResponse(payload=payload) - - + yield messages_pb2.SimpleResponse(payload=payload) + + class GenericBenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer ): - """Generic Server implementation for the Benchmark service.""" - - def __init__(self, resp_size): + """Generic Server implementation for the Benchmark service.""" + + def __init__(self, resp_size): self._response = b'\0' * resp_size - - def UnaryCall(self, request, context): - return self._response - - def StreamingCall(self, request_iterator, context): - for request in request_iterator: - yield self._response + + def UnaryCall(self, request, context): + return self._response + + def StreamingCall(self, request_iterator, context): + for request in request_iterator: + yield self._response diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py index c8c48183ac..c5d299f646 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py @@ -1,90 +1,90 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Defines behavior for WHEN clients send requests. - -Each client exposes a non-blocking send_request() method that the -ClientRunner invokes either periodically or in response to some event. -""" - -import abc -import threading -import time - - -class ClientRunner: - """Abstract interface for sending requests from clients.""" - - __metaclass__ = abc.ABCMeta - - def __init__(self, client): - self._client = client - - @abc.abstractmethod - def start(self): - raise NotImplementedError() - - @abc.abstractmethod - def stop(self): - raise NotImplementedError() - - -class OpenLoopClientRunner(ClientRunner): - - def __init__(self, client, interval_generator): - super(OpenLoopClientRunner, self).__init__(client) - self._is_running = False - self._interval_generator = interval_generator +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines behavior for WHEN clients send requests. + +Each client exposes a non-blocking send_request() method that the +ClientRunner invokes either periodically or in response to some event. +""" + +import abc +import threading +import time + + +class ClientRunner: + """Abstract interface for sending requests from clients.""" + + __metaclass__ = abc.ABCMeta + + def __init__(self, client): + self._client = client + + @abc.abstractmethod + def start(self): + raise NotImplementedError() + + @abc.abstractmethod + def stop(self): + raise NotImplementedError() + + +class OpenLoopClientRunner(ClientRunner): + + def __init__(self, client, interval_generator): + super(OpenLoopClientRunner, self).__init__(client) + self._is_running = False + self._interval_generator = interval_generator self._dispatch_thread = threading.Thread(target=self._dispatch_requests, args=()) - - def start(self): - self._is_running = True - self._client.start() - self._dispatch_thread.start() - - def stop(self): - self._is_running = False - self._client.stop() - self._dispatch_thread.join() - self._client = None - - def _dispatch_requests(self): - while self._is_running: - self._client.send_request() - time.sleep(next(self._interval_generator)) - - -class ClosedLoopClientRunner(ClientRunner): - - def __init__(self, client, request_count): - super(ClosedLoopClientRunner, self).__init__(client) - self._is_running = False - self._request_count = request_count - # Send a new request on each response for closed loop - self._client.add_response_callback(self._send_request) - - def start(self): - self._is_running = True - self._client.start() - for _ in range(self._request_count): - self._client.send_request() - - def stop(self): - self._is_running = False - self._client.stop() - self._client = None - - def _send_request(self, client, response_time): - if self._is_running: - client.send_request() + + def start(self): + self._is_running = True + self._client.start() + self._dispatch_thread.start() + + def stop(self): + self._is_running = False + self._client.stop() + self._dispatch_thread.join() + self._client = None + + def _dispatch_requests(self): + while self._is_running: + self._client.send_request() + time.sleep(next(self._interval_generator)) + + +class ClosedLoopClientRunner(ClientRunner): + + def __init__(self, client, request_count): + super(ClosedLoopClientRunner, self).__init__(client) + self._is_running = False + self._request_count = request_count + # Send a new request on each response for closed loop + self._client.add_response_callback(self._send_request) + + def start(self): + self._is_running = True + self._client.start() + for _ in range(self._request_count): + self._client.send_request() + + def stop(self): + self._is_running = False + self._client.stop() + self._client = None + + def _send_request(self, client, response_time): + if self._is_running: + client.send_request() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py index 33ef27961e..8139a6ee2f 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py @@ -1,70 +1,70 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import math -import threading - -from src.proto.grpc.testing import stats_pb2 - - -class Histogram(object): - """Histogram class used for recording performance testing data. - - This class is thread safe. - """ - - def __init__(self, resolution, max_possible): - self._lock = threading.Lock() - self._resolution = resolution - self._max_possible = max_possible - self._sum = 0 - self._sum_of_squares = 0 - self.multiplier = 1.0 + self._resolution - self._count = 0 - self._min = self._max_possible - self._max = 0 - self._buckets = [0] * (self._bucket_for(self._max_possible) + 1) - - def reset(self): - with self._lock: - self._sum = 0 - self._sum_of_squares = 0 - self._count = 0 - self._min = self._max_possible - self._max = 0 - self._buckets = [0] * (self._bucket_for(self._max_possible) + 1) - - def add(self, val): - with self._lock: - self._sum += val - self._sum_of_squares += val * val - self._count += 1 - self._min = min(self._min, val) - self._max = max(self._max, val) - self._buckets[self._bucket_for(val)] += 1 - - def get_data(self): - with self._lock: - data = stats_pb2.HistogramData() - data.bucket.extend(self._buckets) - data.min_seen = self._min - data.max_seen = self._max - data.sum = self._sum - data.sum_of_squares = self._sum_of_squares - data.count = self._count - return data - +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import threading + +from src.proto.grpc.testing import stats_pb2 + + +class Histogram(object): + """Histogram class used for recording performance testing data. + + This class is thread safe. + """ + + def __init__(self, resolution, max_possible): + self._lock = threading.Lock() + self._resolution = resolution + self._max_possible = max_possible + self._sum = 0 + self._sum_of_squares = 0 + self.multiplier = 1.0 + self._resolution + self._count = 0 + self._min = self._max_possible + self._max = 0 + self._buckets = [0] * (self._bucket_for(self._max_possible) + 1) + + def reset(self): + with self._lock: + self._sum = 0 + self._sum_of_squares = 0 + self._count = 0 + self._min = self._max_possible + self._max = 0 + self._buckets = [0] * (self._bucket_for(self._max_possible) + 1) + + def add(self, val): + with self._lock: + self._sum += val + self._sum_of_squares += val * val + self._count += 1 + self._min = min(self._min, val) + self._max = max(self._max, val) + self._buckets[self._bucket_for(val)] += 1 + + def get_data(self): + with self._lock: + data = stats_pb2.HistogramData() + data.bucket.extend(self._buckets) + data.min_seen = self._min + data.max_seen = self._max + data.sum = self._sum + data.sum_of_squares = self._sum_of_squares + data.count = self._count + return data + def merge(self, another_data): with self._lock: for i in range(len(self._buckets)): @@ -75,6 +75,6 @@ class Histogram(object): self._sum_of_squares += another_data.sum_of_squares self._count += another_data.count - def _bucket_for(self, val): - val = min(val, self._max_possible) - return int(math.log(val, self.multiplier)) + def _bucket_for(self, val): + val = min(val, self._max_possible) + return int(math.log(val, self.multiplier)) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py index a78751bda1..a7e692821a 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py @@ -1,46 +1,46 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""The entry point for the qps worker.""" - -import argparse -import time - -import grpc -from src.proto.grpc.testing import worker_service_pb2_grpc - -from tests.qps import worker_server -from tests.unit import test_common - - -def run_worker_server(port): - server = test_common.test_server() - servicer = worker_server.WorkerServer() - worker_service_pb2_grpc.add_WorkerServiceServicer_to_server( - servicer, server) - server.add_insecure_port('[::]:{}'.format(port)) - server.start() - servicer.wait_for_quit() - server.stop(0) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description='gRPC Python performance testing worker') +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The entry point for the qps worker.""" + +import argparse +import time + +import grpc +from src.proto.grpc.testing import worker_service_pb2_grpc + +from tests.qps import worker_server +from tests.unit import test_common + + +def run_worker_server(port): + server = test_common.test_server() + servicer = worker_server.WorkerServer() + worker_service_pb2_grpc.add_WorkerServiceServicer_to_server( + servicer, server) + server.add_insecure_port('[::]:{}'.format(port)) + server.start() + servicer.wait_for_quit() + server.stop(0) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='gRPC Python performance testing worker') parser.add_argument('--driver_port', type=int, dest='port', help='The port the worker should listen on') - args = parser.parse_args() - - run_worker_server(args.port) + args = parser.parse_args() + + run_worker_server(args.port) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py index c6c1ed27d8..65b081e5d1 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py @@ -1,186 +1,186 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import multiprocessing -import random -import threading -import time - -from concurrent import futures -import grpc -from src.proto.grpc.testing import control_pb2 -from src.proto.grpc.testing import benchmark_service_pb2_grpc -from src.proto.grpc.testing import worker_service_pb2_grpc -from src.proto.grpc.testing import stats_pb2 - -from tests.qps import benchmark_client -from tests.qps import benchmark_server -from tests.qps import client_runner -from tests.qps import histogram -from tests.unit import resources -from tests.unit import test_common - - -class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer): - """Python Worker Server implementation.""" - - def __init__(self): - self._quit_event = threading.Event() - - def RunServer(self, request_iterator, context): - config = next(request_iterator).setup #pylint: disable=stop-iteration-return - server, port = self._create_server(config) - cores = multiprocessing.cpu_count() - server.start() - start_time = time.time() - yield self._get_server_status(start_time, start_time, port, cores) - - for request in request_iterator: - end_time = time.time() - status = self._get_server_status(start_time, end_time, port, cores) - if request.mark.reset: - start_time = end_time - yield status - server.stop(None) - - def _get_server_status(self, start_time, end_time, port, cores): - end_time = time.time() - elapsed_time = end_time - start_time +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import multiprocessing +import random +import threading +import time + +from concurrent import futures +import grpc +from src.proto.grpc.testing import control_pb2 +from src.proto.grpc.testing import benchmark_service_pb2_grpc +from src.proto.grpc.testing import worker_service_pb2_grpc +from src.proto.grpc.testing import stats_pb2 + +from tests.qps import benchmark_client +from tests.qps import benchmark_server +from tests.qps import client_runner +from tests.qps import histogram +from tests.unit import resources +from tests.unit import test_common + + +class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer): + """Python Worker Server implementation.""" + + def __init__(self): + self._quit_event = threading.Event() + + def RunServer(self, request_iterator, context): + config = next(request_iterator).setup #pylint: disable=stop-iteration-return + server, port = self._create_server(config) + cores = multiprocessing.cpu_count() + server.start() + start_time = time.time() + yield self._get_server_status(start_time, start_time, port, cores) + + for request in request_iterator: + end_time = time.time() + status = self._get_server_status(start_time, end_time, port, cores) + if request.mark.reset: + start_time = end_time + yield status + server.stop(None) + + def _get_server_status(self, start_time, end_time, port, cores): + end_time = time.time() + elapsed_time = end_time - start_time stats = stats_pb2.ServerStats(time_elapsed=elapsed_time, time_user=elapsed_time, time_system=elapsed_time) - return control_pb2.ServerStatus(stats=stats, port=port, cores=cores) - - def _create_server(self, config): - if config.async_server_threads == 0: - # This is the default concurrent.futures thread pool size, but - # None doesn't seem to work - server_threads = multiprocessing.cpu_count() * 5 - else: - server_threads = config.async_server_threads - server = test_common.test_server(max_workers=server_threads) - if config.server_type == control_pb2.ASYNC_SERVER: - servicer = benchmark_server.BenchmarkServer() - benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server( - servicer, server) - elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER: - resp_size = config.payload_config.bytebuf_params.resp_size - servicer = benchmark_server.GenericBenchmarkServer(resp_size) - method_implementations = { - 'StreamingCall': + return control_pb2.ServerStatus(stats=stats, port=port, cores=cores) + + def _create_server(self, config): + if config.async_server_threads == 0: + # This is the default concurrent.futures thread pool size, but + # None doesn't seem to work + server_threads = multiprocessing.cpu_count() * 5 + else: + server_threads = config.async_server_threads + server = test_common.test_server(max_workers=server_threads) + if config.server_type == control_pb2.ASYNC_SERVER: + servicer = benchmark_server.BenchmarkServer() + benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server( + servicer, server) + elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER: + resp_size = config.payload_config.bytebuf_params.resp_size + servicer = benchmark_server.GenericBenchmarkServer(resp_size) + method_implementations = { + 'StreamingCall': grpc.stream_stream_rpc_method_handler(servicer.StreamingCall ), - 'UnaryCall': + 'UnaryCall': grpc.unary_unary_rpc_method_handler(servicer.UnaryCall), - } - handler = grpc.method_handlers_generic_handler( - 'grpc.testing.BenchmarkService', method_implementations) - server.add_generic_rpc_handlers((handler,)) - else: - raise Exception('Unsupported server type {}'.format( - config.server_type)) - - if config.HasField('security_params'): # Use SSL - server_creds = grpc.ssl_server_credentials( - ((resources.private_key(), resources.certificate_chain()),)) - port = server.add_secure_port('[::]:{}'.format(config.port), - server_creds) - else: - port = server.add_insecure_port('[::]:{}'.format(config.port)) - - return (server, port) - - def RunClient(self, request_iterator, context): - config = next(request_iterator).setup #pylint: disable=stop-iteration-return - client_runners = [] - qps_data = histogram.Histogram(config.histogram_params.resolution, - config.histogram_params.max_possible) - start_time = time.time() - - # Create a client for each channel - for i in range(config.client_channels): - server = config.server_targets[i % len(config.server_targets)] - runner = self._create_client_runner(server, config, qps_data) - client_runners.append(runner) - runner.start() - - end_time = time.time() - yield self._get_client_status(start_time, end_time, qps_data) - - # Respond to stat requests - for request in request_iterator: - end_time = time.time() - status = self._get_client_status(start_time, end_time, qps_data) - if request.mark.reset: - qps_data.reset() - start_time = time.time() - yield status - - # Cleanup the clients - for runner in client_runners: - runner.stop() - - def _get_client_status(self, start_time, end_time, qps_data): - latencies = qps_data.get_data() - end_time = time.time() - elapsed_time = end_time - start_time + } + handler = grpc.method_handlers_generic_handler( + 'grpc.testing.BenchmarkService', method_implementations) + server.add_generic_rpc_handlers((handler,)) + else: + raise Exception('Unsupported server type {}'.format( + config.server_type)) + + if config.HasField('security_params'): # Use SSL + server_creds = grpc.ssl_server_credentials( + ((resources.private_key(), resources.certificate_chain()),)) + port = server.add_secure_port('[::]:{}'.format(config.port), + server_creds) + else: + port = server.add_insecure_port('[::]:{}'.format(config.port)) + + return (server, port) + + def RunClient(self, request_iterator, context): + config = next(request_iterator).setup #pylint: disable=stop-iteration-return + client_runners = [] + qps_data = histogram.Histogram(config.histogram_params.resolution, + config.histogram_params.max_possible) + start_time = time.time() + + # Create a client for each channel + for i in range(config.client_channels): + server = config.server_targets[i % len(config.server_targets)] + runner = self._create_client_runner(server, config, qps_data) + client_runners.append(runner) + runner.start() + + end_time = time.time() + yield self._get_client_status(start_time, end_time, qps_data) + + # Respond to stat requests + for request in request_iterator: + end_time = time.time() + status = self._get_client_status(start_time, end_time, qps_data) + if request.mark.reset: + qps_data.reset() + start_time = time.time() + yield status + + # Cleanup the clients + for runner in client_runners: + runner.stop() + + def _get_client_status(self, start_time, end_time, qps_data): + latencies = qps_data.get_data() + end_time = time.time() + elapsed_time = end_time - start_time stats = stats_pb2.ClientStats(latencies=latencies, time_elapsed=elapsed_time, time_user=elapsed_time, time_system=elapsed_time) - return control_pb2.ClientStatus(stats=stats) - - def _create_client_runner(self, server, config, qps_data): - if config.client_type == control_pb2.SYNC_CLIENT: - if config.rpc_type == control_pb2.UNARY: - client = benchmark_client.UnarySyncBenchmarkClient( - server, config, qps_data) - elif config.rpc_type == control_pb2.STREAMING: - client = benchmark_client.StreamingSyncBenchmarkClient( - server, config, qps_data) - elif config.client_type == control_pb2.ASYNC_CLIENT: - if config.rpc_type == control_pb2.UNARY: - client = benchmark_client.UnaryAsyncBenchmarkClient( - server, config, qps_data) - else: - raise Exception('Async streaming client not supported') - else: - raise Exception('Unsupported client type {}'.format( - config.client_type)) - - # In multi-channel tests, we split the load across all channels - load_factor = float(config.client_channels) - if config.load_params.WhichOneof('load') == 'closed_loop': - runner = client_runner.ClosedLoopClientRunner( - client, config.outstanding_rpcs_per_channel) - else: # Open loop Poisson - alpha = config.load_params.poisson.offered_load / load_factor - - def poisson(): - while True: - yield random.expovariate(alpha) - - runner = client_runner.OpenLoopClientRunner(client, poisson()) - - return runner - - def CoreCount(self, request, context): - return control_pb2.CoreResponse(cores=multiprocessing.cpu_count()) - - def QuitWorker(self, request, context): - self._quit_event.set() - return control_pb2.Void() - - def wait_for_quit(self): - self._quit_event.wait() + return control_pb2.ClientStatus(stats=stats) + + def _create_client_runner(self, server, config, qps_data): + if config.client_type == control_pb2.SYNC_CLIENT: + if config.rpc_type == control_pb2.UNARY: + client = benchmark_client.UnarySyncBenchmarkClient( + server, config, qps_data) + elif config.rpc_type == control_pb2.STREAMING: + client = benchmark_client.StreamingSyncBenchmarkClient( + server, config, qps_data) + elif config.client_type == control_pb2.ASYNC_CLIENT: + if config.rpc_type == control_pb2.UNARY: + client = benchmark_client.UnaryAsyncBenchmarkClient( + server, config, qps_data) + else: + raise Exception('Async streaming client not supported') + else: + raise Exception('Unsupported client type {}'.format( + config.client_type)) + + # In multi-channel tests, we split the load across all channels + load_factor = float(config.client_channels) + if config.load_params.WhichOneof('load') == 'closed_loop': + runner = client_runner.ClosedLoopClientRunner( + client, config.outstanding_rpcs_per_channel) + else: # Open loop Poisson + alpha = config.load_params.poisson.offered_load / load_factor + + def poisson(): + while True: + yield random.expovariate(alpha) + + runner = client_runner.OpenLoopClientRunner(client, poisson()) + + return runner + + def CoreCount(self, request, context): + return control_pb2.CoreResponse(cores=multiprocessing.cpu_count()) + + def QuitWorker(self, request, context): + self._quit_event.set() + return control_pb2.Void() + + def wait_for_quit(self): + self._quit_event.wait() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py index 8d89990e82..5772620b60 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py index 2650da3d7d..169e55022d 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py @@ -1,195 +1,195 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc_reflection.v1alpha.reflection.""" - -import unittest - -import grpc - -from grpc_reflection.v1alpha import reflection -from grpc_reflection.v1alpha import reflection_pb2 -from grpc_reflection.v1alpha import reflection_pb2_grpc - -from google.protobuf import descriptor_pool -from google.protobuf import descriptor_pb2 - -from src.proto.grpc.testing import empty_pb2 -from src.proto.grpc.testing.proto2 import empty2_extensions_pb2 - -from tests.unit import test_common - -_EMPTY_PROTO_FILE_NAME = 'src/proto/grpc/testing/empty.proto' -_EMPTY_PROTO_SYMBOL_NAME = 'grpc.testing.Empty' -_SERVICE_NAMES = ('Angstrom', 'Bohr', 'Curie', 'Dyson', 'Einstein', 'Feynman', - 'Galilei') -_EMPTY_EXTENSIONS_SYMBOL_NAME = 'grpc.testing.proto2.EmptyWithExtensions' -_EMPTY_EXTENSIONS_NUMBERS = ( - 124, - 125, - 126, - 127, - 128, -) - - -def _file_descriptor_to_proto(descriptor): - proto = descriptor_pb2.FileDescriptorProto() - descriptor.CopyToProto(proto) - return proto.SerializeToString() - - -class ReflectionServicerTest(unittest.TestCase): - - # TODO(https://github.com/grpc/grpc/issues/17844) - # Bazel + Python 3 will result in creating two different instance of - # DESCRIPTOR for each message. So, the equal comparison between protobuf - # returned by stub and manually crafted protobuf will always fail. - def _assert_sequence_of_proto_equal(self, x, y): - self.assertSequenceEqual( - tuple(proto.SerializeToString() for proto in x), - tuple(proto.SerializeToString() for proto in y), - ) - - def setUp(self): - self._server = test_common.test_server() - reflection.enable_server_reflection(_SERVICE_NAMES, self._server) - port = self._server.add_insecure_port('[::]:0') - self._server.start() - - self._channel = grpc.insecure_channel('localhost:%d' % port) - self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel) - - def tearDown(self): - self._server.stop(None) - self._channel.close() - - def testFileByName(self): - requests = ( - reflection_pb2.ServerReflectionRequest( - file_by_filename=_EMPTY_PROTO_FILE_NAME), - reflection_pb2.ServerReflectionRequest( - file_by_filename='i-donut-exist'), - ) - responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) - expected_responses = ( - reflection_pb2.ServerReflectionResponse( - valid_host='', - file_descriptor_response=reflection_pb2.FileDescriptorResponse( - file_descriptor_proto=( - _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))), - reflection_pb2.ServerReflectionResponse( - valid_host='', - error_response=reflection_pb2.ErrorResponse( - error_code=grpc.StatusCode.NOT_FOUND.value[0], - error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), - )), - ) - self._assert_sequence_of_proto_equal(expected_responses, responses) - - def testFileBySymbol(self): - requests = ( - reflection_pb2.ServerReflectionRequest( - file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME), - reflection_pb2.ServerReflectionRequest( - file_containing_symbol='i.donut.exist.co.uk.org.net.me.name.foo' - ), - ) - responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) - expected_responses = ( - reflection_pb2.ServerReflectionResponse( - valid_host='', - file_descriptor_response=reflection_pb2.FileDescriptorResponse( - file_descriptor_proto=( - _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))), - reflection_pb2.ServerReflectionResponse( - valid_host='', - error_response=reflection_pb2.ErrorResponse( - error_code=grpc.StatusCode.NOT_FOUND.value[0], - error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), - )), - ) - self._assert_sequence_of_proto_equal(expected_responses, responses) - - def testFileContainingExtension(self): - requests = ( - reflection_pb2.ServerReflectionRequest( - file_containing_extension=reflection_pb2.ExtensionRequest( - containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME, - extension_number=125, - ),), - reflection_pb2.ServerReflectionRequest( - file_containing_extension=reflection_pb2.ExtensionRequest( - containing_type='i.donut.exist.co.uk.org.net.me.name.foo', - extension_number=55, - ),), - ) - responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) - expected_responses = ( - reflection_pb2.ServerReflectionResponse( - valid_host='', - file_descriptor_response=reflection_pb2.FileDescriptorResponse( - file_descriptor_proto=(_file_descriptor_to_proto( - empty2_extensions_pb2.DESCRIPTOR),))), - reflection_pb2.ServerReflectionResponse( - valid_host='', - error_response=reflection_pb2.ErrorResponse( - error_code=grpc.StatusCode.NOT_FOUND.value[0], - error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), - )), - ) - self._assert_sequence_of_proto_equal(expected_responses, responses) - - def testExtensionNumbersOfType(self): - requests = ( - reflection_pb2.ServerReflectionRequest( - all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME), - reflection_pb2.ServerReflectionRequest( - all_extension_numbers_of_type='i.donut.exist.co.uk.net.name.foo' - ), - ) - responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) - expected_responses = ( - reflection_pb2.ServerReflectionResponse( - valid_host='', - all_extension_numbers_response=reflection_pb2. - ExtensionNumberResponse( - base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME, - extension_number=_EMPTY_EXTENSIONS_NUMBERS)), - reflection_pb2.ServerReflectionResponse( - valid_host='', - error_response=reflection_pb2.ErrorResponse( - error_code=grpc.StatusCode.NOT_FOUND.value[0], - error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), - )), - ) - self._assert_sequence_of_proto_equal(expected_responses, responses) - - def testListServices(self): - requests = (reflection_pb2.ServerReflectionRequest(list_services='',),) - responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) - expected_responses = (reflection_pb2.ServerReflectionResponse( - valid_host='', - list_services_response=reflection_pb2.ListServiceResponse( - service=tuple( - reflection_pb2.ServiceResponse(name=name) - for name in _SERVICE_NAMES))),) - self._assert_sequence_of_proto_equal(expected_responses, responses) - - def testReflectionServiceName(self): - self.assertEqual(reflection.SERVICE_NAME, - 'grpc.reflection.v1alpha.ServerReflection') - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_reflection.v1alpha.reflection.""" + +import unittest + +import grpc + +from grpc_reflection.v1alpha import reflection +from grpc_reflection.v1alpha import reflection_pb2 +from grpc_reflection.v1alpha import reflection_pb2_grpc + +from google.protobuf import descriptor_pool +from google.protobuf import descriptor_pb2 + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing.proto2 import empty2_extensions_pb2 + +from tests.unit import test_common + +_EMPTY_PROTO_FILE_NAME = 'src/proto/grpc/testing/empty.proto' +_EMPTY_PROTO_SYMBOL_NAME = 'grpc.testing.Empty' +_SERVICE_NAMES = ('Angstrom', 'Bohr', 'Curie', 'Dyson', 'Einstein', 'Feynman', + 'Galilei') +_EMPTY_EXTENSIONS_SYMBOL_NAME = 'grpc.testing.proto2.EmptyWithExtensions' +_EMPTY_EXTENSIONS_NUMBERS = ( + 124, + 125, + 126, + 127, + 128, +) + + +def _file_descriptor_to_proto(descriptor): + proto = descriptor_pb2.FileDescriptorProto() + descriptor.CopyToProto(proto) + return proto.SerializeToString() + + +class ReflectionServicerTest(unittest.TestCase): + + # TODO(https://github.com/grpc/grpc/issues/17844) + # Bazel + Python 3 will result in creating two different instance of + # DESCRIPTOR for each message. So, the equal comparison between protobuf + # returned by stub and manually crafted protobuf will always fail. + def _assert_sequence_of_proto_equal(self, x, y): + self.assertSequenceEqual( + tuple(proto.SerializeToString() for proto in x), + tuple(proto.SerializeToString() for proto in y), + ) + + def setUp(self): + self._server = test_common.test_server() + reflection.enable_server_reflection(_SERVICE_NAMES, self._server) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def testFileByName(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + file_by_filename=_EMPTY_PROTO_FILE_NAME), + reflection_pb2.ServerReflectionRequest( + file_by_filename='i-donut-exist'), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + file_descriptor_response=reflection_pb2.FileDescriptorResponse( + file_descriptor_proto=( + _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testFileBySymbol(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME), + reflection_pb2.ServerReflectionRequest( + file_containing_symbol='i.donut.exist.co.uk.org.net.me.name.foo' + ), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + file_descriptor_response=reflection_pb2.FileDescriptorResponse( + file_descriptor_proto=( + _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testFileContainingExtension(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + file_containing_extension=reflection_pb2.ExtensionRequest( + containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME, + extension_number=125, + ),), + reflection_pb2.ServerReflectionRequest( + file_containing_extension=reflection_pb2.ExtensionRequest( + containing_type='i.donut.exist.co.uk.org.net.me.name.foo', + extension_number=55, + ),), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + file_descriptor_response=reflection_pb2.FileDescriptorResponse( + file_descriptor_proto=(_file_descriptor_to_proto( + empty2_extensions_pb2.DESCRIPTOR),))), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testExtensionNumbersOfType(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME), + reflection_pb2.ServerReflectionRequest( + all_extension_numbers_of_type='i.donut.exist.co.uk.net.name.foo' + ), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + all_extension_numbers_response=reflection_pb2. + ExtensionNumberResponse( + base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME, + extension_number=_EMPTY_EXTENSIONS_NUMBERS)), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testListServices(self): + requests = (reflection_pb2.ServerReflectionRequest(list_services='',),) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = (reflection_pb2.ServerReflectionResponse( + valid_host='', + list_services_response=reflection_pb2.ListServiceResponse( + service=tuple( + reflection_pb2.ServiceResponse(name=name) + for name in _SERVICE_NAMES))),) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testReflectionServiceName(self): + self.assertEqual(reflection.SERVICE_NAME, + 'grpc.reflection.v1alpha.ServerReflection') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py index 10b401503b..38fdfc9c5c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2018 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py index 186ea21095..54a3b62420 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py @@ -1,180 +1,180 @@ -# Copyright 2018 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc_status.""" - -# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details -# please refer to comments in the "bazel_namespace_package_hack" module. -try: - from tests import bazel_namespace_package_hack - bazel_namespace_package_hack.sys_path_to_site_dir_hack() -except ImportError: - pass - -import unittest - -import logging -import traceback - -import grpc -from grpc_status import rpc_status - -from tests.unit import test_common - -from google.protobuf import any_pb2 -from google.rpc import code_pb2, status_pb2, error_details_pb2 - -_STATUS_OK = '/test/StatusOK' -_STATUS_NOT_OK = '/test/StatusNotOk' -_ERROR_DETAILS = '/test/ErrorDetails' -_INCONSISTENT = '/test/Inconsistent' -_INVALID_CODE = '/test/InvalidCode' - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x01\x01\x01' - -_GRPC_DETAILS_METADATA_KEY = 'grpc-status-details-bin' - -_STATUS_DETAILS = 'This is an error detail' -_STATUS_DETAILS_ANOTHER = 'This is another error detail' - - -def _ok_unary_unary(request, servicer_context): - return _RESPONSE - - -def _not_ok_unary_unary(request, servicer_context): - servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS) - - -def _error_details_unary_unary(request, servicer_context): - details = any_pb2.Any() - details.Pack( +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_status.""" + +# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details +# please refer to comments in the "bazel_namespace_package_hack" module. +try: + from tests import bazel_namespace_package_hack + bazel_namespace_package_hack.sys_path_to_site_dir_hack() +except ImportError: + pass + +import unittest + +import logging +import traceback + +import grpc +from grpc_status import rpc_status + +from tests.unit import test_common + +from google.protobuf import any_pb2 +from google.rpc import code_pb2, status_pb2, error_details_pb2 + +_STATUS_OK = '/test/StatusOK' +_STATUS_NOT_OK = '/test/StatusNotOk' +_ERROR_DETAILS = '/test/ErrorDetails' +_INCONSISTENT = '/test/Inconsistent' +_INVALID_CODE = '/test/InvalidCode' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x01\x01\x01' + +_GRPC_DETAILS_METADATA_KEY = 'grpc-status-details-bin' + +_STATUS_DETAILS = 'This is an error detail' +_STATUS_DETAILS_ANOTHER = 'This is another error detail' + + +def _ok_unary_unary(request, servicer_context): + return _RESPONSE + + +def _not_ok_unary_unary(request, servicer_context): + servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS) + + +def _error_details_unary_unary(request, servicer_context): + details = any_pb2.Any() + details.Pack( error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(), detail='Intentionally invoked')) - rich_status = status_pb2.Status( - code=code_pb2.INTERNAL, - message=_STATUS_DETAILS, - details=[details], - ) - servicer_context.abort_with_status(rpc_status.to_status(rich_status)) - - -def _inconsistent_unary_unary(request, servicer_context): - rich_status = status_pb2.Status( - code=code_pb2.INTERNAL, - message=_STATUS_DETAILS, - ) - servicer_context.set_code(grpc.StatusCode.NOT_FOUND) - servicer_context.set_details(_STATUS_DETAILS_ANOTHER) - # User put inconsistent status information in trailing metadata + rich_status = status_pb2.Status( + code=code_pb2.INTERNAL, + message=_STATUS_DETAILS, + details=[details], + ) + servicer_context.abort_with_status(rpc_status.to_status(rich_status)) + + +def _inconsistent_unary_unary(request, servicer_context): + rich_status = status_pb2.Status( + code=code_pb2.INTERNAL, + message=_STATUS_DETAILS, + ) + servicer_context.set_code(grpc.StatusCode.NOT_FOUND) + servicer_context.set_details(_STATUS_DETAILS_ANOTHER) + # User put inconsistent status information in trailing metadata servicer_context.set_trailing_metadata( ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),)) - - -def _invalid_code_unary_unary(request, servicer_context): - rich_status = status_pb2.Status( - code=42, - message='Invalid code', - ) - servicer_context.abort_with_status(rpc_status.to_status(rich_status)) - - -class _GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - if handler_call_details.method == _STATUS_OK: - return grpc.unary_unary_rpc_method_handler(_ok_unary_unary) - elif handler_call_details.method == _STATUS_NOT_OK: - return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary) - elif handler_call_details.method == _ERROR_DETAILS: - return grpc.unary_unary_rpc_method_handler( - _error_details_unary_unary) - elif handler_call_details.method == _INCONSISTENT: - return grpc.unary_unary_rpc_method_handler( - _inconsistent_unary_unary) - elif handler_call_details.method == _INVALID_CODE: - return grpc.unary_unary_rpc_method_handler( - _invalid_code_unary_unary) - else: - return None - - -class StatusTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server() - self._server.add_generic_rpc_handlers((_GenericHandler(),)) - port = self._server.add_insecure_port('[::]:0') - self._server.start() - - self._channel = grpc.insecure_channel('localhost:%d' % port) - - def tearDown(self): - self._server.stop(None) - self._channel.close() - - def test_status_ok(self): - _, call = self._channel.unary_unary(_STATUS_OK).with_call(_REQUEST) - - # Succeed RPC doesn't have status - status = rpc_status.from_call(call) - self.assertIs(status, None) - - def test_status_not_ok(self): - with self.assertRaises(grpc.RpcError) as exception_context: - self._channel.unary_unary(_STATUS_NOT_OK).with_call(_REQUEST) - rpc_error = exception_context.exception - - self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) - # Failed RPC doesn't automatically generate status - status = rpc_status.from_call(rpc_error) - self.assertIs(status, None) - - def test_error_details(self): - with self.assertRaises(grpc.RpcError) as exception_context: - self._channel.unary_unary(_ERROR_DETAILS).with_call(_REQUEST) - rpc_error = exception_context.exception - - status = rpc_status.from_call(rpc_error) - self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) - self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL')) - - # Check if the underlying proto message is intact + + +def _invalid_code_unary_unary(request, servicer_context): + rich_status = status_pb2.Status( + code=42, + message='Invalid code', + ) + servicer_context.abort_with_status(rpc_status.to_status(rich_status)) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _STATUS_OK: + return grpc.unary_unary_rpc_method_handler(_ok_unary_unary) + elif handler_call_details.method == _STATUS_NOT_OK: + return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary) + elif handler_call_details.method == _ERROR_DETAILS: + return grpc.unary_unary_rpc_method_handler( + _error_details_unary_unary) + elif handler_call_details.method == _INCONSISTENT: + return grpc.unary_unary_rpc_method_handler( + _inconsistent_unary_unary) + elif handler_call_details.method == _INVALID_CODE: + return grpc.unary_unary_rpc_method_handler( + _invalid_code_unary_unary) + else: + return None + + +class StatusTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers((_GenericHandler(),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def test_status_ok(self): + _, call = self._channel.unary_unary(_STATUS_OK).with_call(_REQUEST) + + # Succeed RPC doesn't have status + status = rpc_status.from_call(call) + self.assertIs(status, None) + + def test_status_not_ok(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_STATUS_NOT_OK).with_call(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + # Failed RPC doesn't automatically generate status + status = rpc_status.from_call(rpc_error) + self.assertIs(status, None) + + def test_error_details(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_ERROR_DETAILS).with_call(_REQUEST) + rpc_error = exception_context.exception + + status = rpc_status.from_call(rpc_error) + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL')) + + # Check if the underlying proto message is intact self.assertEqual( status.details[0].Is(error_details_pb2.DebugInfo.DESCRIPTOR), True) - info = error_details_pb2.DebugInfo() - status.details[0].Unpack(info) - self.assertIn('_error_details_unary_unary', info.stack_entries[-1]) - - def test_code_message_validation(self): - with self.assertRaises(grpc.RpcError) as exception_context: - self._channel.unary_unary(_INCONSISTENT).with_call(_REQUEST) - rpc_error = exception_context.exception - self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND) - - # Code/Message validation failed - self.assertRaises(ValueError, rpc_status.from_call, rpc_error) - - def test_invalid_code(self): - with self.assertRaises(grpc.RpcError) as exception_context: - self._channel.unary_unary(_INVALID_CODE).with_call(_REQUEST) - rpc_error = exception_context.exception - self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN) - # Invalid status code exception raised during coversion - self.assertIn('Invalid status code', rpc_error.details()) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + info = error_details_pb2.DebugInfo() + status.details[0].Unpack(info) + self.assertIn('_error_details_unary_unary', info.stack_entries[-1]) + + def test_code_message_validation(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_INCONSISTENT).with_call(_REQUEST) + rpc_error = exception_context.exception + self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND) + + # Code/Message validation failed + self.assertRaises(ValueError, rpc_status.from_call, rpc_error) + + def test_invalid_code(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_INVALID_CODE).with_call(_REQUEST) + rpc_error = exception_context.exception + self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN) + # Invalid status code exception raised during coversion + self.assertIn('Invalid status code', rpc_error.details()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py index 8d89990e82..5772620b60 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py index 5fe2d03605..01c14ba3e2 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py @@ -1,47 +1,47 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Entry point for running stress tests.""" - -import argparse -from concurrent import futures -import threading - -import grpc -from six.moves import queue -from src.proto.grpc.testing import metrics_pb2_grpc -from src.proto.grpc.testing import test_pb2_grpc - -from tests.interop import methods -from tests.interop import resources -from tests.qps import histogram -from tests.stress import metrics_server -from tests.stress import test_runner - - -def _args(): - parser = argparse.ArgumentParser( - description='gRPC Python stress test client') - parser.add_argument( - '--server_addresses', - help='comma separated list of hostname:port to run servers on', - default='localhost:8080', - type=str) - parser.add_argument( - '--test_cases', - help='comma separated list of testcase:weighting of tests to run', - default='large_unary:100', - type=str) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Entry point for running stress tests.""" + +import argparse +from concurrent import futures +import threading + +import grpc +from six.moves import queue +from src.proto.grpc.testing import metrics_pb2_grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import methods +from tests.interop import resources +from tests.qps import histogram +from tests.stress import metrics_server +from tests.stress import test_runner + + +def _args(): + parser = argparse.ArgumentParser( + description='gRPC Python stress test client') + parser.add_argument( + '--server_addresses', + help='comma separated list of hostname:port to run servers on', + default='localhost:8080', + type=str) + parser.add_argument( + '--test_cases', + help='comma separated list of testcase:weighting of tests to run', + default='large_unary:100', + type=str) parser.add_argument('--test_duration_secs', help='number of seconds to run the stress test', default=-1, @@ -58,11 +58,11 @@ def _args(): help='the port to listen for metrics requests on', default=8081, type=int) - parser.add_argument( - '--use_test_ca', - help='Whether to use our fake CA. Requires --use_tls=true', - default=False, - type=bool) + parser.add_argument( + '--use_test_ca', + help='Whether to use our fake CA. Requires --use_tls=true', + default=False, + type=bool) parser.add_argument('--use_tls', help='Whether to use TLS', default=False, @@ -70,90 +70,90 @@ def _args(): parser.add_argument('--server_host_override', help='the server host to which to claim to connect', type=str) - return parser.parse_args() - - -def _test_case_from_arg(test_case_arg): - for test_case in methods.TestCase: - if test_case_arg == test_case.value: - return test_case - else: - raise ValueError('No test case {}!'.format(test_case_arg)) - - -def _parse_weighted_test_cases(test_case_args): - weighted_test_cases = {} - for test_case_arg in test_case_args.split(','): - name, weight = test_case_arg.split(':', 1) - test_case = _test_case_from_arg(name) - weighted_test_cases[test_case] = int(weight) - return weighted_test_cases - - -def _get_channel(target, args): - if args.use_tls: - if args.use_test_ca: - root_certificates = resources.test_root_certificates() - else: - root_certificates = None # will load default roots. - channel_credentials = grpc.ssl_channel_credentials( - root_certificates=root_certificates) - options = (( - 'grpc.ssl_target_name_override', - args.server_host_override, - ),) + return parser.parse_args() + + +def _test_case_from_arg(test_case_arg): + for test_case in methods.TestCase: + if test_case_arg == test_case.value: + return test_case + else: + raise ValueError('No test case {}!'.format(test_case_arg)) + + +def _parse_weighted_test_cases(test_case_args): + weighted_test_cases = {} + for test_case_arg in test_case_args.split(','): + name, weight = test_case_arg.split(':', 1) + test_case = _test_case_from_arg(name) + weighted_test_cases[test_case] = int(weight) + return weighted_test_cases + + +def _get_channel(target, args): + if args.use_tls: + if args.use_test_ca: + root_certificates = resources.test_root_certificates() + else: + root_certificates = None # will load default roots. + channel_credentials = grpc.ssl_channel_credentials( + root_certificates=root_certificates) + options = (( + 'grpc.ssl_target_name_override', + args.server_host_override, + ),) channel = grpc.secure_channel(target, channel_credentials, options=options) - else: - channel = grpc.insecure_channel(target) - - # waits for the channel to be ready before we start sending messages - grpc.channel_ready_future(channel).result() - return channel - - -def run_test(args): - test_cases = _parse_weighted_test_cases(args.test_cases) - test_server_targets = args.server_addresses.split(',') - # Propagate any client exceptions with a queue - exception_queue = queue.Queue() - stop_event = threading.Event() - hist = histogram.Histogram(1, 1) - runners = [] - - server = grpc.server(futures.ThreadPoolExecutor(max_workers=25)) - metrics_pb2_grpc.add_MetricsServiceServicer_to_server( - metrics_server.MetricsServer(hist), server) - server.add_insecure_port('[::]:{}'.format(args.metrics_port)) - server.start() - - for test_server_target in test_server_targets: - for _ in range(args.num_channels_per_server): - channel = _get_channel(test_server_target, args) - for _ in range(args.num_stubs_per_channel): - stub = test_pb2_grpc.TestServiceStub(channel) - runner = test_runner.TestRunner(stub, test_cases, hist, - exception_queue, stop_event) - runners.append(runner) - - for runner in runners: - runner.start() - try: - timeout_secs = args.test_duration_secs - if timeout_secs < 0: - timeout_secs = None - raise exception_queue.get(block=True, timeout=timeout_secs) - except queue.Empty: - # No exceptions thrown, success - pass - finally: - stop_event.set() - for runner in runners: - runner.join() - runner = None - server.stop(None) - - -if __name__ == '__main__': - run_test(_args()) + else: + channel = grpc.insecure_channel(target) + + # waits for the channel to be ready before we start sending messages + grpc.channel_ready_future(channel).result() + return channel + + +def run_test(args): + test_cases = _parse_weighted_test_cases(args.test_cases) + test_server_targets = args.server_addresses.split(',') + # Propagate any client exceptions with a queue + exception_queue = queue.Queue() + stop_event = threading.Event() + hist = histogram.Histogram(1, 1) + runners = [] + + server = grpc.server(futures.ThreadPoolExecutor(max_workers=25)) + metrics_pb2_grpc.add_MetricsServiceServicer_to_server( + metrics_server.MetricsServer(hist), server) + server.add_insecure_port('[::]:{}'.format(args.metrics_port)) + server.start() + + for test_server_target in test_server_targets: + for _ in range(args.num_channels_per_server): + channel = _get_channel(test_server_target, args) + for _ in range(args.num_stubs_per_channel): + stub = test_pb2_grpc.TestServiceStub(channel) + runner = test_runner.TestRunner(stub, test_cases, hist, + exception_queue, stop_event) + runners.append(runner) + + for runner in runners: + runner.start() + try: + timeout_secs = args.test_duration_secs + if timeout_secs < 0: + timeout_secs = None + raise exception_queue.get(block=True, timeout=timeout_secs) + except queue.Empty: + # No exceptions thrown, success + pass + finally: + stop_event.set() + for runner in runners: + runner.join() + runner = None + server.stop(None) + + +if __name__ == '__main__': + run_test(_args()) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py index 5adbe9e5b3..33a74b4a38 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py @@ -1,45 +1,45 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""MetricsService for publishing stress test qps data.""" - -import time - -from src.proto.grpc.testing import metrics_pb2 -from src.proto.grpc.testing import metrics_pb2_grpc - -GAUGE_NAME = 'python_overall_qps' - - -class MetricsServer(metrics_pb2_grpc.MetricsServiceServicer): - - def __init__(self, histogram): - self._start_time = time.time() - self._histogram = histogram - - def _get_qps(self): - count = self._histogram.get_data().count - delta = time.time() - self._start_time - self._histogram.reset() - self._start_time = time.time() - return int(count / delta) - - def GetAllGauges(self, request, context): - qps = self._get_qps() - return [metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps)] - - def GetGauge(self, request, context): - if request.name != GAUGE_NAME: - raise Exception('Gauge {} does not exist'.format(request.name)) - qps = self._get_qps() - return metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MetricsService for publishing stress test qps data.""" + +import time + +from src.proto.grpc.testing import metrics_pb2 +from src.proto.grpc.testing import metrics_pb2_grpc + +GAUGE_NAME = 'python_overall_qps' + + +class MetricsServer(metrics_pb2_grpc.MetricsServiceServicer): + + def __init__(self, histogram): + self._start_time = time.time() + self._histogram = histogram + + def _get_qps(self): + count = self._histogram.get_data().count + delta = time.time() - self._start_time + self._histogram.reset() + self._start_time = time.time() + return int(count / delta) + + def GetAllGauges(self, request, context): + qps = self._get_qps() + return [metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps)] + + def GetGauge(self, request, context): + if request.name != GAUGE_NAME: + raise Exception('Gauge {} does not exist'.format(request.name)) + qps = self._get_qps() + return metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py index e672473177..1b6003fc69 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py @@ -1,58 +1,58 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Thread that sends random weighted requests on a TestService stub.""" - -import random -import threading -import time -import traceback - - -def _weighted_test_case_generator(weighted_cases): - weight_sum = sum(weighted_cases.itervalues()) - - while True: - val = random.uniform(0, weight_sum) - partial_sum = 0 - for case in weighted_cases: - partial_sum += weighted_cases[case] - if val <= partial_sum: - yield case - break - - -class TestRunner(threading.Thread): - - def __init__(self, stub, test_cases, hist, exception_queue, stop_event): - super(TestRunner, self).__init__() - self._exception_queue = exception_queue - self._stop_event = stop_event - self._stub = stub - self._test_cases = _weighted_test_case_generator(test_cases) - self._histogram = hist - - def run(self): - while not self._stop_event.is_set(): - try: - test_case = next(self._test_cases) - start_time = time.time() - test_case.test_interoperability(self._stub, None) - end_time = time.time() - self._histogram.add((end_time - start_time) * 1e9) - except Exception as e: # pylint: disable=broad-except - traceback.print_exc() - self._exception_queue.put( +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Thread that sends random weighted requests on a TestService stub.""" + +import random +import threading +import time +import traceback + + +def _weighted_test_case_generator(weighted_cases): + weight_sum = sum(weighted_cases.itervalues()) + + while True: + val = random.uniform(0, weight_sum) + partial_sum = 0 + for case in weighted_cases: + partial_sum += weighted_cases[case] + if val <= partial_sum: + yield case + break + + +class TestRunner(threading.Thread): + + def __init__(self, stub, test_cases, hist, exception_queue, stop_event): + super(TestRunner, self).__init__() + self._exception_queue = exception_queue + self._stop_event = stop_event + self._stub = stub + self._test_cases = _weighted_test_case_generator(test_cases) + self._histogram = hist + + def run(self): + while not self._stop_event.is_set(): + try: + test_case = next(self._test_cases) + start_time = time.time() + test_case.test_interoperability(self._stub, None) + end_time = time.time() + self._histogram.add((end_time - start_time) * 1e9) + except Exception as e: # pylint: disable=broad-except + traceback.print_exc() + self._exception_queue.put( Exception( "An exception occurred during test {}".format( test_case), e)) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py index 717ad58c60..cd872ece29 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py @@ -1,102 +1,102 @@ -# Copyright 2019 The gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import threading -import grpc -import grpc.experimental -import subprocess -import sys -import time -import contextlib - -_PORT = 5741 -_MESSAGE_SIZE = 4 -_RESPONSE_COUNT = 32 * 1024 - -_SERVER_CODE = """ -import datetime -import threading -import grpc -from concurrent import futures -from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2 -from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc - -class Handler(unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceServicer): - - def Benchmark(self, request, context): - payload = b'\\x00\\x01' * int(request.message_size / 2) - for _ in range(request.response_count): - yield unary_stream_benchmark_pb2.BenchmarkResponse(response=payload) - - -server = grpc.server(futures.ThreadPoolExecutor(max_workers=1)) -server.add_insecure_port('[::]:%d') -unary_stream_benchmark_pb2_grpc.add_UnaryStreamBenchmarkServiceServicer_to_server(Handler(), server) -server.start() -server.wait_for_termination() -""" % _PORT - -try: - from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2 - from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc - - _GRPC_CHANNEL_OPTIONS = [ - ('grpc.max_metadata_size', 16 * 1024 * 1024), - ('grpc.max_receive_message_length', 64 * 1024 * 1024), - (grpc.experimental.ChannelOptions.SingleThreadedUnaryStream, 1), - ] - - @contextlib.contextmanager - def _running_server(): +# Copyright 2019 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import threading +import grpc +import grpc.experimental +import subprocess +import sys +import time +import contextlib + +_PORT = 5741 +_MESSAGE_SIZE = 4 +_RESPONSE_COUNT = 32 * 1024 + +_SERVER_CODE = """ +import datetime +import threading +import grpc +from concurrent import futures +from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2 +from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc + +class Handler(unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceServicer): + + def Benchmark(self, request, context): + payload = b'\\x00\\x01' * int(request.message_size / 2) + for _ in range(request.response_count): + yield unary_stream_benchmark_pb2.BenchmarkResponse(response=payload) + + +server = grpc.server(futures.ThreadPoolExecutor(max_workers=1)) +server.add_insecure_port('[::]:%d') +unary_stream_benchmark_pb2_grpc.add_UnaryStreamBenchmarkServiceServicer_to_server(Handler(), server) +server.start() +server.wait_for_termination() +""" % _PORT + +try: + from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2 + from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc + + _GRPC_CHANNEL_OPTIONS = [ + ('grpc.max_metadata_size', 16 * 1024 * 1024), + ('grpc.max_receive_message_length', 64 * 1024 * 1024), + (grpc.experimental.ChannelOptions.SingleThreadedUnaryStream, 1), + ] + + @contextlib.contextmanager + def _running_server(): server_process = subprocess.Popen([sys.executable, '-c', _SERVER_CODE], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - try: - yield - finally: - server_process.terminate() - server_process.wait() - sys.stdout.write("stdout: {}".format(server_process.stdout.read())) - sys.stdout.flush() - sys.stdout.write("stderr: {}".format(server_process.stderr.read())) - sys.stdout.flush() - - def profile(message_size, response_count): - request = unary_stream_benchmark_pb2.BenchmarkRequest( - message_size=message_size, response_count=response_count) + try: + yield + finally: + server_process.terminate() + server_process.wait() + sys.stdout.write("stdout: {}".format(server_process.stdout.read())) + sys.stdout.flush() + sys.stdout.write("stderr: {}".format(server_process.stderr.read())) + sys.stdout.flush() + + def profile(message_size, response_count): + request = unary_stream_benchmark_pb2.BenchmarkRequest( + message_size=message_size, response_count=response_count) with grpc.insecure_channel('[::]:{}'.format(_PORT), options=_GRPC_CHANNEL_OPTIONS) as channel: - stub = unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceStub( - channel) - start = datetime.datetime.now() - call = stub.Benchmark(request, wait_for_ready=True) - for message in call: - pass - end = datetime.datetime.now() - return end - start - - def main(): - with _running_server(): - for i in range(1000): - latency = profile(_MESSAGE_SIZE, 1024) - sys.stdout.write("{}\n".format(latency.total_seconds())) - sys.stdout.flush() - - if __name__ == '__main__': - main() - -except ImportError: - # NOTE(rbellevi): The test runner should not load this module. - pass + stub = unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceStub( + channel) + start = datetime.datetime.now() + call = stub.Benchmark(request, wait_for_ready=True) + for message in call: + pass + end = datetime.datetime.now() + return end - start + + def main(): + with _running_server(): + for i in range(1000): + latency = profile(_MESSAGE_SIZE, 1024) + sys.stdout.write("{}\n".format(latency.total_seconds())) + sys.stdout.flush() + + if __name__ == '__main__': + main() + +except ImportError: + # NOTE(rbellevi): The test runner should not load this module. + pass diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py index 0030911829..1e120359cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py index 19fe21b71b..3226d1fb02 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py @@ -1,43 +1,43 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""An example gRPC Python-using application's common code elements.""" - -from tests.testing.proto import requests_pb2 -from tests.testing.proto import services_pb2 - -SERVICE_NAME = 'tests_of_grpc_testing.FirstService' -UNARY_UNARY_METHOD_NAME = 'UnUn' -UNARY_STREAM_METHOD_NAME = 'UnStre' -STREAM_UNARY_METHOD_NAME = 'StreUn' -STREAM_STREAM_METHOD_NAME = 'StreStre' - -UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=2) -ERRONEOUS_UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=3) -UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=5) -ERRONEOUS_UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=7) -UNARY_STREAM_REQUEST = requests_pb2.Charm(first_charm_field=11) -STREAM_UNARY_REQUEST = requests_pb2.Charm(first_charm_field=13) -STREAM_UNARY_RESPONSE = services_pb2.Strange(first_strange_field=17) -STREAM_STREAM_REQUEST = requests_pb2.Top(first_top_field=19) -STREAM_STREAM_RESPONSE = services_pb2.Bottom(first_bottom_field=23) -TWO_STREAM_STREAM_RESPONSES = (STREAM_STREAM_RESPONSE,) * 2 -ABORT_REQUEST = requests_pb2.Up(first_up_field=42) -ABORT_SUCCESS_QUERY = requests_pb2.Up(first_up_field=43) -ABORT_NO_STATUS_RESPONSE = services_pb2.Down(first_down_field=50) -ABORT_SUCCESS_RESPONSE = services_pb2.Down(first_down_field=51) -ABORT_FAILURE_RESPONSE = services_pb2.Down(first_down_field=52) -STREAM_STREAM_MUTATING_REQUEST = requests_pb2.Top(first_top_field=24601) -STREAM_STREAM_MUTATING_COUNT = 2 - -INFINITE_REQUEST_STREAM_TIMEOUT = 0.2 +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""An example gRPC Python-using application's common code elements.""" + +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 + +SERVICE_NAME = 'tests_of_grpc_testing.FirstService' +UNARY_UNARY_METHOD_NAME = 'UnUn' +UNARY_STREAM_METHOD_NAME = 'UnStre' +STREAM_UNARY_METHOD_NAME = 'StreUn' +STREAM_STREAM_METHOD_NAME = 'StreStre' + +UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=2) +ERRONEOUS_UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=3) +UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=5) +ERRONEOUS_UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=7) +UNARY_STREAM_REQUEST = requests_pb2.Charm(first_charm_field=11) +STREAM_UNARY_REQUEST = requests_pb2.Charm(first_charm_field=13) +STREAM_UNARY_RESPONSE = services_pb2.Strange(first_strange_field=17) +STREAM_STREAM_REQUEST = requests_pb2.Top(first_top_field=19) +STREAM_STREAM_RESPONSE = services_pb2.Bottom(first_bottom_field=23) +TWO_STREAM_STREAM_RESPONSES = (STREAM_STREAM_RESPONSE,) * 2 +ABORT_REQUEST = requests_pb2.Up(first_up_field=42) +ABORT_SUCCESS_QUERY = requests_pb2.Up(first_up_field=43) +ABORT_NO_STATUS_RESPONSE = services_pb2.Down(first_down_field=50) +ABORT_SUCCESS_RESPONSE = services_pb2.Down(first_down_field=51) +ABORT_FAILURE_RESPONSE = services_pb2.Down(first_down_field=52) +STREAM_STREAM_MUTATING_REQUEST = requests_pb2.Top(first_top_field=24601) +STREAM_STREAM_MUTATING_COUNT = 2 + +INFINITE_REQUEST_STREAM_TIMEOUT = 0.2 diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py index 73a4552428..9c9e485a78 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py @@ -1,33 +1,33 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import grpc_testing - -from tests.testing.proto import requests_pb2 -from tests.testing.proto import services_pb2 - -# TODO(https://github.com/grpc/grpc/issues/11657): Eliminate this entirely. -# TODO(https://github.com/google/protobuf/issues/3452): Eliminate this if/else. -if services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None: - FIRST_SERVICE = 'Fix protobuf issue 3452!' - FIRST_SERVICE_UNUN = 'Fix protobuf issue 3452!' - FIRST_SERVICE_UNSTRE = 'Fix protobuf issue 3452!' - FIRST_SERVICE_STREUN = 'Fix protobuf issue 3452!' - FIRST_SERVICE_STRESTRE = 'Fix protobuf issue 3452!' -else: - FIRST_SERVICE = services_pb2.DESCRIPTOR.services_by_name['FirstService'] - FIRST_SERVICE_UNUN = FIRST_SERVICE.methods_by_name['UnUn'] - FIRST_SERVICE_UNSTRE = FIRST_SERVICE.methods_by_name['UnStre'] - FIRST_SERVICE_STREUN = FIRST_SERVICE.methods_by_name['StreUn'] - FIRST_SERVICE_STRESTRE = FIRST_SERVICE.methods_by_name['StreStre'] +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc_testing + +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 + +# TODO(https://github.com/grpc/grpc/issues/11657): Eliminate this entirely. +# TODO(https://github.com/google/protobuf/issues/3452): Eliminate this if/else. +if services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None: + FIRST_SERVICE = 'Fix protobuf issue 3452!' + FIRST_SERVICE_UNUN = 'Fix protobuf issue 3452!' + FIRST_SERVICE_UNSTRE = 'Fix protobuf issue 3452!' + FIRST_SERVICE_STREUN = 'Fix protobuf issue 3452!' + FIRST_SERVICE_STRESTRE = 'Fix protobuf issue 3452!' +else: + FIRST_SERVICE = services_pb2.DESCRIPTOR.services_by_name['FirstService'] + FIRST_SERVICE_UNUN = FIRST_SERVICE.methods_by_name['UnUn'] + FIRST_SERVICE_UNSTRE = FIRST_SERVICE.methods_by_name['UnStre'] + FIRST_SERVICE_STREUN = FIRST_SERVICE.methods_by_name['StreUn'] + FIRST_SERVICE_STRESTRE = FIRST_SERVICE.methods_by_name['StreStre'] diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py index 9b36945501..57fa510913 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py @@ -1,236 +1,236 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""An example gRPC Python-using client-side application.""" - -import collections -import enum -import threading -import time - -import grpc -from tests.unit.framework.common import test_constants - -from tests.testing.proto import requests_pb2 -from tests.testing.proto import services_pb2 -from tests.testing.proto import services_pb2_grpc - -from tests.testing import _application_common - - -@enum.unique -class Scenario(enum.Enum): - UNARY_UNARY = 'unary unary' - UNARY_STREAM = 'unary stream' - STREAM_UNARY = 'stream unary' - STREAM_STREAM = 'stream stream' - CONCURRENT_STREAM_UNARY = 'concurrent stream unary' - CONCURRENT_STREAM_STREAM = 'concurrent stream stream' - CANCEL_UNARY_UNARY = 'cancel unary unary' - CANCEL_UNARY_STREAM = 'cancel unary stream' - INFINITE_REQUEST_STREAM = 'infinite request stream' - - -class Outcome(collections.namedtuple('Outcome', ('kind', 'code', 'details'))): - """Outcome of a client application scenario. - - Attributes: - kind: A Kind value describing the overall kind of scenario execution. - code: A grpc.StatusCode value. Only valid if kind is Kind.RPC_ERROR. - details: A status details string. Only valid if kind is Kind.RPC_ERROR. - """ - - @enum.unique - class Kind(enum.Enum): - SATISFACTORY = 'satisfactory' - UNSATISFACTORY = 'unsatisfactory' - RPC_ERROR = 'rpc error' - - -_SATISFACTORY_OUTCOME = Outcome(Outcome.Kind.SATISFACTORY, None, None) -_UNSATISFACTORY_OUTCOME = Outcome(Outcome.Kind.UNSATISFACTORY, None, None) - - -class _Pipe(object): - - def __init__(self): - self._condition = threading.Condition() - self._values = [] - self._open = True - - def __iter__(self): - return self - - def _next(self): - with self._condition: - while True: - if self._values: - return self._values.pop(0) - elif not self._open: - raise StopIteration() - else: - self._condition.wait() - - def __next__(self): # (Python 3 Iterator Protocol) - return self._next() - - def next(self): # (Python 2 Iterator Protocol) - return self._next() - - def add(self, value): - with self._condition: - self._values.append(value) - self._condition.notify_all() - - def close(self): - with self._condition: - self._open = False - self._condition.notify_all() - - -def _run_unary_unary(stub): - response = stub.UnUn(_application_common.UNARY_UNARY_REQUEST) - if _application_common.UNARY_UNARY_RESPONSE == response: - return _SATISFACTORY_OUTCOME - else: - return _UNSATISFACTORY_OUTCOME - - -def _run_unary_stream(stub): - response_iterator = stub.UnStre(_application_common.UNARY_STREAM_REQUEST) - try: - next(response_iterator) - except StopIteration: - return _SATISFACTORY_OUTCOME - else: - return _UNSATISFACTORY_OUTCOME - - -def _run_stream_unary(stub): - response, call = stub.StreUn.with_call( - iter((_application_common.STREAM_UNARY_REQUEST,) * 3)) - if (_application_common.STREAM_UNARY_RESPONSE == response and - call.code() is grpc.StatusCode.OK): - return _SATISFACTORY_OUTCOME - else: - return _UNSATISFACTORY_OUTCOME - - -def _run_stream_stream(stub): - request_pipe = _Pipe() - response_iterator = stub.StreStre(iter(request_pipe)) - request_pipe.add(_application_common.STREAM_STREAM_REQUEST) - first_responses = next(response_iterator), next(response_iterator) - request_pipe.add(_application_common.STREAM_STREAM_REQUEST) - second_responses = next(response_iterator), next(response_iterator) - request_pipe.close() - try: - next(response_iterator) - except StopIteration: - unexpected_extra_response = False - else: - unexpected_extra_response = True - if (first_responses == _application_common.TWO_STREAM_STREAM_RESPONSES and - second_responses == _application_common.TWO_STREAM_STREAM_RESPONSES - and not unexpected_extra_response): - return _SATISFACTORY_OUTCOME - else: - return _UNSATISFACTORY_OUTCOME - - -def _run_concurrent_stream_unary(stub): - future_calls = tuple( +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""An example gRPC Python-using client-side application.""" + +import collections +import enum +import threading +import time + +import grpc +from tests.unit.framework.common import test_constants + +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 +from tests.testing.proto import services_pb2_grpc + +from tests.testing import _application_common + + +@enum.unique +class Scenario(enum.Enum): + UNARY_UNARY = 'unary unary' + UNARY_STREAM = 'unary stream' + STREAM_UNARY = 'stream unary' + STREAM_STREAM = 'stream stream' + CONCURRENT_STREAM_UNARY = 'concurrent stream unary' + CONCURRENT_STREAM_STREAM = 'concurrent stream stream' + CANCEL_UNARY_UNARY = 'cancel unary unary' + CANCEL_UNARY_STREAM = 'cancel unary stream' + INFINITE_REQUEST_STREAM = 'infinite request stream' + + +class Outcome(collections.namedtuple('Outcome', ('kind', 'code', 'details'))): + """Outcome of a client application scenario. + + Attributes: + kind: A Kind value describing the overall kind of scenario execution. + code: A grpc.StatusCode value. Only valid if kind is Kind.RPC_ERROR. + details: A status details string. Only valid if kind is Kind.RPC_ERROR. + """ + + @enum.unique + class Kind(enum.Enum): + SATISFACTORY = 'satisfactory' + UNSATISFACTORY = 'unsatisfactory' + RPC_ERROR = 'rpc error' + + +_SATISFACTORY_OUTCOME = Outcome(Outcome.Kind.SATISFACTORY, None, None) +_UNSATISFACTORY_OUTCOME = Outcome(Outcome.Kind.UNSATISFACTORY, None, None) + + +class _Pipe(object): + + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._open = True + + def __iter__(self): + return self + + def _next(self): + with self._condition: + while True: + if self._values: + return self._values.pop(0) + elif not self._open: + raise StopIteration() + else: + self._condition.wait() + + def __next__(self): # (Python 3 Iterator Protocol) + return self._next() + + def next(self): # (Python 2 Iterator Protocol) + return self._next() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify_all() + + def close(self): + with self._condition: + self._open = False + self._condition.notify_all() + + +def _run_unary_unary(stub): + response = stub.UnUn(_application_common.UNARY_UNARY_REQUEST) + if _application_common.UNARY_UNARY_RESPONSE == response: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_unary_stream(stub): + response_iterator = stub.UnStre(_application_common.UNARY_STREAM_REQUEST) + try: + next(response_iterator) + except StopIteration: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_stream_unary(stub): + response, call = stub.StreUn.with_call( + iter((_application_common.STREAM_UNARY_REQUEST,) * 3)) + if (_application_common.STREAM_UNARY_RESPONSE == response and + call.code() is grpc.StatusCode.OK): + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_stream_stream(stub): + request_pipe = _Pipe() + response_iterator = stub.StreStre(iter(request_pipe)) + request_pipe.add(_application_common.STREAM_STREAM_REQUEST) + first_responses = next(response_iterator), next(response_iterator) + request_pipe.add(_application_common.STREAM_STREAM_REQUEST) + second_responses = next(response_iterator), next(response_iterator) + request_pipe.close() + try: + next(response_iterator) + except StopIteration: + unexpected_extra_response = False + else: + unexpected_extra_response = True + if (first_responses == _application_common.TWO_STREAM_STREAM_RESPONSES and + second_responses == _application_common.TWO_STREAM_STREAM_RESPONSES + and not unexpected_extra_response): + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_concurrent_stream_unary(stub): + future_calls = tuple( stub.StreUn.future(iter((_application_common.STREAM_UNARY_REQUEST,) * 3)) - for _ in range(test_constants.THREAD_CONCURRENCY)) - for future_call in future_calls: - if future_call.code() is grpc.StatusCode.OK: - response = future_call.result() - if _application_common.STREAM_UNARY_RESPONSE != response: - return _UNSATISFACTORY_OUTCOME - else: - return _UNSATISFACTORY_OUTCOME - else: - return _SATISFACTORY_OUTCOME - - -def _run_concurrent_stream_stream(stub): - condition = threading.Condition() - outcomes = [None] * test_constants.RPC_CONCURRENCY - - def run_stream_stream(index): - outcome = _run_stream_stream(stub) - with condition: - outcomes[index] = outcome - condition.notify() - - for index in range(test_constants.RPC_CONCURRENCY): - thread = threading.Thread(target=run_stream_stream, args=(index,)) - thread.start() - with condition: - while True: - if all(outcomes): - for outcome in outcomes: - if outcome.kind is not Outcome.Kind.SATISFACTORY: - return _UNSATISFACTORY_OUTCOME - else: - return _SATISFACTORY_OUTCOME - else: - condition.wait() - - -def _run_cancel_unary_unary(stub): - response_future_call = stub.UnUn.future( - _application_common.UNARY_UNARY_REQUEST) - initial_metadata = response_future_call.initial_metadata() - cancelled = response_future_call.cancel() - if initial_metadata is not None and cancelled: - return _SATISFACTORY_OUTCOME - else: - return _UNSATISFACTORY_OUTCOME - - -def _run_infinite_request_stream(stub): - - def infinite_request_iterator(): - while True: - yield _application_common.STREAM_UNARY_REQUEST - - response_future_call = stub.StreUn.future( - infinite_request_iterator(), - timeout=_application_common.INFINITE_REQUEST_STREAM_TIMEOUT) - if response_future_call.code() is grpc.StatusCode.DEADLINE_EXCEEDED: - return _SATISFACTORY_OUTCOME - else: - return _UNSATISFACTORY_OUTCOME - - -_IMPLEMENTATIONS = { - Scenario.UNARY_UNARY: _run_unary_unary, - Scenario.UNARY_STREAM: _run_unary_stream, - Scenario.STREAM_UNARY: _run_stream_unary, - Scenario.STREAM_STREAM: _run_stream_stream, - Scenario.CONCURRENT_STREAM_UNARY: _run_concurrent_stream_unary, - Scenario.CONCURRENT_STREAM_STREAM: _run_concurrent_stream_stream, - Scenario.CANCEL_UNARY_UNARY: _run_cancel_unary_unary, - Scenario.INFINITE_REQUEST_STREAM: _run_infinite_request_stream, -} - - -def run(scenario, channel): - stub = services_pb2_grpc.FirstServiceStub(channel) - try: - return _IMPLEMENTATIONS[scenario](stub) - except grpc.RpcError as rpc_error: - return Outcome(Outcome.Kind.RPC_ERROR, rpc_error.code(), - rpc_error.details()) + for _ in range(test_constants.THREAD_CONCURRENCY)) + for future_call in future_calls: + if future_call.code() is grpc.StatusCode.OK: + response = future_call.result() + if _application_common.STREAM_UNARY_RESPONSE != response: + return _UNSATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + else: + return _SATISFACTORY_OUTCOME + + +def _run_concurrent_stream_stream(stub): + condition = threading.Condition() + outcomes = [None] * test_constants.RPC_CONCURRENCY + + def run_stream_stream(index): + outcome = _run_stream_stream(stub) + with condition: + outcomes[index] = outcome + condition.notify() + + for index in range(test_constants.RPC_CONCURRENCY): + thread = threading.Thread(target=run_stream_stream, args=(index,)) + thread.start() + with condition: + while True: + if all(outcomes): + for outcome in outcomes: + if outcome.kind is not Outcome.Kind.SATISFACTORY: + return _UNSATISFACTORY_OUTCOME + else: + return _SATISFACTORY_OUTCOME + else: + condition.wait() + + +def _run_cancel_unary_unary(stub): + response_future_call = stub.UnUn.future( + _application_common.UNARY_UNARY_REQUEST) + initial_metadata = response_future_call.initial_metadata() + cancelled = response_future_call.cancel() + if initial_metadata is not None and cancelled: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_infinite_request_stream(stub): + + def infinite_request_iterator(): + while True: + yield _application_common.STREAM_UNARY_REQUEST + + response_future_call = stub.StreUn.future( + infinite_request_iterator(), + timeout=_application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + if response_future_call.code() is grpc.StatusCode.DEADLINE_EXCEEDED: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +_IMPLEMENTATIONS = { + Scenario.UNARY_UNARY: _run_unary_unary, + Scenario.UNARY_STREAM: _run_unary_stream, + Scenario.STREAM_UNARY: _run_stream_unary, + Scenario.STREAM_STREAM: _run_stream_stream, + Scenario.CONCURRENT_STREAM_UNARY: _run_concurrent_stream_unary, + Scenario.CONCURRENT_STREAM_STREAM: _run_concurrent_stream_stream, + Scenario.CANCEL_UNARY_UNARY: _run_cancel_unary_unary, + Scenario.INFINITE_REQUEST_STREAM: _run_infinite_request_stream, +} + + +def run(scenario, channel): + stub = services_pb2_grpc.FirstServiceStub(channel) + try: + return _IMPLEMENTATIONS[scenario](stub) + except grpc.RpcError as rpc_error: + return Outcome(Outcome.Kind.RPC_ERROR, rpc_error.code(), + rpc_error.details()) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py index b3173ba6f5..5b051c3939 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py @@ -1,308 +1,308 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from concurrent import futures -import time -import unittest - -import grpc -from grpc.framework.foundation import logging_pool -from tests.unit.framework.common import test_constants -import grpc_testing - -from tests.testing import _application_common -from tests.testing import _application_testing_common -from tests.testing import _client_application -from tests.testing.proto import requests_pb2 -from tests.testing.proto import services_pb2 - - -# TODO(https://github.com/google/protobuf/issues/3452): Drop this skip. -@unittest.skipIf( - services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None, - 'Fix protobuf issue 3452!') -class ClientTest(unittest.TestCase): - - def setUp(self): - # In this test the client-side application under test executes in - # a separate thread while we retain use of the test thread to "play - # server". - self._client_execution_thread_pool = logging_pool.pool(1) - - self._fake_time = grpc_testing.strict_fake_time(time.time()) - self._real_time = grpc_testing.strict_real_time() - self._fake_time_channel = grpc_testing.channel( - services_pb2.DESCRIPTOR.services_by_name.values(), self._fake_time) - self._real_time_channel = grpc_testing.channel( - services_pb2.DESCRIPTOR.services_by_name.values(), self._real_time) - - def tearDown(self): - self._client_execution_thread_pool.shutdown(wait=True) - - def test_successful_unary_unary(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, _client_application.Scenario.UNARY_UNARY, - self._real_time_channel) - invocation_metadata, request, rpc = ( - self._real_time_channel.take_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN)) - rpc.send_initial_metadata(()) - rpc.terminate(_application_common.UNARY_UNARY_RESPONSE, (), - grpc.StatusCode.OK, '') - application_return_value = application_future.result() - - self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.SATISFACTORY) - - def test_successful_unary_stream(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, _client_application.Scenario.UNARY_STREAM, - self._fake_time_channel) - invocation_metadata, request, rpc = ( - self._fake_time_channel.take_unary_stream( - _application_testing_common.FIRST_SERVICE_UNSTRE)) - rpc.send_initial_metadata(()) - rpc.terminate((), grpc.StatusCode.OK, '') - application_return_value = application_future.result() - - self.assertEqual(_application_common.UNARY_STREAM_REQUEST, request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.SATISFACTORY) - - def test_successful_stream_unary(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, _client_application.Scenario.STREAM_UNARY, - self._real_time_channel) - invocation_metadata, rpc = self._real_time_channel.take_stream_unary( - _application_testing_common.FIRST_SERVICE_STREUN) - rpc.send_initial_metadata(()) - first_request = rpc.take_request() - second_request = rpc.take_request() - third_request = rpc.take_request() - rpc.requests_closed() - rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), - grpc.StatusCode.OK, '') - application_return_value = application_future.result() - - self.assertEqual(_application_common.STREAM_UNARY_REQUEST, - first_request) - self.assertEqual(_application_common.STREAM_UNARY_REQUEST, - second_request) - self.assertEqual(_application_common.STREAM_UNARY_REQUEST, - third_request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.SATISFACTORY) - - def test_successful_stream_stream(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, _client_application.Scenario.STREAM_STREAM, - self._fake_time_channel) - invocation_metadata, rpc = self._fake_time_channel.take_stream_stream( - _application_testing_common.FIRST_SERVICE_STRESTRE) - first_request = rpc.take_request() - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - second_request = rpc.take_request() - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.requests_closed() - rpc.terminate((), grpc.StatusCode.OK, '') - application_return_value = application_future.result() - - self.assertEqual(_application_common.STREAM_STREAM_REQUEST, - first_request) - self.assertEqual(_application_common.STREAM_STREAM_REQUEST, - second_request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.SATISFACTORY) - - def test_concurrent_stream_stream(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, - _client_application.Scenario.CONCURRENT_STREAM_STREAM, - self._real_time_channel) - rpcs = [] - for _ in range(test_constants.RPC_CONCURRENCY): - invocation_metadata, rpc = ( - self._real_time_channel.take_stream_stream( - _application_testing_common.FIRST_SERVICE_STRESTRE)) - rpcs.append(rpc) - requests = {} - for rpc in rpcs: - requests[rpc] = [rpc.take_request()] - for rpc in rpcs: - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - for rpc in rpcs: - requests[rpc].append(rpc.take_request()) - for rpc in rpcs: - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - for rpc in rpcs: - rpc.requests_closed() - for rpc in rpcs: - rpc.terminate((), grpc.StatusCode.OK, '') - application_return_value = application_future.result() - - for requests_of_one_rpc in requests.values(): - for request in requests_of_one_rpc: - self.assertEqual(_application_common.STREAM_STREAM_REQUEST, - request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.SATISFACTORY) - - def test_cancelled_unary_unary(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, - _client_application.Scenario.CANCEL_UNARY_UNARY, - self._fake_time_channel) - invocation_metadata, request, rpc = ( - self._fake_time_channel.take_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN)) - rpc.send_initial_metadata(()) - rpc.cancelled() - application_return_value = application_future.result() - - self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.SATISFACTORY) - - def test_status_stream_unary(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, - _client_application.Scenario.CONCURRENT_STREAM_UNARY, - self._fake_time_channel) - rpcs = tuple( - self._fake_time_channel.take_stream_unary( - _application_testing_common.FIRST_SERVICE_STREUN)[1] - for _ in range(test_constants.THREAD_CONCURRENCY)) - for rpc in rpcs: - rpc.take_request() - rpc.take_request() - rpc.take_request() - rpc.requests_closed() - rpc.send_initial_metadata((( - 'my_metadata_key', - 'My Metadata Value!', - ),)) - for rpc in rpcs[:-1]: - rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), - grpc.StatusCode.OK, '') - rpcs[-1].terminate(_application_common.STREAM_UNARY_RESPONSE, (), - grpc.StatusCode.RESOURCE_EXHAUSTED, - 'nope; not able to handle all those RPCs!') - application_return_value = application_future.result() - - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.UNSATISFACTORY) - - def test_status_stream_stream(self): - code = grpc.StatusCode.DEADLINE_EXCEEDED - details = 'test deadline exceeded!' - - application_future = self._client_execution_thread_pool.submit( - _client_application.run, _client_application.Scenario.STREAM_STREAM, - self._real_time_channel) - invocation_metadata, rpc = self._real_time_channel.take_stream_stream( - _application_testing_common.FIRST_SERVICE_STRESTRE) - first_request = rpc.take_request() - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - second_request = rpc.take_request() - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.requests_closed() - rpc.terminate((), code, details) - application_return_value = application_future.result() - - self.assertEqual(_application_common.STREAM_STREAM_REQUEST, - first_request) - self.assertEqual(_application_common.STREAM_STREAM_REQUEST, - second_request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.RPC_ERROR) - self.assertIs(application_return_value.code, code) - self.assertEqual(application_return_value.details, details) - - def test_misbehaving_server_unary_unary(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, _client_application.Scenario.UNARY_UNARY, - self._fake_time_channel) - invocation_metadata, request, rpc = ( - self._fake_time_channel.take_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN)) - rpc.send_initial_metadata(()) - rpc.terminate(_application_common.ERRONEOUS_UNARY_UNARY_RESPONSE, (), - grpc.StatusCode.OK, '') - application_return_value = application_future.result() - - self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.UNSATISFACTORY) - - def test_misbehaving_server_stream_stream(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, _client_application.Scenario.STREAM_STREAM, - self._real_time_channel) - invocation_metadata, rpc = self._real_time_channel.take_stream_stream( - _application_testing_common.FIRST_SERVICE_STRESTRE) - first_request = rpc.take_request() - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - second_request = rpc.take_request() - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) - rpc.requests_closed() - rpc.terminate((), grpc.StatusCode.OK, '') - application_return_value = application_future.result() - - self.assertEqual(_application_common.STREAM_STREAM_REQUEST, - first_request) - self.assertEqual(_application_common.STREAM_STREAM_REQUEST, - second_request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.UNSATISFACTORY) - - def test_infinite_request_stream_real_time(self): - application_future = self._client_execution_thread_pool.submit( - _client_application.run, - _client_application.Scenario.INFINITE_REQUEST_STREAM, - self._real_time_channel) - invocation_metadata, rpc = self._real_time_channel.take_stream_unary( - _application_testing_common.FIRST_SERVICE_STREUN) - rpc.send_initial_metadata(()) - first_request = rpc.take_request() - second_request = rpc.take_request() - third_request = rpc.take_request() - self._real_time.sleep_for( - _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) - rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), - grpc.StatusCode.DEADLINE_EXCEEDED, '') - application_return_value = application_future.result() - - self.assertEqual(_application_common.STREAM_UNARY_REQUEST, - first_request) - self.assertEqual(_application_common.STREAM_UNARY_REQUEST, - second_request) - self.assertEqual(_application_common.STREAM_UNARY_REQUEST, - third_request) - self.assertIs(application_return_value.kind, - _client_application.Outcome.Kind.SATISFACTORY) - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from concurrent import futures +import time +import unittest + +import grpc +from grpc.framework.foundation import logging_pool +from tests.unit.framework.common import test_constants +import grpc_testing + +from tests.testing import _application_common +from tests.testing import _application_testing_common +from tests.testing import _client_application +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 + + +# TODO(https://github.com/google/protobuf/issues/3452): Drop this skip. +@unittest.skipIf( + services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None, + 'Fix protobuf issue 3452!') +class ClientTest(unittest.TestCase): + + def setUp(self): + # In this test the client-side application under test executes in + # a separate thread while we retain use of the test thread to "play + # server". + self._client_execution_thread_pool = logging_pool.pool(1) + + self._fake_time = grpc_testing.strict_fake_time(time.time()) + self._real_time = grpc_testing.strict_real_time() + self._fake_time_channel = grpc_testing.channel( + services_pb2.DESCRIPTOR.services_by_name.values(), self._fake_time) + self._real_time_channel = grpc_testing.channel( + services_pb2.DESCRIPTOR.services_by_name.values(), self._real_time) + + def tearDown(self): + self._client_execution_thread_pool.shutdown(wait=True) + + def test_successful_unary_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.UNARY_UNARY, + self._real_time_channel) + invocation_metadata, request, rpc = ( + self._real_time_channel.take_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN)) + rpc.send_initial_metadata(()) + rpc.terminate(_application_common.UNARY_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_successful_unary_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.UNARY_STREAM, + self._fake_time_channel) + invocation_metadata, request, rpc = ( + self._fake_time_channel.take_unary_stream( + _application_testing_common.FIRST_SERVICE_UNSTRE)) + rpc.send_initial_metadata(()) + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_STREAM_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_successful_stream_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_UNARY, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN) + rpc.send_initial_metadata(()) + first_request = rpc.take_request() + second_request = rpc.take_request() + third_request = rpc.take_request() + rpc.requests_closed() + rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + second_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + third_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_successful_stream_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_STREAM, + self._fake_time_channel) + invocation_metadata, rpc = self._fake_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE) + first_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + second_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.requests_closed() + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + second_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_concurrent_stream_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.CONCURRENT_STREAM_STREAM, + self._real_time_channel) + rpcs = [] + for _ in range(test_constants.RPC_CONCURRENCY): + invocation_metadata, rpc = ( + self._real_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE)) + rpcs.append(rpc) + requests = {} + for rpc in rpcs: + requests[rpc] = [rpc.take_request()] + for rpc in rpcs: + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + for rpc in rpcs: + requests[rpc].append(rpc.take_request()) + for rpc in rpcs: + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + for rpc in rpcs: + rpc.requests_closed() + for rpc in rpcs: + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + for requests_of_one_rpc in requests.values(): + for request in requests_of_one_rpc: + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_cancelled_unary_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.CANCEL_UNARY_UNARY, + self._fake_time_channel) + invocation_metadata, request, rpc = ( + self._fake_time_channel.take_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN)) + rpc.send_initial_metadata(()) + rpc.cancelled() + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_status_stream_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.CONCURRENT_STREAM_UNARY, + self._fake_time_channel) + rpcs = tuple( + self._fake_time_channel.take_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN)[1] + for _ in range(test_constants.THREAD_CONCURRENCY)) + for rpc in rpcs: + rpc.take_request() + rpc.take_request() + rpc.take_request() + rpc.requests_closed() + rpc.send_initial_metadata((( + 'my_metadata_key', + 'My Metadata Value!', + ),)) + for rpc in rpcs[:-1]: + rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + rpcs[-1].terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.RESOURCE_EXHAUSTED, + 'nope; not able to handle all those RPCs!') + application_return_value = application_future.result() + + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.UNSATISFACTORY) + + def test_status_stream_stream(self): + code = grpc.StatusCode.DEADLINE_EXCEEDED + details = 'test deadline exceeded!' + + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_STREAM, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE) + first_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + second_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.requests_closed() + rpc.terminate((), code, details) + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + second_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.RPC_ERROR) + self.assertIs(application_return_value.code, code) + self.assertEqual(application_return_value.details, details) + + def test_misbehaving_server_unary_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.UNARY_UNARY, + self._fake_time_channel) + invocation_metadata, request, rpc = ( + self._fake_time_channel.take_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN)) + rpc.send_initial_metadata(()) + rpc.terminate(_application_common.ERRONEOUS_UNARY_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.UNSATISFACTORY) + + def test_misbehaving_server_stream_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_STREAM, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE) + first_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + second_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.requests_closed() + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + second_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.UNSATISFACTORY) + + def test_infinite_request_stream_real_time(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.INFINITE_REQUEST_STREAM, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN) + rpc.send_initial_metadata(()) + first_request = rpc.take_request() + second_request = rpc.take_request() + third_request = rpc.take_request() + self._real_time.sleep_for( + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.DEADLINE_EXCEEDED, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + second_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + third_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py index 6268e4d997..51ed977b8f 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py @@ -1,95 +1,95 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""An example gRPC Python-using server-side application.""" - -import grpc - -import threading - -# requests_pb2 is a semantic dependency of this module. -from tests.testing import _application_common -from tests.testing.proto import requests_pb2 # pylint: disable=unused-import -from tests.testing.proto import services_pb2 -from tests.testing.proto import services_pb2_grpc - - -class FirstServiceServicer(services_pb2_grpc.FirstServiceServicer): - """Services RPCs.""" - - def __init__(self): - self._abort_lock = threading.RLock() - self._abort_response = _application_common.ABORT_NO_STATUS_RESPONSE - - def UnUn(self, request, context): - if request == _application_common.UNARY_UNARY_REQUEST: - return _application_common.UNARY_UNARY_RESPONSE - elif request == _application_common.ABORT_REQUEST: - with self._abort_lock: - try: - context.abort(grpc.StatusCode.PERMISSION_DENIED, - "Denying permission to test abort.") - except Exception as e: # pylint: disable=broad-except - self._abort_response = _application_common.ABORT_SUCCESS_RESPONSE - else: - self._abort_status = _application_common.ABORT_FAILURE_RESPONSE - return None # NOTE: For the linter. - elif request == _application_common.ABORT_SUCCESS_QUERY: - with self._abort_lock: - return self._abort_response - else: - context.set_code(grpc.StatusCode.INVALID_ARGUMENT) - context.set_details('Something is wrong with your request!') - return services_pb2.Down() - - def UnStre(self, request, context): - if _application_common.UNARY_STREAM_REQUEST != request: - context.set_code(grpc.StatusCode.INVALID_ARGUMENT) - context.set_details('Something is wrong with your request!') - return - yield services_pb2.Strange() # pylint: disable=unreachable - - def StreUn(self, request_iterator, context): - context.send_initial_metadata((( - 'server_application_metadata_key', - 'Hi there!', - ),)) - for request in request_iterator: - if request != _application_common.STREAM_UNARY_REQUEST: - context.set_code(grpc.StatusCode.INVALID_ARGUMENT) - context.set_details('Something is wrong with your request!') - return services_pb2.Strange() - elif not context.is_active(): - return services_pb2.Strange() - else: - return _application_common.STREAM_UNARY_RESPONSE - - def StreStre(self, request_iterator, context): - valid_requests = (_application_common.STREAM_STREAM_REQUEST, - _application_common.STREAM_STREAM_MUTATING_REQUEST) - for request in request_iterator: - if request not in valid_requests: - context.set_code(grpc.StatusCode.INVALID_ARGUMENT) - context.set_details('Something is wrong with your request!') - return - elif not context.is_active(): - return - elif request == _application_common.STREAM_STREAM_REQUEST: - yield _application_common.STREAM_STREAM_RESPONSE - yield _application_common.STREAM_STREAM_RESPONSE - elif request == _application_common.STREAM_STREAM_MUTATING_REQUEST: - response = services_pb2.Bottom() - for i in range( - _application_common.STREAM_STREAM_MUTATING_COUNT): - response.first_bottom_field = i - yield response +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""An example gRPC Python-using server-side application.""" + +import grpc + +import threading + +# requests_pb2 is a semantic dependency of this module. +from tests.testing import _application_common +from tests.testing.proto import requests_pb2 # pylint: disable=unused-import +from tests.testing.proto import services_pb2 +from tests.testing.proto import services_pb2_grpc + + +class FirstServiceServicer(services_pb2_grpc.FirstServiceServicer): + """Services RPCs.""" + + def __init__(self): + self._abort_lock = threading.RLock() + self._abort_response = _application_common.ABORT_NO_STATUS_RESPONSE + + def UnUn(self, request, context): + if request == _application_common.UNARY_UNARY_REQUEST: + return _application_common.UNARY_UNARY_RESPONSE + elif request == _application_common.ABORT_REQUEST: + with self._abort_lock: + try: + context.abort(grpc.StatusCode.PERMISSION_DENIED, + "Denying permission to test abort.") + except Exception as e: # pylint: disable=broad-except + self._abort_response = _application_common.ABORT_SUCCESS_RESPONSE + else: + self._abort_status = _application_common.ABORT_FAILURE_RESPONSE + return None # NOTE: For the linter. + elif request == _application_common.ABORT_SUCCESS_QUERY: + with self._abort_lock: + return self._abort_response + else: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return services_pb2.Down() + + def UnStre(self, request, context): + if _application_common.UNARY_STREAM_REQUEST != request: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return + yield services_pb2.Strange() # pylint: disable=unreachable + + def StreUn(self, request_iterator, context): + context.send_initial_metadata((( + 'server_application_metadata_key', + 'Hi there!', + ),)) + for request in request_iterator: + if request != _application_common.STREAM_UNARY_REQUEST: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return services_pb2.Strange() + elif not context.is_active(): + return services_pb2.Strange() + else: + return _application_common.STREAM_UNARY_RESPONSE + + def StreStre(self, request_iterator, context): + valid_requests = (_application_common.STREAM_STREAM_REQUEST, + _application_common.STREAM_STREAM_MUTATING_REQUEST) + for request in request_iterator: + if request not in valid_requests: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return + elif not context.is_active(): + return + elif request == _application_common.STREAM_STREAM_REQUEST: + yield _application_common.STREAM_STREAM_RESPONSE + yield _application_common.STREAM_STREAM_RESPONSE + elif request == _application_common.STREAM_STREAM_MUTATING_REQUEST: + response = services_pb2.Bottom() + for i in range( + _application_common.STREAM_STREAM_MUTATING_COUNT): + response.first_bottom_field = i + yield response diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py index dd8b258468..617a41b7e5 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py @@ -1,207 +1,207 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -import unittest - -import grpc -import grpc_testing - -from tests.testing import _application_common -from tests.testing import _application_testing_common -from tests.testing import _server_application -from tests.testing.proto import services_pb2 - - -class FirstServiceServicerTest(unittest.TestCase): - - def setUp(self): - self._real_time = grpc_testing.strict_real_time() - self._fake_time = grpc_testing.strict_fake_time(time.time()) - servicer = _server_application.FirstServiceServicer() - descriptors_to_servicers = { - _application_testing_common.FIRST_SERVICE: servicer - } - self._real_time_server = grpc_testing.server_from_dictionary( - descriptors_to_servicers, self._real_time) - self._fake_time_server = grpc_testing.server_from_dictionary( - descriptors_to_servicers, self._fake_time) - - def test_successful_unary_unary(self): - rpc = self._real_time_server.invoke_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN, (), - _application_common.UNARY_UNARY_REQUEST, None) - initial_metadata = rpc.initial_metadata() - response, trailing_metadata, code, details = rpc.termination() - - self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response) - self.assertIs(code, grpc.StatusCode.OK) - - def test_successful_unary_stream(self): - rpc = self._real_time_server.invoke_unary_stream( - _application_testing_common.FIRST_SERVICE_UNSTRE, (), - _application_common.UNARY_STREAM_REQUEST, None) - initial_metadata = rpc.initial_metadata() - trailing_metadata, code, details = rpc.termination() - - self.assertIs(code, grpc.StatusCode.OK) - - def test_successful_stream_unary(self): - rpc = self._real_time_server.invoke_stream_unary( - _application_testing_common.FIRST_SERVICE_STREUN, (), None) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - rpc.requests_closed() - initial_metadata = rpc.initial_metadata() - response, trailing_metadata, code, details = rpc.termination() - - self.assertEqual(_application_common.STREAM_UNARY_RESPONSE, response) - self.assertIs(code, grpc.StatusCode.OK) - - def test_successful_stream_stream(self): - rpc = self._real_time_server.invoke_stream_stream( - _application_testing_common.FIRST_SERVICE_STRESTRE, (), None) - rpc.send_request(_application_common.STREAM_STREAM_REQUEST) - initial_metadata = rpc.initial_metadata() - responses = [ - rpc.take_response(), - rpc.take_response(), - ] - rpc.send_request(_application_common.STREAM_STREAM_REQUEST) - rpc.send_request(_application_common.STREAM_STREAM_REQUEST) - responses.extend([ - rpc.take_response(), - rpc.take_response(), - rpc.take_response(), - rpc.take_response(), - ]) - rpc.requests_closed() - trailing_metadata, code, details = rpc.termination() - - for response in responses: - self.assertEqual(_application_common.STREAM_STREAM_RESPONSE, - response) - self.assertIs(code, grpc.StatusCode.OK) - - def test_mutating_stream_stream(self): - rpc = self._real_time_server.invoke_stream_stream( - _application_testing_common.FIRST_SERVICE_STRESTRE, (), None) - rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST) - initial_metadata = rpc.initial_metadata() - responses = [ - rpc.take_response() - for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT) - ] - rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST) - responses.extend([ - rpc.take_response() - for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT) - ]) - rpc.requests_closed() - _, _, _ = rpc.termination() - expected_responses = ( - services_pb2.Bottom(first_bottom_field=0), - services_pb2.Bottom(first_bottom_field=1), - services_pb2.Bottom(first_bottom_field=0), - services_pb2.Bottom(first_bottom_field=1), - ) - self.assertSequenceEqual(expected_responses, responses) - - def test_server_rpc_idempotence(self): - rpc = self._real_time_server.invoke_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN, (), - _application_common.UNARY_UNARY_REQUEST, None) - first_initial_metadata = rpc.initial_metadata() - second_initial_metadata = rpc.initial_metadata() - third_initial_metadata = rpc.initial_metadata() - first_termination = rpc.termination() - second_termination = rpc.termination() - third_termination = rpc.termination() - - for later_initial_metadata in ( - second_initial_metadata, - third_initial_metadata, - ): - self.assertEqual(first_initial_metadata, later_initial_metadata) - response = first_termination[0] - terminal_metadata = first_termination[1] - code = first_termination[2] - details = first_termination[3] - for later_termination in ( - second_termination, - third_termination, - ): - self.assertEqual(response, later_termination[0]) - self.assertEqual(terminal_metadata, later_termination[1]) - self.assertIs(code, later_termination[2]) - self.assertEqual(details, later_termination[3]) - self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response) - self.assertIs(code, grpc.StatusCode.OK) - - def test_misbehaving_client_unary_unary(self): - rpc = self._real_time_server.invoke_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN, (), - _application_common.ERRONEOUS_UNARY_UNARY_REQUEST, None) - initial_metadata = rpc.initial_metadata() - response, trailing_metadata, code, details = rpc.termination() - - self.assertIsNot(code, grpc.StatusCode.OK) - - def test_infinite_request_stream_real_time(self): - rpc = self._real_time_server.invoke_stream_unary( - _application_testing_common.FIRST_SERVICE_STREUN, (), - _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - initial_metadata = rpc.initial_metadata() - self._real_time.sleep_for( - _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - response, trailing_metadata, code, details = rpc.termination() - - self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED) - - def test_infinite_request_stream_fake_time(self): - rpc = self._fake_time_server.invoke_stream_unary( - _application_testing_common.FIRST_SERVICE_STREUN, (), - _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - initial_metadata = rpc.initial_metadata() - self._fake_time.sleep_for( - _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2) - rpc.send_request(_application_common.STREAM_UNARY_REQUEST) - response, trailing_metadata, code, details = rpc.termination() - - self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED) - - def test_servicer_context_abort(self): - rpc = self._real_time_server.invoke_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN, (), - _application_common.ABORT_REQUEST, None) - _, _, code, _ = rpc.termination() - self.assertIs(code, grpc.StatusCode.PERMISSION_DENIED) - rpc = self._real_time_server.invoke_unary_unary( - _application_testing_common.FIRST_SERVICE_UNUN, (), - _application_common.ABORT_SUCCESS_QUERY, None) - response, _, code, _ = rpc.termination() - self.assertEqual(_application_common.ABORT_SUCCESS_RESPONSE, response) - self.assertIs(code, grpc.StatusCode.OK) - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import unittest + +import grpc +import grpc_testing + +from tests.testing import _application_common +from tests.testing import _application_testing_common +from tests.testing import _server_application +from tests.testing.proto import services_pb2 + + +class FirstServiceServicerTest(unittest.TestCase): + + def setUp(self): + self._real_time = grpc_testing.strict_real_time() + self._fake_time = grpc_testing.strict_fake_time(time.time()) + servicer = _server_application.FirstServiceServicer() + descriptors_to_servicers = { + _application_testing_common.FIRST_SERVICE: servicer + } + self._real_time_server = grpc_testing.server_from_dictionary( + descriptors_to_servicers, self._real_time) + self._fake_time_server = grpc_testing.server_from_dictionary( + descriptors_to_servicers, self._fake_time) + + def test_successful_unary_unary(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.UNARY_UNARY_REQUEST, None) + initial_metadata = rpc.initial_metadata() + response, trailing_metadata, code, details = rpc.termination() + + self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_successful_unary_stream(self): + rpc = self._real_time_server.invoke_unary_stream( + _application_testing_common.FIRST_SERVICE_UNSTRE, (), + _application_common.UNARY_STREAM_REQUEST, None) + initial_metadata = rpc.initial_metadata() + trailing_metadata, code, details = rpc.termination() + + self.assertIs(code, grpc.StatusCode.OK) + + def test_successful_stream_unary(self): + rpc = self._real_time_server.invoke_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN, (), None) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.requests_closed() + initial_metadata = rpc.initial_metadata() + response, trailing_metadata, code, details = rpc.termination() + + self.assertEqual(_application_common.STREAM_UNARY_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_successful_stream_stream(self): + rpc = self._real_time_server.invoke_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE, (), None) + rpc.send_request(_application_common.STREAM_STREAM_REQUEST) + initial_metadata = rpc.initial_metadata() + responses = [ + rpc.take_response(), + rpc.take_response(), + ] + rpc.send_request(_application_common.STREAM_STREAM_REQUEST) + rpc.send_request(_application_common.STREAM_STREAM_REQUEST) + responses.extend([ + rpc.take_response(), + rpc.take_response(), + rpc.take_response(), + rpc.take_response(), + ]) + rpc.requests_closed() + trailing_metadata, code, details = rpc.termination() + + for response in responses: + self.assertEqual(_application_common.STREAM_STREAM_RESPONSE, + response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_mutating_stream_stream(self): + rpc = self._real_time_server.invoke_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE, (), None) + rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST) + initial_metadata = rpc.initial_metadata() + responses = [ + rpc.take_response() + for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT) + ] + rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST) + responses.extend([ + rpc.take_response() + for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT) + ]) + rpc.requests_closed() + _, _, _ = rpc.termination() + expected_responses = ( + services_pb2.Bottom(first_bottom_field=0), + services_pb2.Bottom(first_bottom_field=1), + services_pb2.Bottom(first_bottom_field=0), + services_pb2.Bottom(first_bottom_field=1), + ) + self.assertSequenceEqual(expected_responses, responses) + + def test_server_rpc_idempotence(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.UNARY_UNARY_REQUEST, None) + first_initial_metadata = rpc.initial_metadata() + second_initial_metadata = rpc.initial_metadata() + third_initial_metadata = rpc.initial_metadata() + first_termination = rpc.termination() + second_termination = rpc.termination() + third_termination = rpc.termination() + + for later_initial_metadata in ( + second_initial_metadata, + third_initial_metadata, + ): + self.assertEqual(first_initial_metadata, later_initial_metadata) + response = first_termination[0] + terminal_metadata = first_termination[1] + code = first_termination[2] + details = first_termination[3] + for later_termination in ( + second_termination, + third_termination, + ): + self.assertEqual(response, later_termination[0]) + self.assertEqual(terminal_metadata, later_termination[1]) + self.assertIs(code, later_termination[2]) + self.assertEqual(details, later_termination[3]) + self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_misbehaving_client_unary_unary(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.ERRONEOUS_UNARY_UNARY_REQUEST, None) + initial_metadata = rpc.initial_metadata() + response, trailing_metadata, code, details = rpc.termination() + + self.assertIsNot(code, grpc.StatusCode.OK) + + def test_infinite_request_stream_real_time(self): + rpc = self._real_time_server.invoke_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN, (), + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + initial_metadata = rpc.initial_metadata() + self._real_time.sleep_for( + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + response, trailing_metadata, code, details = rpc.termination() + + self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED) + + def test_infinite_request_stream_fake_time(self): + rpc = self._fake_time_server.invoke_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN, (), + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + initial_metadata = rpc.initial_metadata() + self._fake_time.sleep_for( + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + response, trailing_metadata, code, details = rpc.termination() + + self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED) + + def test_servicer_context_abort(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.ABORT_REQUEST, None) + _, _, code, _ = rpc.termination() + self.assertIs(code, grpc.StatusCode.PERMISSION_DENIED) + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.ABORT_SUCCESS_QUERY, None) + response, _, code, _ = rpc.termination() + self.assertEqual(_application_common.ABORT_SUCCESS_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py index 4f1495a5f0..cab665c045 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py @@ -1,165 +1,165 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import random -import threading -import time -import unittest - -import grpc_testing - -_QUANTUM = 0.3 -_MANY = 10000 -# Tests that run in real time can either wait for the scheduler to -# eventually run what needs to be run (and risk timing out) or declare -# that the scheduler didn't schedule work reasonably fast enough. We -# choose the latter for this test. -_PATHOLOGICAL_SCHEDULING = 'pathological thread scheduling!' - - -class _TimeNoter(object): - - def __init__(self, time): - self._condition = threading.Condition() - self._time = time - self._call_times = [] - - def __call__(self): - with self._condition: - self._call_times.append(self._time.time()) - - def call_times(self): - with self._condition: - return tuple(self._call_times) - - -class TimeTest(object): - - def test_sleep_for(self): - start_time = self._time.time() - self._time.sleep_for(_QUANTUM) - end_time = self._time.time() - - self.assertLessEqual(start_time + _QUANTUM, end_time) - - def test_sleep_until(self): - start_time = self._time.time() - self._time.sleep_until(start_time + _QUANTUM) - end_time = self._time.time() - - self.assertLessEqual(start_time + _QUANTUM, end_time) - - def test_call_in(self): - time_noter = _TimeNoter(self._time) - - start_time = self._time.time() - self._time.call_in(time_noter, _QUANTUM) - self._time.sleep_for(_QUANTUM * 2) - call_times = time_noter.call_times() - - self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING) - self.assertLessEqual(start_time + _QUANTUM, call_times[0]) - - def test_call_at(self): - time_noter = _TimeNoter(self._time) - - start_time = self._time.time() - self._time.call_at(time_noter, self._time.time() + _QUANTUM) - self._time.sleep_for(_QUANTUM * 2) - call_times = time_noter.call_times() - - self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING) - self.assertLessEqual(start_time + _QUANTUM, call_times[0]) - - def test_cancel(self): - time_noter = _TimeNoter(self._time) - - future = self._time.call_in(time_noter, _QUANTUM * 2) - self._time.sleep_for(_QUANTUM) - cancelled = future.cancel() - self._time.sleep_for(_QUANTUM * 2) - call_times = time_noter.call_times() - - self.assertFalse(call_times, msg=_PATHOLOGICAL_SCHEDULING) - self.assertTrue(cancelled) - self.assertTrue(future.cancelled()) - - def test_many(self): - test_events = tuple(threading.Event() for _ in range(_MANY)) - possibly_cancelled_futures = {} - background_noise_futures = [] - - for test_event in test_events: - possibly_cancelled_futures[test_event] = self._time.call_in( - test_event.set, _QUANTUM * (2 + random.random())) - for _ in range(_MANY): - background_noise_futures.append( - self._time.call_in(threading.Event().set, - _QUANTUM * 1000 * random.random())) - self._time.sleep_for(_QUANTUM) - cancelled = set() - for test_event, test_future in possibly_cancelled_futures.items(): - if bool(random.randint(0, 1)) and test_future.cancel(): - cancelled.add(test_event) - self._time.sleep_for(_QUANTUM * 3) - - for test_event in test_events: +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import random +import threading +import time +import unittest + +import grpc_testing + +_QUANTUM = 0.3 +_MANY = 10000 +# Tests that run in real time can either wait for the scheduler to +# eventually run what needs to be run (and risk timing out) or declare +# that the scheduler didn't schedule work reasonably fast enough. We +# choose the latter for this test. +_PATHOLOGICAL_SCHEDULING = 'pathological thread scheduling!' + + +class _TimeNoter(object): + + def __init__(self, time): + self._condition = threading.Condition() + self._time = time + self._call_times = [] + + def __call__(self): + with self._condition: + self._call_times.append(self._time.time()) + + def call_times(self): + with self._condition: + return tuple(self._call_times) + + +class TimeTest(object): + + def test_sleep_for(self): + start_time = self._time.time() + self._time.sleep_for(_QUANTUM) + end_time = self._time.time() + + self.assertLessEqual(start_time + _QUANTUM, end_time) + + def test_sleep_until(self): + start_time = self._time.time() + self._time.sleep_until(start_time + _QUANTUM) + end_time = self._time.time() + + self.assertLessEqual(start_time + _QUANTUM, end_time) + + def test_call_in(self): + time_noter = _TimeNoter(self._time) + + start_time = self._time.time() + self._time.call_in(time_noter, _QUANTUM) + self._time.sleep_for(_QUANTUM * 2) + call_times = time_noter.call_times() + + self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING) + self.assertLessEqual(start_time + _QUANTUM, call_times[0]) + + def test_call_at(self): + time_noter = _TimeNoter(self._time) + + start_time = self._time.time() + self._time.call_at(time_noter, self._time.time() + _QUANTUM) + self._time.sleep_for(_QUANTUM * 2) + call_times = time_noter.call_times() + + self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING) + self.assertLessEqual(start_time + _QUANTUM, call_times[0]) + + def test_cancel(self): + time_noter = _TimeNoter(self._time) + + future = self._time.call_in(time_noter, _QUANTUM * 2) + self._time.sleep_for(_QUANTUM) + cancelled = future.cancel() + self._time.sleep_for(_QUANTUM * 2) + call_times = time_noter.call_times() + + self.assertFalse(call_times, msg=_PATHOLOGICAL_SCHEDULING) + self.assertTrue(cancelled) + self.assertTrue(future.cancelled()) + + def test_many(self): + test_events = tuple(threading.Event() for _ in range(_MANY)) + possibly_cancelled_futures = {} + background_noise_futures = [] + + for test_event in test_events: + possibly_cancelled_futures[test_event] = self._time.call_in( + test_event.set, _QUANTUM * (2 + random.random())) + for _ in range(_MANY): + background_noise_futures.append( + self._time.call_in(threading.Event().set, + _QUANTUM * 1000 * random.random())) + self._time.sleep_for(_QUANTUM) + cancelled = set() + for test_event, test_future in possibly_cancelled_futures.items(): + if bool(random.randint(0, 1)) and test_future.cancel(): + cancelled.add(test_event) + self._time.sleep_for(_QUANTUM * 3) + + for test_event in test_events: (self.assertFalse if test_event in cancelled else self.assertTrue)( test_event.is_set()) - for background_noise_future in background_noise_futures: - background_noise_future.cancel() - - def test_same_behavior_used_several_times(self): - time_noter = _TimeNoter(self._time) - - start_time = self._time.time() - first_future_at_one = self._time.call_in(time_noter, _QUANTUM) - second_future_at_one = self._time.call_in(time_noter, _QUANTUM) - first_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3) - second_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3) - self._time.sleep_for(_QUANTUM * 2) - first_future_at_one_cancelled = first_future_at_one.cancel() - second_future_at_one_cancelled = second_future_at_one.cancel() - first_future_at_three_cancelled = first_future_at_three.cancel() - self._time.sleep_for(_QUANTUM * 2) - second_future_at_three_cancelled = second_future_at_three.cancel() - first_future_at_three_cancelled_again = first_future_at_three.cancel() - call_times = time_noter.call_times() - - self.assertEqual(3, len(call_times), msg=_PATHOLOGICAL_SCHEDULING) - self.assertFalse(first_future_at_one_cancelled) - self.assertFalse(second_future_at_one_cancelled) - self.assertTrue(first_future_at_three_cancelled) - self.assertFalse(second_future_at_three_cancelled) - self.assertTrue(first_future_at_three_cancelled_again) - self.assertLessEqual(start_time + _QUANTUM, call_times[0]) - self.assertLessEqual(start_time + _QUANTUM, call_times[1]) - self.assertLessEqual(start_time + _QUANTUM * 3, call_times[2]) - - -class StrictRealTimeTest(TimeTest, unittest.TestCase): - - def setUp(self): - self._time = grpc_testing.strict_real_time() - - -class StrictFakeTimeTest(TimeTest, unittest.TestCase): - - def setUp(self): - self._time = grpc_testing.strict_fake_time( - random.randint(0, int(time.time()))) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + for background_noise_future in background_noise_futures: + background_noise_future.cancel() + + def test_same_behavior_used_several_times(self): + time_noter = _TimeNoter(self._time) + + start_time = self._time.time() + first_future_at_one = self._time.call_in(time_noter, _QUANTUM) + second_future_at_one = self._time.call_in(time_noter, _QUANTUM) + first_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3) + second_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3) + self._time.sleep_for(_QUANTUM * 2) + first_future_at_one_cancelled = first_future_at_one.cancel() + second_future_at_one_cancelled = second_future_at_one.cancel() + first_future_at_three_cancelled = first_future_at_three.cancel() + self._time.sleep_for(_QUANTUM * 2) + second_future_at_three_cancelled = second_future_at_three.cancel() + first_future_at_three_cancelled_again = first_future_at_three.cancel() + call_times = time_noter.call_times() + + self.assertEqual(3, len(call_times), msg=_PATHOLOGICAL_SCHEDULING) + self.assertFalse(first_future_at_one_cancelled) + self.assertFalse(second_future_at_one_cancelled) + self.assertTrue(first_future_at_three_cancelled) + self.assertFalse(second_future_at_three_cancelled) + self.assertTrue(first_future_at_three_cancelled_again) + self.assertLessEqual(start_time + _QUANTUM, call_times[0]) + self.assertLessEqual(start_time + _QUANTUM, call_times[1]) + self.assertLessEqual(start_time + _QUANTUM * 3, call_times[2]) + + +class StrictRealTimeTest(TimeTest, unittest.TestCase): + + def setUp(self): + self._time = grpc_testing.strict_real_time() + + +class StrictFakeTimeTest(TimeTest, unittest.TestCase): + + def setUp(self): + self._time = grpc_testing.strict_fake_time( + random.randint(0, int(time.time()))) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py index 0030911829..1e120359cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/__init__.py index dc985eebb4..5fb4f3c3cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py index ff16091c50..d2eaf97d5f 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py @@ -1,154 +1,154 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests server context abort mechanism""" - -import unittest -import collections -import gc -import logging -import weakref - -import grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_ABORT = '/test/abort' -_ABORT_WITH_STATUS = '/test/AbortWithStatus' -_INVALID_CODE = '/test/InvalidCode' - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x00' - -_ABORT_DETAILS = 'Abandon ship!' -_ABORT_METADATA = (('a-trailing-metadata', '42'),) - - -class _Status( +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server context abort mechanism""" + +import unittest +import collections +import gc +import logging +import weakref + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_ABORT = '/test/abort' +_ABORT_WITH_STATUS = '/test/AbortWithStatus' +_INVALID_CODE = '/test/InvalidCode' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_ABORT_DETAILS = 'Abandon ship!' +_ABORT_METADATA = (('a-trailing-metadata', '42'),) + + +class _Status( collections.namedtuple('_Status', ('code', 'details', 'trailing_metadata')), grpc.Status): - pass - - -class _Object(object): - pass - - -do_not_leak_me = _Object() - - -def abort_unary_unary(request, servicer_context): - this_should_not_be_leaked = do_not_leak_me - servicer_context.abort( - grpc.StatusCode.INTERNAL, - _ABORT_DETAILS, - ) - raise Exception('This line should not be executed!') - - -def abort_with_status_unary_unary(request, servicer_context): - servicer_context.abort_with_status( - _Status( - code=grpc.StatusCode.INTERNAL, - details=_ABORT_DETAILS, - trailing_metadata=_ABORT_METADATA, - )) - raise Exception('This line should not be executed!') - - -def invalid_code_unary_unary(request, servicer_context): - servicer_context.abort( - 42, - _ABORT_DETAILS, - ) - - -class _GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - if handler_call_details.method == _ABORT: - return grpc.unary_unary_rpc_method_handler(abort_unary_unary) - elif handler_call_details.method == _ABORT_WITH_STATUS: - return grpc.unary_unary_rpc_method_handler( - abort_with_status_unary_unary) - elif handler_call_details.method == _INVALID_CODE: - return grpc.stream_stream_rpc_method_handler( - invalid_code_unary_unary) - else: - return None - - -class AbortTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server() - port = self._server.add_insecure_port('[::]:0') - self._server.add_generic_rpc_handlers((_GenericHandler(),)) - self._server.start() - - self._channel = grpc.insecure_channel('localhost:%d' % port) - - def tearDown(self): - self._channel.close() - self._server.stop(0) - - def test_abort(self): - with self.assertRaises(grpc.RpcError) as exception_context: - self._channel.unary_unary(_ABORT)(_REQUEST) - rpc_error = exception_context.exception - - self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) - self.assertEqual(rpc_error.details(), _ABORT_DETAILS) - - # This test ensures that abort() does not store the raised exception, which - # on Python 3 (via the `__traceback__` attribute) holds a reference to - # all local vars. Storing the raised exception can prevent GC and stop the - # grpc_call from being unref'ed, even after server shutdown. - @unittest.skip("https://github.com/grpc/grpc/issues/17927") - def test_abort_does_not_leak_local_vars(self): - global do_not_leak_me # pylint: disable=global-statement - weak_ref = weakref.ref(do_not_leak_me) - - # Servicer will abort() after creating a local ref to do_not_leak_me. - with self.assertRaises(grpc.RpcError): - self._channel.unary_unary(_ABORT)(_REQUEST) - - # Server may still have a stack frame reference to the exception even - # after client sees error, so ensure server has shutdown. - self._server.stop(None) - do_not_leak_me = None - self.assertIsNone(weak_ref()) - - def test_abort_with_status(self): - with self.assertRaises(grpc.RpcError) as exception_context: - self._channel.unary_unary(_ABORT_WITH_STATUS)(_REQUEST) - rpc_error = exception_context.exception - - self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) - self.assertEqual(rpc_error.details(), _ABORT_DETAILS) - self.assertEqual(rpc_error.trailing_metadata(), _ABORT_METADATA) - - def test_invalid_code(self): - with self.assertRaises(grpc.RpcError) as exception_context: - self._channel.unary_unary(_INVALID_CODE)(_REQUEST) - rpc_error = exception_context.exception - - self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN) - self.assertEqual(rpc_error.details(), _ABORT_DETAILS) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + pass + + +class _Object(object): + pass + + +do_not_leak_me = _Object() + + +def abort_unary_unary(request, servicer_context): + this_should_not_be_leaked = do_not_leak_me + servicer_context.abort( + grpc.StatusCode.INTERNAL, + _ABORT_DETAILS, + ) + raise Exception('This line should not be executed!') + + +def abort_with_status_unary_unary(request, servicer_context): + servicer_context.abort_with_status( + _Status( + code=grpc.StatusCode.INTERNAL, + details=_ABORT_DETAILS, + trailing_metadata=_ABORT_METADATA, + )) + raise Exception('This line should not be executed!') + + +def invalid_code_unary_unary(request, servicer_context): + servicer_context.abort( + 42, + _ABORT_DETAILS, + ) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _ABORT: + return grpc.unary_unary_rpc_method_handler(abort_unary_unary) + elif handler_call_details.method == _ABORT_WITH_STATUS: + return grpc.unary_unary_rpc_method_handler( + abort_with_status_unary_unary) + elif handler_call_details.method == _INVALID_CODE: + return grpc.stream_stream_rpc_method_handler( + invalid_code_unary_unary) + else: + return None + + +class AbortTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + port = self._server.add_insecure_port('[::]:0') + self._server.add_generic_rpc_handlers((_GenericHandler(),)) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._channel.close() + self._server.stop(0) + + def test_abort(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_ABORT)(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + self.assertEqual(rpc_error.details(), _ABORT_DETAILS) + + # This test ensures that abort() does not store the raised exception, which + # on Python 3 (via the `__traceback__` attribute) holds a reference to + # all local vars. Storing the raised exception can prevent GC and stop the + # grpc_call from being unref'ed, even after server shutdown. + @unittest.skip("https://github.com/grpc/grpc/issues/17927") + def test_abort_does_not_leak_local_vars(self): + global do_not_leak_me # pylint: disable=global-statement + weak_ref = weakref.ref(do_not_leak_me) + + # Servicer will abort() after creating a local ref to do_not_leak_me. + with self.assertRaises(grpc.RpcError): + self._channel.unary_unary(_ABORT)(_REQUEST) + + # Server may still have a stack frame reference to the exception even + # after client sees error, so ensure server has shutdown. + self._server.stop(None) + do_not_leak_me = None + self.assertIsNone(weak_ref()) + + def test_abort_with_status(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_ABORT_WITH_STATUS)(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + self.assertEqual(rpc_error.details(), _ABORT_DETAILS) + self.assertEqual(rpc_error.trailing_metadata(), _ABORT_METADATA) + + def test_invalid_code(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_INVALID_CODE)(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN) + self.assertEqual(rpc_error.details(), _ABORT_DETAILS) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py index 7f82dfb2a3..a459ee6e19 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py @@ -1,118 +1,118 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of gRPC Python's application-layer API.""" - -import unittest -import logging - -import six - -import grpc - -from tests.unit import _from_grpc_import_star - - -class AllTest(unittest.TestCase): - - def testAll(self): - expected_grpc_code_elements = ( - 'FutureTimeoutError', - 'FutureCancelledError', - 'Future', - 'ChannelConnectivity', - 'Compression', - 'StatusCode', - 'Status', - 'RpcError', - 'RpcContext', - 'Call', - 'ChannelCredentials', - 'CallCredentials', - 'AuthMetadataContext', - 'AuthMetadataPluginCallback', - 'AuthMetadataPlugin', - 'ServerCertificateConfiguration', - 'ServerCredentials', - 'UnaryUnaryMultiCallable', - 'UnaryStreamMultiCallable', - 'StreamUnaryMultiCallable', - 'StreamStreamMultiCallable', - 'UnaryUnaryClientInterceptor', - 'UnaryStreamClientInterceptor', - 'StreamUnaryClientInterceptor', - 'StreamStreamClientInterceptor', - 'Channel', - 'ServicerContext', - 'RpcMethodHandler', - 'HandlerCallDetails', - 'GenericRpcHandler', - 'ServiceRpcHandler', - 'Server', - 'ServerInterceptor', - 'LocalConnectionType', - 'local_channel_credentials', - 'local_server_credentials', +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of gRPC Python's application-layer API.""" + +import unittest +import logging + +import six + +import grpc + +from tests.unit import _from_grpc_import_star + + +class AllTest(unittest.TestCase): + + def testAll(self): + expected_grpc_code_elements = ( + 'FutureTimeoutError', + 'FutureCancelledError', + 'Future', + 'ChannelConnectivity', + 'Compression', + 'StatusCode', + 'Status', + 'RpcError', + 'RpcContext', + 'Call', + 'ChannelCredentials', + 'CallCredentials', + 'AuthMetadataContext', + 'AuthMetadataPluginCallback', + 'AuthMetadataPlugin', + 'ServerCertificateConfiguration', + 'ServerCredentials', + 'UnaryUnaryMultiCallable', + 'UnaryStreamMultiCallable', + 'StreamUnaryMultiCallable', + 'StreamStreamMultiCallable', + 'UnaryUnaryClientInterceptor', + 'UnaryStreamClientInterceptor', + 'StreamUnaryClientInterceptor', + 'StreamStreamClientInterceptor', + 'Channel', + 'ServicerContext', + 'RpcMethodHandler', + 'HandlerCallDetails', + 'GenericRpcHandler', + 'ServiceRpcHandler', + 'Server', + 'ServerInterceptor', + 'LocalConnectionType', + 'local_channel_credentials', + 'local_server_credentials', 'alts_channel_credentials', 'alts_server_credentials', - 'unary_unary_rpc_method_handler', - 'unary_stream_rpc_method_handler', - 'stream_unary_rpc_method_handler', - 'ClientCallDetails', - 'stream_stream_rpc_method_handler', - 'method_handlers_generic_handler', - 'ssl_channel_credentials', - 'metadata_call_credentials', - 'access_token_call_credentials', - 'composite_call_credentials', - 'composite_channel_credentials', - 'ssl_server_credentials', - 'ssl_server_certificate_configuration', - 'dynamic_ssl_server_credentials', - 'channel_ready_future', - 'insecure_channel', - 'secure_channel', - 'intercept_channel', - 'server', + 'unary_unary_rpc_method_handler', + 'unary_stream_rpc_method_handler', + 'stream_unary_rpc_method_handler', + 'ClientCallDetails', + 'stream_stream_rpc_method_handler', + 'method_handlers_generic_handler', + 'ssl_channel_credentials', + 'metadata_call_credentials', + 'access_token_call_credentials', + 'composite_call_credentials', + 'composite_channel_credentials', + 'ssl_server_credentials', + 'ssl_server_certificate_configuration', + 'dynamic_ssl_server_credentials', + 'channel_ready_future', + 'insecure_channel', + 'secure_channel', + 'intercept_channel', + 'server', 'protos', 'services', 'protos_and_services', - ) - - six.assertCountEqual(self, expected_grpc_code_elements, - _from_grpc_import_star.GRPC_ELEMENTS) - - -class ChannelConnectivityTest(unittest.TestCase): - - def testChannelConnectivity(self): - self.assertSequenceEqual(( - grpc.ChannelConnectivity.IDLE, - grpc.ChannelConnectivity.CONNECTING, - grpc.ChannelConnectivity.READY, - grpc.ChannelConnectivity.TRANSIENT_FAILURE, - grpc.ChannelConnectivity.SHUTDOWN, - ), tuple(grpc.ChannelConnectivity)) - - -class ChannelTest(unittest.TestCase): - - def test_secure_channel(self): - channel_credentials = grpc.ssl_channel_credentials() - channel = grpc.secure_channel('google.com:443', channel_credentials) - channel.close() - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + ) + + six.assertCountEqual(self, expected_grpc_code_elements, + _from_grpc_import_star.GRPC_ELEMENTS) + + +class ChannelConnectivityTest(unittest.TestCase): + + def testChannelConnectivity(self): + self.assertSequenceEqual(( + grpc.ChannelConnectivity.IDLE, + grpc.ChannelConnectivity.CONNECTING, + grpc.ChannelConnectivity.READY, + grpc.ChannelConnectivity.TRANSIENT_FAILURE, + grpc.ChannelConnectivity.SHUTDOWN, + ), tuple(grpc.ChannelConnectivity)) + + +class ChannelTest(unittest.TestCase): + + def test_secure_channel(self): + channel_credentials = grpc.ssl_channel_credentials() + channel = grpc.secure_channel('google.com:443', channel_credentials) + channel.close() + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py index cba350e781..817c528237 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py @@ -1,193 +1,193 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests exposure of SSL auth context""" - -import pickle -import unittest -import logging - -import grpc -from grpc import _channel -from grpc.experimental import session_cache -import six - -from tests.unit import test_common -from tests.unit import resources - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x00' - -_UNARY_UNARY = '/test/UnaryUnary' - -_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' -_CLIENT_IDS = ( - b'*.test.google.fr', - b'waterzooi.test.google.be', - b'*.test.youtube.com', - b'192.168.1.3', -) -_ID = 'id' -_ID_KEY = 'id_key' -_AUTH_CTX = 'auth_ctx' - -_PRIVATE_KEY = resources.private_key() -_CERTIFICATE_CHAIN = resources.certificate_chain() -_TEST_ROOT_CERTIFICATES = resources.test_root_certificates() -_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),) -_PROPERTY_OPTIONS = (( - 'grpc.ssl_target_name_override', - _SERVER_HOST_OVERRIDE, -),) - - -def handle_unary_unary(request, servicer_context): - return pickle.dumps({ - _ID: servicer_context.peer_identities(), - _ID_KEY: servicer_context.peer_identity_key(), - _AUTH_CTX: servicer_context.auth_context() - }) - - -class AuthContextTest(unittest.TestCase): - - def testInsecure(self): - handler = grpc.method_handlers_generic_handler('test', { - 'UnaryUnary': +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests exposure of SSL auth context""" + +import pickle +import unittest +import logging + +import grpc +from grpc import _channel +from grpc.experimental import session_cache +import six + +from tests.unit import test_common +from tests.unit import resources + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' +_CLIENT_IDS = ( + b'*.test.google.fr', + b'waterzooi.test.google.be', + b'*.test.youtube.com', + b'192.168.1.3', +) +_ID = 'id' +_ID_KEY = 'id_key' +_AUTH_CTX = 'auth_ctx' + +_PRIVATE_KEY = resources.private_key() +_CERTIFICATE_CHAIN = resources.certificate_chain() +_TEST_ROOT_CERTIFICATES = resources.test_root_certificates() +_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),) +_PROPERTY_OPTIONS = (( + 'grpc.ssl_target_name_override', + _SERVER_HOST_OVERRIDE, +),) + + +def handle_unary_unary(request, servicer_context): + return pickle.dumps({ + _ID: servicer_context.peer_identities(), + _ID_KEY: servicer_context.peer_identity_key(), + _AUTH_CTX: servicer_context.auth_context() + }) + + +class AuthContextTest(unittest.TestCase): + + def testInsecure(self): + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary) - }) - server = test_common.test_server() - server.add_generic_rpc_handlers((handler,)) - port = server.add_insecure_port('[::]:0') - server.start() - - with grpc.insecure_channel('localhost:%d' % port) as channel: - response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) - server.stop(None) - - auth_data = pickle.loads(response) - self.assertIsNone(auth_data[_ID]) - self.assertIsNone(auth_data[_ID_KEY]) - self.assertDictEqual({}, auth_data[_AUTH_CTX]) - - def testSecureNoCert(self): - handler = grpc.method_handlers_generic_handler('test', { - 'UnaryUnary': + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + port = server.add_insecure_port('[::]:0') + server.start() + + with grpc.insecure_channel('localhost:%d' % port) as channel: + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + server.stop(None) + + auth_data = pickle.loads(response) + self.assertIsNone(auth_data[_ID]) + self.assertIsNone(auth_data[_ID_KEY]) + self.assertDictEqual({}, auth_data[_AUTH_CTX]) + + def testSecureNoCert(self): + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary) - }) - server = test_common.test_server() - server.add_generic_rpc_handlers((handler,)) - server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) - port = server.add_secure_port('[::]:0', server_cred) - server.start() - - channel_creds = grpc.ssl_channel_credentials( - root_certificates=_TEST_ROOT_CERTIFICATES) + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES) channel = grpc.secure_channel('localhost:{}'.format(port), channel_creds, options=_PROPERTY_OPTIONS) - response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) - channel.close() - server.stop(None) - - auth_data = pickle.loads(response) - self.assertIsNone(auth_data[_ID]) - self.assertIsNone(auth_data[_ID_KEY]) + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + channel.close() + server.stop(None) + + auth_data = pickle.loads(response) + self.assertIsNone(auth_data[_ID]) + self.assertIsNone(auth_data[_ID_KEY]) self.assertDictEqual( { 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'], 'transport_security_type': [b'ssl'], 'ssl_session_reused': [b'false'], }, auth_data[_AUTH_CTX]) - - def testSecureClientCert(self): - handler = grpc.method_handlers_generic_handler('test', { - 'UnaryUnary': + + def testSecureClientCert(self): + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary) - }) - server = test_common.test_server() - server.add_generic_rpc_handlers((handler,)) - server_cred = grpc.ssl_server_credentials( - _SERVER_CERTS, - root_certificates=_TEST_ROOT_CERTIFICATES, - require_client_auth=True) - port = server.add_secure_port('[::]:0', server_cred) - server.start() - - channel_creds = grpc.ssl_channel_credentials( - root_certificates=_TEST_ROOT_CERTIFICATES, - private_key=_PRIVATE_KEY, - certificate_chain=_CERTIFICATE_CHAIN) + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials( + _SERVER_CERTS, + root_certificates=_TEST_ROOT_CERTIFICATES, + require_client_auth=True) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES, + private_key=_PRIVATE_KEY, + certificate_chain=_CERTIFICATE_CHAIN) channel = grpc.secure_channel('localhost:{}'.format(port), channel_creds, options=_PROPERTY_OPTIONS) - - response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) - channel.close() - server.stop(None) - - auth_data = pickle.loads(response) - auth_ctx = auth_data[_AUTH_CTX] - six.assertCountEqual(self, _CLIENT_IDS, auth_data[_ID]) - self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY]) - self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type']) - self.assertSequenceEqual([b'*.test.google.com'], - auth_ctx['x509_common_name']) - - def _do_one_shot_client_rpc(self, channel_creds, channel_options, port, - expect_ssl_session_reused): + + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + channel.close() + server.stop(None) + + auth_data = pickle.loads(response) + auth_ctx = auth_data[_AUTH_CTX] + six.assertCountEqual(self, _CLIENT_IDS, auth_data[_ID]) + self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY]) + self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type']) + self.assertSequenceEqual([b'*.test.google.com'], + auth_ctx['x509_common_name']) + + def _do_one_shot_client_rpc(self, channel_creds, channel_options, port, + expect_ssl_session_reused): channel = grpc.secure_channel('localhost:{}'.format(port), channel_creds, options=channel_options) - response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) - auth_data = pickle.loads(response) - self.assertEqual(expect_ssl_session_reused, - auth_data[_AUTH_CTX]['ssl_session_reused']) - channel.close() - - def testSessionResumption(self): - # Set up a secure server - handler = grpc.method_handlers_generic_handler('test', { - 'UnaryUnary': + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + auth_data = pickle.loads(response) + self.assertEqual(expect_ssl_session_reused, + auth_data[_AUTH_CTX]['ssl_session_reused']) + channel.close() + + def testSessionResumption(self): + # Set up a secure server + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary) - }) - server = test_common.test_server() - server.add_generic_rpc_handlers((handler,)) - server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) - port = server.add_secure_port('[::]:0', server_cred) - server.start() - - # Create a cache for TLS session tickets - cache = session_cache.ssl_session_cache_lru(1) - channel_creds = grpc.ssl_channel_credentials( - root_certificates=_TEST_ROOT_CERTIFICATES) - channel_options = _PROPERTY_OPTIONS + ( - ('grpc.ssl_session_cache', cache),) - - # Initial connection has no session to resume + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + # Create a cache for TLS session tickets + cache = session_cache.ssl_session_cache_lru(1) + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES) + channel_options = _PROPERTY_OPTIONS + ( + ('grpc.ssl_session_cache', cache),) + + # Initial connection has no session to resume self._do_one_shot_client_rpc(channel_creds, channel_options, port, expect_ssl_session_reused=[b'false']) - - # Subsequent connections resume sessions + + # Subsequent connections resume sessions self._do_one_shot_client_rpc(channel_creds, channel_options, port, expect_ssl_session_reused=[b'true']) - server.stop(None) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + server.stop(None) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_test.py index cdeeb19ab8..d9df2add4f 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_test.py @@ -1,82 +1,82 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of standard AuthMetadataPlugins.""" - -import collections -import threading -import unittest -import logging - -from grpc import _auth - - -class MockGoogleCreds(object): - - def get_access_token(self): - token = collections.namedtuple('MockAccessTokenInfo', - ('access_token', 'expires_in')) - token.access_token = 'token' - return token - - -class MockExceptionGoogleCreds(object): - - def get_access_token(self): - raise Exception() - - -class GoogleCallCredentialsTest(unittest.TestCase): - - def test_google_call_credentials_success(self): - callback_event = threading.Event() - - def mock_callback(metadata, error): - self.assertEqual(metadata, (('authorization', 'Bearer token'),)) - self.assertIsNone(error) - callback_event.set() - - call_creds = _auth.GoogleCallCredentials(MockGoogleCreds()) - call_creds(None, mock_callback) - self.assertTrue(callback_event.wait(1.0)) - - def test_google_call_credentials_error(self): - callback_event = threading.Event() - - def mock_callback(metadata, error): - self.assertIsNotNone(error) - callback_event.set() - - call_creds = _auth.GoogleCallCredentials(MockExceptionGoogleCreds()) - call_creds(None, mock_callback) - self.assertTrue(callback_event.wait(1.0)) - - -class AccessTokenAuthMetadataPluginTest(unittest.TestCase): - - def test_google_call_credentials_success(self): - callback_event = threading.Event() - - def mock_callback(metadata, error): - self.assertEqual(metadata, (('authorization', 'Bearer token'),)) - self.assertIsNone(error) - callback_event.set() - - metadata_plugin = _auth.AccessTokenAuthMetadataPlugin('token') - metadata_plugin(None, mock_callback) - self.assertTrue(callback_event.wait(1.0)) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of standard AuthMetadataPlugins.""" + +import collections +import threading +import unittest +import logging + +from grpc import _auth + + +class MockGoogleCreds(object): + + def get_access_token(self): + token = collections.namedtuple('MockAccessTokenInfo', + ('access_token', 'expires_in')) + token.access_token = 'token' + return token + + +class MockExceptionGoogleCreds(object): + + def get_access_token(self): + raise Exception() + + +class GoogleCallCredentialsTest(unittest.TestCase): + + def test_google_call_credentials_success(self): + callback_event = threading.Event() + + def mock_callback(metadata, error): + self.assertEqual(metadata, (('authorization', 'Bearer token'),)) + self.assertIsNone(error) + callback_event.set() + + call_creds = _auth.GoogleCallCredentials(MockGoogleCreds()) + call_creds(None, mock_callback) + self.assertTrue(callback_event.wait(1.0)) + + def test_google_call_credentials_error(self): + callback_event = threading.Event() + + def mock_callback(metadata, error): + self.assertIsNotNone(error) + callback_event.set() + + call_creds = _auth.GoogleCallCredentials(MockExceptionGoogleCreds()) + call_creds(None, mock_callback) + self.assertTrue(callback_event.wait(1.0)) + + +class AccessTokenAuthMetadataPluginTest(unittest.TestCase): + + def test_google_call_credentials_success(self): + callback_event = threading.Event() + + def mock_callback(metadata, error): + self.assertEqual(metadata, (('authorization', 'Bearer token'),)) + self.assertIsNone(error) + callback_event.set() + + metadata_plugin = _auth.AccessTokenAuthMetadataPlugin('token') + metadata_plugin(None, mock_callback) + self.assertTrue(callback_event.wait(1.0)) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py index df5a18b978..2f2eea61db 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py @@ -1,65 +1,65 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. """Tests of channel arguments on client/server side.""" - -from concurrent import futures -import unittest -import logging - -import grpc - - -class TestPointerWrapper(object): - - def __int__(self): - return 123456 - - -TEST_CHANNEL_ARGS = ( - ('arg1', b'bytes_val'), - ('arg2', 'str_val'), - ('arg3', 1), - (b'arg4', 'str_val'), - ('arg6', TestPointerWrapper()), -) - -INVALID_TEST_CHANNEL_ARGS = [ - { - 'foo': 'bar' - }, - (('key',),), - 'str', -] - - -class ChannelArgsTest(unittest.TestCase): - - def test_client(self): - grpc.insecure_channel('localhost:8080', options=TEST_CHANNEL_ARGS) - - def test_server(self): + +from concurrent import futures +import unittest +import logging + +import grpc + + +class TestPointerWrapper(object): + + def __int__(self): + return 123456 + + +TEST_CHANNEL_ARGS = ( + ('arg1', b'bytes_val'), + ('arg2', 'str_val'), + ('arg3', 1), + (b'arg4', 'str_val'), + ('arg6', TestPointerWrapper()), +) + +INVALID_TEST_CHANNEL_ARGS = [ + { + 'foo': 'bar' + }, + (('key',),), + 'str', +] + + +class ChannelArgsTest(unittest.TestCase): + + def test_client(self): + grpc.insecure_channel('localhost:8080', options=TEST_CHANNEL_ARGS) + + def test_server(self): grpc.server(futures.ThreadPoolExecutor(max_workers=1), options=TEST_CHANNEL_ARGS) - - def test_invalid_client_args(self): - for invalid_arg in INVALID_TEST_CHANNEL_ARGS: + + def test_invalid_client_args(self): + for invalid_arg in INVALID_TEST_CHANNEL_ARGS: self.assertRaises(ValueError, grpc.insecure_channel, 'localhost:8080', options=invalid_arg) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_close_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_close_test.py index 912738eb48..47f52b4890 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_close_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_close_test.py @@ -1,220 +1,220 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests server and client side compression.""" - -import itertools -import logging -import threading -import time -import unittest - -import grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_BEAT = 0.5 -_SOME_TIME = 5 -_MORE_TIME = 10 - -_STREAM_URI = 'Meffod' -_UNARY_URI = 'MeffodMan' - - -class _StreamingMethodHandler(grpc.RpcMethodHandler): - - request_streaming = True - response_streaming = True - request_deserializer = None - response_serializer = None - - def stream_stream(self, request_iterator, servicer_context): - for request in request_iterator: - yield request * 2 - - -class _UnaryMethodHandler(grpc.RpcMethodHandler): - - request_streaming = False - response_streaming = False - request_deserializer = None - response_serializer = None - - def unary_unary(self, request, servicer_context): - return request * 2 - - -_STREAMING_METHOD_HANDLER = _StreamingMethodHandler() -_UNARY_METHOD_HANDLER = _UnaryMethodHandler() - - -class _GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - if handler_call_details.method == _STREAM_URI: - return _STREAMING_METHOD_HANDLER - else: - return _UNARY_METHOD_HANDLER - - -_GENERIC_HANDLER = _GenericHandler() - - -class _Pipe(object): - - def __init__(self, values): - self._condition = threading.Condition() - self._values = list(values) - self._open = True - - def __iter__(self): - return self - - def _next(self): - with self._condition: - while not self._values and self._open: - self._condition.wait() - if self._values: - return self._values.pop(0) - else: - raise StopIteration() - - def next(self): - return self._next() - - def __next__(self): - return self._next() - - def add(self, value): - with self._condition: - self._values.append(value) - self._condition.notify() - - def close(self): - with self._condition: - self._open = False - self._condition.notify() - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - self.close() - - -class ChannelCloseTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server( - max_workers=test_constants.THREAD_CONCURRENCY) - self._server.add_generic_rpc_handlers((_GENERIC_HANDLER,)) - self._port = self._server.add_insecure_port('[::]:0') - self._server.start() - - def tearDown(self): - self._server.stop(None) - - def test_close_immediately_after_call_invocation(self): - channel = grpc.insecure_channel('localhost:{}'.format(self._port)) - multi_callable = channel.stream_stream(_STREAM_URI) - request_iterator = _Pipe(()) - response_iterator = multi_callable(request_iterator) - channel.close() - request_iterator.close() - - self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) - - def test_close_while_call_active(self): - channel = grpc.insecure_channel('localhost:{}'.format(self._port)) - multi_callable = channel.stream_stream(_STREAM_URI) - request_iterator = _Pipe((b'abc',)) - response_iterator = multi_callable(request_iterator) - next(response_iterator) - channel.close() - request_iterator.close() - - self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) - - def test_context_manager_close_while_call_active(self): - with grpc.insecure_channel('localhost:{}'.format( - self._port)) as channel: # pylint: disable=bad-continuation - multi_callable = channel.stream_stream(_STREAM_URI) - request_iterator = _Pipe((b'abc',)) - response_iterator = multi_callable(request_iterator) - next(response_iterator) - request_iterator.close() - - self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) - - def test_context_manager_close_while_many_calls_active(self): - with grpc.insecure_channel('localhost:{}'.format( - self._port)) as channel: # pylint: disable=bad-continuation - multi_callable = channel.stream_stream(_STREAM_URI) - request_iterators = tuple( - _Pipe((b'abc',)) - for _ in range(test_constants.THREAD_CONCURRENCY)) - response_iterators = [] - for request_iterator in request_iterators: - response_iterator = multi_callable(request_iterator) - next(response_iterator) - response_iterators.append(response_iterator) - for request_iterator in request_iterators: - request_iterator.close() - - for response_iterator in response_iterators: - self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) - - def test_many_concurrent_closes(self): - channel = grpc.insecure_channel('localhost:{}'.format(self._port)) - multi_callable = channel.stream_stream(_STREAM_URI) - request_iterator = _Pipe((b'abc',)) - response_iterator = multi_callable(request_iterator) - next(response_iterator) - start = time.time() - end = start + _MORE_TIME - - def sleep_some_time_then_close(): - time.sleep(_SOME_TIME) - channel.close() - - for _ in range(test_constants.THREAD_CONCURRENCY): - close_thread = threading.Thread(target=sleep_some_time_then_close) - close_thread.start() - while True: - request_iterator.add(b'def') - time.sleep(_BEAT) - if end < time.time(): - break - request_iterator.close() - - self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) - - def test_exception_in_callback(self): - with grpc.insecure_channel('localhost:{}'.format( - self._port)) as channel: - stream_multi_callable = channel.stream_stream(_STREAM_URI) - endless_iterator = itertools.repeat(b'abc') - stream_response_iterator = stream_multi_callable(endless_iterator) - future = channel.unary_unary(_UNARY_URI).future(b'abc') - - def on_done_callback(future): - raise Exception("This should not cause a deadlock.") - - future.add_done_callback(on_done_callback) - future.result() - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server and client side compression.""" + +import itertools +import logging +import threading +import time +import unittest + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_BEAT = 0.5 +_SOME_TIME = 5 +_MORE_TIME = 10 + +_STREAM_URI = 'Meffod' +_UNARY_URI = 'MeffodMan' + + +class _StreamingMethodHandler(grpc.RpcMethodHandler): + + request_streaming = True + response_streaming = True + request_deserializer = None + response_serializer = None + + def stream_stream(self, request_iterator, servicer_context): + for request in request_iterator: + yield request * 2 + + +class _UnaryMethodHandler(grpc.RpcMethodHandler): + + request_streaming = False + response_streaming = False + request_deserializer = None + response_serializer = None + + def unary_unary(self, request, servicer_context): + return request * 2 + + +_STREAMING_METHOD_HANDLER = _StreamingMethodHandler() +_UNARY_METHOD_HANDLER = _UnaryMethodHandler() + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _STREAM_URI: + return _STREAMING_METHOD_HANDLER + else: + return _UNARY_METHOD_HANDLER + + +_GENERIC_HANDLER = _GenericHandler() + + +class _Pipe(object): + + def __init__(self, values): + self._condition = threading.Condition() + self._values = list(values) + self._open = True + + def __iter__(self): + return self + + def _next(self): + with self._condition: + while not self._values and self._open: + self._condition.wait() + if self._values: + return self._values.pop(0) + else: + raise StopIteration() + + def next(self): + return self._next() + + def __next__(self): + return self._next() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify() + + def close(self): + with self._condition: + self._open = False + self._condition.notify() + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +class ChannelCloseTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server( + max_workers=test_constants.THREAD_CONCURRENCY) + self._server.add_generic_rpc_handlers((_GENERIC_HANDLER,)) + self._port = self._server.add_insecure_port('[::]:0') + self._server.start() + + def tearDown(self): + self._server.stop(None) + + def test_close_immediately_after_call_invocation(self): + channel = grpc.insecure_channel('localhost:{}'.format(self._port)) + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe(()) + response_iterator = multi_callable(request_iterator) + channel.close() + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_close_while_call_active(self): + channel = grpc.insecure_channel('localhost:{}'.format(self._port)) + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe((b'abc',)) + response_iterator = multi_callable(request_iterator) + next(response_iterator) + channel.close() + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_context_manager_close_while_call_active(self): + with grpc.insecure_channel('localhost:{}'.format( + self._port)) as channel: # pylint: disable=bad-continuation + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe((b'abc',)) + response_iterator = multi_callable(request_iterator) + next(response_iterator) + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_context_manager_close_while_many_calls_active(self): + with grpc.insecure_channel('localhost:{}'.format( + self._port)) as channel: # pylint: disable=bad-continuation + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterators = tuple( + _Pipe((b'abc',)) + for _ in range(test_constants.THREAD_CONCURRENCY)) + response_iterators = [] + for request_iterator in request_iterators: + response_iterator = multi_callable(request_iterator) + next(response_iterator) + response_iterators.append(response_iterator) + for request_iterator in request_iterators: + request_iterator.close() + + for response_iterator in response_iterators: + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_many_concurrent_closes(self): + channel = grpc.insecure_channel('localhost:{}'.format(self._port)) + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe((b'abc',)) + response_iterator = multi_callable(request_iterator) + next(response_iterator) + start = time.time() + end = start + _MORE_TIME + + def sleep_some_time_then_close(): + time.sleep(_SOME_TIME) + channel.close() + + for _ in range(test_constants.THREAD_CONCURRENCY): + close_thread = threading.Thread(target=sleep_some_time_then_close) + close_thread.start() + while True: + request_iterator.add(b'def') + time.sleep(_BEAT) + if end < time.time(): + break + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_exception_in_callback(self): + with grpc.insecure_channel('localhost:{}'.format( + self._port)) as channel: + stream_multi_callable = channel.stream_stream(_STREAM_URI) + endless_iterator = itertools.repeat(b'abc') + stream_response_iterator = stream_multi_callable(endless_iterator) + future = channel.unary_unary(_UNARY_URI).future(b'abc') + + def on_done_callback(future): + raise Exception("This should not cause a deadlock.") + + future.add_done_callback(on_done_callback) + future.result() + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py index d1c2c2201e..d1b4c3c932 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py @@ -1,155 +1,155 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc._channel.Channel connectivity.""" - -import logging -import threading -import time -import unittest - -import grpc -from tests.unit.framework.common import test_constants -from tests.unit import thread_pool - - -def _ready_in_connectivities(connectivities): - return grpc.ChannelConnectivity.READY in connectivities - - -def _last_connectivity_is_not_ready(connectivities): - return connectivities[-1] is not grpc.ChannelConnectivity.READY - - -class _Callback(object): - - def __init__(self): - self._condition = threading.Condition() - self._connectivities = [] - - def update(self, connectivity): - with self._condition: - self._connectivities.append(connectivity) - self._condition.notify() - - def connectivities(self): - with self._condition: - return tuple(self._connectivities) - - def block_until_connectivities_satisfy(self, predicate): - with self._condition: - while True: - connectivities = tuple(self._connectivities) - if predicate(connectivities): - return connectivities - else: - self._condition.wait() - - -class ChannelConnectivityTest(unittest.TestCase): - - def test_lonely_channel_connectivity(self): - callback = _Callback() - - channel = grpc.insecure_channel('localhost:12345') - channel.subscribe(callback.update, try_to_connect=False) - first_connectivities = callback.block_until_connectivities_satisfy(bool) - channel.subscribe(callback.update, try_to_connect=True) - second_connectivities = callback.block_until_connectivities_satisfy( - lambda connectivities: 2 <= len(connectivities)) - # Wait for a connection that will never happen. - time.sleep(test_constants.SHORT_TIMEOUT) - third_connectivities = callback.connectivities() - channel.unsubscribe(callback.update) - fourth_connectivities = callback.connectivities() - channel.unsubscribe(callback.update) - fifth_connectivities = callback.connectivities() - - channel.close() - - self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), - first_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.READY, second_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.READY, third_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.READY, fourth_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.READY, fifth_connectivities) - - def test_immediately_connectable_channel_connectivity(self): - recording_thread_pool = thread_pool.RecordingThreadPool( - max_workers=None) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc._channel.Channel connectivity.""" + +import logging +import threading +import time +import unittest + +import grpc +from tests.unit.framework.common import test_constants +from tests.unit import thread_pool + + +def _ready_in_connectivities(connectivities): + return grpc.ChannelConnectivity.READY in connectivities + + +def _last_connectivity_is_not_ready(connectivities): + return connectivities[-1] is not grpc.ChannelConnectivity.READY + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._connectivities = [] + + def update(self, connectivity): + with self._condition: + self._connectivities.append(connectivity) + self._condition.notify() + + def connectivities(self): + with self._condition: + return tuple(self._connectivities) + + def block_until_connectivities_satisfy(self, predicate): + with self._condition: + while True: + connectivities = tuple(self._connectivities) + if predicate(connectivities): + return connectivities + else: + self._condition.wait() + + +class ChannelConnectivityTest(unittest.TestCase): + + def test_lonely_channel_connectivity(self): + callback = _Callback() + + channel = grpc.insecure_channel('localhost:12345') + channel.subscribe(callback.update, try_to_connect=False) + first_connectivities = callback.block_until_connectivities_satisfy(bool) + channel.subscribe(callback.update, try_to_connect=True) + second_connectivities = callback.block_until_connectivities_satisfy( + lambda connectivities: 2 <= len(connectivities)) + # Wait for a connection that will never happen. + time.sleep(test_constants.SHORT_TIMEOUT) + third_connectivities = callback.connectivities() + channel.unsubscribe(callback.update) + fourth_connectivities = callback.connectivities() + channel.unsubscribe(callback.update) + fifth_connectivities = callback.connectivities() + + channel.close() + + self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), + first_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, second_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, third_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, fourth_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, fifth_connectivities) + + def test_immediately_connectable_channel_connectivity(self): + recording_thread_pool = thread_pool.RecordingThreadPool( + max_workers=None) server = grpc.server(recording_thread_pool, options=(('grpc.so_reuseport', 0),)) - port = server.add_insecure_port('[::]:0') - server.start() - first_callback = _Callback() - second_callback = _Callback() - - channel = grpc.insecure_channel('localhost:{}'.format(port)) - channel.subscribe(first_callback.update, try_to_connect=False) - first_connectivities = first_callback.block_until_connectivities_satisfy( - bool) - # Wait for a connection that will never happen because try_to_connect=True - # has not yet been passed. - time.sleep(test_constants.SHORT_TIMEOUT) - second_connectivities = first_callback.connectivities() - channel.subscribe(second_callback.update, try_to_connect=True) - third_connectivities = first_callback.block_until_connectivities_satisfy( - lambda connectivities: 2 <= len(connectivities)) - fourth_connectivities = second_callback.block_until_connectivities_satisfy( - bool) - # Wait for a connection that will happen (or may already have happened). - first_callback.block_until_connectivities_satisfy( - _ready_in_connectivities) - second_callback.block_until_connectivities_satisfy( - _ready_in_connectivities) - channel.close() - server.stop(None) - - self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), - first_connectivities) - self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), - second_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.TRANSIENT_FAILURE, - third_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.SHUTDOWN, - third_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.TRANSIENT_FAILURE, - fourth_connectivities) - self.assertNotIn(grpc.ChannelConnectivity.SHUTDOWN, - fourth_connectivities) - self.assertFalse(recording_thread_pool.was_used()) - - def test_reachable_then_unreachable_channel_connectivity(self): - recording_thread_pool = thread_pool.RecordingThreadPool( - max_workers=None) + port = server.add_insecure_port('[::]:0') + server.start() + first_callback = _Callback() + second_callback = _Callback() + + channel = grpc.insecure_channel('localhost:{}'.format(port)) + channel.subscribe(first_callback.update, try_to_connect=False) + first_connectivities = first_callback.block_until_connectivities_satisfy( + bool) + # Wait for a connection that will never happen because try_to_connect=True + # has not yet been passed. + time.sleep(test_constants.SHORT_TIMEOUT) + second_connectivities = first_callback.connectivities() + channel.subscribe(second_callback.update, try_to_connect=True) + third_connectivities = first_callback.block_until_connectivities_satisfy( + lambda connectivities: 2 <= len(connectivities)) + fourth_connectivities = second_callback.block_until_connectivities_satisfy( + bool) + # Wait for a connection that will happen (or may already have happened). + first_callback.block_until_connectivities_satisfy( + _ready_in_connectivities) + second_callback.block_until_connectivities_satisfy( + _ready_in_connectivities) + channel.close() + server.stop(None) + + self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), + first_connectivities) + self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), + second_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.TRANSIENT_FAILURE, + third_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.SHUTDOWN, + third_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.TRANSIENT_FAILURE, + fourth_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.SHUTDOWN, + fourth_connectivities) + self.assertFalse(recording_thread_pool.was_used()) + + def test_reachable_then_unreachable_channel_connectivity(self): + recording_thread_pool = thread_pool.RecordingThreadPool( + max_workers=None) server = grpc.server(recording_thread_pool, options=(('grpc.so_reuseport', 0),)) - port = server.add_insecure_port('[::]:0') - server.start() - callback = _Callback() - - channel = grpc.insecure_channel('localhost:{}'.format(port)) - channel.subscribe(callback.update, try_to_connect=True) - callback.block_until_connectivities_satisfy(_ready_in_connectivities) - # Now take down the server and confirm that channel readiness is repudiated. - server.stop(None) - callback.block_until_connectivities_satisfy( - _last_connectivity_is_not_ready) - channel.unsubscribe(callback.update) - channel.close() - self.assertFalse(recording_thread_pool.was_used()) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + port = server.add_insecure_port('[::]:0') + server.start() + callback = _Callback() + + channel = grpc.insecure_channel('localhost:{}'.format(port)) + channel.subscribe(callback.update, try_to_connect=True) + callback.block_until_connectivities_satisfy(_ready_in_connectivities) + # Now take down the server and confirm that channel readiness is repudiated. + server.stop(None) + callback.block_until_connectivities_satisfy( + _last_connectivity_is_not_ready) + channel.unsubscribe(callback.update) + channel.close() + self.assertFalse(recording_thread_pool.was_used()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py index 35c8fd7ff3..ca9ebc16fe 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py @@ -1,97 +1,97 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc.channel_ready_future.""" - -import threading -import unittest -import logging - -import grpc -from tests.unit.framework.common import test_constants -from tests.unit import thread_pool - - -class _Callback(object): - - def __init__(self): - self._condition = threading.Condition() - self._value = None - - def accept_value(self, value): - with self._condition: - self._value = value - self._condition.notify_all() - - def block_until_called(self): - with self._condition: - while self._value is None: - self._condition.wait() - return self._value - - -class ChannelReadyFutureTest(unittest.TestCase): - - def test_lonely_channel_connectivity(self): - channel = grpc.insecure_channel('localhost:12345') - callback = _Callback() - - ready_future = grpc.channel_ready_future(channel) - ready_future.add_done_callback(callback.accept_value) - with self.assertRaises(grpc.FutureTimeoutError): - ready_future.result(timeout=test_constants.SHORT_TIMEOUT) - self.assertFalse(ready_future.cancelled()) - self.assertFalse(ready_future.done()) - self.assertTrue(ready_future.running()) - ready_future.cancel() - value_passed_to_callback = callback.block_until_called() - self.assertIs(ready_future, value_passed_to_callback) - self.assertTrue(ready_future.cancelled()) - self.assertTrue(ready_future.done()) - self.assertFalse(ready_future.running()) - - channel.close() - - def test_immediately_connectable_channel_connectivity(self): - recording_thread_pool = thread_pool.RecordingThreadPool( - max_workers=None) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc.channel_ready_future.""" + +import threading +import unittest +import logging + +import grpc +from tests.unit.framework.common import test_constants +from tests.unit import thread_pool + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._value = None + + def accept_value(self, value): + with self._condition: + self._value = value + self._condition.notify_all() + + def block_until_called(self): + with self._condition: + while self._value is None: + self._condition.wait() + return self._value + + +class ChannelReadyFutureTest(unittest.TestCase): + + def test_lonely_channel_connectivity(self): + channel = grpc.insecure_channel('localhost:12345') + callback = _Callback() + + ready_future = grpc.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + with self.assertRaises(grpc.FutureTimeoutError): + ready_future.result(timeout=test_constants.SHORT_TIMEOUT) + self.assertFalse(ready_future.cancelled()) + self.assertFalse(ready_future.done()) + self.assertTrue(ready_future.running()) + ready_future.cancel() + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertTrue(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + + channel.close() + + def test_immediately_connectable_channel_connectivity(self): + recording_thread_pool = thread_pool.RecordingThreadPool( + max_workers=None) server = grpc.server(recording_thread_pool, options=(('grpc.so_reuseport', 0),)) - port = server.add_insecure_port('[::]:0') - server.start() - channel = grpc.insecure_channel('localhost:{}'.format(port)) - callback = _Callback() - - ready_future = grpc.channel_ready_future(channel) - ready_future.add_done_callback(callback.accept_value) - self.assertIsNone( - ready_future.result(timeout=test_constants.LONG_TIMEOUT)) - value_passed_to_callback = callback.block_until_called() - self.assertIs(ready_future, value_passed_to_callback) - self.assertFalse(ready_future.cancelled()) - self.assertTrue(ready_future.done()) - self.assertFalse(ready_future.running()) - # Cancellation after maturity has no effect. - ready_future.cancel() - self.assertFalse(ready_future.cancelled()) - self.assertTrue(ready_future.done()) - self.assertFalse(ready_future.running()) - self.assertFalse(recording_thread_pool.was_used()) - - channel.close() - server.stop(None) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + port = server.add_insecure_port('[::]:0') + server.start() + channel = grpc.insecure_channel('localhost:{}'.format(port)) + callback = _Callback() + + ready_future = grpc.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + self.assertIsNone( + ready_future.result(timeout=test_constants.LONG_TIMEOUT)) + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + # Cancellation after maturity has no effect. + ready_future.cancel() + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + self.assertFalse(recording_thread_pool.was_used()) + + channel.close() + server.stop(None) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py index c1de279a7d..bc58e1032c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py @@ -1,382 +1,382 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests server and client side compression.""" - -import unittest - -import contextlib -from concurrent import futures -import functools -import itertools -import logging -import os - -import grpc -from grpc import _grpcio_metadata - -from tests.unit import test_common -from tests.unit.framework.common import test_constants -from tests.unit import _tcp_proxy - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' - -# Cut down on test time. -_STREAM_LENGTH = test_constants.STREAM_LENGTH // 16 - -_HOST = 'localhost' - -_REQUEST = b'\x00' * 100 -_COMPRESSION_RATIO_THRESHOLD = 0.05 -_COMPRESSION_METHODS = ( - None, - # Disabled for test tractability. - # grpc.Compression.NoCompression, - # grpc.Compression.Deflate, - grpc.Compression.Gzip, -) -_COMPRESSION_NAMES = { - None: 'Uncompressed', - grpc.Compression.NoCompression: 'NoCompression', - grpc.Compression.Deflate: 'DeflateCompression', - grpc.Compression.Gzip: 'GzipCompression', -} - -_TEST_OPTIONS = { - 'client_streaming': (True, False), - 'server_streaming': (True, False), - 'channel_compression': _COMPRESSION_METHODS, - 'multicallable_compression': _COMPRESSION_METHODS, - 'server_compression': _COMPRESSION_METHODS, - 'server_call_compression': _COMPRESSION_METHODS, -} - - -def _make_handle_unary_unary(pre_response_callback): - - def _handle_unary(request, servicer_context): - if pre_response_callback: - pre_response_callback(request, servicer_context) - return request - - return _handle_unary - - -def _make_handle_unary_stream(pre_response_callback): - - def _handle_unary_stream(request, servicer_context): - if pre_response_callback: - pre_response_callback(request, servicer_context) - for _ in range(_STREAM_LENGTH): - yield request - - return _handle_unary_stream - - -def _make_handle_stream_unary(pre_response_callback): - - def _handle_stream_unary(request_iterator, servicer_context): - if pre_response_callback: - pre_response_callback(request_iterator, servicer_context) - response = None - for request in request_iterator: - if not response: - response = request - return response - - return _handle_stream_unary - - -def _make_handle_stream_stream(pre_response_callback): - - def _handle_stream(request_iterator, servicer_context): - # TODO(issue:#6891) We should be able to remove this loop, - # and replace with return; yield - for request in request_iterator: - if pre_response_callback: - pre_response_callback(request, servicer_context) - yield request - - return _handle_stream - - -def set_call_compression(compression_method, request_or_iterator, - servicer_context): - del request_or_iterator - servicer_context.set_compression(compression_method) - - -def disable_next_compression(request, servicer_context): - del request - servicer_context.disable_next_message_compression() - - -def disable_first_compression(request, servicer_context): - if int(request.decode('ascii')) == 0: - servicer_context.disable_next_message_compression() - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, request_streaming, response_streaming, - pre_response_callback): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = None - self.response_serializer = None - self.unary_unary = None - self.unary_stream = None - self.stream_unary = None - self.stream_stream = None - - if self.request_streaming and self.response_streaming: - self.stream_stream = _make_handle_stream_stream( - pre_response_callback) - elif not self.request_streaming and not self.response_streaming: - self.unary_unary = _make_handle_unary_unary(pre_response_callback) - elif not self.request_streaming and self.response_streaming: - self.unary_stream = _make_handle_unary_stream(pre_response_callback) - else: - self.stream_unary = _make_handle_stream_unary(pre_response_callback) - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self, pre_response_callback): - self._pre_response_callback = pre_response_callback - - def service(self, handler_call_details): - if handler_call_details.method == _UNARY_UNARY: - return _MethodHandler(False, False, self._pre_response_callback) - elif handler_call_details.method == _UNARY_STREAM: - return _MethodHandler(False, True, self._pre_response_callback) - elif handler_call_details.method == _STREAM_UNARY: - return _MethodHandler(True, False, self._pre_response_callback) - elif handler_call_details.method == _STREAM_STREAM: - return _MethodHandler(True, True, self._pre_response_callback) - else: - return None - - -@contextlib.contextmanager -def _instrumented_client_server_pair(channel_kwargs, server_kwargs, - server_handler): - server = grpc.server(futures.ThreadPoolExecutor(), **server_kwargs) - server.add_generic_rpc_handlers((server_handler,)) - server_port = server.add_insecure_port('{}:0'.format(_HOST)) - server.start() - with _tcp_proxy.TcpProxy(_HOST, _HOST, server_port) as proxy: - proxy_port = proxy.get_port() - with grpc.insecure_channel('{}:{}'.format(_HOST, proxy_port), - **channel_kwargs) as client_channel: - try: - yield client_channel, proxy, server - finally: - server.stop(None) - - -def _get_byte_counts(channel_kwargs, multicallable_kwargs, client_function, - server_kwargs, server_handler, message): - with _instrumented_client_server_pair(channel_kwargs, server_kwargs, - server_handler) as pipeline: - client_channel, proxy, server = pipeline - client_function(client_channel, multicallable_kwargs, message) - return proxy.get_byte_count() - - -def _get_compression_ratios(client_function, first_channel_kwargs, - first_multicallable_kwargs, first_server_kwargs, - first_server_handler, second_channel_kwargs, - second_multicallable_kwargs, second_server_kwargs, - second_server_handler, message): - try: - # This test requires the byte length of each connection to be deterministic. As - # it turns out, flow control puts bytes on the wire in a nondeterministic - # manner. We disable it here in order to measure compression ratios - # deterministically. - os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL'] = 'true' - first_bytes_sent, first_bytes_received = _get_byte_counts( - first_channel_kwargs, first_multicallable_kwargs, client_function, - first_server_kwargs, first_server_handler, message) - second_bytes_sent, second_bytes_received = _get_byte_counts( - second_channel_kwargs, second_multicallable_kwargs, client_function, - second_server_kwargs, second_server_handler, message) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server and client side compression.""" + +import unittest + +import contextlib +from concurrent import futures +import functools +import itertools +import logging +import os + +import grpc +from grpc import _grpcio_metadata + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit import _tcp_proxy + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + +# Cut down on test time. +_STREAM_LENGTH = test_constants.STREAM_LENGTH // 16 + +_HOST = 'localhost' + +_REQUEST = b'\x00' * 100 +_COMPRESSION_RATIO_THRESHOLD = 0.05 +_COMPRESSION_METHODS = ( + None, + # Disabled for test tractability. + # grpc.Compression.NoCompression, + # grpc.Compression.Deflate, + grpc.Compression.Gzip, +) +_COMPRESSION_NAMES = { + None: 'Uncompressed', + grpc.Compression.NoCompression: 'NoCompression', + grpc.Compression.Deflate: 'DeflateCompression', + grpc.Compression.Gzip: 'GzipCompression', +} + +_TEST_OPTIONS = { + 'client_streaming': (True, False), + 'server_streaming': (True, False), + 'channel_compression': _COMPRESSION_METHODS, + 'multicallable_compression': _COMPRESSION_METHODS, + 'server_compression': _COMPRESSION_METHODS, + 'server_call_compression': _COMPRESSION_METHODS, +} + + +def _make_handle_unary_unary(pre_response_callback): + + def _handle_unary(request, servicer_context): + if pre_response_callback: + pre_response_callback(request, servicer_context) + return request + + return _handle_unary + + +def _make_handle_unary_stream(pre_response_callback): + + def _handle_unary_stream(request, servicer_context): + if pre_response_callback: + pre_response_callback(request, servicer_context) + for _ in range(_STREAM_LENGTH): + yield request + + return _handle_unary_stream + + +def _make_handle_stream_unary(pre_response_callback): + + def _handle_stream_unary(request_iterator, servicer_context): + if pre_response_callback: + pre_response_callback(request_iterator, servicer_context) + response = None + for request in request_iterator: + if not response: + response = request + return response + + return _handle_stream_unary + + +def _make_handle_stream_stream(pre_response_callback): + + def _handle_stream(request_iterator, servicer_context): + # TODO(issue:#6891) We should be able to remove this loop, + # and replace with return; yield + for request in request_iterator: + if pre_response_callback: + pre_response_callback(request, servicer_context) + yield request + + return _handle_stream + + +def set_call_compression(compression_method, request_or_iterator, + servicer_context): + del request_or_iterator + servicer_context.set_compression(compression_method) + + +def disable_next_compression(request, servicer_context): + del request + servicer_context.disable_next_message_compression() + + +def disable_first_compression(request, servicer_context): + if int(request.decode('ascii')) == 0: + servicer_context.disable_next_message_compression() + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, + pre_response_callback): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + + if self.request_streaming and self.response_streaming: + self.stream_stream = _make_handle_stream_stream( + pre_response_callback) + elif not self.request_streaming and not self.response_streaming: + self.unary_unary = _make_handle_unary_unary(pre_response_callback) + elif not self.request_streaming and self.response_streaming: + self.unary_stream = _make_handle_unary_stream(pre_response_callback) + else: + self.stream_unary = _make_handle_stream_unary(pre_response_callback) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, pre_response_callback): + self._pre_response_callback = pre_response_callback + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False, self._pre_response_callback) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True, self._pre_response_callback) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False, self._pre_response_callback) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True, self._pre_response_callback) + else: + return None + + +@contextlib.contextmanager +def _instrumented_client_server_pair(channel_kwargs, server_kwargs, + server_handler): + server = grpc.server(futures.ThreadPoolExecutor(), **server_kwargs) + server.add_generic_rpc_handlers((server_handler,)) + server_port = server.add_insecure_port('{}:0'.format(_HOST)) + server.start() + with _tcp_proxy.TcpProxy(_HOST, _HOST, server_port) as proxy: + proxy_port = proxy.get_port() + with grpc.insecure_channel('{}:{}'.format(_HOST, proxy_port), + **channel_kwargs) as client_channel: + try: + yield client_channel, proxy, server + finally: + server.stop(None) + + +def _get_byte_counts(channel_kwargs, multicallable_kwargs, client_function, + server_kwargs, server_handler, message): + with _instrumented_client_server_pair(channel_kwargs, server_kwargs, + server_handler) as pipeline: + client_channel, proxy, server = pipeline + client_function(client_channel, multicallable_kwargs, message) + return proxy.get_byte_count() + + +def _get_compression_ratios(client_function, first_channel_kwargs, + first_multicallable_kwargs, first_server_kwargs, + first_server_handler, second_channel_kwargs, + second_multicallable_kwargs, second_server_kwargs, + second_server_handler, message): + try: + # This test requires the byte length of each connection to be deterministic. As + # it turns out, flow control puts bytes on the wire in a nondeterministic + # manner. We disable it here in order to measure compression ratios + # deterministically. + os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL'] = 'true' + first_bytes_sent, first_bytes_received = _get_byte_counts( + first_channel_kwargs, first_multicallable_kwargs, client_function, + first_server_kwargs, first_server_handler, message) + second_bytes_sent, second_bytes_received = _get_byte_counts( + second_channel_kwargs, second_multicallable_kwargs, client_function, + second_server_kwargs, second_server_handler, message) return ((second_bytes_sent - first_bytes_sent) / float(first_bytes_sent), - (second_bytes_received - first_bytes_received) / - float(first_bytes_received)) - finally: - del os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL'] - - -def _unary_unary_client(channel, multicallable_kwargs, message): - multi_callable = channel.unary_unary(_UNARY_UNARY) - response = multi_callable(message, **multicallable_kwargs) - if response != message: - raise RuntimeError("Request '{}' != Response '{}'".format( - message, response)) - - -def _unary_stream_client(channel, multicallable_kwargs, message): - multi_callable = channel.unary_stream(_UNARY_STREAM) - response_iterator = multi_callable(message, **multicallable_kwargs) - for response in response_iterator: - if response != message: - raise RuntimeError("Request '{}' != Response '{}'".format( - message, response)) - - -def _stream_unary_client(channel, multicallable_kwargs, message): - multi_callable = channel.stream_unary(_STREAM_UNARY) - requests = (_REQUEST for _ in range(_STREAM_LENGTH)) - response = multi_callable(requests, **multicallable_kwargs) - if response != message: - raise RuntimeError("Request '{}' != Response '{}'".format( - message, response)) - - -def _stream_stream_client(channel, multicallable_kwargs, message): - multi_callable = channel.stream_stream(_STREAM_STREAM) - request_prefix = str(0).encode('ascii') * 100 - requests = ( - request_prefix + str(i).encode('ascii') for i in range(_STREAM_LENGTH)) - response_iterator = multi_callable(requests, **multicallable_kwargs) - for i, response in enumerate(response_iterator): - if int(response.decode('ascii')) != i: - raise RuntimeError("Request '{}' != Response '{}'".format( - i, response)) - - -class CompressionTest(unittest.TestCase): - - def assertCompressed(self, compression_ratio): - self.assertLess( - compression_ratio, - -1.0 * _COMPRESSION_RATIO_THRESHOLD, - msg='Actual compression ratio: {}'.format(compression_ratio)) - - def assertNotCompressed(self, compression_ratio): - self.assertGreaterEqual( - compression_ratio, - -1.0 * _COMPRESSION_RATIO_THRESHOLD, - msg='Actual compession ratio: {}'.format(compression_ratio)) - + (second_bytes_received - first_bytes_received) / + float(first_bytes_received)) + finally: + del os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL'] + + +def _unary_unary_client(channel, multicallable_kwargs, message): + multi_callable = channel.unary_unary(_UNARY_UNARY) + response = multi_callable(message, **multicallable_kwargs) + if response != message: + raise RuntimeError("Request '{}' != Response '{}'".format( + message, response)) + + +def _unary_stream_client(channel, multicallable_kwargs, message): + multi_callable = channel.unary_stream(_UNARY_STREAM) + response_iterator = multi_callable(message, **multicallable_kwargs) + for response in response_iterator: + if response != message: + raise RuntimeError("Request '{}' != Response '{}'".format( + message, response)) + + +def _stream_unary_client(channel, multicallable_kwargs, message): + multi_callable = channel.stream_unary(_STREAM_UNARY) + requests = (_REQUEST for _ in range(_STREAM_LENGTH)) + response = multi_callable(requests, **multicallable_kwargs) + if response != message: + raise RuntimeError("Request '{}' != Response '{}'".format( + message, response)) + + +def _stream_stream_client(channel, multicallable_kwargs, message): + multi_callable = channel.stream_stream(_STREAM_STREAM) + request_prefix = str(0).encode('ascii') * 100 + requests = ( + request_prefix + str(i).encode('ascii') for i in range(_STREAM_LENGTH)) + response_iterator = multi_callable(requests, **multicallable_kwargs) + for i, response in enumerate(response_iterator): + if int(response.decode('ascii')) != i: + raise RuntimeError("Request '{}' != Response '{}'".format( + i, response)) + + +class CompressionTest(unittest.TestCase): + + def assertCompressed(self, compression_ratio): + self.assertLess( + compression_ratio, + -1.0 * _COMPRESSION_RATIO_THRESHOLD, + msg='Actual compression ratio: {}'.format(compression_ratio)) + + def assertNotCompressed(self, compression_ratio): + self.assertGreaterEqual( + compression_ratio, + -1.0 * _COMPRESSION_RATIO_THRESHOLD, + msg='Actual compession ratio: {}'.format(compression_ratio)) + def assertConfigurationCompressed(self, client_streaming, server_streaming, channel_compression, multicallable_compression, server_compression, server_call_compression): - client_side_compressed = channel_compression or multicallable_compression - server_side_compressed = server_compression or server_call_compression - channel_kwargs = { - 'compression': channel_compression, - } if channel_compression else {} - multicallable_kwargs = { - 'compression': multicallable_compression, - } if multicallable_compression else {} - - client_function = None - if not client_streaming and not server_streaming: - client_function = _unary_unary_client - elif not client_streaming and server_streaming: - client_function = _unary_stream_client - elif client_streaming and not server_streaming: - client_function = _stream_unary_client - else: - client_function = _stream_stream_client - - server_kwargs = { - 'compression': server_compression, - } if server_compression else {} - server_handler = _GenericHandler( - functools.partial(set_call_compression, grpc.Compression.Gzip) - ) if server_call_compression else _GenericHandler(None) - sent_ratio, received_ratio = _get_compression_ratios( - client_function, {}, {}, {}, _GenericHandler(None), channel_kwargs, - multicallable_kwargs, server_kwargs, server_handler, _REQUEST) - - if client_side_compressed: - self.assertCompressed(sent_ratio) - else: - self.assertNotCompressed(sent_ratio) - - if server_side_compressed: - self.assertCompressed(received_ratio) - else: - self.assertNotCompressed(received_ratio) - - def testDisableNextCompressionStreaming(self): - server_kwargs = { - 'compression': grpc.Compression.Deflate, - } - _, received_ratio = _get_compression_ratios( - _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {}, - server_kwargs, _GenericHandler(disable_next_compression), _REQUEST) - self.assertNotCompressed(received_ratio) - - def testDisableNextCompressionStreamingResets(self): - server_kwargs = { - 'compression': grpc.Compression.Deflate, - } - _, received_ratio = _get_compression_ratios( - _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {}, - server_kwargs, _GenericHandler(disable_first_compression), _REQUEST) - self.assertCompressed(received_ratio) - - -def _get_compression_str(name, value): - return '{}{}'.format(name, _COMPRESSION_NAMES[value]) - - -def _get_compression_test_name(client_streaming, server_streaming, - channel_compression, multicallable_compression, - server_compression, server_call_compression): - client_arity = 'Stream' if client_streaming else 'Unary' - server_arity = 'Stream' if server_streaming else 'Unary' - arity = '{}{}'.format(client_arity, server_arity) - channel_compression_str = _get_compression_str('Channel', - channel_compression) - multicallable_compression_str = _get_compression_str( - 'Multicallable', multicallable_compression) - server_compression_str = _get_compression_str('Server', server_compression) - server_call_compression_str = _get_compression_str('ServerCall', - server_call_compression) + client_side_compressed = channel_compression or multicallable_compression + server_side_compressed = server_compression or server_call_compression + channel_kwargs = { + 'compression': channel_compression, + } if channel_compression else {} + multicallable_kwargs = { + 'compression': multicallable_compression, + } if multicallable_compression else {} + + client_function = None + if not client_streaming and not server_streaming: + client_function = _unary_unary_client + elif not client_streaming and server_streaming: + client_function = _unary_stream_client + elif client_streaming and not server_streaming: + client_function = _stream_unary_client + else: + client_function = _stream_stream_client + + server_kwargs = { + 'compression': server_compression, + } if server_compression else {} + server_handler = _GenericHandler( + functools.partial(set_call_compression, grpc.Compression.Gzip) + ) if server_call_compression else _GenericHandler(None) + sent_ratio, received_ratio = _get_compression_ratios( + client_function, {}, {}, {}, _GenericHandler(None), channel_kwargs, + multicallable_kwargs, server_kwargs, server_handler, _REQUEST) + + if client_side_compressed: + self.assertCompressed(sent_ratio) + else: + self.assertNotCompressed(sent_ratio) + + if server_side_compressed: + self.assertCompressed(received_ratio) + else: + self.assertNotCompressed(received_ratio) + + def testDisableNextCompressionStreaming(self): + server_kwargs = { + 'compression': grpc.Compression.Deflate, + } + _, received_ratio = _get_compression_ratios( + _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {}, + server_kwargs, _GenericHandler(disable_next_compression), _REQUEST) + self.assertNotCompressed(received_ratio) + + def testDisableNextCompressionStreamingResets(self): + server_kwargs = { + 'compression': grpc.Compression.Deflate, + } + _, received_ratio = _get_compression_ratios( + _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {}, + server_kwargs, _GenericHandler(disable_first_compression), _REQUEST) + self.assertCompressed(received_ratio) + + +def _get_compression_str(name, value): + return '{}{}'.format(name, _COMPRESSION_NAMES[value]) + + +def _get_compression_test_name(client_streaming, server_streaming, + channel_compression, multicallable_compression, + server_compression, server_call_compression): + client_arity = 'Stream' if client_streaming else 'Unary' + server_arity = 'Stream' if server_streaming else 'Unary' + arity = '{}{}'.format(client_arity, server_arity) + channel_compression_str = _get_compression_str('Channel', + channel_compression) + multicallable_compression_str = _get_compression_str( + 'Multicallable', multicallable_compression) + server_compression_str = _get_compression_str('Server', server_compression) + server_call_compression_str = _get_compression_str('ServerCall', + server_call_compression) return 'test{}{}{}{}{}'.format(arity, channel_compression_str, multicallable_compression_str, server_compression_str, server_call_compression_str) - - -def _test_options(): - for test_parameters in itertools.product(*_TEST_OPTIONS.values()): - yield dict(zip(_TEST_OPTIONS.keys(), test_parameters)) - - -for options in _test_options(): - - def test_compression(**kwargs): - - def _test_compression(self): - self.assertConfigurationCompressed(**kwargs) - - return _test_compression - - setattr(CompressionTest, _get_compression_test_name(**options), - test_compression(**options)) - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + + +def _test_options(): + for test_parameters in itertools.product(*_TEST_OPTIONS.values()): + yield dict(zip(_TEST_OPTIONS.keys(), test_parameters)) + + +for options in _test_options(): + + def test_compression(**kwargs): + + def _test_compression(self): + self.assertConfigurationCompressed(**kwargs) + + return _test_compression + + setattr(CompressionTest, _get_compression_test_name(**options), + test_compression(**options)) + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_credentials_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_credentials_test.py index cf44ed464a..187a6f0388 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_credentials_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_credentials_test.py @@ -1,70 +1,70 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of credentials.""" - -import unittest -import logging -import six - -import grpc - - -class CredentialsTest(unittest.TestCase): - - def test_call_credentials_composition(self): - first = grpc.access_token_call_credentials('abc') - second = grpc.access_token_call_credentials('def') - third = grpc.access_token_call_credentials('ghi') - - first_and_second = grpc.composite_call_credentials(first, second) - first_second_and_third = grpc.composite_call_credentials( - first, second, third) - - self.assertIsInstance(first_and_second, grpc.CallCredentials) - self.assertIsInstance(first_second_and_third, grpc.CallCredentials) - - def test_channel_credentials_composition(self): - first_call_credentials = grpc.access_token_call_credentials('abc') - second_call_credentials = grpc.access_token_call_credentials('def') - third_call_credentials = grpc.access_token_call_credentials('ghi') - channel_credentials = grpc.ssl_channel_credentials() - - channel_and_first = grpc.composite_channel_credentials( - channel_credentials, first_call_credentials) - channel_first_and_second = grpc.composite_channel_credentials( - channel_credentials, first_call_credentials, - second_call_credentials) - channel_first_second_and_third = grpc.composite_channel_credentials( - channel_credentials, first_call_credentials, - second_call_credentials, third_call_credentials) - - self.assertIsInstance(channel_and_first, grpc.ChannelCredentials) - self.assertIsInstance(channel_first_and_second, grpc.ChannelCredentials) - self.assertIsInstance(channel_first_second_and_third, - grpc.ChannelCredentials) - - @unittest.skipIf(six.PY2, 'only invalid in Python3') - def test_invalid_string_certificate(self): - self.assertRaises( - TypeError, - grpc.ssl_channel_credentials, - root_certificates='A Certificate', - private_key=None, - certificate_chain=None, - ) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of credentials.""" + +import unittest +import logging +import six + +import grpc + + +class CredentialsTest(unittest.TestCase): + + def test_call_credentials_composition(self): + first = grpc.access_token_call_credentials('abc') + second = grpc.access_token_call_credentials('def') + third = grpc.access_token_call_credentials('ghi') + + first_and_second = grpc.composite_call_credentials(first, second) + first_second_and_third = grpc.composite_call_credentials( + first, second, third) + + self.assertIsInstance(first_and_second, grpc.CallCredentials) + self.assertIsInstance(first_second_and_third, grpc.CallCredentials) + + def test_channel_credentials_composition(self): + first_call_credentials = grpc.access_token_call_credentials('abc') + second_call_credentials = grpc.access_token_call_credentials('def') + third_call_credentials = grpc.access_token_call_credentials('ghi') + channel_credentials = grpc.ssl_channel_credentials() + + channel_and_first = grpc.composite_channel_credentials( + channel_credentials, first_call_credentials) + channel_first_and_second = grpc.composite_channel_credentials( + channel_credentials, first_call_credentials, + second_call_credentials) + channel_first_second_and_third = grpc.composite_channel_credentials( + channel_credentials, first_call_credentials, + second_call_credentials, third_call_credentials) + + self.assertIsInstance(channel_and_first, grpc.ChannelCredentials) + self.assertIsInstance(channel_first_and_second, grpc.ChannelCredentials) + self.assertIsInstance(channel_first_second_and_third, + grpc.ChannelCredentials) + + @unittest.skipIf(six.PY2, 'only invalid in Python3') + def test_invalid_string_certificate(self): + self.assertRaises( + TypeError, + grpc.ssl_channel_credentials, + root_certificates='A Certificate', + private_key=None, + certificate_chain=None, + ) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/__init__.py index dc985eebb4..5fb4f3c3cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py index 478f56ad2f..b279f3d07c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py @@ -1,223 +1,223 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test making many calls and immediately cancelling most of them.""" - -import threading -import unittest - -from grpc._cython import cygrpc -from grpc.framework.foundation import logging_pool -from tests.unit.framework.common import test_constants -from tests.unit._cython import test_utilities - -_EMPTY_FLAGS = 0 -_EMPTY_METADATA = () - -_SERVER_SHUTDOWN_TAG = 'server_shutdown' -_REQUEST_CALL_TAG = 'request_call' -_RECEIVE_CLOSE_ON_SERVER_TAG = 'receive_close_on_server' -_RECEIVE_MESSAGE_TAG = 'receive_message' -_SERVER_COMPLETE_CALL_TAG = 'server_complete_call' - -_SUCCESS_CALL_FRACTION = 1.0 / 8.0 -_SUCCESSFUL_CALLS = int(test_constants.RPC_CONCURRENCY * _SUCCESS_CALL_FRACTION) -_UNSUCCESSFUL_CALLS = test_constants.RPC_CONCURRENCY - _SUCCESSFUL_CALLS - - -class _State(object): - - def __init__(self): - self.condition = threading.Condition() - self.handlers_released = False - self.parked_handlers = 0 - self.handled_rpcs = 0 - - -def _is_cancellation_event(event): - return (event.tag is _RECEIVE_CLOSE_ON_SERVER_TAG and - event.batch_operations[0].cancelled()) - - -class _Handler(object): - - def __init__(self, state, completion_queue, rpc_event): - self._state = state - self._lock = threading.Lock() - self._completion_queue = completion_queue - self._call = rpc_event.call - - def __call__(self): - with self._state.condition: - self._state.parked_handlers += 1 - if self._state.parked_handlers == test_constants.THREAD_CONCURRENCY: - self._state.condition.notify_all() - while not self._state.handlers_released: - self._state.condition.wait() - - with self._lock: - self._call.start_server_batch( - (cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),), - _RECEIVE_CLOSE_ON_SERVER_TAG) - self._call.start_server_batch( - (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), - _RECEIVE_MESSAGE_TAG) - first_event = self._completion_queue.poll() - if _is_cancellation_event(first_event): - self._completion_queue.poll() - else: - with self._lock: - operations = ( - cygrpc.SendInitialMetadataOperation(_EMPTY_METADATA, - _EMPTY_FLAGS), - cygrpc.SendMessageOperation(b'\x79\x57', _EMPTY_FLAGS), - cygrpc.SendStatusFromServerOperation( - _EMPTY_METADATA, cygrpc.StatusCode.ok, b'test details!', - _EMPTY_FLAGS), - ) - self._call.start_server_batch(operations, - _SERVER_COMPLETE_CALL_TAG) - self._completion_queue.poll() - self._completion_queue.poll() - - -def _serve(state, server, server_completion_queue, thread_pool): - for _ in range(test_constants.RPC_CONCURRENCY): - call_completion_queue = cygrpc.CompletionQueue() - server.request_call(call_completion_queue, server_completion_queue, - _REQUEST_CALL_TAG) - rpc_event = server_completion_queue.poll() - thread_pool.submit(_Handler(state, call_completion_queue, rpc_event)) - with state.condition: - state.handled_rpcs += 1 - if test_constants.RPC_CONCURRENCY <= state.handled_rpcs: - state.condition.notify_all() - server_completion_queue.poll() - - -class _QueueDriver(object): - - def __init__(self, condition, completion_queue, due): - self._condition = condition - self._completion_queue = completion_queue - self._due = due - self._events = [] - self._returned = False - - def start(self): - - def in_thread(): - while True: - event = self._completion_queue.poll() - with self._condition: - self._events.append(event) - self._due.remove(event.tag) - self._condition.notify_all() - if not self._due: - self._returned = True - return - - thread = threading.Thread(target=in_thread) - thread.start() - - def events(self, at_least): - with self._condition: - while len(self._events) < at_least: - self._condition.wait() - return tuple(self._events) - - -class CancelManyCallsTest(unittest.TestCase): - - def testCancelManyCalls(self): - server_thread_pool = logging_pool.pool( - test_constants.THREAD_CONCURRENCY) - - server_completion_queue = cygrpc.CompletionQueue() - server = cygrpc.Server([ - ( - b'grpc.so_reuseport', - 0, - ), - ]) - server.register_completion_queue(server_completion_queue) - port = server.add_http2_port(b'[::]:0') - server.start() - channel = cygrpc.Channel('localhost:{}'.format(port).encode(), None, - None) - - state = _State() - - server_thread_args = ( - state, - server, - server_completion_queue, - server_thread_pool, - ) - server_thread = threading.Thread(target=_serve, args=server_thread_args) - server_thread.start() - - client_condition = threading.Condition() - client_due = set() - - with client_condition: - client_calls = [] - for index in range(test_constants.RPC_CONCURRENCY): - tag = 'client_complete_call_{0:04d}_tag'.format(index) - client_call = channel.integrated_call( - _EMPTY_FLAGS, b'/twinkies', None, None, _EMPTY_METADATA, - None, (( - ( - cygrpc.SendInitialMetadataOperation( - _EMPTY_METADATA, _EMPTY_FLAGS), - cygrpc.SendMessageOperation(b'\x45\x56', - _EMPTY_FLAGS), - cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), - cygrpc.ReceiveInitialMetadataOperation( - _EMPTY_FLAGS), - cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), - cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), - ), - tag, - ),)) - client_due.add(tag) - client_calls.append(client_call) - +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test making many calls and immediately cancelling most of them.""" + +import threading +import unittest + +from grpc._cython import cygrpc +from grpc.framework.foundation import logging_pool +from tests.unit.framework.common import test_constants +from tests.unit._cython import test_utilities + +_EMPTY_FLAGS = 0 +_EMPTY_METADATA = () + +_SERVER_SHUTDOWN_TAG = 'server_shutdown' +_REQUEST_CALL_TAG = 'request_call' +_RECEIVE_CLOSE_ON_SERVER_TAG = 'receive_close_on_server' +_RECEIVE_MESSAGE_TAG = 'receive_message' +_SERVER_COMPLETE_CALL_TAG = 'server_complete_call' + +_SUCCESS_CALL_FRACTION = 1.0 / 8.0 +_SUCCESSFUL_CALLS = int(test_constants.RPC_CONCURRENCY * _SUCCESS_CALL_FRACTION) +_UNSUCCESSFUL_CALLS = test_constants.RPC_CONCURRENCY - _SUCCESSFUL_CALLS + + +class _State(object): + + def __init__(self): + self.condition = threading.Condition() + self.handlers_released = False + self.parked_handlers = 0 + self.handled_rpcs = 0 + + +def _is_cancellation_event(event): + return (event.tag is _RECEIVE_CLOSE_ON_SERVER_TAG and + event.batch_operations[0].cancelled()) + + +class _Handler(object): + + def __init__(self, state, completion_queue, rpc_event): + self._state = state + self._lock = threading.Lock() + self._completion_queue = completion_queue + self._call = rpc_event.call + + def __call__(self): + with self._state.condition: + self._state.parked_handlers += 1 + if self._state.parked_handlers == test_constants.THREAD_CONCURRENCY: + self._state.condition.notify_all() + while not self._state.handlers_released: + self._state.condition.wait() + + with self._lock: + self._call.start_server_batch( + (cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),), + _RECEIVE_CLOSE_ON_SERVER_TAG) + self._call.start_server_batch( + (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), + _RECEIVE_MESSAGE_TAG) + first_event = self._completion_queue.poll() + if _is_cancellation_event(first_event): + self._completion_queue.poll() + else: + with self._lock: + operations = ( + cygrpc.SendInitialMetadataOperation(_EMPTY_METADATA, + _EMPTY_FLAGS), + cygrpc.SendMessageOperation(b'\x79\x57', _EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + _EMPTY_METADATA, cygrpc.StatusCode.ok, b'test details!', + _EMPTY_FLAGS), + ) + self._call.start_server_batch(operations, + _SERVER_COMPLETE_CALL_TAG) + self._completion_queue.poll() + self._completion_queue.poll() + + +def _serve(state, server, server_completion_queue, thread_pool): + for _ in range(test_constants.RPC_CONCURRENCY): + call_completion_queue = cygrpc.CompletionQueue() + server.request_call(call_completion_queue, server_completion_queue, + _REQUEST_CALL_TAG) + rpc_event = server_completion_queue.poll() + thread_pool.submit(_Handler(state, call_completion_queue, rpc_event)) + with state.condition: + state.handled_rpcs += 1 + if test_constants.RPC_CONCURRENCY <= state.handled_rpcs: + state.condition.notify_all() + server_completion_queue.poll() + + +class _QueueDriver(object): + + def __init__(self, condition, completion_queue, due): + self._condition = condition + self._completion_queue = completion_queue + self._due = due + self._events = [] + self._returned = False + + def start(self): + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events.append(event) + self._due.remove(event.tag) + self._condition.notify_all() + if not self._due: + self._returned = True + return + + thread = threading.Thread(target=in_thread) + thread.start() + + def events(self, at_least): + with self._condition: + while len(self._events) < at_least: + self._condition.wait() + return tuple(self._events) + + +class CancelManyCallsTest(unittest.TestCase): + + def testCancelManyCalls(self): + server_thread_pool = logging_pool.pool( + test_constants.THREAD_CONCURRENCY) + + server_completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + server.register_completion_queue(server_completion_queue) + port = server.add_http2_port(b'[::]:0') + server.start() + channel = cygrpc.Channel('localhost:{}'.format(port).encode(), None, + None) + + state = _State() + + server_thread_args = ( + state, + server, + server_completion_queue, + server_thread_pool, + ) + server_thread = threading.Thread(target=_serve, args=server_thread_args) + server_thread.start() + + client_condition = threading.Condition() + client_due = set() + + with client_condition: + client_calls = [] + for index in range(test_constants.RPC_CONCURRENCY): + tag = 'client_complete_call_{0:04d}_tag'.format(index) + client_call = channel.integrated_call( + _EMPTY_FLAGS, b'/twinkies', None, None, _EMPTY_METADATA, + None, (( + ( + cygrpc.SendInitialMetadataOperation( + _EMPTY_METADATA, _EMPTY_FLAGS), + cygrpc.SendMessageOperation(b'\x45\x56', + _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveInitialMetadataOperation( + _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ), + tag, + ),)) + client_due.add(tag) + client_calls.append(client_call) + client_events_future = test_utilities.SimpleFuture(lambda: tuple( channel.next_call_event() for _ in range(_SUCCESSFUL_CALLS))) - - with state.condition: - while True: - if state.parked_handlers < test_constants.THREAD_CONCURRENCY: - state.condition.wait() - elif state.handled_rpcs < test_constants.RPC_CONCURRENCY: - state.condition.wait() - else: - state.handlers_released = True - state.condition.notify_all() - break - - client_events_future.result() - with client_condition: - for client_call in client_calls: - client_call.cancel(cygrpc.StatusCode.cancelled, 'Cancelled!') - for _ in range(_UNSUCCESSFUL_CALLS): - channel.next_call_event() - - channel.close(cygrpc.StatusCode.unknown, 'Cancelled on channel close!') - with state.condition: - server.shutdown(server_completion_queue, _SERVER_SHUTDOWN_TAG) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + + with state.condition: + while True: + if state.parked_handlers < test_constants.THREAD_CONCURRENCY: + state.condition.wait() + elif state.handled_rpcs < test_constants.RPC_CONCURRENCY: + state.condition.wait() + else: + state.handlers_released = True + state.condition.notify_all() + break + + client_events_future.result() + with client_condition: + for client_call in client_calls: + client_call.cancel(cygrpc.StatusCode.cancelled, 'Cancelled!') + for _ in range(_UNSUCCESSFUL_CALLS): + channel.next_call_event() + + channel.close(cygrpc.StatusCode.unknown, 'Cancelled on channel close!') + with state.condition: + server.shutdown(server_completion_queue, _SERVER_SHUTDOWN_TAG) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py index 67d352df6e..54f620523e 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py @@ -1,70 +1,70 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -import threading -import unittest - -from grpc._cython import cygrpc - -from tests.unit.framework.common import test_constants - - -def _channel(): - return cygrpc.Channel(b'localhost:54321', (), None) - - -def _connectivity_loop(channel): - for _ in range(100): - connectivity = channel.check_connectivity_state(True) - channel.watch_connectivity_state(connectivity, time.time() + 0.2) - - -def _create_loop_destroy(): - channel = _channel() - _connectivity_loop(channel) - channel.close(cygrpc.StatusCode.ok, 'Channel close!') - - -def _in_parallel(behavior, arguments): - threads = tuple( - threading.Thread(target=behavior, args=arguments) - for _ in range(test_constants.THREAD_CONCURRENCY)) - for thread in threads: - thread.start() - for thread in threads: - thread.join() - - -class ChannelTest(unittest.TestCase): - - def test_single_channel_lonely_connectivity(self): - channel = _channel() - _connectivity_loop(channel) - channel.close(cygrpc.StatusCode.ok, 'Channel close!') - - def test_multiple_channels_lonely_connectivity(self): - _in_parallel(_create_loop_destroy, ()) - - def test_negative_deadline_connectivity(self): - channel = _channel() - connectivity = channel.check_connectivity_state(True) - channel.watch_connectivity_state(connectivity, -3.14) - channel.close(cygrpc.StatusCode.ok, 'Channel close!') - # NOTE(lidiz) The negative timeout should not trigger SIGABRT. - # Bug report: https://github.com/grpc/grpc/issues/18244 - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import threading +import unittest + +from grpc._cython import cygrpc + +from tests.unit.framework.common import test_constants + + +def _channel(): + return cygrpc.Channel(b'localhost:54321', (), None) + + +def _connectivity_loop(channel): + for _ in range(100): + connectivity = channel.check_connectivity_state(True) + channel.watch_connectivity_state(connectivity, time.time() + 0.2) + + +def _create_loop_destroy(): + channel = _channel() + _connectivity_loop(channel) + channel.close(cygrpc.StatusCode.ok, 'Channel close!') + + +def _in_parallel(behavior, arguments): + threads = tuple( + threading.Thread(target=behavior, args=arguments) + for _ in range(test_constants.THREAD_CONCURRENCY)) + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + +class ChannelTest(unittest.TestCase): + + def test_single_channel_lonely_connectivity(self): + channel = _channel() + _connectivity_loop(channel) + channel.close(cygrpc.StatusCode.ok, 'Channel close!') + + def test_multiple_channels_lonely_connectivity(self): + _in_parallel(_create_loop_destroy, ()) + + def test_negative_deadline_connectivity(self): + channel = _channel() + connectivity = channel.check_connectivity_state(True) + channel.watch_connectivity_state(connectivity, -3.14) + channel.close(cygrpc.StatusCode.ok, 'Channel close!') + # NOTE(lidiz) The negative timeout should not trigger SIGABRT. + # Bug report: https://github.com/grpc/grpc/issues/18244 + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_common.py index 23813ba2ee..d8210f36f8 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_common.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_common.py @@ -1,123 +1,123 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common utilities for tests of the Cython layer of gRPC Python.""" - -import collections -import threading - -from grpc._cython import cygrpc - -RPC_COUNT = 4000 - -EMPTY_FLAGS = 0 - -INVOCATION_METADATA = ( - ('client-md-key', 'client-md-key'), - ('client-md-key-bin', b'\x00\x01' * 3000), -) - -INITIAL_METADATA = ( - ('server-initial-md-key', 'server-initial-md-value'), - ('server-initial-md-key-bin', b'\x00\x02' * 3000), -) - -TRAILING_METADATA = ( - ('server-trailing-md-key', 'server-trailing-md-value'), - ('server-trailing-md-key-bin', b'\x00\x03' * 3000), -) - - -class QueueDriver(object): - - def __init__(self, condition, completion_queue): - self._condition = condition - self._completion_queue = completion_queue - self._due = collections.defaultdict(int) - self._events = collections.defaultdict(list) - - def add_due(self, tags): - if not self._due: - - def in_thread(): - while True: - event = self._completion_queue.poll() - with self._condition: - self._events[event.tag].append(event) - self._due[event.tag] -= 1 - self._condition.notify_all() - if self._due[event.tag] <= 0: - self._due.pop(event.tag) - if not self._due: - return - - thread = threading.Thread(target=in_thread) - thread.start() - for tag in tags: - self._due[tag] += 1 - - def event_with_tag(self, tag): - with self._condition: - while True: - if self._events[tag]: - return self._events[tag].pop(0) - else: - self._condition.wait() - - -def execute_many_times(behavior): - return tuple(behavior() for _ in range(RPC_COUNT)) - - -class OperationResult( - collections.namedtuple('OperationResult', ( - 'start_batch_result', - 'completion_type', - 'success', - ))): - pass - - -SUCCESSFUL_OPERATION_RESULT = OperationResult( - cygrpc.CallError.ok, cygrpc.CompletionType.operation_complete, True) - - -class RpcTest(object): - - def setUp(self): - self.server_completion_queue = cygrpc.CompletionQueue() - self.server = cygrpc.Server([(b'grpc.so_reuseport', 0)]) - self.server.register_completion_queue(self.server_completion_queue) - port = self.server.add_http2_port(b'[::]:0') - self.server.start() - self.channel = cygrpc.Channel('localhost:{}'.format(port).encode(), [], - None) - - self._server_shutdown_tag = 'server_shutdown_tag' - self.server_condition = threading.Condition() - self.server_driver = QueueDriver(self.server_condition, - self.server_completion_queue) - with self.server_condition: - self.server_driver.add_due({ - self._server_shutdown_tag, - }) - - self.client_condition = threading.Condition() - self.client_completion_queue = cygrpc.CompletionQueue() - self.client_driver = QueueDriver(self.client_condition, - self.client_completion_queue) - - def tearDown(self): - self.server.shutdown(self.server_completion_queue, - self._server_shutdown_tag) - self.server.cancel_all_calls() +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common utilities for tests of the Cython layer of gRPC Python.""" + +import collections +import threading + +from grpc._cython import cygrpc + +RPC_COUNT = 4000 + +EMPTY_FLAGS = 0 + +INVOCATION_METADATA = ( + ('client-md-key', 'client-md-key'), + ('client-md-key-bin', b'\x00\x01' * 3000), +) + +INITIAL_METADATA = ( + ('server-initial-md-key', 'server-initial-md-value'), + ('server-initial-md-key-bin', b'\x00\x02' * 3000), +) + +TRAILING_METADATA = ( + ('server-trailing-md-key', 'server-trailing-md-value'), + ('server-trailing-md-key-bin', b'\x00\x03' * 3000), +) + + +class QueueDriver(object): + + def __init__(self, condition, completion_queue): + self._condition = condition + self._completion_queue = completion_queue + self._due = collections.defaultdict(int) + self._events = collections.defaultdict(list) + + def add_due(self, tags): + if not self._due: + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events[event.tag].append(event) + self._due[event.tag] -= 1 + self._condition.notify_all() + if self._due[event.tag] <= 0: + self._due.pop(event.tag) + if not self._due: + return + + thread = threading.Thread(target=in_thread) + thread.start() + for tag in tags: + self._due[tag] += 1 + + def event_with_tag(self, tag): + with self._condition: + while True: + if self._events[tag]: + return self._events[tag].pop(0) + else: + self._condition.wait() + + +def execute_many_times(behavior): + return tuple(behavior() for _ in range(RPC_COUNT)) + + +class OperationResult( + collections.namedtuple('OperationResult', ( + 'start_batch_result', + 'completion_type', + 'success', + ))): + pass + + +SUCCESSFUL_OPERATION_RESULT = OperationResult( + cygrpc.CallError.ok, cygrpc.CompletionType.operation_complete, True) + + +class RpcTest(object): + + def setUp(self): + self.server_completion_queue = cygrpc.CompletionQueue() + self.server = cygrpc.Server([(b'grpc.so_reuseport', 0)]) + self.server.register_completion_queue(self.server_completion_queue) + port = self.server.add_http2_port(b'[::]:0') + self.server.start() + self.channel = cygrpc.Channel('localhost:{}'.format(port).encode(), [], + None) + + self._server_shutdown_tag = 'server_shutdown_tag' + self.server_condition = threading.Condition() + self.server_driver = QueueDriver(self.server_condition, + self.server_completion_queue) + with self.server_condition: + self.server_driver.add_due({ + self._server_shutdown_tag, + }) + + self.client_condition = threading.Condition() + self.client_completion_queue = cygrpc.CompletionQueue() + self.client_driver = QueueDriver(self.client_condition, + self.client_completion_queue) + + def tearDown(self): + self.server.shutdown(self.server_completion_queue, + self._server_shutdown_tag) + self.server.cancel_all_calls() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py index 37a0bf1d92..5a5dedd5f2 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py @@ -1,72 +1,72 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import threading -import unittest - -from grpc._cython import cygrpc - - -def _get_number_active_threads(): - return cygrpc._fork_state.active_thread_count._num_active_threads - - -@unittest.skipIf(os.name == 'nt', 'Posix-specific tests') -class ForkPosixTester(unittest.TestCase): - - def setUp(self): - self._saved_fork_support_flag = cygrpc._GRPC_ENABLE_FORK_SUPPORT - cygrpc._GRPC_ENABLE_FORK_SUPPORT = True - - def testForkManagedThread(self): - - def cb(): - self.assertEqual(1, _get_number_active_threads()) - - thread = cygrpc.ForkManagedThread(cb) - thread.start() - thread.join() - self.assertEqual(0, _get_number_active_threads()) - - def testForkManagedThreadThrowsException(self): - - def cb(): - self.assertEqual(1, _get_number_active_threads()) - raise Exception("expected exception") - - thread = cygrpc.ForkManagedThread(cb) - thread.start() - thread.join() - self.assertEqual(0, _get_number_active_threads()) - - def tearDown(self): - cygrpc._GRPC_ENABLE_FORK_SUPPORT = self._saved_fork_support_flag - - -@unittest.skipUnless(os.name == 'nt', 'Windows-specific tests') -class ForkWindowsTester(unittest.TestCase): - - def testForkManagedThreadIsNoOp(self): - - def cb(): - pass - - thread = cygrpc.ForkManagedThread(cb) - thread.start() - thread.join() - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import threading +import unittest + +from grpc._cython import cygrpc + + +def _get_number_active_threads(): + return cygrpc._fork_state.active_thread_count._num_active_threads + + +@unittest.skipIf(os.name == 'nt', 'Posix-specific tests') +class ForkPosixTester(unittest.TestCase): + + def setUp(self): + self._saved_fork_support_flag = cygrpc._GRPC_ENABLE_FORK_SUPPORT + cygrpc._GRPC_ENABLE_FORK_SUPPORT = True + + def testForkManagedThread(self): + + def cb(): + self.assertEqual(1, _get_number_active_threads()) + + thread = cygrpc.ForkManagedThread(cb) + thread.start() + thread.join() + self.assertEqual(0, _get_number_active_threads()) + + def testForkManagedThreadThrowsException(self): + + def cb(): + self.assertEqual(1, _get_number_active_threads()) + raise Exception("expected exception") + + thread = cygrpc.ForkManagedThread(cb) + thread.start() + thread.join() + self.assertEqual(0, _get_number_active_threads()) + + def tearDown(self): + cygrpc._GRPC_ENABLE_FORK_SUPPORT = self._saved_fork_support_flag + + +@unittest.skipUnless(os.name == 'nt', 'Windows-specific tests') +class ForkWindowsTester(unittest.TestCase): + + def testForkManagedThreadIsNoOp(self): + + def cb(): + pass + + thread = cygrpc.ForkManagedThread(cb) + thread.start() + thread.join() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py index b09c9d61a9..144a2fcae3 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py @@ -1,132 +1,132 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test a corner-case at the level of the Cython API.""" - -import threading -import unittest - -from grpc._cython import cygrpc - -from tests.unit._cython import _common -from tests.unit._cython import test_utilities - - -class Test(_common.RpcTest, unittest.TestCase): - - def _do_rpcs(self): - server_call_condition = threading.Condition() - server_call_completion_queue = cygrpc.CompletionQueue() - server_call_driver = _common.QueueDriver(server_call_condition, - server_call_completion_queue) - - server_request_call_tag = 'server_request_call_tag' - server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' - server_complete_rpc_tag = 'server_complete_rpc_tag' - - with self.server_condition: - server_request_call_start_batch_result = self.server.request_call( - server_call_completion_queue, self.server_completion_queue, - server_request_call_tag) - self.server_driver.add_due({ - server_request_call_tag, - }) - - client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' - client_complete_rpc_tag = 'client_complete_rpc_tag' - client_call = self.channel.integrated_call( - _common.EMPTY_FLAGS, b'/twinkies', None, None, - _common.INVOCATION_METADATA, None, [( - [ - cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS), - ], - client_receive_initial_metadata_tag, - )]) - client_call.operate([ - cygrpc.SendInitialMetadataOperation(_common.INVOCATION_METADATA, - _common.EMPTY_FLAGS), - cygrpc.SendCloseFromClientOperation(_common.EMPTY_FLAGS), - cygrpc.ReceiveStatusOnClientOperation(_common.EMPTY_FLAGS), - ], client_complete_rpc_tag) - +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test a corner-case at the level of the Cython API.""" + +import threading +import unittest + +from grpc._cython import cygrpc + +from tests.unit._cython import _common +from tests.unit._cython import test_utilities + + +class Test(_common.RpcTest, unittest.TestCase): + + def _do_rpcs(self): + server_call_condition = threading.Condition() + server_call_completion_queue = cygrpc.CompletionQueue() + server_call_driver = _common.QueueDriver(server_call_condition, + server_call_completion_queue) + + server_request_call_tag = 'server_request_call_tag' + server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' + server_complete_rpc_tag = 'server_complete_rpc_tag' + + with self.server_condition: + server_request_call_start_batch_result = self.server.request_call( + server_call_completion_queue, self.server_completion_queue, + server_request_call_tag) + self.server_driver.add_due({ + server_request_call_tag, + }) + + client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' + client_complete_rpc_tag = 'client_complete_rpc_tag' + client_call = self.channel.integrated_call( + _common.EMPTY_FLAGS, b'/twinkies', None, None, + _common.INVOCATION_METADATA, None, [( + [ + cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS), + ], + client_receive_initial_metadata_tag, + )]) + client_call.operate([ + cygrpc.SendInitialMetadataOperation(_common.INVOCATION_METADATA, + _common.EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_common.EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_common.EMPTY_FLAGS), + ], client_complete_rpc_tag) + client_events_future = test_utilities.SimpleFuture(lambda: [ self.channel.next_call_event(), self.channel.next_call_event(), ]) - - server_request_call_event = self.server_driver.event_with_tag( - server_request_call_tag) - - with server_call_condition: - server_send_initial_metadata_start_batch_result = ( - server_request_call_event.call.start_server_batch([ - cygrpc.SendInitialMetadataOperation( - _common.INITIAL_METADATA, _common.EMPTY_FLAGS), - ], server_send_initial_metadata_tag)) - server_call_driver.add_due({ - server_send_initial_metadata_tag, - }) - server_send_initial_metadata_event = server_call_driver.event_with_tag( - server_send_initial_metadata_tag) - - with server_call_condition: - server_complete_rpc_start_batch_result = ( - server_request_call_event.call.start_server_batch([ - cygrpc.ReceiveCloseOnServerOperation(_common.EMPTY_FLAGS), - cygrpc.SendStatusFromServerOperation( - _common.TRAILING_METADATA, cygrpc.StatusCode.ok, - b'test details', _common.EMPTY_FLAGS), - ], server_complete_rpc_tag)) - server_call_driver.add_due({ - server_complete_rpc_tag, - }) - server_complete_rpc_event = server_call_driver.event_with_tag( - server_complete_rpc_tag) - - client_events = client_events_future.result() - if client_events[0].tag is client_receive_initial_metadata_tag: - client_receive_initial_metadata_event = client_events[0] - client_complete_rpc_event = client_events[1] - else: - client_complete_rpc_event = client_events[0] - client_receive_initial_metadata_event = client_events[1] - - return ( - _common.OperationResult(server_request_call_start_batch_result, - server_request_call_event.completion_type, - server_request_call_event.success), - _common.OperationResult( - cygrpc.CallError.ok, - client_receive_initial_metadata_event.completion_type, - client_receive_initial_metadata_event.success), - _common.OperationResult(cygrpc.CallError.ok, - client_complete_rpc_event.completion_type, - client_complete_rpc_event.success), - _common.OperationResult( - server_send_initial_metadata_start_batch_result, - server_send_initial_metadata_event.completion_type, - server_send_initial_metadata_event.success), - _common.OperationResult(server_complete_rpc_start_batch_result, - server_complete_rpc_event.completion_type, - server_complete_rpc_event.success), - ) - - def test_rpcs(self): + + server_request_call_event = self.server_driver.event_with_tag( + server_request_call_tag) + + with server_call_condition: + server_send_initial_metadata_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.SendInitialMetadataOperation( + _common.INITIAL_METADATA, _common.EMPTY_FLAGS), + ], server_send_initial_metadata_tag)) + server_call_driver.add_due({ + server_send_initial_metadata_tag, + }) + server_send_initial_metadata_event = server_call_driver.event_with_tag( + server_send_initial_metadata_tag) + + with server_call_condition: + server_complete_rpc_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.ReceiveCloseOnServerOperation(_common.EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + _common.TRAILING_METADATA, cygrpc.StatusCode.ok, + b'test details', _common.EMPTY_FLAGS), + ], server_complete_rpc_tag)) + server_call_driver.add_due({ + server_complete_rpc_tag, + }) + server_complete_rpc_event = server_call_driver.event_with_tag( + server_complete_rpc_tag) + + client_events = client_events_future.result() + if client_events[0].tag is client_receive_initial_metadata_tag: + client_receive_initial_metadata_event = client_events[0] + client_complete_rpc_event = client_events[1] + else: + client_complete_rpc_event = client_events[0] + client_receive_initial_metadata_event = client_events[1] + + return ( + _common.OperationResult(server_request_call_start_batch_result, + server_request_call_event.completion_type, + server_request_call_event.success), + _common.OperationResult( + cygrpc.CallError.ok, + client_receive_initial_metadata_event.completion_type, + client_receive_initial_metadata_event.success), + _common.OperationResult(cygrpc.CallError.ok, + client_complete_rpc_event.completion_type, + client_complete_rpc_event.success), + _common.OperationResult( + server_send_initial_metadata_start_batch_result, + server_send_initial_metadata_event.completion_type, + server_send_initial_metadata_event.success), + _common.OperationResult(server_complete_rpc_start_batch_result, + server_complete_rpc_event.completion_type, + server_complete_rpc_event.success), + ) + + def test_rpcs(self): expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5 ] * _common.RPC_COUNT - actuallys = _common.execute_many_times(self._do_rpcs) - self.assertSequenceEqual(expecteds, actuallys) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + actuallys = _common.execute_many_times(self._do_rpcs) + self.assertSequenceEqual(expecteds, actuallys) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py index c586c340a2..38964768db 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py @@ -1,126 +1,126 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test a corner-case at the level of the Cython API.""" - -import threading -import unittest - -from grpc._cython import cygrpc - -from tests.unit._cython import _common -from tests.unit._cython import test_utilities - - -class Test(_common.RpcTest, unittest.TestCase): - - def _do_rpcs(self): - server_request_call_tag = 'server_request_call_tag' - server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' - server_complete_rpc_tag = 'server_complete_rpc_tag' - - with self.server_condition: - server_request_call_start_batch_result = self.server.request_call( - self.server_completion_queue, self.server_completion_queue, - server_request_call_tag) - self.server_driver.add_due({ - server_request_call_tag, - }) - - client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' - client_complete_rpc_tag = 'client_complete_rpc_tag' - client_call = self.channel.integrated_call( - _common.EMPTY_FLAGS, b'/twinkies', None, None, - _common.INVOCATION_METADATA, None, [ - ( - [ - cygrpc.SendInitialMetadataOperation( - _common.INVOCATION_METADATA, _common.EMPTY_FLAGS), - cygrpc.SendCloseFromClientOperation( - _common.EMPTY_FLAGS), - cygrpc.ReceiveStatusOnClientOperation( - _common.EMPTY_FLAGS), - ], - client_complete_rpc_tag, - ), - ]) - client_call.operate([ - cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS), - ], client_receive_initial_metadata_tag) - +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test a corner-case at the level of the Cython API.""" + +import threading +import unittest + +from grpc._cython import cygrpc + +from tests.unit._cython import _common +from tests.unit._cython import test_utilities + + +class Test(_common.RpcTest, unittest.TestCase): + + def _do_rpcs(self): + server_request_call_tag = 'server_request_call_tag' + server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' + server_complete_rpc_tag = 'server_complete_rpc_tag' + + with self.server_condition: + server_request_call_start_batch_result = self.server.request_call( + self.server_completion_queue, self.server_completion_queue, + server_request_call_tag) + self.server_driver.add_due({ + server_request_call_tag, + }) + + client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' + client_complete_rpc_tag = 'client_complete_rpc_tag' + client_call = self.channel.integrated_call( + _common.EMPTY_FLAGS, b'/twinkies', None, None, + _common.INVOCATION_METADATA, None, [ + ( + [ + cygrpc.SendInitialMetadataOperation( + _common.INVOCATION_METADATA, _common.EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation( + _common.EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation( + _common.EMPTY_FLAGS), + ], + client_complete_rpc_tag, + ), + ]) + client_call.operate([ + cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS), + ], client_receive_initial_metadata_tag) + client_events_future = test_utilities.SimpleFuture(lambda: [ self.channel.next_call_event(), self.channel.next_call_event(), ]) - server_request_call_event = self.server_driver.event_with_tag( - server_request_call_tag) - - with self.server_condition: - server_send_initial_metadata_start_batch_result = ( - server_request_call_event.call.start_server_batch([ - cygrpc.SendInitialMetadataOperation( - _common.INITIAL_METADATA, _common.EMPTY_FLAGS), - ], server_send_initial_metadata_tag)) - self.server_driver.add_due({ - server_send_initial_metadata_tag, - }) - server_send_initial_metadata_event = self.server_driver.event_with_tag( - server_send_initial_metadata_tag) - - with self.server_condition: - server_complete_rpc_start_batch_result = ( - server_request_call_event.call.start_server_batch([ - cygrpc.ReceiveCloseOnServerOperation(_common.EMPTY_FLAGS), - cygrpc.SendStatusFromServerOperation( - _common.TRAILING_METADATA, cygrpc.StatusCode.ok, - 'test details', _common.EMPTY_FLAGS), - ], server_complete_rpc_tag)) - self.server_driver.add_due({ - server_complete_rpc_tag, - }) - server_complete_rpc_event = self.server_driver.event_with_tag( - server_complete_rpc_tag) - - client_events = client_events_future.result() - client_receive_initial_metadata_event = client_events[0] - client_complete_rpc_event = client_events[1] - - return ( - _common.OperationResult(server_request_call_start_batch_result, - server_request_call_event.completion_type, - server_request_call_event.success), - _common.OperationResult( - cygrpc.CallError.ok, - client_receive_initial_metadata_event.completion_type, - client_receive_initial_metadata_event.success), - _common.OperationResult(cygrpc.CallError.ok, - client_complete_rpc_event.completion_type, - client_complete_rpc_event.success), - _common.OperationResult( - server_send_initial_metadata_start_batch_result, - server_send_initial_metadata_event.completion_type, - server_send_initial_metadata_event.success), - _common.OperationResult(server_complete_rpc_start_batch_result, - server_complete_rpc_event.completion_type, - server_complete_rpc_event.success), - ) - - def test_rpcs(self): + server_request_call_event = self.server_driver.event_with_tag( + server_request_call_tag) + + with self.server_condition: + server_send_initial_metadata_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.SendInitialMetadataOperation( + _common.INITIAL_METADATA, _common.EMPTY_FLAGS), + ], server_send_initial_metadata_tag)) + self.server_driver.add_due({ + server_send_initial_metadata_tag, + }) + server_send_initial_metadata_event = self.server_driver.event_with_tag( + server_send_initial_metadata_tag) + + with self.server_condition: + server_complete_rpc_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.ReceiveCloseOnServerOperation(_common.EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + _common.TRAILING_METADATA, cygrpc.StatusCode.ok, + 'test details', _common.EMPTY_FLAGS), + ], server_complete_rpc_tag)) + self.server_driver.add_due({ + server_complete_rpc_tag, + }) + server_complete_rpc_event = self.server_driver.event_with_tag( + server_complete_rpc_tag) + + client_events = client_events_future.result() + client_receive_initial_metadata_event = client_events[0] + client_complete_rpc_event = client_events[1] + + return ( + _common.OperationResult(server_request_call_start_batch_result, + server_request_call_event.completion_type, + server_request_call_event.success), + _common.OperationResult( + cygrpc.CallError.ok, + client_receive_initial_metadata_event.completion_type, + client_receive_initial_metadata_event.success), + _common.OperationResult(cygrpc.CallError.ok, + client_complete_rpc_event.completion_type, + client_complete_rpc_event.success), + _common.OperationResult( + server_send_initial_metadata_start_batch_result, + server_send_initial_metadata_event.completion_type, + server_send_initial_metadata_event.success), + _common.OperationResult(server_complete_rpc_start_batch_result, + server_complete_rpc_event.completion_type, + server_complete_rpc_event.success), + ) + + def test_rpcs(self): expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5 ] * _common.RPC_COUNT - actuallys = _common.execute_many_times(self._do_rpcs) - self.assertSequenceEqual(expecteds, actuallys) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + actuallys = _common.execute_many_times(self._do_rpcs) + self.assertSequenceEqual(expecteds, actuallys) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py index efd047c9ad..8a903bfaf9 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py @@ -1,240 +1,240 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test a corner-case at the level of the Cython API.""" - -import threading -import unittest - -from grpc._cython import cygrpc -from tests.unit._cython import test_utilities - -_EMPTY_FLAGS = 0 -_EMPTY_METADATA = () - - -class _ServerDriver(object): - - def __init__(self, completion_queue, shutdown_tag): - self._condition = threading.Condition() - self._completion_queue = completion_queue - self._shutdown_tag = shutdown_tag - self._events = [] - self._saw_shutdown_tag = False - - def start(self): - - def in_thread(): - while True: - event = self._completion_queue.poll() - with self._condition: - self._events.append(event) - self._condition.notify() - if event.tag is self._shutdown_tag: - self._saw_shutdown_tag = True - break - - thread = threading.Thread(target=in_thread) - thread.start() - - def done(self): - with self._condition: - return self._saw_shutdown_tag - - def first_event(self): - with self._condition: - while not self._events: - self._condition.wait() - return self._events[0] - - def events(self): - with self._condition: - while not self._saw_shutdown_tag: - self._condition.wait() - return tuple(self._events) - - -class _QueueDriver(object): - - def __init__(self, condition, completion_queue, due): - self._condition = condition - self._completion_queue = completion_queue - self._due = due - self._events = [] - self._returned = False - - def start(self): - - def in_thread(): - while True: - event = self._completion_queue.poll() - with self._condition: - self._events.append(event) - self._due.remove(event.tag) - self._condition.notify_all() - if not self._due: - self._returned = True - return - - thread = threading.Thread(target=in_thread) - thread.start() - - def done(self): - with self._condition: - return self._returned - - def event_with_tag(self, tag): - with self._condition: - while True: - for event in self._events: - if event.tag is tag: - return event - self._condition.wait() - - def events(self): - with self._condition: - while not self._returned: - self._condition.wait() - return tuple(self._events) - - -class ReadSomeButNotAllResponsesTest(unittest.TestCase): - - def testReadSomeButNotAllResponses(self): - server_completion_queue = cygrpc.CompletionQueue() - server = cygrpc.Server([( - b'grpc.so_reuseport', - 0, - )]) - server.register_completion_queue(server_completion_queue) - port = server.add_http2_port(b'[::]:0') - server.start() - channel = cygrpc.Channel('localhost:{}'.format(port).encode(), set(), - None) - - server_shutdown_tag = 'server_shutdown_tag' - server_driver = _ServerDriver(server_completion_queue, - server_shutdown_tag) - server_driver.start() - - client_condition = threading.Condition() - client_due = set() - - server_call_condition = threading.Condition() - server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' - server_send_first_message_tag = 'server_send_first_message_tag' - server_send_second_message_tag = 'server_send_second_message_tag' - server_complete_rpc_tag = 'server_complete_rpc_tag' - server_call_due = set(( - server_send_initial_metadata_tag, - server_send_first_message_tag, - server_send_second_message_tag, - server_complete_rpc_tag, - )) - server_call_completion_queue = cygrpc.CompletionQueue() - server_call_driver = _QueueDriver(server_call_condition, - server_call_completion_queue, - server_call_due) - server_call_driver.start() - - server_rpc_tag = 'server_rpc_tag' - request_call_result = server.request_call(server_call_completion_queue, - server_completion_queue, - server_rpc_tag) - - client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' - client_complete_rpc_tag = 'client_complete_rpc_tag' - client_call = channel.segregated_call( - _EMPTY_FLAGS, b'/twinkies', None, None, _EMPTY_METADATA, None, ( - ( - [ - cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), - ], - client_receive_initial_metadata_tag, - ), - ( - [ - cygrpc.SendInitialMetadataOperation( - _EMPTY_METADATA, _EMPTY_FLAGS), - cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), - cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), - ], - client_complete_rpc_tag, - ), - )) - client_receive_initial_metadata_event_future = test_utilities.SimpleFuture( - client_call.next_event) - - server_rpc_event = server_driver.first_event() - - with server_call_condition: - server_send_initial_metadata_start_batch_result = ( - server_rpc_event.call.start_server_batch([ - cygrpc.SendInitialMetadataOperation(_EMPTY_METADATA, - _EMPTY_FLAGS), - ], server_send_initial_metadata_tag)) - server_send_first_message_start_batch_result = ( - server_rpc_event.call.start_server_batch([ - cygrpc.SendMessageOperation(b'\x07', _EMPTY_FLAGS), - ], server_send_first_message_tag)) - server_send_initial_metadata_event = server_call_driver.event_with_tag( - server_send_initial_metadata_tag) - server_send_first_message_event = server_call_driver.event_with_tag( - server_send_first_message_tag) - with server_call_condition: - server_send_second_message_start_batch_result = ( - server_rpc_event.call.start_server_batch([ - cygrpc.SendMessageOperation(b'\x07', _EMPTY_FLAGS), - ], server_send_second_message_tag)) - server_complete_rpc_start_batch_result = ( - server_rpc_event.call.start_server_batch([ - cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), - cygrpc.SendStatusFromServerOperation( - (), cygrpc.StatusCode.ok, b'test details', - _EMPTY_FLAGS), - ], server_complete_rpc_tag)) - server_send_second_message_event = server_call_driver.event_with_tag( - server_send_second_message_tag) - server_complete_rpc_event = server_call_driver.event_with_tag( - server_complete_rpc_tag) - server_call_driver.events() - - client_recieve_initial_metadata_event = client_receive_initial_metadata_event_future.result( - ) - - client_receive_first_message_tag = 'client_receive_first_message_tag' - client_call.operate([ - cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), - ], client_receive_first_message_tag) - client_receive_first_message_event = client_call.next_event() - - client_call_cancel_result = client_call.cancel( - cygrpc.StatusCode.cancelled, 'Cancelled during test!') - client_complete_rpc_event = client_call.next_event() - - channel.close(cygrpc.StatusCode.unknown, 'Channel closed!') - server.shutdown(server_completion_queue, server_shutdown_tag) - server.cancel_all_calls() - server_driver.events() - - self.assertEqual(cygrpc.CallError.ok, request_call_result) - self.assertEqual(cygrpc.CallError.ok, - server_send_initial_metadata_start_batch_result) - self.assertIs(server_rpc_tag, server_rpc_event.tag) - self.assertEqual(cygrpc.CompletionType.operation_complete, - server_rpc_event.completion_type) - self.assertIsInstance(server_rpc_event.call, cygrpc.Call) - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test a corner-case at the level of the Cython API.""" + +import threading +import unittest + +from grpc._cython import cygrpc +from tests.unit._cython import test_utilities + +_EMPTY_FLAGS = 0 +_EMPTY_METADATA = () + + +class _ServerDriver(object): + + def __init__(self, completion_queue, shutdown_tag): + self._condition = threading.Condition() + self._completion_queue = completion_queue + self._shutdown_tag = shutdown_tag + self._events = [] + self._saw_shutdown_tag = False + + def start(self): + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events.append(event) + self._condition.notify() + if event.tag is self._shutdown_tag: + self._saw_shutdown_tag = True + break + + thread = threading.Thread(target=in_thread) + thread.start() + + def done(self): + with self._condition: + return self._saw_shutdown_tag + + def first_event(self): + with self._condition: + while not self._events: + self._condition.wait() + return self._events[0] + + def events(self): + with self._condition: + while not self._saw_shutdown_tag: + self._condition.wait() + return tuple(self._events) + + +class _QueueDriver(object): + + def __init__(self, condition, completion_queue, due): + self._condition = condition + self._completion_queue = completion_queue + self._due = due + self._events = [] + self._returned = False + + def start(self): + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events.append(event) + self._due.remove(event.tag) + self._condition.notify_all() + if not self._due: + self._returned = True + return + + thread = threading.Thread(target=in_thread) + thread.start() + + def done(self): + with self._condition: + return self._returned + + def event_with_tag(self, tag): + with self._condition: + while True: + for event in self._events: + if event.tag is tag: + return event + self._condition.wait() + + def events(self): + with self._condition: + while not self._returned: + self._condition.wait() + return tuple(self._events) + + +class ReadSomeButNotAllResponsesTest(unittest.TestCase): + + def testReadSomeButNotAllResponses(self): + server_completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server([( + b'grpc.so_reuseport', + 0, + )]) + server.register_completion_queue(server_completion_queue) + port = server.add_http2_port(b'[::]:0') + server.start() + channel = cygrpc.Channel('localhost:{}'.format(port).encode(), set(), + None) + + server_shutdown_tag = 'server_shutdown_tag' + server_driver = _ServerDriver(server_completion_queue, + server_shutdown_tag) + server_driver.start() + + client_condition = threading.Condition() + client_due = set() + + server_call_condition = threading.Condition() + server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' + server_send_first_message_tag = 'server_send_first_message_tag' + server_send_second_message_tag = 'server_send_second_message_tag' + server_complete_rpc_tag = 'server_complete_rpc_tag' + server_call_due = set(( + server_send_initial_metadata_tag, + server_send_first_message_tag, + server_send_second_message_tag, + server_complete_rpc_tag, + )) + server_call_completion_queue = cygrpc.CompletionQueue() + server_call_driver = _QueueDriver(server_call_condition, + server_call_completion_queue, + server_call_due) + server_call_driver.start() + + server_rpc_tag = 'server_rpc_tag' + request_call_result = server.request_call(server_call_completion_queue, + server_completion_queue, + server_rpc_tag) + + client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' + client_complete_rpc_tag = 'client_complete_rpc_tag' + client_call = channel.segregated_call( + _EMPTY_FLAGS, b'/twinkies', None, None, _EMPTY_METADATA, None, ( + ( + [ + cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), + ], + client_receive_initial_metadata_tag, + ), + ( + [ + cygrpc.SendInitialMetadataOperation( + _EMPTY_METADATA, _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ], + client_complete_rpc_tag, + ), + )) + client_receive_initial_metadata_event_future = test_utilities.SimpleFuture( + client_call.next_event) + + server_rpc_event = server_driver.first_event() + + with server_call_condition: + server_send_initial_metadata_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.SendInitialMetadataOperation(_EMPTY_METADATA, + _EMPTY_FLAGS), + ], server_send_initial_metadata_tag)) + server_send_first_message_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.SendMessageOperation(b'\x07', _EMPTY_FLAGS), + ], server_send_first_message_tag)) + server_send_initial_metadata_event = server_call_driver.event_with_tag( + server_send_initial_metadata_tag) + server_send_first_message_event = server_call_driver.event_with_tag( + server_send_first_message_tag) + with server_call_condition: + server_send_second_message_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.SendMessageOperation(b'\x07', _EMPTY_FLAGS), + ], server_send_second_message_tag)) + server_complete_rpc_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + (), cygrpc.StatusCode.ok, b'test details', + _EMPTY_FLAGS), + ], server_complete_rpc_tag)) + server_send_second_message_event = server_call_driver.event_with_tag( + server_send_second_message_tag) + server_complete_rpc_event = server_call_driver.event_with_tag( + server_complete_rpc_tag) + server_call_driver.events() + + client_recieve_initial_metadata_event = client_receive_initial_metadata_event_future.result( + ) + + client_receive_first_message_tag = 'client_receive_first_message_tag' + client_call.operate([ + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + ], client_receive_first_message_tag) + client_receive_first_message_event = client_call.next_event() + + client_call_cancel_result = client_call.cancel( + cygrpc.StatusCode.cancelled, 'Cancelled during test!') + client_complete_rpc_event = client_call.next_event() + + channel.close(cygrpc.StatusCode.unknown, 'Channel closed!') + server.shutdown(server_completion_queue, server_shutdown_tag) + server.cancel_all_calls() + server_driver.events() + + self.assertEqual(cygrpc.CallError.ok, request_call_result) + self.assertEqual(cygrpc.CallError.ok, + server_send_initial_metadata_start_batch_result) + self.assertIs(server_rpc_tag, server_rpc_event.tag) + self.assertEqual(cygrpc.CompletionType.operation_complete, + server_rpc_event.completion_type) + self.assertIsInstance(server_rpc_event.call, cygrpc.Call) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_server_test.py index f89fc49d20..bbd25457b3 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_server_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_server_test.py @@ -1,49 +1,49 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test servers at the level of the Cython API.""" - -import threading -import time -import unittest - -from grpc._cython import cygrpc - - -class Test(unittest.TestCase): - - def test_lonely_server(self): - server_call_completion_queue = cygrpc.CompletionQueue() - server_shutdown_completion_queue = cygrpc.CompletionQueue() - server = cygrpc.Server(None) - server.register_completion_queue(server_call_completion_queue) - server.register_completion_queue(server_shutdown_completion_queue) - port = server.add_http2_port(b'[::]:0') - server.start() - - server_request_call_tag = 'server_request_call_tag' - server_request_call_start_batch_result = server.request_call( - server_call_completion_queue, server_call_completion_queue, - server_request_call_tag) - - time.sleep(4) - - server_shutdown_tag = 'server_shutdown_tag' - server_shutdown_result = server.shutdown( - server_shutdown_completion_queue, server_shutdown_tag) - server_request_call_event = server_call_completion_queue.poll() - server_shutdown_event = server_shutdown_completion_queue.poll() - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test servers at the level of the Cython API.""" + +import threading +import time +import unittest + +from grpc._cython import cygrpc + + +class Test(unittest.TestCase): + + def test_lonely_server(self): + server_call_completion_queue = cygrpc.CompletionQueue() + server_shutdown_completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server(None) + server.register_completion_queue(server_call_completion_queue) + server.register_completion_queue(server_shutdown_completion_queue) + port = server.add_http2_port(b'[::]:0') + server.start() + + server_request_call_tag = 'server_request_call_tag' + server_request_call_start_batch_result = server.request_call( + server_call_completion_queue, server_call_completion_queue, + server_request_call_tag) + + time.sleep(4) + + server_shutdown_tag = 'server_shutdown_tag' + server_shutdown_result = server.shutdown( + server_shutdown_completion_queue, server_shutdown_tag) + server_request_call_event = server_call_completion_queue.poll() + server_shutdown_event = server_shutdown_completion_queue.poll() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py index a5e0003c42..1182f83a42 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py @@ -1,335 +1,335 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -import threading -import unittest -import platform - -from grpc._cython import cygrpc -from tests.unit._cython import test_utilities -from tests.unit import test_common -from tests.unit import resources - -_SSL_HOST_OVERRIDE = b'foo.test.google.fr' -_CALL_CREDENTIALS_METADATA_KEY = 'call-creds-key' -_CALL_CREDENTIALS_METADATA_VALUE = 'call-creds-value' -_EMPTY_FLAGS = 0 - - -def _metadata_plugin(context, callback): - callback((( - _CALL_CREDENTIALS_METADATA_KEY, - _CALL_CREDENTIALS_METADATA_VALUE, - ),), cygrpc.StatusCode.ok, b'') - - -class TypeSmokeTest(unittest.TestCase): - - def testCompletionQueueUpDown(self): - completion_queue = cygrpc.CompletionQueue() - del completion_queue - - def testServerUpDown(self): - server = cygrpc.Server(set([ - ( - b'grpc.so_reuseport', - 0, - ), - ])) - del server - - def testChannelUpDown(self): - channel = cygrpc.Channel(b'[::]:0', None, None) - channel.close(cygrpc.StatusCode.cancelled, 'Test method anyway!') - - def test_metadata_plugin_call_credentials_up_down(self): - cygrpc.MetadataPluginCallCredentials(_metadata_plugin, - b'test plugin name!') - - def testServerStartNoExplicitShutdown(self): - server = cygrpc.Server([ - ( - b'grpc.so_reuseport', - 0, - ), - ]) - completion_queue = cygrpc.CompletionQueue() - server.register_completion_queue(completion_queue) - port = server.add_http2_port(b'[::]:0') - self.assertIsInstance(port, int) - server.start() - del server - - def testServerStartShutdown(self): - completion_queue = cygrpc.CompletionQueue() - server = cygrpc.Server([ - ( - b'grpc.so_reuseport', - 0, - ), - ]) - server.add_http2_port(b'[::]:0') - server.register_completion_queue(completion_queue) - server.start() - shutdown_tag = object() - server.shutdown(completion_queue, shutdown_tag) - event = completion_queue.poll() - self.assertEqual(cygrpc.CompletionType.operation_complete, - event.completion_type) - self.assertIs(shutdown_tag, event.tag) - del server - del completion_queue - - -class ServerClientMixin(object): - - def setUpMixin(self, server_credentials, client_credentials, host_override): - self.server_completion_queue = cygrpc.CompletionQueue() - self.server = cygrpc.Server([ - ( - b'grpc.so_reuseport', - 0, - ), - ]) - self.server.register_completion_queue(self.server_completion_queue) - if server_credentials: - self.port = self.server.add_http2_port(b'[::]:0', - server_credentials) - else: - self.port = self.server.add_http2_port(b'[::]:0') - self.server.start() - self.client_completion_queue = cygrpc.CompletionQueue() - if client_credentials: - client_channel_arguments = (( - cygrpc.ChannelArgKey.ssl_target_name_override, - host_override, - ),) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import threading +import unittest +import platform + +from grpc._cython import cygrpc +from tests.unit._cython import test_utilities +from tests.unit import test_common +from tests.unit import resources + +_SSL_HOST_OVERRIDE = b'foo.test.google.fr' +_CALL_CREDENTIALS_METADATA_KEY = 'call-creds-key' +_CALL_CREDENTIALS_METADATA_VALUE = 'call-creds-value' +_EMPTY_FLAGS = 0 + + +def _metadata_plugin(context, callback): + callback((( + _CALL_CREDENTIALS_METADATA_KEY, + _CALL_CREDENTIALS_METADATA_VALUE, + ),), cygrpc.StatusCode.ok, b'') + + +class TypeSmokeTest(unittest.TestCase): + + def testCompletionQueueUpDown(self): + completion_queue = cygrpc.CompletionQueue() + del completion_queue + + def testServerUpDown(self): + server = cygrpc.Server(set([ + ( + b'grpc.so_reuseport', + 0, + ), + ])) + del server + + def testChannelUpDown(self): + channel = cygrpc.Channel(b'[::]:0', None, None) + channel.close(cygrpc.StatusCode.cancelled, 'Test method anyway!') + + def test_metadata_plugin_call_credentials_up_down(self): + cygrpc.MetadataPluginCallCredentials(_metadata_plugin, + b'test plugin name!') + + def testServerStartNoExplicitShutdown(self): + server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + completion_queue = cygrpc.CompletionQueue() + server.register_completion_queue(completion_queue) + port = server.add_http2_port(b'[::]:0') + self.assertIsInstance(port, int) + server.start() + del server + + def testServerStartShutdown(self): + completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + server.add_http2_port(b'[::]:0') + server.register_completion_queue(completion_queue) + server.start() + shutdown_tag = object() + server.shutdown(completion_queue, shutdown_tag) + event = completion_queue.poll() + self.assertEqual(cygrpc.CompletionType.operation_complete, + event.completion_type) + self.assertIs(shutdown_tag, event.tag) + del server + del completion_queue + + +class ServerClientMixin(object): + + def setUpMixin(self, server_credentials, client_credentials, host_override): + self.server_completion_queue = cygrpc.CompletionQueue() + self.server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + self.server.register_completion_queue(self.server_completion_queue) + if server_credentials: + self.port = self.server.add_http2_port(b'[::]:0', + server_credentials) + else: + self.port = self.server.add_http2_port(b'[::]:0') + self.server.start() + self.client_completion_queue = cygrpc.CompletionQueue() + if client_credentials: + client_channel_arguments = (( + cygrpc.ChannelArgKey.ssl_target_name_override, + host_override, + ),) self.client_channel = cygrpc.Channel( 'localhost:{}'.format(self.port).encode(), client_channel_arguments, client_credentials) - else: + else: self.client_channel = cygrpc.Channel( 'localhost:{}'.format(self.port).encode(), set(), None) - if host_override: - self.host_argument = None # default host - self.expected_host = host_override - else: - # arbitrary host name necessitating no further identification - self.host_argument = b'hostess' - self.expected_host = self.host_argument - - def tearDownMixin(self): - self.client_channel.close(cygrpc.StatusCode.ok, 'test being torn down!') - del self.client_channel - del self.server - del self.client_completion_queue - del self.server_completion_queue - - def _perform_queue_operations(self, operations, call, queue, deadline, - description): - """Perform the operations with given call, queue, and deadline. - - Invocation errors are reported with as an exception with `description` - in the message. Performs the operations asynchronously, returning a - future. - """ - - def performer(): - tag = object() - try: - call_result = call.start_client_batch(operations, tag) - self.assertEqual(cygrpc.CallError.ok, call_result) - event = queue.poll(deadline=deadline) - self.assertEqual(cygrpc.CompletionType.operation_complete, - event.completion_type) - self.assertTrue(event.success) - self.assertIs(tag, event.tag) - except Exception as error: - raise Exception("Error in '{}': {}".format( - description, error.message)) - return event - - return test_utilities.SimpleFuture(performer) - - def test_echo(self): - DEADLINE = time.time() + 5 - DEADLINE_TOLERANCE = 0.25 - CLIENT_METADATA_ASCII_KEY = 'key' - CLIENT_METADATA_ASCII_VALUE = 'val' - CLIENT_METADATA_BIN_KEY = 'key-bin' - CLIENT_METADATA_BIN_VALUE = b'\0' * 1000 - SERVER_INITIAL_METADATA_KEY = 'init_me_me_me' - SERVER_INITIAL_METADATA_VALUE = 'whodawha?' - SERVER_TRAILING_METADATA_KEY = 'california_is_in_a_drought' - SERVER_TRAILING_METADATA_VALUE = 'zomg it is' - SERVER_STATUS_CODE = cygrpc.StatusCode.ok - SERVER_STATUS_DETAILS = 'our work is never over' - REQUEST = b'in death a member of project mayhem has a name' - RESPONSE = b'his name is robert paulson' - METHOD = b'twinkies' - - server_request_tag = object() - request_call_result = self.server.request_call( - self.server_completion_queue, self.server_completion_queue, - server_request_tag) - - self.assertEqual(cygrpc.CallError.ok, request_call_result) - - client_call_tag = object() - client_initial_metadata = ( - ( - CLIENT_METADATA_ASCII_KEY, - CLIENT_METADATA_ASCII_VALUE, - ), - ( - CLIENT_METADATA_BIN_KEY, - CLIENT_METADATA_BIN_VALUE, - ), - ) - client_call = self.client_channel.integrated_call( - 0, METHOD, self.host_argument, DEADLINE, client_initial_metadata, - None, [ - ( - [ - cygrpc.SendInitialMetadataOperation( - client_initial_metadata, _EMPTY_FLAGS), - cygrpc.SendMessageOperation(REQUEST, _EMPTY_FLAGS), - cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), - cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), - cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), - cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), - ], - client_call_tag, - ), - ]) - client_event_future = test_utilities.SimpleFuture( - self.client_channel.next_call_event) - - request_event = self.server_completion_queue.poll(deadline=DEADLINE) - self.assertEqual(cygrpc.CompletionType.operation_complete, - request_event.completion_type) - self.assertIsInstance(request_event.call, cygrpc.Call) - self.assertIs(server_request_tag, request_event.tag) - self.assertTrue( - test_common.metadata_transmitted(client_initial_metadata, - request_event.invocation_metadata)) - self.assertEqual(METHOD, request_event.call_details.method) - self.assertEqual(self.expected_host, request_event.call_details.host) + if host_override: + self.host_argument = None # default host + self.expected_host = host_override + else: + # arbitrary host name necessitating no further identification + self.host_argument = b'hostess' + self.expected_host = self.host_argument + + def tearDownMixin(self): + self.client_channel.close(cygrpc.StatusCode.ok, 'test being torn down!') + del self.client_channel + del self.server + del self.client_completion_queue + del self.server_completion_queue + + def _perform_queue_operations(self, operations, call, queue, deadline, + description): + """Perform the operations with given call, queue, and deadline. + + Invocation errors are reported with as an exception with `description` + in the message. Performs the operations asynchronously, returning a + future. + """ + + def performer(): + tag = object() + try: + call_result = call.start_client_batch(operations, tag) + self.assertEqual(cygrpc.CallError.ok, call_result) + event = queue.poll(deadline=deadline) + self.assertEqual(cygrpc.CompletionType.operation_complete, + event.completion_type) + self.assertTrue(event.success) + self.assertIs(tag, event.tag) + except Exception as error: + raise Exception("Error in '{}': {}".format( + description, error.message)) + return event + + return test_utilities.SimpleFuture(performer) + + def test_echo(self): + DEADLINE = time.time() + 5 + DEADLINE_TOLERANCE = 0.25 + CLIENT_METADATA_ASCII_KEY = 'key' + CLIENT_METADATA_ASCII_VALUE = 'val' + CLIENT_METADATA_BIN_KEY = 'key-bin' + CLIENT_METADATA_BIN_VALUE = b'\0' * 1000 + SERVER_INITIAL_METADATA_KEY = 'init_me_me_me' + SERVER_INITIAL_METADATA_VALUE = 'whodawha?' + SERVER_TRAILING_METADATA_KEY = 'california_is_in_a_drought' + SERVER_TRAILING_METADATA_VALUE = 'zomg it is' + SERVER_STATUS_CODE = cygrpc.StatusCode.ok + SERVER_STATUS_DETAILS = 'our work is never over' + REQUEST = b'in death a member of project mayhem has a name' + RESPONSE = b'his name is robert paulson' + METHOD = b'twinkies' + + server_request_tag = object() + request_call_result = self.server.request_call( + self.server_completion_queue, self.server_completion_queue, + server_request_tag) + + self.assertEqual(cygrpc.CallError.ok, request_call_result) + + client_call_tag = object() + client_initial_metadata = ( + ( + CLIENT_METADATA_ASCII_KEY, + CLIENT_METADATA_ASCII_VALUE, + ), + ( + CLIENT_METADATA_BIN_KEY, + CLIENT_METADATA_BIN_VALUE, + ), + ) + client_call = self.client_channel.integrated_call( + 0, METHOD, self.host_argument, DEADLINE, client_initial_metadata, + None, [ + ( + [ + cygrpc.SendInitialMetadataOperation( + client_initial_metadata, _EMPTY_FLAGS), + cygrpc.SendMessageOperation(REQUEST, _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ], + client_call_tag, + ), + ]) + client_event_future = test_utilities.SimpleFuture( + self.client_channel.next_call_event) + + request_event = self.server_completion_queue.poll(deadline=DEADLINE) + self.assertEqual(cygrpc.CompletionType.operation_complete, + request_event.completion_type) + self.assertIsInstance(request_event.call, cygrpc.Call) + self.assertIs(server_request_tag, request_event.tag) + self.assertTrue( + test_common.metadata_transmitted(client_initial_metadata, + request_event.invocation_metadata)) + self.assertEqual(METHOD, request_event.call_details.method) + self.assertEqual(self.expected_host, request_event.call_details.host) self.assertLess(abs(DEADLINE - request_event.call_details.deadline), DEADLINE_TOLERANCE) - - server_call_tag = object() - server_call = request_event.call - server_initial_metadata = (( - SERVER_INITIAL_METADATA_KEY, - SERVER_INITIAL_METADATA_VALUE, - ),) - server_trailing_metadata = (( - SERVER_TRAILING_METADATA_KEY, - SERVER_TRAILING_METADATA_VALUE, - ),) - server_start_batch_result = server_call.start_server_batch([ - cygrpc.SendInitialMetadataOperation(server_initial_metadata, - _EMPTY_FLAGS), - cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), - cygrpc.SendMessageOperation(RESPONSE, _EMPTY_FLAGS), - cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), - cygrpc.SendStatusFromServerOperation( - server_trailing_metadata, SERVER_STATUS_CODE, - SERVER_STATUS_DETAILS, _EMPTY_FLAGS) - ], server_call_tag) - self.assertEqual(cygrpc.CallError.ok, server_start_batch_result) - - server_event = self.server_completion_queue.poll(deadline=DEADLINE) - client_event = client_event_future.result() - - self.assertEqual(6, len(client_event.batch_operations)) - found_client_op_types = set() - for client_result in client_event.batch_operations: - # we expect each op type to be unique - self.assertNotIn(client_result.type(), found_client_op_types) - found_client_op_types.add(client_result.type()) - if client_result.type( - ) == cygrpc.OperationType.receive_initial_metadata: - self.assertTrue( - test_common.metadata_transmitted( - server_initial_metadata, - client_result.initial_metadata())) - elif client_result.type() == cygrpc.OperationType.receive_message: - self.assertEqual(RESPONSE, client_result.message()) - elif client_result.type( - ) == cygrpc.OperationType.receive_status_on_client: - self.assertTrue( - test_common.metadata_transmitted( - server_trailing_metadata, - client_result.trailing_metadata())) - self.assertEqual(SERVER_STATUS_DETAILS, client_result.details()) - self.assertEqual(SERVER_STATUS_CODE, client_result.code()) - self.assertEqual( - set([ - cygrpc.OperationType.send_initial_metadata, - cygrpc.OperationType.send_message, - cygrpc.OperationType.send_close_from_client, - cygrpc.OperationType.receive_initial_metadata, - cygrpc.OperationType.receive_message, - cygrpc.OperationType.receive_status_on_client - ]), found_client_op_types) - - self.assertEqual(5, len(server_event.batch_operations)) - found_server_op_types = set() - for server_result in server_event.batch_operations: - self.assertNotIn(server_result.type(), found_server_op_types) - found_server_op_types.add(server_result.type()) - if server_result.type() == cygrpc.OperationType.receive_message: - self.assertEqual(REQUEST, server_result.message()) - elif server_result.type( - ) == cygrpc.OperationType.receive_close_on_server: - self.assertFalse(server_result.cancelled()) - self.assertEqual( - set([ - cygrpc.OperationType.send_initial_metadata, - cygrpc.OperationType.receive_message, - cygrpc.OperationType.send_message, - cygrpc.OperationType.receive_close_on_server, - cygrpc.OperationType.send_status_from_server - ]), found_server_op_types) - - del client_call - del server_call - - def test_6522(self): - DEADLINE = time.time() + 5 - DEADLINE_TOLERANCE = 0.25 - METHOD = b'twinkies' - - empty_metadata = () - - # Prologue - server_request_tag = object() - self.server.request_call(self.server_completion_queue, - self.server_completion_queue, - server_request_tag) - client_call = self.client_channel.segregated_call( + + server_call_tag = object() + server_call = request_event.call + server_initial_metadata = (( + SERVER_INITIAL_METADATA_KEY, + SERVER_INITIAL_METADATA_VALUE, + ),) + server_trailing_metadata = (( + SERVER_TRAILING_METADATA_KEY, + SERVER_TRAILING_METADATA_VALUE, + ),) + server_start_batch_result = server_call.start_server_batch([ + cygrpc.SendInitialMetadataOperation(server_initial_metadata, + _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.SendMessageOperation(RESPONSE, _EMPTY_FLAGS), + cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + server_trailing_metadata, SERVER_STATUS_CODE, + SERVER_STATUS_DETAILS, _EMPTY_FLAGS) + ], server_call_tag) + self.assertEqual(cygrpc.CallError.ok, server_start_batch_result) + + server_event = self.server_completion_queue.poll(deadline=DEADLINE) + client_event = client_event_future.result() + + self.assertEqual(6, len(client_event.batch_operations)) + found_client_op_types = set() + for client_result in client_event.batch_operations: + # we expect each op type to be unique + self.assertNotIn(client_result.type(), found_client_op_types) + found_client_op_types.add(client_result.type()) + if client_result.type( + ) == cygrpc.OperationType.receive_initial_metadata: + self.assertTrue( + test_common.metadata_transmitted( + server_initial_metadata, + client_result.initial_metadata())) + elif client_result.type() == cygrpc.OperationType.receive_message: + self.assertEqual(RESPONSE, client_result.message()) + elif client_result.type( + ) == cygrpc.OperationType.receive_status_on_client: + self.assertTrue( + test_common.metadata_transmitted( + server_trailing_metadata, + client_result.trailing_metadata())) + self.assertEqual(SERVER_STATUS_DETAILS, client_result.details()) + self.assertEqual(SERVER_STATUS_CODE, client_result.code()) + self.assertEqual( + set([ + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.send_message, + cygrpc.OperationType.send_close_from_client, + cygrpc.OperationType.receive_initial_metadata, + cygrpc.OperationType.receive_message, + cygrpc.OperationType.receive_status_on_client + ]), found_client_op_types) + + self.assertEqual(5, len(server_event.batch_operations)) + found_server_op_types = set() + for server_result in server_event.batch_operations: + self.assertNotIn(server_result.type(), found_server_op_types) + found_server_op_types.add(server_result.type()) + if server_result.type() == cygrpc.OperationType.receive_message: + self.assertEqual(REQUEST, server_result.message()) + elif server_result.type( + ) == cygrpc.OperationType.receive_close_on_server: + self.assertFalse(server_result.cancelled()) + self.assertEqual( + set([ + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.receive_message, + cygrpc.OperationType.send_message, + cygrpc.OperationType.receive_close_on_server, + cygrpc.OperationType.send_status_from_server + ]), found_server_op_types) + + del client_call + del server_call + + def test_6522(self): + DEADLINE = time.time() + 5 + DEADLINE_TOLERANCE = 0.25 + METHOD = b'twinkies' + + empty_metadata = () + + # Prologue + server_request_tag = object() + self.server.request_call(self.server_completion_queue, + self.server_completion_queue, + server_request_tag) + client_call = self.client_channel.segregated_call( 0, METHOD, self.host_argument, DEADLINE, None, None, ([( - [ - cygrpc.SendInitialMetadataOperation(empty_metadata, - _EMPTY_FLAGS), - cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), - ], - object(), + [ + cygrpc.SendInitialMetadataOperation(empty_metadata, + _EMPTY_FLAGS), + cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), + ], + object(), ), ( [ @@ -337,86 +337,86 @@ class ServerClientMixin(object): ], object(), )])) - - client_initial_metadata_event_future = test_utilities.SimpleFuture( - client_call.next_event) - - request_event = self.server_completion_queue.poll(deadline=DEADLINE) - server_call = request_event.call - - def perform_server_operations(operations, description): - return self._perform_queue_operations(operations, server_call, - self.server_completion_queue, - DEADLINE, description) - - server_event_future = perform_server_operations([ - cygrpc.SendInitialMetadataOperation(empty_metadata, _EMPTY_FLAGS), - ], "Server prologue") - - client_initial_metadata_event_future.result() # force completion - server_event_future.result() - - # Messaging - for _ in range(10): - client_call.operate([ - cygrpc.SendMessageOperation(b'', _EMPTY_FLAGS), - cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), - ], "Client message") - client_message_event_future = test_utilities.SimpleFuture( - client_call.next_event) - server_event_future = perform_server_operations([ - cygrpc.SendMessageOperation(b'', _EMPTY_FLAGS), - cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), - ], "Server receive") - - client_message_event_future.result() # force completion - server_event_future.result() - - # Epilogue - client_call.operate([ - cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), - ], "Client epilogue") - # One for ReceiveStatusOnClient, one for SendCloseFromClient. + + client_initial_metadata_event_future = test_utilities.SimpleFuture( + client_call.next_event) + + request_event = self.server_completion_queue.poll(deadline=DEADLINE) + server_call = request_event.call + + def perform_server_operations(operations, description): + return self._perform_queue_operations(operations, server_call, + self.server_completion_queue, + DEADLINE, description) + + server_event_future = perform_server_operations([ + cygrpc.SendInitialMetadataOperation(empty_metadata, _EMPTY_FLAGS), + ], "Server prologue") + + client_initial_metadata_event_future.result() # force completion + server_event_future.result() + + # Messaging + for _ in range(10): + client_call.operate([ + cygrpc.SendMessageOperation(b'', _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + ], "Client message") + client_message_event_future = test_utilities.SimpleFuture( + client_call.next_event) + server_event_future = perform_server_operations([ + cygrpc.SendMessageOperation(b'', _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + ], "Server receive") + + client_message_event_future.result() # force completion + server_event_future.result() + + # Epilogue + client_call.operate([ + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + ], "Client epilogue") + # One for ReceiveStatusOnClient, one for SendCloseFromClient. client_events_future = test_utilities.SimpleFuture(lambda: { client_call.next_event(), client_call.next_event(), }) - - server_event_future = perform_server_operations([ - cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), - cygrpc.SendStatusFromServerOperation( - empty_metadata, cygrpc.StatusCode.ok, b'', _EMPTY_FLAGS) - ], "Server epilogue") - - client_events_future.result() # force completion - server_event_future.result() - - -class InsecureServerInsecureClient(unittest.TestCase, ServerClientMixin): - - def setUp(self): - self.setUpMixin(None, None, None) - - def tearDown(self): - self.tearDownMixin() - - -class SecureServerSecureClient(unittest.TestCase, ServerClientMixin): - - def setUp(self): - server_credentials = cygrpc.server_credentials_ssl( - None, [ - cygrpc.SslPemKeyCertPair(resources.private_key(), - resources.certificate_chain()) - ], False) - client_credentials = cygrpc.SSLChannelCredentials( - resources.test_root_certificates(), None, None) - self.setUpMixin(server_credentials, client_credentials, - _SSL_HOST_OVERRIDE) - - def tearDown(self): - self.tearDownMixin() - - -if __name__ == '__main__': - unittest.main(verbosity=2) + + server_event_future = perform_server_operations([ + cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + empty_metadata, cygrpc.StatusCode.ok, b'', _EMPTY_FLAGS) + ], "Server epilogue") + + client_events_future.result() # force completion + server_event_future.result() + + +class InsecureServerInsecureClient(unittest.TestCase, ServerClientMixin): + + def setUp(self): + self.setUpMixin(None, None, None) + + def tearDown(self): + self.tearDownMixin() + + +class SecureServerSecureClient(unittest.TestCase, ServerClientMixin): + + def setUp(self): + server_credentials = cygrpc.server_credentials_ssl( + None, [ + cygrpc.SslPemKeyCertPair(resources.private_key(), + resources.certificate_chain()) + ], False) + client_credentials = cygrpc.SSLChannelCredentials( + resources.test_root_certificates(), None, None) + self.setUpMixin(server_credentials, client_credentials, + _SSL_HOST_OVERRIDE) + + def tearDown(self): + self.tearDownMixin() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py index 6e01588b0c..7d5eaaaa84 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py @@ -1,52 +1,52 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import threading - -from grpc._cython import cygrpc - - -class SimpleFuture(object): - """A simple future mechanism.""" - - def __init__(self, function, *args, **kwargs): - - def wrapped_function(): - try: - self._result = function(*args, **kwargs) - except Exception as error: # pylint: disable=broad-except - self._error = error - - self._result = None - self._error = None - self._thread = threading.Thread(target=wrapped_function) - self._thread.start() - - def result(self): - """The resulting value of this future. - - Re-raises any exceptions. - """ - self._thread.join() - if self._error: - # TODO(atash): re-raise exceptions in a way that preserves tracebacks - raise self._error # pylint: disable=raising-bad-type - return self._result - - -class CompletionQueuePollFuture(SimpleFuture): - - def __init__(self, completion_queue, deadline): - super(CompletionQueuePollFuture, - self).__init__(lambda: completion_queue.poll(deadline=deadline)) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +from grpc._cython import cygrpc + + +class SimpleFuture(object): + """A simple future mechanism.""" + + def __init__(self, function, *args, **kwargs): + + def wrapped_function(): + try: + self._result = function(*args, **kwargs) + except Exception as error: # pylint: disable=broad-except + self._error = error + + self._result = None + self._error = None + self._thread = threading.Thread(target=wrapped_function) + self._thread.start() + + def result(self): + """The resulting value of this future. + + Re-raises any exceptions. + """ + self._thread.join() + if self._error: + # TODO(atash): re-raise exceptions in a way that preserves tracebacks + raise self._error # pylint: disable=raising-bad-type + return self._result + + +class CompletionQueuePollFuture(SimpleFuture): + + def __init__(self, completion_queue, deadline): + super(CompletionQueuePollFuture, + self).__init__(lambda: completion_queue.poll(deadline=deadline)) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py index b8cad97712..43141255f1 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py @@ -1,63 +1,63 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests for an actual dns resolution.""" - -import unittest -import logging -import six - -import grpc -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_METHOD = '/ANY/METHOD' -_REQUEST = b'\x00\x00\x00' -_RESPONSE = _REQUEST - - -class GenericHandler(grpc.GenericRpcHandler): - - def service(self, unused_handler_details): - return grpc.unary_unary_rpc_method_handler( - lambda request, unused_context: request, - ) - - -class DNSResolverTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server() - self._server.add_generic_rpc_handlers((GenericHandler(),)) - self._port = self._server.add_insecure_port('[::]:0') - self._server.start() - - def tearDown(self): - self._server.stop(None) - - def test_connect_loopback(self): - # NOTE(https://github.com/grpc/grpc/issues/18422) - # In short, Gevent + C-Ares = Segfault. The C-Ares driver is not - # supported by custom io manager like "gevent" or "libuv". +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for an actual dns resolution.""" + +import unittest +import logging +import six + +import grpc +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_METHOD = '/ANY/METHOD' +_REQUEST = b'\x00\x00\x00' +_RESPONSE = _REQUEST + + +class GenericHandler(grpc.GenericRpcHandler): + + def service(self, unused_handler_details): + return grpc.unary_unary_rpc_method_handler( + lambda request, unused_context: request, + ) + + +class DNSResolverTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers((GenericHandler(),)) + self._port = self._server.add_insecure_port('[::]:0') + self._server.start() + + def tearDown(self): + self._server.stop(None) + + def test_connect_loopback(self): + # NOTE(https://github.com/grpc/grpc/issues/18422) + # In short, Gevent + C-Ares = Segfault. The C-Ares driver is not + # supported by custom io manager like "gevent" or "libuv". with grpc.insecure_channel('loopback4.unittest.grpc.io:%d' % self._port) as channel: - self.assertEqual( - channel.unary_unary(_METHOD)( - _REQUEST, - timeout=test_constants.SHORT_TIMEOUT, - ), _RESPONSE) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + self.assertEqual( + channel.unary_unary(_METHOD)( + _REQUEST, + timeout=test_constants.SHORT_TIMEOUT, + ), _RESPONSE) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_empty_message_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_empty_message_test.py index 99f8e414a7..f27ea422d0 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_empty_message_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_empty_message_test.py @@ -1,124 +1,124 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -import logging - -import grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_REQUEST = b'' -_RESPONSE = b'' - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' - - -def handle_unary_unary(request, servicer_context): - return _RESPONSE - - -def handle_unary_stream(request, servicer_context): - for _ in range(test_constants.STREAM_LENGTH): - yield _RESPONSE - - -def handle_stream_unary(request_iterator, servicer_context): - for request in request_iterator: - pass - return _RESPONSE - - -def handle_stream_stream(request_iterator, servicer_context): - for request in request_iterator: - yield _RESPONSE - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, request_streaming, response_streaming): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = None - self.response_serializer = None - self.unary_unary = None - self.unary_stream = None - self.stream_unary = None - self.stream_stream = None - if self.request_streaming and self.response_streaming: - self.stream_stream = handle_stream_stream - elif self.request_streaming: - self.stream_unary = handle_stream_unary - elif self.response_streaming: - self.unary_stream = handle_unary_stream - else: - self.unary_unary = handle_unary_unary - - -class _GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - if handler_call_details.method == _UNARY_UNARY: - return _MethodHandler(False, False) - elif handler_call_details.method == _UNARY_STREAM: - return _MethodHandler(False, True) - elif handler_call_details.method == _STREAM_UNARY: - return _MethodHandler(True, False) - elif handler_call_details.method == _STREAM_STREAM: - return _MethodHandler(True, True) - else: - return None - - -class EmptyMessageTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server() - self._server.add_generic_rpc_handlers((_GenericHandler(),)) - port = self._server.add_insecure_port('[::]:0') - self._server.start() - self._channel = grpc.insecure_channel('localhost:%d' % port) - - def tearDown(self): - self._server.stop(0) - self._channel.close() - - def testUnaryUnary(self): - response = self._channel.unary_unary(_UNARY_UNARY)(_REQUEST) - self.assertEqual(_RESPONSE, response) - - def testUnaryStream(self): - response_iterator = self._channel.unary_stream(_UNARY_STREAM)(_REQUEST) - self.assertSequenceEqual([_RESPONSE] * test_constants.STREAM_LENGTH, - list(response_iterator)) - - def testStreamUnary(self): - response = self._channel.stream_unary(_STREAM_UNARY)(iter( - [_REQUEST] * test_constants.STREAM_LENGTH)) - self.assertEqual(_RESPONSE, response) - - def testStreamStream(self): - response_iterator = self._channel.stream_stream(_STREAM_STREAM)(iter( - [_REQUEST] * test_constants.STREAM_LENGTH)) - self.assertSequenceEqual([_RESPONSE] * test_constants.STREAM_LENGTH, - list(response_iterator)) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import logging + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_REQUEST = b'' +_RESPONSE = b'' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +def handle_unary_unary(request, servicer_context): + return _RESPONSE + + +def handle_unary_stream(request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(request_iterator, servicer_context): + for request in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(request_iterator, servicer_context): + for request in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + self.stream_stream = handle_stream_stream + elif self.request_streaming: + self.stream_unary = handle_stream_unary + elif self.response_streaming: + self.unary_stream = handle_unary_stream + else: + self.unary_unary = handle_unary_unary + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True) + else: + return None + + +class EmptyMessageTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers((_GenericHandler(),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testUnaryUnary(self): + response = self._channel.unary_unary(_UNARY_UNARY)(_REQUEST) + self.assertEqual(_RESPONSE, response) + + def testUnaryStream(self): + response_iterator = self._channel.unary_stream(_UNARY_STREAM)(_REQUEST) + self.assertSequenceEqual([_RESPONSE] * test_constants.STREAM_LENGTH, + list(response_iterator)) + + def testStreamUnary(self): + response = self._channel.stream_unary(_STREAM_UNARY)(iter( + [_REQUEST] * test_constants.STREAM_LENGTH)) + self.assertEqual(_RESPONSE, response) + + def testStreamStream(self): + response_iterator = self._channel.stream_stream(_STREAM_STREAM)(iter( + [_REQUEST] * test_constants.STREAM_LENGTH)) + self.assertSequenceEqual([_RESPONSE] * test_constants.STREAM_LENGTH, + list(response_iterator)) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py index 1b04e87c35..e58007ad3e 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py @@ -1,87 +1,87 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests 'utf-8' encoded error message.""" - -import unittest -import weakref - -import grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_UNICODE_ERROR_MESSAGES = [ - b'\xe2\x80\x9d'.decode('utf-8'), - b'abc\x80\xd0\xaf'.decode('latin-1'), - b'\xc3\xa9'.decode('utf-8'), -] - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x00' - -_UNARY_UNARY = '/test/UnaryUnary' - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, request_streaming=None, response_streaming=None): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = None - self.response_serializer = None - self.unary_stream = None - self.stream_unary = None - self.stream_stream = None - - def unary_unary(self, request, servicer_context): - servicer_context.set_code(grpc.StatusCode.UNKNOWN) - servicer_context.set_details(request.decode('utf-8')) - return _RESPONSE - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self, test): - self._test = test - - def service(self, handler_call_details): - return _MethodHandler() - - -class ErrorMessageEncodingTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server() +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests 'utf-8' encoded error message.""" + +import unittest +import weakref + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_UNICODE_ERROR_MESSAGES = [ + b'\xe2\x80\x9d'.decode('utf-8'), + b'abc\x80\xd0\xaf'.decode('latin-1'), + b'\xc3\xa9'.decode('utf-8'), +] + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming=None, response_streaming=None): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + + def unary_unary(self, request, servicer_context): + servicer_context.set_code(grpc.StatusCode.UNKNOWN) + servicer_context.set_details(request.decode('utf-8')) + return _RESPONSE + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, test): + self._test = test + + def service(self, handler_call_details): + return _MethodHandler() + + +class ErrorMessageEncodingTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() self._server.add_generic_rpc_handlers( (_GenericHandler(weakref.proxy(self)),)) - port = self._server.add_insecure_port('[::]:0') - self._server.start() - self._channel = grpc.insecure_channel('localhost:%d' % port) - - def tearDown(self): - self._server.stop(0) - self._channel.close() - - def testMessageEncoding(self): - for message in _UNICODE_ERROR_MESSAGES: - multi_callable = self._channel.unary_unary(_UNARY_UNARY) - with self.assertRaises(grpc.RpcError) as cm: - multi_callable(message.encode('utf-8')) - - self.assertEqual(cm.exception.code(), grpc.StatusCode.UNKNOWN) - self.assertEqual(cm.exception.details(), message) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testMessageEncoding(self): + for message in _UNICODE_ERROR_MESSAGES: + multi_callable = self._channel.unary_unary(_UNARY_UNARY) + with self.assertRaises(grpc.RpcError) as cm: + multi_callable(message.encode('utf-8')) + + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNKNOWN) + self.assertEqual(cm.exception.details(), message) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py index b57af76d87..48ea054d2d 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py @@ -1,236 +1,236 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Defines a number of module-scope gRPC scenarios to test clean exit.""" - -import argparse -import threading -import time -import logging - -import grpc - -from tests.unit.framework.common import test_constants - -WAIT_TIME = 1000 - -REQUEST = b'request' - -UNSTARTED_SERVER = 'unstarted_server' -RUNNING_SERVER = 'running_server' -POLL_CONNECTIVITY_NO_SERVER = 'poll_connectivity_no_server' -POLL_CONNECTIVITY = 'poll_connectivity' -IN_FLIGHT_UNARY_UNARY_CALL = 'in_flight_unary_unary_call' -IN_FLIGHT_UNARY_STREAM_CALL = 'in_flight_unary_stream_call' -IN_FLIGHT_STREAM_UNARY_CALL = 'in_flight_stream_unary_call' -IN_FLIGHT_STREAM_STREAM_CALL = 'in_flight_stream_stream_call' -IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL = 'in_flight_partial_unary_stream_call' -IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL = 'in_flight_partial_stream_unary_call' -IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL = 'in_flight_partial_stream_stream_call' - -UNARY_UNARY = b'/test/UnaryUnary' -UNARY_STREAM = b'/test/UnaryStream' -STREAM_UNARY = b'/test/StreamUnary' -STREAM_STREAM = b'/test/StreamStream' -PARTIAL_UNARY_STREAM = b'/test/PartialUnaryStream' -PARTIAL_STREAM_UNARY = b'/test/PartialStreamUnary' -PARTIAL_STREAM_STREAM = b'/test/PartialStreamStream' - -TEST_TO_METHOD = { - IN_FLIGHT_UNARY_UNARY_CALL: UNARY_UNARY, - IN_FLIGHT_UNARY_STREAM_CALL: UNARY_STREAM, - IN_FLIGHT_STREAM_UNARY_CALL: STREAM_UNARY, - IN_FLIGHT_STREAM_STREAM_CALL: STREAM_STREAM, - IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL: PARTIAL_UNARY_STREAM, - IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL: PARTIAL_STREAM_UNARY, - IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL: PARTIAL_STREAM_STREAM, -} - - -def hang_unary_unary(request, servicer_context): - time.sleep(WAIT_TIME) - - -def hang_unary_stream(request, servicer_context): - time.sleep(WAIT_TIME) - - -def hang_partial_unary_stream(request, servicer_context): - for _ in range(test_constants.STREAM_LENGTH // 2): - yield request - time.sleep(WAIT_TIME) - - -def hang_stream_unary(request_iterator, servicer_context): - time.sleep(WAIT_TIME) - - -def hang_partial_stream_unary(request_iterator, servicer_context): - for _ in range(test_constants.STREAM_LENGTH // 2): - next(request_iterator) - time.sleep(WAIT_TIME) - - -def hang_stream_stream(request_iterator, servicer_context): - time.sleep(WAIT_TIME) - - -def hang_partial_stream_stream(request_iterator, servicer_context): - for _ in range(test_constants.STREAM_LENGTH // 2): - yield next(request_iterator) #pylint: disable=stop-iteration-return - time.sleep(WAIT_TIME) - - -class MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, request_streaming, response_streaming, partial_hang): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = None - self.response_serializer = None - self.unary_unary = None - self.unary_stream = None - self.stream_unary = None - self.stream_stream = None - if self.request_streaming and self.response_streaming: - if partial_hang: - self.stream_stream = hang_partial_stream_stream - else: - self.stream_stream = hang_stream_stream - elif self.request_streaming: - if partial_hang: - self.stream_unary = hang_partial_stream_unary - else: - self.stream_unary = hang_stream_unary - elif self.response_streaming: - if partial_hang: - self.unary_stream = hang_partial_unary_stream - else: - self.unary_stream = hang_unary_stream - else: - self.unary_unary = hang_unary_unary - - -class GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - if handler_call_details.method == UNARY_UNARY: - return MethodHandler(False, False, False) - elif handler_call_details.method == UNARY_STREAM: - return MethodHandler(False, True, False) - elif handler_call_details.method == STREAM_UNARY: - return MethodHandler(True, False, False) - elif handler_call_details.method == STREAM_STREAM: - return MethodHandler(True, True, False) - elif handler_call_details.method == PARTIAL_UNARY_STREAM: - return MethodHandler(False, True, True) - elif handler_call_details.method == PARTIAL_STREAM_UNARY: - return MethodHandler(True, False, True) - elif handler_call_details.method == PARTIAL_STREAM_STREAM: - return MethodHandler(True, True, True) - else: - return None - - -# Traditional executors will not exit until all their -# current jobs complete. Because we submit jobs that will -# never finish, we don't want to block exit on these jobs. -class DaemonPool(object): - - def submit(self, fn, *args, **kwargs): - thread = threading.Thread(target=fn, args=args, kwargs=kwargs) - thread.daemon = True - thread.start() - - def shutdown(self, wait=True): - pass - - -def infinite_request_iterator(): - while True: - yield REQUEST - - -if __name__ == '__main__': - logging.basicConfig() - parser = argparse.ArgumentParser() - parser.add_argument('scenario', type=str) +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines a number of module-scope gRPC scenarios to test clean exit.""" + +import argparse +import threading +import time +import logging + +import grpc + +from tests.unit.framework.common import test_constants + +WAIT_TIME = 1000 + +REQUEST = b'request' + +UNSTARTED_SERVER = 'unstarted_server' +RUNNING_SERVER = 'running_server' +POLL_CONNECTIVITY_NO_SERVER = 'poll_connectivity_no_server' +POLL_CONNECTIVITY = 'poll_connectivity' +IN_FLIGHT_UNARY_UNARY_CALL = 'in_flight_unary_unary_call' +IN_FLIGHT_UNARY_STREAM_CALL = 'in_flight_unary_stream_call' +IN_FLIGHT_STREAM_UNARY_CALL = 'in_flight_stream_unary_call' +IN_FLIGHT_STREAM_STREAM_CALL = 'in_flight_stream_stream_call' +IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL = 'in_flight_partial_unary_stream_call' +IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL = 'in_flight_partial_stream_unary_call' +IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL = 'in_flight_partial_stream_stream_call' + +UNARY_UNARY = b'/test/UnaryUnary' +UNARY_STREAM = b'/test/UnaryStream' +STREAM_UNARY = b'/test/StreamUnary' +STREAM_STREAM = b'/test/StreamStream' +PARTIAL_UNARY_STREAM = b'/test/PartialUnaryStream' +PARTIAL_STREAM_UNARY = b'/test/PartialStreamUnary' +PARTIAL_STREAM_STREAM = b'/test/PartialStreamStream' + +TEST_TO_METHOD = { + IN_FLIGHT_UNARY_UNARY_CALL: UNARY_UNARY, + IN_FLIGHT_UNARY_STREAM_CALL: UNARY_STREAM, + IN_FLIGHT_STREAM_UNARY_CALL: STREAM_UNARY, + IN_FLIGHT_STREAM_STREAM_CALL: STREAM_STREAM, + IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL: PARTIAL_UNARY_STREAM, + IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL: PARTIAL_STREAM_UNARY, + IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL: PARTIAL_STREAM_STREAM, +} + + +def hang_unary_unary(request, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_unary_stream(request, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_partial_unary_stream(request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH // 2): + yield request + time.sleep(WAIT_TIME) + + +def hang_stream_unary(request_iterator, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_partial_stream_unary(request_iterator, servicer_context): + for _ in range(test_constants.STREAM_LENGTH // 2): + next(request_iterator) + time.sleep(WAIT_TIME) + + +def hang_stream_stream(request_iterator, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_partial_stream_stream(request_iterator, servicer_context): + for _ in range(test_constants.STREAM_LENGTH // 2): + yield next(request_iterator) #pylint: disable=stop-iteration-return + time.sleep(WAIT_TIME) + + +class MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, partial_hang): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + if partial_hang: + self.stream_stream = hang_partial_stream_stream + else: + self.stream_stream = hang_stream_stream + elif self.request_streaming: + if partial_hang: + self.stream_unary = hang_partial_stream_unary + else: + self.stream_unary = hang_stream_unary + elif self.response_streaming: + if partial_hang: + self.unary_stream = hang_partial_unary_stream + else: + self.unary_stream = hang_unary_stream + else: + self.unary_unary = hang_unary_unary + + +class GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == UNARY_UNARY: + return MethodHandler(False, False, False) + elif handler_call_details.method == UNARY_STREAM: + return MethodHandler(False, True, False) + elif handler_call_details.method == STREAM_UNARY: + return MethodHandler(True, False, False) + elif handler_call_details.method == STREAM_STREAM: + return MethodHandler(True, True, False) + elif handler_call_details.method == PARTIAL_UNARY_STREAM: + return MethodHandler(False, True, True) + elif handler_call_details.method == PARTIAL_STREAM_UNARY: + return MethodHandler(True, False, True) + elif handler_call_details.method == PARTIAL_STREAM_STREAM: + return MethodHandler(True, True, True) + else: + return None + + +# Traditional executors will not exit until all their +# current jobs complete. Because we submit jobs that will +# never finish, we don't want to block exit on these jobs. +class DaemonPool(object): + + def submit(self, fn, *args, **kwargs): + thread = threading.Thread(target=fn, args=args, kwargs=kwargs) + thread.daemon = True + thread.start() + + def shutdown(self, wait=True): + pass + + +def infinite_request_iterator(): + while True: + yield REQUEST + + +if __name__ == '__main__': + logging.basicConfig() + parser = argparse.ArgumentParser() + parser.add_argument('scenario', type=str) parser.add_argument('--wait_for_interrupt', dest='wait_for_interrupt', action='store_true') - args = parser.parse_args() - - if args.scenario == UNSTARTED_SERVER: - server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) - if args.wait_for_interrupt: - time.sleep(WAIT_TIME) - elif args.scenario == RUNNING_SERVER: - server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) - port = server.add_insecure_port('[::]:0') - server.start() - if args.wait_for_interrupt: - time.sleep(WAIT_TIME) - elif args.scenario == POLL_CONNECTIVITY_NO_SERVER: - channel = grpc.insecure_channel('localhost:12345') - - def connectivity_callback(connectivity): - pass - - channel.subscribe(connectivity_callback, try_to_connect=True) - if args.wait_for_interrupt: - time.sleep(WAIT_TIME) - elif args.scenario == POLL_CONNECTIVITY: - server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) - port = server.add_insecure_port('[::]:0') - server.start() - channel = grpc.insecure_channel('localhost:%d' % port) - - def connectivity_callback(connectivity): - pass - - channel.subscribe(connectivity_callback, try_to_connect=True) - if args.wait_for_interrupt: - time.sleep(WAIT_TIME) - - else: - handler = GenericHandler() - server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) - port = server.add_insecure_port('[::]:0') - server.add_generic_rpc_handlers((handler,)) - server.start() - channel = grpc.insecure_channel('localhost:%d' % port) - - method = TEST_TO_METHOD[args.scenario] - - if args.scenario == IN_FLIGHT_UNARY_UNARY_CALL: - multi_callable = channel.unary_unary(method) - future = multi_callable.future(REQUEST) - result, call = multi_callable.with_call(REQUEST) - elif (args.scenario == IN_FLIGHT_UNARY_STREAM_CALL or - args.scenario == IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL): - multi_callable = channel.unary_stream(method) - response_iterator = multi_callable(REQUEST) - for response in response_iterator: - pass - elif (args.scenario == IN_FLIGHT_STREAM_UNARY_CALL or - args.scenario == IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL): - multi_callable = channel.stream_unary(method) - future = multi_callable.future(infinite_request_iterator()) - result, call = multi_callable.with_call( - iter([REQUEST] * test_constants.STREAM_LENGTH)) - elif (args.scenario == IN_FLIGHT_STREAM_STREAM_CALL or - args.scenario == IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL): - multi_callable = channel.stream_stream(method) - response_iterator = multi_callable(infinite_request_iterator()) - for response in response_iterator: - pass + args = parser.parse_args() + + if args.scenario == UNSTARTED_SERVER: + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + elif args.scenario == RUNNING_SERVER: + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.start() + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + elif args.scenario == POLL_CONNECTIVITY_NO_SERVER: + channel = grpc.insecure_channel('localhost:12345') + + def connectivity_callback(connectivity): + pass + + channel.subscribe(connectivity_callback, try_to_connect=True) + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + elif args.scenario == POLL_CONNECTIVITY: + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.start() + channel = grpc.insecure_channel('localhost:%d' % port) + + def connectivity_callback(connectivity): + pass + + channel.subscribe(connectivity_callback, try_to_connect=True) + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + + else: + handler = GenericHandler() + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.add_generic_rpc_handlers((handler,)) + server.start() + channel = grpc.insecure_channel('localhost:%d' % port) + + method = TEST_TO_METHOD[args.scenario] + + if args.scenario == IN_FLIGHT_UNARY_UNARY_CALL: + multi_callable = channel.unary_unary(method) + future = multi_callable.future(REQUEST) + result, call = multi_callable.with_call(REQUEST) + elif (args.scenario == IN_FLIGHT_UNARY_STREAM_CALL or + args.scenario == IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL): + multi_callable = channel.unary_stream(method) + response_iterator = multi_callable(REQUEST) + for response in response_iterator: + pass + elif (args.scenario == IN_FLIGHT_STREAM_UNARY_CALL or + args.scenario == IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL): + multi_callable = channel.stream_unary(method) + future = multi_callable.future(infinite_request_iterator()) + result, call = multi_callable.with_call( + iter([REQUEST] * test_constants.STREAM_LENGTH)) + elif (args.scenario == IN_FLIGHT_STREAM_STREAM_CALL or + args.scenario == IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL): + multi_callable = channel.stream_stream(method) + response_iterator = multi_callable(infinite_request_iterator()) + for response in response_iterator: + pass diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py index 06b32af34e..4cf5ab63bd 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py @@ -1,65 +1,65 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests clean exit of server/client on Python Interpreter exit/sigint. - -The tests in this module spawn a subprocess for each test case, the -test is considered successful if it doesn't hang/timeout. -""" - -import atexit -import os -import signal -import six -import subprocess -import sys -import threading +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests clean exit of server/client on Python Interpreter exit/sigint. + +The tests in this module spawn a subprocess for each test case, the +test is considered successful if it doesn't hang/timeout. +""" + +import atexit +import os +import signal +import six +import subprocess +import sys +import threading import datetime -import time -import unittest -import logging - -from tests.unit import _exit_scenarios - -# SCENARIO_FILE = os.path.abspath( +import time +import unittest +import logging + +from tests.unit import _exit_scenarios + +# SCENARIO_FILE = os.path.abspath( # os.path.join(os.path.dirname(os.path.realpath(__file__)), # '_exit_scenarios.py')) -INTERPRETER = sys.executable -BASE_COMMAND = [INTERPRETER, '-m', 'tests.unit._exit_scenarios'] -BASE_SIGTERM_COMMAND = BASE_COMMAND + ['--wait_for_interrupt'] - +INTERPRETER = sys.executable +BASE_COMMAND = [INTERPRETER, '-m', 'tests.unit._exit_scenarios'] +BASE_SIGTERM_COMMAND = BASE_COMMAND + ['--wait_for_interrupt'] + INIT_TIME = datetime.timedelta(seconds=1) WAIT_CHECK_INTERVAL = datetime.timedelta(milliseconds=100) WAIT_CHECK_DEFAULT_TIMEOUT = datetime.timedelta(seconds=5) - -processes = [] -process_lock = threading.Lock() - - -# Make sure we attempt to clean up any -# processes we may have left running -def cleanup_processes(): - with process_lock: - for process in processes: - try: - process.kill() - except Exception: # pylint: disable=broad-except - pass - - -atexit.register(cleanup_processes) - - + +processes = [] +process_lock = threading.Lock() + + +# Make sure we attempt to clean up any +# processes we may have left running +def cleanup_processes(): + with process_lock: + for process in processes: + try: + process.kill() + except Exception: # pylint: disable=broad-except + pass + + +atexit.register(cleanup_processes) + + def _process_wait_with_timeout(process, timeout=WAIT_CHECK_DEFAULT_TIMEOUT): """A funciton to mimic 3.3+ only timeout argument in process.wait.""" deadline = datetime.datetime.now() + timeout @@ -69,193 +69,193 @@ def _process_wait_with_timeout(process, timeout=WAIT_CHECK_DEFAULT_TIMEOUT): raise RuntimeError('Process failed to exit within %s' % timeout) -def interrupt_and_wait(process): - with process_lock: - processes.append(process) +def interrupt_and_wait(process): + with process_lock: + processes.append(process) time.sleep(INIT_TIME.total_seconds()) - os.kill(process.pid, signal.SIGINT) + os.kill(process.pid, signal.SIGINT) _process_wait_with_timeout(process) - - -def wait(process): - with process_lock: - processes.append(process) + + +def wait(process): + with process_lock: + processes.append(process) _process_wait_with_timeout(process) - - + + # TODO(lidiz) enable exit tests once the root cause found. @unittest.skip('https://github.com/grpc/grpc/issues/23982') @unittest.skip('https://github.com/grpc/grpc/issues/23028') -class ExitTest(unittest.TestCase): - - def test_unstarted_server(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' +class ExitTest(unittest.TestCase): + + def test_unstarted_server(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen(BASE_COMMAND + [_exit_scenarios.UNSTARTED_SERVER], stdout=sys.stdout, stderr=sys.stderr, env=env) - wait(process) - - def test_unstarted_server_terminate(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' + wait(process) + + def test_unstarted_server_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen(BASE_SIGTERM_COMMAND + [_exit_scenarios.UNSTARTED_SERVER], stdout=sys.stdout, env=env) - interrupt_and_wait(process) - - def test_running_server(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' + interrupt_and_wait(process) + + def test_running_server(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen(BASE_COMMAND + [_exit_scenarios.RUNNING_SERVER], stdout=sys.stdout, stderr=sys.stderr, env=env) - wait(process) - - def test_running_server_terminate(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' + wait(process) + + def test_running_server_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen(BASE_SIGTERM_COMMAND + [_exit_scenarios.RUNNING_SERVER], stdout=sys.stdout, stderr=sys.stderr, env=env) - interrupt_and_wait(process) - - def test_poll_connectivity_no_server(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - wait(process) - - def test_poll_connectivity_no_server_terminate(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_SIGTERM_COMMAND + - [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - interrupt_and_wait(process) - - def test_poll_connectivity(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' + interrupt_and_wait(process) + + def test_poll_connectivity_no_server(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + def test_poll_connectivity_no_server_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_SIGTERM_COMMAND + + [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + def test_poll_connectivity(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen(BASE_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY], stdout=sys.stdout, stderr=sys.stderr, env=env) - wait(process) - - def test_poll_connectivity_terminate(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' + wait(process) + + def test_poll_connectivity_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen(BASE_SIGTERM_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY], stdout=sys.stdout, stderr=sys.stderr, env=env) - interrupt_and_wait(process) - - @unittest.skipIf(os.name == 'nt', - 'os.kill does not have required permission on Windows') - def test_in_flight_unary_unary_call(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_unary_unary_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen(BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_UNARY_UNARY_CALL], stdout=sys.stdout, stderr=sys.stderr, env=env) - interrupt_and_wait(process) - - @unittest.skipIf(os.name == 'nt', - 'os.kill does not have required permission on Windows') - def test_in_flight_unary_stream_call(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_UNARY_STREAM_CALL], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - interrupt_and_wait(process) - - @unittest.skipIf(os.name == 'nt', - 'os.kill does not have required permission on Windows') - def test_in_flight_stream_unary_call(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_UNARY_CALL], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - interrupt_and_wait(process) - - @unittest.skipIf(os.name == 'nt', - 'os.kill does not have required permission on Windows') - def test_in_flight_stream_stream_call(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_STREAM_CALL], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - interrupt_and_wait(process) - - @unittest.skipIf(os.name == 'nt', - 'os.kill does not have required permission on Windows') - def test_in_flight_partial_unary_stream_call(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + - [_exit_scenarios.IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - interrupt_and_wait(process) - - @unittest.skipIf(os.name == 'nt', - 'os.kill does not have required permission on Windows') - def test_in_flight_partial_stream_unary_call(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + - [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - interrupt_and_wait(process) - - @unittest.skipIf(os.name == 'nt', - 'os.kill does not have required permission on Windows') - def test_in_flight_partial_stream_stream_call(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + - [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - interrupt_and_wait(process) - - -if __name__ == '__main__': + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_unary_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_UNARY_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_stream_unary_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_UNARY_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_stream_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_partial_unary_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + + [_exit_scenarios.IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_partial_stream_unary_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + + [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_partial_stream_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + + [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + +if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) - unittest.main(verbosity=2) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py index 022ec3aa2a..1ada25382d 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py @@ -1,23 +1,23 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -_BEFORE_IMPORT = tuple(globals()) - -from grpc import * # pylint: disable=wildcard-import,unused-wildcard-import - -_AFTER_IMPORT = tuple(globals()) - -GRPC_ELEMENTS = tuple( - element for element in _AFTER_IMPORT - if element not in _BEFORE_IMPORT and element != '_BEFORE_IMPORT') +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +_BEFORE_IMPORT = tuple(globals()) + +from grpc import * # pylint: disable=wildcard-import,unused-wildcard-import + +_AFTER_IMPORT = tuple(globals()) + +GRPC_ELEMENTS = tuple( + element for element in _AFTER_IMPORT + if element not in _BEFORE_IMPORT and element != '_BEFORE_IMPORT') diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py index 3a2bed66c0..1c4890b97f 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py @@ -1,54 +1,54 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests the gRPC Core shutdown path.""" - -import time -import threading -import unittest -import datetime - -import grpc - -_TIMEOUT_FOR_SEGFAULT = datetime.timedelta(seconds=10) - - -class GrpcShutdownTest(unittest.TestCase): - - def test_channel_close_with_connectivity_watcher(self): - """Originated by https://github.com/grpc/grpc/issues/20299. - - The grpc_shutdown happens synchronously, but there might be Core object - references left in Cython which might lead to ABORT or SIGSEGV. - """ - connection_failed = threading.Event() - - def on_state_change(state): - if state in (grpc.ChannelConnectivity.TRANSIENT_FAILURE, - grpc.ChannelConnectivity.SHUTDOWN): - connection_failed.set() - - # Connects to an void address, and subscribes state changes - channel = grpc.insecure_channel("0.1.1.1:12345") - channel.subscribe(on_state_change, True) - - deadline = datetime.datetime.now() + _TIMEOUT_FOR_SEGFAULT - - while datetime.datetime.now() < deadline: - time.sleep(0.1) - if connection_failed.is_set(): - channel.close() - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the gRPC Core shutdown path.""" + +import time +import threading +import unittest +import datetime + +import grpc + +_TIMEOUT_FOR_SEGFAULT = datetime.timedelta(seconds=10) + + +class GrpcShutdownTest(unittest.TestCase): + + def test_channel_close_with_connectivity_watcher(self): + """Originated by https://github.com/grpc/grpc/issues/20299. + + The grpc_shutdown happens synchronously, but there might be Core object + references left in Cython which might lead to ABORT or SIGSEGV. + """ + connection_failed = threading.Event() + + def on_state_change(state): + if state in (grpc.ChannelConnectivity.TRANSIENT_FAILURE, + grpc.ChannelConnectivity.SHUTDOWN): + connection_failed.set() + + # Connects to an void address, and subscribes state changes + channel = grpc.insecure_channel("0.1.1.1:12345") + channel.subscribe(on_state_change, True) + + deadline = datetime.datetime.now() + _TIMEOUT_FOR_SEGFAULT + + while datetime.datetime.now() < deadline: + time.sleep(0.1) + if connection_failed.is_set(): + channel.close() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py index 621a9efc70..619db7b3ff 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py @@ -1,356 +1,356 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of gRPC Python interceptors.""" - -import collections -import itertools -import threading -import unittest -import logging -import os -from concurrent import futures - -import grpc -from grpc.framework.foundation import logging_pool - -from tests.unit import test_common -from tests.unit.framework.common import test_constants -from tests.unit.framework.common import test_control - -_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 -_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] -_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 -_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] - -_EXCEPTION_REQUEST = b'\x09\x0a' - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' - - -class _ApplicationErrorStandin(Exception): - pass - - -class _Callback(object): - - def __init__(self): - self._condition = threading.Condition() - self._value = None - self._called = False - - def __call__(self, value): - with self._condition: - self._value = value - self._called = True - self._condition.notify_all() - - def value(self): - with self._condition: - while not self._called: - self._condition.wait() - return self._value - - -class _Handler(object): - - def __init__(self, control): - self._control = control - - def handle_unary_unary(self, request, servicer_context): - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - if request == _EXCEPTION_REQUEST: - raise _ApplicationErrorStandin() - return request - - def handle_unary_stream(self, request, servicer_context): - if request == _EXCEPTION_REQUEST: - raise _ApplicationErrorStandin() - for _ in range(test_constants.STREAM_LENGTH): - self._control.control() - yield request - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - - def handle_stream_unary(self, request_iterator, servicer_context): - if servicer_context is not None: - servicer_context.invocation_metadata() - self._control.control() - response_elements = [] - for request in request_iterator: - self._control.control() - response_elements.append(request) - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - if _EXCEPTION_REQUEST in response_elements: - raise _ApplicationErrorStandin() - return b''.join(response_elements) - - def handle_stream_stream(self, request_iterator, servicer_context): - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - for request in request_iterator: - if request == _EXCEPTION_REQUEST: - raise _ApplicationErrorStandin() - self._control.control() - yield request - self._control.control() - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, request_streaming, response_streaming, - request_deserializer, response_serializer, unary_unary, - unary_stream, stream_unary, stream_stream): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = request_deserializer - self.response_serializer = response_serializer - self.unary_unary = unary_unary - self.unary_stream = unary_stream - self.stream_unary = stream_unary - self.stream_stream = stream_stream - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self, handler): - self._handler = handler - - def service(self, handler_call_details): - if handler_call_details.method == _UNARY_UNARY: - return _MethodHandler(False, False, None, None, - self._handler.handle_unary_unary, None, None, - None) - elif handler_call_details.method == _UNARY_STREAM: - return _MethodHandler(False, True, _DESERIALIZE_REQUEST, - _SERIALIZE_RESPONSE, None, - self._handler.handle_unary_stream, None, None) - elif handler_call_details.method == _STREAM_UNARY: - return _MethodHandler(True, False, _DESERIALIZE_REQUEST, - _SERIALIZE_RESPONSE, None, None, - self._handler.handle_stream_unary, None) - elif handler_call_details.method == _STREAM_STREAM: - return _MethodHandler(True, True, None, None, None, None, None, - self._handler.handle_stream_stream) - else: - return None - - -def _unary_unary_multi_callable(channel): - return channel.unary_unary(_UNARY_UNARY) - - -def _unary_stream_multi_callable(channel): +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of gRPC Python interceptors.""" + +import collections +import itertools +import threading +import unittest +import logging +import os +from concurrent import futures + +import grpc +from grpc.framework.foundation import logging_pool + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_control + +_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 +_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] +_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 +_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] + +_EXCEPTION_REQUEST = b'\x09\x0a' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +class _ApplicationErrorStandin(Exception): + pass + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._value = None + self._called = False + + def __call__(self, value): + with self._condition: + self._value = value + self._called = True + self._condition.notify_all() + + def value(self): + with self._condition: + while not self._called: + self._condition.wait() + return self._value + + +class _Handler(object): + + def __init__(self, control): + self._control = control + + def handle_unary_unary(self, request, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + if request == _EXCEPTION_REQUEST: + raise _ApplicationErrorStandin() + return request + + def handle_unary_stream(self, request, servicer_context): + if request == _EXCEPTION_REQUEST: + raise _ApplicationErrorStandin() + for _ in range(test_constants.STREAM_LENGTH): + self._control.control() + yield request + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + + def handle_stream_unary(self, request_iterator, servicer_context): + if servicer_context is not None: + servicer_context.invocation_metadata() + self._control.control() + response_elements = [] + for request in request_iterator: + self._control.control() + response_elements.append(request) + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + if _EXCEPTION_REQUEST in response_elements: + raise _ApplicationErrorStandin() + return b''.join(response_elements) + + def handle_stream_stream(self, request_iterator, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + for request in request_iterator: + if request == _EXCEPTION_REQUEST: + raise _ApplicationErrorStandin() + self._control.control() + yield request + self._control.control() + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, + request_deserializer, response_serializer, unary_unary, + unary_stream, stream_unary, stream_stream): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = request_deserializer + self.response_serializer = response_serializer + self.unary_unary = unary_unary + self.unary_stream = unary_stream + self.stream_unary = stream_unary + self.stream_stream = stream_stream + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, handler): + self._handler = handler + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False, None, None, + self._handler.handle_unary_unary, None, None, + None) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, + self._handler.handle_unary_stream, None, None) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, None, + self._handler.handle_stream_unary, None) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True, None, None, None, None, None, + self._handler.handle_stream_stream) + else: + return None + + +def _unary_unary_multi_callable(channel): + return channel.unary_unary(_UNARY_UNARY) + + +def _unary_stream_multi_callable(channel): return channel.unary_stream(_UNARY_STREAM, request_serializer=_SERIALIZE_REQUEST, response_deserializer=_DESERIALIZE_RESPONSE) - - -def _stream_unary_multi_callable(channel): + + +def _stream_unary_multi_callable(channel): return channel.stream_unary(_STREAM_UNARY, request_serializer=_SERIALIZE_REQUEST, response_deserializer=_DESERIALIZE_RESPONSE) - - -def _stream_stream_multi_callable(channel): - return channel.stream_stream(_STREAM_STREAM) - - -class _ClientCallDetails( - collections.namedtuple( - '_ClientCallDetails', - ('method', 'timeout', 'metadata', 'credentials')), - grpc.ClientCallDetails): - pass - - + + +def _stream_stream_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM) + + +class _ClientCallDetails( + collections.namedtuple( + '_ClientCallDetails', + ('method', 'timeout', 'metadata', 'credentials')), + grpc.ClientCallDetails): + pass + + class _GenericClientInterceptor(grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor, grpc.StreamUnaryClientInterceptor, grpc.StreamStreamClientInterceptor): - - def __init__(self, interceptor_function): - self._fn = interceptor_function - - def intercept_unary_unary(self, continuation, client_call_details, request): - new_details, new_request_iterator, postprocess = self._fn( - client_call_details, iter((request,)), False, False) - response = continuation(new_details, next(new_request_iterator)) - return postprocess(response) if postprocess else response - - def intercept_unary_stream(self, continuation, client_call_details, - request): - new_details, new_request_iterator, postprocess = self._fn( - client_call_details, iter((request,)), False, True) - response_it = continuation(new_details, new_request_iterator) - return postprocess(response_it) if postprocess else response_it - - def intercept_stream_unary(self, continuation, client_call_details, - request_iterator): - new_details, new_request_iterator, postprocess = self._fn( - client_call_details, request_iterator, True, False) - response = continuation(new_details, next(new_request_iterator)) - return postprocess(response) if postprocess else response - - def intercept_stream_stream(self, continuation, client_call_details, - request_iterator): - new_details, new_request_iterator, postprocess = self._fn( - client_call_details, request_iterator, True, True) - response_it = continuation(new_details, new_request_iterator) - return postprocess(response_it) if postprocess else response_it - - + + def __init__(self, interceptor_function): + self._fn = interceptor_function + + def intercept_unary_unary(self, continuation, client_call_details, request): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, iter((request,)), False, False) + response = continuation(new_details, next(new_request_iterator)) + return postprocess(response) if postprocess else response + + def intercept_unary_stream(self, continuation, client_call_details, + request): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, iter((request,)), False, True) + response_it = continuation(new_details, new_request_iterator) + return postprocess(response_it) if postprocess else response_it + + def intercept_stream_unary(self, continuation, client_call_details, + request_iterator): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, request_iterator, True, False) + response = continuation(new_details, next(new_request_iterator)) + return postprocess(response) if postprocess else response + + def intercept_stream_stream(self, continuation, client_call_details, + request_iterator): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, request_iterator, True, True) + response_it = continuation(new_details, new_request_iterator) + return postprocess(response_it) if postprocess else response_it + + class _LoggingInterceptor(grpc.ServerInterceptor, grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor, grpc.StreamUnaryClientInterceptor, grpc.StreamStreamClientInterceptor): - - def __init__(self, tag, record): - self.tag = tag - self.record = record - - def intercept_service(self, continuation, handler_call_details): - self.record.append(self.tag + ':intercept_service') - return continuation(handler_call_details) - - def intercept_unary_unary(self, continuation, client_call_details, request): - self.record.append(self.tag + ':intercept_unary_unary') - result = continuation(client_call_details, request) - assert isinstance( - result, - grpc.Call), '{} ({}) is not an instance of grpc.Call'.format( - result, type(result)) - assert isinstance( - result, - grpc.Future), '{} ({}) is not an instance of grpc.Future'.format( - result, type(result)) - return result - - def intercept_unary_stream(self, continuation, client_call_details, - request): - self.record.append(self.tag + ':intercept_unary_stream') - return continuation(client_call_details, request) - - def intercept_stream_unary(self, continuation, client_call_details, - request_iterator): - self.record.append(self.tag + ':intercept_stream_unary') - result = continuation(client_call_details, request_iterator) - assert isinstance( - result, - grpc.Call), '{} is not an instance of grpc.Call'.format(result) - assert isinstance( - result, - grpc.Future), '{} is not an instance of grpc.Future'.format(result) - return result - - def intercept_stream_stream(self, continuation, client_call_details, - request_iterator): - self.record.append(self.tag + ':intercept_stream_stream') - return continuation(client_call_details, request_iterator) - - -class _DefectiveClientInterceptor(grpc.UnaryUnaryClientInterceptor): - - def intercept_unary_unary(self, ignored_continuation, - ignored_client_call_details, ignored_request): - raise test_control.Defect() - - -def _wrap_request_iterator_stream_interceptor(wrapper): - - def intercept_call(client_call_details, request_iterator, request_streaming, - ignored_response_streaming): - if request_streaming: - return client_call_details, wrapper(request_iterator), None - else: - return client_call_details, request_iterator, None - - return _GenericClientInterceptor(intercept_call) - - -def _append_request_header_interceptor(header, value): - - def intercept_call(client_call_details, request_iterator, - ignored_request_streaming, ignored_response_streaming): - metadata = [] - if client_call_details.metadata: - metadata = list(client_call_details.metadata) - metadata.append(( - header, - value, - )) - client_call_details = _ClientCallDetails( - client_call_details.method, client_call_details.timeout, metadata, - client_call_details.credentials) - return client_call_details, request_iterator, None - - return _GenericClientInterceptor(intercept_call) - - -class _GenericServerInterceptor(grpc.ServerInterceptor): - - def __init__(self, fn): - self._fn = fn - - def intercept_service(self, continuation, handler_call_details): - return self._fn(continuation, handler_call_details) - - -def _filter_server_interceptor(condition, interceptor): - - def intercept_service(continuation, handler_call_details): - if condition(handler_call_details): - return interceptor.intercept_service(continuation, - handler_call_details) - return continuation(handler_call_details) - - return _GenericServerInterceptor(intercept_service) - - -class InterceptorTest(unittest.TestCase): - - def setUp(self): - self._control = test_control.PauseFailControl() - self._handler = _Handler(self._control) - self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) - - self._record = [] - conditional_interceptor = _filter_server_interceptor( - lambda x: ('secret', '42') in x.invocation_metadata, - _LoggingInterceptor('s3', self._record)) - + + def __init__(self, tag, record): + self.tag = tag + self.record = record + + def intercept_service(self, continuation, handler_call_details): + self.record.append(self.tag + ':intercept_service') + return continuation(handler_call_details) + + def intercept_unary_unary(self, continuation, client_call_details, request): + self.record.append(self.tag + ':intercept_unary_unary') + result = continuation(client_call_details, request) + assert isinstance( + result, + grpc.Call), '{} ({}) is not an instance of grpc.Call'.format( + result, type(result)) + assert isinstance( + result, + grpc.Future), '{} ({}) is not an instance of grpc.Future'.format( + result, type(result)) + return result + + def intercept_unary_stream(self, continuation, client_call_details, + request): + self.record.append(self.tag + ':intercept_unary_stream') + return continuation(client_call_details, request) + + def intercept_stream_unary(self, continuation, client_call_details, + request_iterator): + self.record.append(self.tag + ':intercept_stream_unary') + result = continuation(client_call_details, request_iterator) + assert isinstance( + result, + grpc.Call), '{} is not an instance of grpc.Call'.format(result) + assert isinstance( + result, + grpc.Future), '{} is not an instance of grpc.Future'.format(result) + return result + + def intercept_stream_stream(self, continuation, client_call_details, + request_iterator): + self.record.append(self.tag + ':intercept_stream_stream') + return continuation(client_call_details, request_iterator) + + +class _DefectiveClientInterceptor(grpc.UnaryUnaryClientInterceptor): + + def intercept_unary_unary(self, ignored_continuation, + ignored_client_call_details, ignored_request): + raise test_control.Defect() + + +def _wrap_request_iterator_stream_interceptor(wrapper): + + def intercept_call(client_call_details, request_iterator, request_streaming, + ignored_response_streaming): + if request_streaming: + return client_call_details, wrapper(request_iterator), None + else: + return client_call_details, request_iterator, None + + return _GenericClientInterceptor(intercept_call) + + +def _append_request_header_interceptor(header, value): + + def intercept_call(client_call_details, request_iterator, + ignored_request_streaming, ignored_response_streaming): + metadata = [] + if client_call_details.metadata: + metadata = list(client_call_details.metadata) + metadata.append(( + header, + value, + )) + client_call_details = _ClientCallDetails( + client_call_details.method, client_call_details.timeout, metadata, + client_call_details.credentials) + return client_call_details, request_iterator, None + + return _GenericClientInterceptor(intercept_call) + + +class _GenericServerInterceptor(grpc.ServerInterceptor): + + def __init__(self, fn): + self._fn = fn + + def intercept_service(self, continuation, handler_call_details): + return self._fn(continuation, handler_call_details) + + +def _filter_server_interceptor(condition, interceptor): + + def intercept_service(continuation, handler_call_details): + if condition(handler_call_details): + return interceptor.intercept_service(continuation, + handler_call_details) + return continuation(handler_call_details) + + return _GenericServerInterceptor(intercept_service) + + +class InterceptorTest(unittest.TestCase): + + def setUp(self): + self._control = test_control.PauseFailControl() + self._handler = _Handler(self._control) + self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + + self._record = [] + conditional_interceptor = _filter_server_interceptor( + lambda x: ('secret', '42') in x.invocation_metadata, + _LoggingInterceptor('s3', self._record)) + self._server = grpc.server(self._server_pool, options=(('grpc.so_reuseport', 0),), interceptors=( @@ -358,351 +358,351 @@ class InterceptorTest(unittest.TestCase): conditional_interceptor, _LoggingInterceptor('s2', self._record), )) - port = self._server.add_insecure_port('[::]:0') - self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),)) - self._server.start() - - self._channel = grpc.insecure_channel('localhost:%d' % port) - - def tearDown(self): - self._server.stop(None) - self._server_pool.shutdown(wait=True) - self._channel.close() - - def testTripleRequestMessagesClientInterceptor(self): - - def triple(request_iterator): - while True: - try: - item = next(request_iterator) - yield item - yield item - yield item - except StopIteration: - break - - interceptor = _wrap_request_iterator_stream_interceptor(triple) - channel = grpc.intercept_channel(self._channel, interceptor) - requests = tuple( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - - multi_callable = _stream_stream_multi_callable(channel) - response_iterator = multi_callable( - iter(requests), - metadata=( - ('test', - 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) - - responses = tuple(response_iterator) - self.assertEqual(len(responses), 3 * test_constants.STREAM_LENGTH) - - multi_callable = _stream_stream_multi_callable(self._channel) - response_iterator = multi_callable( - iter(requests), - metadata=( - ('test', - 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) - - responses = tuple(response_iterator) - self.assertEqual(len(responses), test_constants.STREAM_LENGTH) - - def testDefectiveClientInterceptor(self): - interceptor = _DefectiveClientInterceptor() - defective_channel = grpc.intercept_channel(self._channel, interceptor) - - request = b'\x07\x08' - - multi_callable = _unary_unary_multi_callable(defective_channel) - call_future = multi_callable.future( - request, - metadata=(('test', - 'InterceptedUnaryRequestBlockingUnaryResponse'),)) - - self.assertIsNotNone(call_future.exception()) - self.assertEqual(call_future.code(), grpc.StatusCode.INTERNAL) - - def testInterceptedHeaderManipulationWithServerSideVerification(self): - request = b'\x07\x08' - + port = self._server.add_insecure_port('[::]:0') + self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),)) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(None) + self._server_pool.shutdown(wait=True) + self._channel.close() + + def testTripleRequestMessagesClientInterceptor(self): + + def triple(request_iterator): + while True: + try: + item = next(request_iterator) + yield item + yield item + yield item + except StopIteration: + break + + interceptor = _wrap_request_iterator_stream_interceptor(triple) + channel = grpc.intercept_channel(self._channel, interceptor) + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + + multi_callable = _stream_stream_multi_callable(channel) + response_iterator = multi_callable( + iter(requests), + metadata=( + ('test', + 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) + + responses = tuple(response_iterator) + self.assertEqual(len(responses), 3 * test_constants.STREAM_LENGTH) + + multi_callable = _stream_stream_multi_callable(self._channel) + response_iterator = multi_callable( + iter(requests), + metadata=( + ('test', + 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) + + responses = tuple(response_iterator) + self.assertEqual(len(responses), test_constants.STREAM_LENGTH) + + def testDefectiveClientInterceptor(self): + interceptor = _DefectiveClientInterceptor() + defective_channel = grpc.intercept_channel(self._channel, interceptor) + + request = b'\x07\x08' + + multi_callable = _unary_unary_multi_callable(defective_channel) + call_future = multi_callable.future( + request, + metadata=(('test', + 'InterceptedUnaryRequestBlockingUnaryResponse'),)) + + self.assertIsNotNone(call_future.exception()) + self.assertEqual(call_future.code(), grpc.StatusCode.INTERNAL) + + def testInterceptedHeaderManipulationWithServerSideVerification(self): + request = b'\x07\x08' + channel = grpc.intercept_channel( self._channel, _append_request_header_interceptor('secret', '42')) channel = grpc.intercept_channel( channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - self._record[:] = [] - - multi_callable = _unary_unary_multi_callable(channel) - multi_callable.with_call( - request, - metadata=( - ('test', - 'InterceptedUnaryRequestBlockingUnaryResponseWithCall'),)) - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', - 's1:intercept_service', 's3:intercept_service', - 's2:intercept_service' - ]) - - def testInterceptedUnaryRequestBlockingUnaryResponse(self): - request = b'\x07\x08' - - self._record[:] = [] - + + self._record[:] = [] + + multi_callable = _unary_unary_multi_callable(channel) + multi_callable.with_call( + request, + metadata=( + ('test', + 'InterceptedUnaryRequestBlockingUnaryResponseWithCall'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's3:intercept_service', + 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestBlockingUnaryResponse(self): + request = b'\x07\x08' + + self._record[:] = [] + channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _unary_unary_multi_callable(channel) - multi_callable( - request, - metadata=(('test', - 'InterceptedUnaryRequestBlockingUnaryResponse'),)) - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedUnaryRequestBlockingUnaryResponseWithError(self): - request = _EXCEPTION_REQUEST - - self._record[:] = [] - + + multi_callable = _unary_unary_multi_callable(channel) + multi_callable( + request, + metadata=(('test', + 'InterceptedUnaryRequestBlockingUnaryResponse'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestBlockingUnaryResponseWithError(self): + request = _EXCEPTION_REQUEST + + self._record[:] = [] + channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _unary_unary_multi_callable(channel) - with self.assertRaises(grpc.RpcError) as exception_context: - multi_callable( - request, - metadata=(('test', - 'InterceptedUnaryRequestBlockingUnaryResponse'),)) - exception = exception_context.exception - self.assertFalse(exception.cancelled()) - self.assertFalse(exception.running()) - self.assertTrue(exception.done()) - with self.assertRaises(grpc.RpcError): - exception.result() - self.assertIsInstance(exception.exception(), grpc.RpcError) - - def testInterceptedUnaryRequestBlockingUnaryResponseWithCall(self): - request = b'\x07\x08' - + + multi_callable = _unary_unary_multi_callable(channel) + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable( + request, + metadata=(('test', + 'InterceptedUnaryRequestBlockingUnaryResponse'),)) + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + def testInterceptedUnaryRequestBlockingUnaryResponseWithCall(self): + request = b'\x07\x08' + channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - self._record[:] = [] - - multi_callable = _unary_unary_multi_callable(channel) - multi_callable.with_call( - request, - metadata=( - ('test', - 'InterceptedUnaryRequestBlockingUnaryResponseWithCall'),)) - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedUnaryRequestFutureUnaryResponse(self): - request = b'\x07\x08' - - self._record[:] = [] + + self._record[:] = [] + + multi_callable = _unary_unary_multi_callable(channel) + multi_callable.with_call( + request, + metadata=( + ('test', + 'InterceptedUnaryRequestBlockingUnaryResponseWithCall'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestFutureUnaryResponse(self): + request = b'\x07\x08' + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _unary_unary_multi_callable(channel) - response_future = multi_callable.future( - request, - metadata=(('test', 'InterceptedUnaryRequestFutureUnaryResponse'),)) - response_future.result() - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedUnaryRequestStreamResponse(self): - request = b'\x37\x58' - - self._record[:] = [] + + multi_callable = _unary_unary_multi_callable(channel) + response_future = multi_callable.future( + request, + metadata=(('test', 'InterceptedUnaryRequestFutureUnaryResponse'),)) + response_future.result() + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestStreamResponse(self): + request = b'\x37\x58' + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _unary_stream_multi_callable(channel) - response_iterator = multi_callable( - request, - metadata=(('test', 'InterceptedUnaryRequestStreamResponse'),)) - tuple(response_iterator) - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_unary_stream', 'c2:intercept_unary_stream', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedUnaryRequestStreamResponseWithError(self): - request = _EXCEPTION_REQUEST - - self._record[:] = [] + + multi_callable = _unary_stream_multi_callable(channel) + response_iterator = multi_callable( + request, + metadata=(('test', 'InterceptedUnaryRequestStreamResponse'),)) + tuple(response_iterator) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_stream', 'c2:intercept_unary_stream', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestStreamResponseWithError(self): + request = _EXCEPTION_REQUEST + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _unary_stream_multi_callable(channel) - response_iterator = multi_callable( - request, - metadata=(('test', 'InterceptedUnaryRequestStreamResponse'),)) - with self.assertRaises(grpc.RpcError) as exception_context: - tuple(response_iterator) - exception = exception_context.exception - self.assertFalse(exception.cancelled()) - self.assertFalse(exception.running()) - self.assertTrue(exception.done()) - with self.assertRaises(grpc.RpcError): - exception.result() - self.assertIsInstance(exception.exception(), grpc.RpcError) - - def testInterceptedStreamRequestBlockingUnaryResponse(self): - requests = tuple( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - request_iterator = iter(requests) - - self._record[:] = [] + + multi_callable = _unary_stream_multi_callable(channel) + response_iterator = multi_callable( + request, + metadata=(('test', 'InterceptedUnaryRequestStreamResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: + tuple(response_iterator) + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + def testInterceptedStreamRequestBlockingUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _stream_unary_multi_callable(channel) - multi_callable( - request_iterator, - metadata=(('test', - 'InterceptedStreamRequestBlockingUnaryResponse'),)) - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedStreamRequestBlockingUnaryResponseWithCall(self): - requests = tuple( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - request_iterator = iter(requests) - - self._record[:] = [] + + multi_callable = _stream_unary_multi_callable(channel) + multi_callable( + request_iterator, + metadata=(('test', + 'InterceptedStreamRequestBlockingUnaryResponse'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestBlockingUnaryResponseWithCall(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _stream_unary_multi_callable(channel) - multi_callable.with_call( - request_iterator, - metadata=( - ('test', - 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedStreamRequestFutureUnaryResponse(self): - requests = tuple( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - request_iterator = iter(requests) - - self._record[:] = [] + + multi_callable = _stream_unary_multi_callable(channel) + multi_callable.with_call( + request_iterator, + metadata=( + ('test', + 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestFutureUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _stream_unary_multi_callable(channel) - response_future = multi_callable.future( - request_iterator, - metadata=(('test', 'InterceptedStreamRequestFutureUnaryResponse'),)) - response_future.result() - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedStreamRequestFutureUnaryResponseWithError(self): - requests = tuple( - _EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)) - request_iterator = iter(requests) - - self._record[:] = [] + + multi_callable = _stream_unary_multi_callable(channel) + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestFutureUnaryResponse'),)) + response_future.result() + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestFutureUnaryResponseWithError(self): + requests = tuple( + _EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _stream_unary_multi_callable(channel) - response_future = multi_callable.future( - request_iterator, - metadata=(('test', 'InterceptedStreamRequestFutureUnaryResponse'),)) - with self.assertRaises(grpc.RpcError) as exception_context: - response_future.result() - exception = exception_context.exception - self.assertFalse(exception.cancelled()) - self.assertFalse(exception.running()) - self.assertTrue(exception.done()) - with self.assertRaises(grpc.RpcError): - exception.result() - self.assertIsInstance(exception.exception(), grpc.RpcError) - - def testInterceptedStreamRequestStreamResponse(self): - requests = tuple( - b'\x77\x58' for _ in range(test_constants.STREAM_LENGTH)) - request_iterator = iter(requests) - - self._record[:] = [] + + multi_callable = _stream_unary_multi_callable(channel) + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestFutureUnaryResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: + response_future.result() + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + def testInterceptedStreamRequestStreamResponse(self): + requests = tuple( + b'\x77\x58' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _stream_stream_multi_callable(channel) - response_iterator = multi_callable( - request_iterator, - metadata=(('test', 'InterceptedStreamRequestStreamResponse'),)) - tuple(response_iterator) - - self.assertSequenceEqual(self._record, [ - 'c1:intercept_stream_stream', 'c2:intercept_stream_stream', - 's1:intercept_service', 's2:intercept_service' - ]) - - def testInterceptedStreamRequestStreamResponseWithError(self): - requests = tuple( - _EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)) - request_iterator = iter(requests) - - self._record[:] = [] + + multi_callable = _stream_stream_multi_callable(channel) + response_iterator = multi_callable( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestStreamResponse'),)) + tuple(response_iterator) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_stream', 'c2:intercept_stream_stream', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestStreamResponseWithError(self): + requests = tuple( + _EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] channel = grpc.intercept_channel( self._channel, _LoggingInterceptor('c1', self._record), _LoggingInterceptor('c2', self._record)) - - multi_callable = _stream_stream_multi_callable(channel) - response_iterator = multi_callable( - request_iterator, - metadata=(('test', 'InterceptedStreamRequestStreamResponse'),)) - with self.assertRaises(grpc.RpcError) as exception_context: - tuple(response_iterator) - exception = exception_context.exception - self.assertFalse(exception.cancelled()) - self.assertFalse(exception.running()) - self.assertTrue(exception.done()) - with self.assertRaises(grpc.RpcError): - exception.result() - self.assertIsInstance(exception.exception(), grpc.RpcError) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + + multi_callable = _stream_stream_multi_callable(channel) + response_iterator = multi_callable( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestStreamResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: + tuple(response_iterator) + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py index 70fbe81043..d1f1499d8c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py @@ -1,140 +1,140 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of RPCs made against gRPC Python's application-layer API.""" - -import unittest -import logging - -import grpc - -from tests.unit.framework.common import test_constants - -_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 -_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] -_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 -_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' - - -def _unary_unary_multi_callable(channel): - return channel.unary_unary(_UNARY_UNARY) - - -def _unary_stream_multi_callable(channel): +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of RPCs made against gRPC Python's application-layer API.""" + +import unittest +import logging + +import grpc + +from tests.unit.framework.common import test_constants + +_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 +_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] +_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 +_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +def _unary_unary_multi_callable(channel): + return channel.unary_unary(_UNARY_UNARY) + + +def _unary_stream_multi_callable(channel): return channel.unary_stream(_UNARY_STREAM, request_serializer=_SERIALIZE_REQUEST, response_deserializer=_DESERIALIZE_RESPONSE) - - -def _stream_unary_multi_callable(channel): + + +def _stream_unary_multi_callable(channel): return channel.stream_unary(_STREAM_UNARY, request_serializer=_SERIALIZE_REQUEST, response_deserializer=_DESERIALIZE_RESPONSE) - - -def _stream_stream_multi_callable(channel): - return channel.stream_stream(_STREAM_STREAM) - - -class InvalidMetadataTest(unittest.TestCase): - - def setUp(self): - self._channel = grpc.insecure_channel('localhost:8080') - self._unary_unary = _unary_unary_multi_callable(self._channel) - self._unary_stream = _unary_stream_multi_callable(self._channel) - self._stream_unary = _stream_unary_multi_callable(self._channel) - self._stream_stream = _stream_stream_multi_callable(self._channel) - - def tearDown(self): - self._channel.close() - - def testUnaryRequestBlockingUnaryResponse(self): - request = b'\x07\x08' - metadata = (('InVaLiD', 'UnaryRequestBlockingUnaryResponse'),) - expected_error_details = "metadata was invalid: %s" % metadata - with self.assertRaises(ValueError) as exception_context: - self._unary_unary(request, metadata=metadata) - self.assertIn(expected_error_details, str(exception_context.exception)) - - def testUnaryRequestBlockingUnaryResponseWithCall(self): - request = b'\x07\x08' - metadata = (('InVaLiD', 'UnaryRequestBlockingUnaryResponseWithCall'),) - expected_error_details = "metadata was invalid: %s" % metadata - with self.assertRaises(ValueError) as exception_context: - self._unary_unary.with_call(request, metadata=metadata) - self.assertIn(expected_error_details, str(exception_context.exception)) - - def testUnaryRequestFutureUnaryResponse(self): - request = b'\x07\x08' - metadata = (('InVaLiD', 'UnaryRequestFutureUnaryResponse'),) - expected_error_details = "metadata was invalid: %s" % metadata - with self.assertRaises(ValueError) as exception_context: - self._unary_unary.future(request, metadata=metadata) - - def testUnaryRequestStreamResponse(self): - request = b'\x37\x58' - metadata = (('InVaLiD', 'UnaryRequestStreamResponse'),) - expected_error_details = "metadata was invalid: %s" % metadata - with self.assertRaises(ValueError) as exception_context: - self._unary_stream(request, metadata=metadata) - self.assertIn(expected_error_details, str(exception_context.exception)) - - def testStreamRequestBlockingUnaryResponse(self): - request_iterator = ( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - metadata = (('InVaLiD', 'StreamRequestBlockingUnaryResponse'),) - expected_error_details = "metadata was invalid: %s" % metadata - with self.assertRaises(ValueError) as exception_context: - self._stream_unary(request_iterator, metadata=metadata) - self.assertIn(expected_error_details, str(exception_context.exception)) - - def testStreamRequestBlockingUnaryResponseWithCall(self): - request_iterator = ( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - metadata = (('InVaLiD', 'StreamRequestBlockingUnaryResponseWithCall'),) - expected_error_details = "metadata was invalid: %s" % metadata - multi_callable = _stream_unary_multi_callable(self._channel) - with self.assertRaises(ValueError) as exception_context: - multi_callable.with_call(request_iterator, metadata=metadata) - self.assertIn(expected_error_details, str(exception_context.exception)) - - def testStreamRequestFutureUnaryResponse(self): - request_iterator = ( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - metadata = (('InVaLiD', 'StreamRequestFutureUnaryResponse'),) - expected_error_details = "metadata was invalid: %s" % metadata - with self.assertRaises(ValueError) as exception_context: - self._stream_unary.future(request_iterator, metadata=metadata) - self.assertIn(expected_error_details, str(exception_context.exception)) - - def testStreamRequestStreamResponse(self): - request_iterator = ( - b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) - metadata = (('InVaLiD', 'StreamRequestStreamResponse'),) - expected_error_details = "metadata was invalid: %s" % metadata - with self.assertRaises(ValueError) as exception_context: - self._stream_stream(request_iterator, metadata=metadata) - self.assertIn(expected_error_details, str(exception_context.exception)) - - def testInvalidMetadata(self): - self.assertRaises(TypeError, self._unary_unary, b'', metadata=42) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + + +def _stream_stream_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM) + + +class InvalidMetadataTest(unittest.TestCase): + + def setUp(self): + self._channel = grpc.insecure_channel('localhost:8080') + self._unary_unary = _unary_unary_multi_callable(self._channel) + self._unary_stream = _unary_stream_multi_callable(self._channel) + self._stream_unary = _stream_unary_multi_callable(self._channel) + self._stream_stream = _stream_stream_multi_callable(self._channel) + + def tearDown(self): + self._channel.close() + + def testUnaryRequestBlockingUnaryResponse(self): + request = b'\x07\x08' + metadata = (('InVaLiD', 'UnaryRequestBlockingUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_unary(request, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testUnaryRequestBlockingUnaryResponseWithCall(self): + request = b'\x07\x08' + metadata = (('InVaLiD', 'UnaryRequestBlockingUnaryResponseWithCall'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_unary.with_call(request, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testUnaryRequestFutureUnaryResponse(self): + request = b'\x07\x08' + metadata = (('InVaLiD', 'UnaryRequestFutureUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_unary.future(request, metadata=metadata) + + def testUnaryRequestStreamResponse(self): + request = b'\x37\x58' + metadata = (('InVaLiD', 'UnaryRequestStreamResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_stream(request, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestBlockingUnaryResponse(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestBlockingUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._stream_unary(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestBlockingUnaryResponseWithCall(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestBlockingUnaryResponseWithCall'),) + expected_error_details = "metadata was invalid: %s" % metadata + multi_callable = _stream_unary_multi_callable(self._channel) + with self.assertRaises(ValueError) as exception_context: + multi_callable.with_call(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestFutureUnaryResponse(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestFutureUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._stream_unary.future(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestStreamResponse(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestStreamResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._stream_stream(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testInvalidMetadata(self): + self.assertRaises(TypeError, self._unary_unary, b'', metadata=42) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py index e926a77569..a0208b51df 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py @@ -1,266 +1,266 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest -import logging - -import grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants -from tests.unit.framework.common import test_control - -_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 -_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] -_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 -_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' -_DEFECTIVE_GENERIC_RPC_HANDLER = '/test/DefectiveGenericRpcHandler' - - -class _Handler(object): - - def __init__(self, control): - self._control = control - - def handle_unary_unary(self, request, servicer_context): - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - return request - - def handle_unary_stream(self, request, servicer_context): - for _ in range(test_constants.STREAM_LENGTH): - self._control.control() - yield request - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - - def handle_stream_unary(self, request_iterator, servicer_context): - if servicer_context is not None: - servicer_context.invocation_metadata() - self._control.control() - response_elements = [] - for request in request_iterator: - self._control.control() - response_elements.append(request) - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - return b''.join(response_elements) - - def handle_stream_stream(self, request_iterator, servicer_context): - self._control.control() - if servicer_context is not None: - servicer_context.set_trailing_metadata((( - 'testkey', - 'testvalue', - ),)) - for request in request_iterator: - self._control.control() - yield request - self._control.control() - - def defective_generic_rpc_handler(self): - raise test_control.Defect() - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, request_streaming, response_streaming, - request_deserializer, response_serializer, unary_unary, - unary_stream, stream_unary, stream_stream): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = request_deserializer - self.response_serializer = response_serializer - self.unary_unary = unary_unary - self.unary_stream = unary_stream - self.stream_unary = stream_unary - self.stream_stream = stream_stream - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self, handler): - self._handler = handler - - def service(self, handler_call_details): - if handler_call_details.method == _UNARY_UNARY: - return _MethodHandler(False, False, None, None, - self._handler.handle_unary_unary, None, None, - None) - elif handler_call_details.method == _UNARY_STREAM: - return _MethodHandler(False, True, _DESERIALIZE_REQUEST, - _SERIALIZE_RESPONSE, None, - self._handler.handle_unary_stream, None, None) - elif handler_call_details.method == _STREAM_UNARY: - return _MethodHandler(True, False, _DESERIALIZE_REQUEST, - _SERIALIZE_RESPONSE, None, None, - self._handler.handle_stream_unary, None) - elif handler_call_details.method == _STREAM_STREAM: - return _MethodHandler(True, True, None, None, None, None, None, - self._handler.handle_stream_stream) - elif handler_call_details.method == _DEFECTIVE_GENERIC_RPC_HANDLER: - return self._handler.defective_generic_rpc_handler() - else: - return None - - -class FailAfterFewIterationsCounter(object): - - def __init__(self, high, bytestring): - self._current = 0 - self._high = high - self._bytestring = bytestring - - def __iter__(self): - return self - - def __next__(self): - if self._current >= self._high: - raise test_control.Defect() - else: - self._current += 1 - return self._bytestring - - next = __next__ - - -def _unary_unary_multi_callable(channel): - return channel.unary_unary(_UNARY_UNARY) - - -def _unary_stream_multi_callable(channel): +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import logging + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_control + +_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 +_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] +_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 +_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' +_DEFECTIVE_GENERIC_RPC_HANDLER = '/test/DefectiveGenericRpcHandler' + + +class _Handler(object): + + def __init__(self, control): + self._control = control + + def handle_unary_unary(self, request, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + return request + + def handle_unary_stream(self, request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH): + self._control.control() + yield request + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + + def handle_stream_unary(self, request_iterator, servicer_context): + if servicer_context is not None: + servicer_context.invocation_metadata() + self._control.control() + response_elements = [] + for request in request_iterator: + self._control.control() + response_elements.append(request) + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + return b''.join(response_elements) + + def handle_stream_stream(self, request_iterator, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + for request in request_iterator: + self._control.control() + yield request + self._control.control() + + def defective_generic_rpc_handler(self): + raise test_control.Defect() + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, + request_deserializer, response_serializer, unary_unary, + unary_stream, stream_unary, stream_stream): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = request_deserializer + self.response_serializer = response_serializer + self.unary_unary = unary_unary + self.unary_stream = unary_stream + self.stream_unary = stream_unary + self.stream_stream = stream_stream + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, handler): + self._handler = handler + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False, None, None, + self._handler.handle_unary_unary, None, None, + None) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, + self._handler.handle_unary_stream, None, None) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, None, + self._handler.handle_stream_unary, None) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True, None, None, None, None, None, + self._handler.handle_stream_stream) + elif handler_call_details.method == _DEFECTIVE_GENERIC_RPC_HANDLER: + return self._handler.defective_generic_rpc_handler() + else: + return None + + +class FailAfterFewIterationsCounter(object): + + def __init__(self, high, bytestring): + self._current = 0 + self._high = high + self._bytestring = bytestring + + def __iter__(self): + return self + + def __next__(self): + if self._current >= self._high: + raise test_control.Defect() + else: + self._current += 1 + return self._bytestring + + next = __next__ + + +def _unary_unary_multi_callable(channel): + return channel.unary_unary(_UNARY_UNARY) + + +def _unary_stream_multi_callable(channel): return channel.unary_stream(_UNARY_STREAM, request_serializer=_SERIALIZE_REQUEST, response_deserializer=_DESERIALIZE_RESPONSE) - - -def _stream_unary_multi_callable(channel): + + +def _stream_unary_multi_callable(channel): return channel.stream_unary(_STREAM_UNARY, request_serializer=_SERIALIZE_REQUEST, response_deserializer=_DESERIALIZE_RESPONSE) - - -def _stream_stream_multi_callable(channel): - return channel.stream_stream(_STREAM_STREAM) - - -def _defective_handler_multi_callable(channel): - return channel.unary_unary(_DEFECTIVE_GENERIC_RPC_HANDLER) - - -class InvocationDefectsTest(unittest.TestCase): + + +def _stream_stream_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM) + + +def _defective_handler_multi_callable(channel): + return channel.unary_unary(_DEFECTIVE_GENERIC_RPC_HANDLER) + + +class InvocationDefectsTest(unittest.TestCase): """Tests the handling of exception-raising user code on the client-side.""" - - def setUp(self): - self._control = test_control.PauseFailControl() - self._handler = _Handler(self._control) - - self._server = test_common.test_server() - port = self._server.add_insecure_port('[::]:0') - self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),)) - self._server.start() - - self._channel = grpc.insecure_channel('localhost:%d' % port) - - def tearDown(self): - self._server.stop(0) - self._channel.close() - - def testIterableStreamRequestBlockingUnaryResponse(self): + + def setUp(self): + self._control = test_control.PauseFailControl() + self._handler = _Handler(self._control) + + self._server = test_common.test_server() + port = self._server.add_insecure_port('[::]:0') + self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),)) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testIterableStreamRequestBlockingUnaryResponse(self): requests = object() - multi_callable = _stream_unary_multi_callable(self._channel) - + multi_callable = _stream_unary_multi_callable(self._channel) + with self.assertRaises(grpc.RpcError) as exception_context: multi_callable( - requests, - metadata=(('test', - 'IterableStreamRequestBlockingUnaryResponse'),)) - + requests, + metadata=(('test', + 'IterableStreamRequestBlockingUnaryResponse'),)) + self.assertIs(grpc.StatusCode.UNKNOWN, exception_context.exception.code()) - def testIterableStreamRequestFutureUnaryResponse(self): + def testIterableStreamRequestFutureUnaryResponse(self): requests = object() - multi_callable = _stream_unary_multi_callable(self._channel) - response_future = multi_callable.future( - requests, - metadata=(('test', 'IterableStreamRequestFutureUnaryResponse'),)) - + multi_callable = _stream_unary_multi_callable(self._channel) + response_future = multi_callable.future( + requests, + metadata=(('test', 'IterableStreamRequestFutureUnaryResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: response_future.result() - + self.assertIs(grpc.StatusCode.UNKNOWN, exception_context.exception.code()) - def testIterableStreamRequestStreamResponse(self): + def testIterableStreamRequestStreamResponse(self): requests = object() - multi_callable = _stream_stream_multi_callable(self._channel) - response_iterator = multi_callable( - requests, - metadata=(('test', 'IterableStreamRequestStreamResponse'),)) - + multi_callable = _stream_stream_multi_callable(self._channel) + response_iterator = multi_callable( + requests, + metadata=(('test', 'IterableStreamRequestStreamResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: - next(response_iterator) - + next(response_iterator) + self.assertIs(grpc.StatusCode.UNKNOWN, exception_context.exception.code()) - def testIteratorStreamRequestStreamResponse(self): - requests_iterator = FailAfterFewIterationsCounter( - test_constants.STREAM_LENGTH // 2, b'\x07\x08') - multi_callable = _stream_stream_multi_callable(self._channel) - response_iterator = multi_callable( - requests_iterator, - metadata=(('test', 'IteratorStreamRequestStreamResponse'),)) - + def testIteratorStreamRequestStreamResponse(self): + requests_iterator = FailAfterFewIterationsCounter( + test_constants.STREAM_LENGTH // 2, b'\x07\x08') + multi_callable = _stream_stream_multi_callable(self._channel) + response_iterator = multi_callable( + requests_iterator, + metadata=(('test', 'IteratorStreamRequestStreamResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: - for _ in range(test_constants.STREAM_LENGTH // 2 + 1): - next(response_iterator) - + for _ in range(test_constants.STREAM_LENGTH // 2 + 1): + next(response_iterator) + self.assertIs(grpc.StatusCode.UNKNOWN, exception_context.exception.code()) - def testDefectiveGenericRpcHandlerUnaryResponse(self): - request = b'\x07\x08' - multi_callable = _defective_handler_multi_callable(self._channel) - - with self.assertRaises(grpc.RpcError) as exception_context: + def testDefectiveGenericRpcHandlerUnaryResponse(self): + request = b'\x07\x08' + multi_callable = _defective_handler_multi_callable(self._channel) + + with self.assertRaises(grpc.RpcError) as exception_context: multi_callable(request, metadata=(('test', 'DefectiveGenericRpcHandlerUnary'),)) - - self.assertIs(grpc.StatusCode.UNKNOWN, - exception_context.exception.code()) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py index b85353ab49..cd1f71dbee 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py @@ -1,77 +1,77 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of RPCs made using local credentials.""" - -import unittest -import os -from concurrent.futures import ThreadPoolExecutor -import grpc - - -class _GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - return grpc.unary_unary_rpc_method_handler( - lambda request, unused_context: request) - - -class LocalCredentialsTest(unittest.TestCase): - - def _create_server(self): - server = grpc.server(ThreadPoolExecutor()) - server.add_generic_rpc_handlers((_GenericHandler(),)) - return server - - @unittest.skipIf(os.name == 'nt', - 'TODO(https://github.com/grpc/grpc/issues/20078)') - def test_local_tcp(self): - server_addr = 'localhost:{}' - channel_creds = grpc.local_channel_credentials( - grpc.LocalConnectionType.LOCAL_TCP) - server_creds = grpc.local_server_credentials( - grpc.LocalConnectionType.LOCAL_TCP) - - server = self._create_server() - port = server.add_secure_port(server_addr.format(0), server_creds) - server.start() - with grpc.secure_channel(server_addr.format(port), - channel_creds) as channel: +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of RPCs made using local credentials.""" + +import unittest +import os +from concurrent.futures import ThreadPoolExecutor +import grpc + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + return grpc.unary_unary_rpc_method_handler( + lambda request, unused_context: request) + + +class LocalCredentialsTest(unittest.TestCase): + + def _create_server(self): + server = grpc.server(ThreadPoolExecutor()) + server.add_generic_rpc_handlers((_GenericHandler(),)) + return server + + @unittest.skipIf(os.name == 'nt', + 'TODO(https://github.com/grpc/grpc/issues/20078)') + def test_local_tcp(self): + server_addr = 'localhost:{}' + channel_creds = grpc.local_channel_credentials( + grpc.LocalConnectionType.LOCAL_TCP) + server_creds = grpc.local_server_credentials( + grpc.LocalConnectionType.LOCAL_TCP) + + server = self._create_server() + port = server.add_secure_port(server_addr.format(0), server_creds) + server.start() + with grpc.secure_channel(server_addr.format(port), + channel_creds) as channel: self.assertEqual( b'abc', channel.unary_unary('/test/method')(b'abc', wait_for_ready=True)) - server.stop(None) - - @unittest.skipIf(os.name == 'nt', - 'Unix Domain Socket is not supported on Windows') - def test_uds(self): - server_addr = 'unix:/tmp/grpc_fullstack_test' - channel_creds = grpc.local_channel_credentials( - grpc.LocalConnectionType.UDS) - server_creds = grpc.local_server_credentials( - grpc.LocalConnectionType.UDS) - - server = self._create_server() - server.add_secure_port(server_addr, server_creds) - server.start() - with grpc.secure_channel(server_addr, channel_creds) as channel: + server.stop(None) + + @unittest.skipIf(os.name == 'nt', + 'Unix Domain Socket is not supported on Windows') + def test_uds(self): + server_addr = 'unix:/tmp/grpc_fullstack_test' + channel_creds = grpc.local_channel_credentials( + grpc.LocalConnectionType.UDS) + server_creds = grpc.local_server_credentials( + grpc.LocalConnectionType.UDS) + + server = self._create_server() + server.add_secure_port(server_addr, server_creds) + server.start() + with grpc.secure_channel(server_addr, channel_creds) as channel: self.assertEqual( b'abc', channel.unary_unary('/test/method')(b'abc', wait_for_ready=True)) - server.stop(None) - - -if __name__ == '__main__': - unittest.main() + server.stop(None) + + +if __name__ == '__main__': + unittest.main() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py index 8b83e2d784..1304bb5587 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py @@ -1,103 +1,103 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of gRPC Python's interaction with the python logging module""" - -import unittest -import logging -import grpc -import os -import subprocess -import sys - -INTERPRETER = sys.executable - - -class LoggingTest(unittest.TestCase): - - def test_logger_not_occupied(self): - script = """if True: - import logging - - import grpc - - if len(logging.getLogger().handlers) != 0: - raise Exception('expected 0 logging handlers') - - """ - self._verifyScriptSucceeds(script) - - def test_handler_found(self): - script = """if True: - import logging - - import grpc - """ - out, err = self._verifyScriptSucceeds(script) - self.assertEqual(0, len(err), 'unexpected output to stderr') - - def test_can_configure_logger(self): - script = """if True: - import logging - import six - - import grpc - - - intended_stream = six.StringIO() - logging.basicConfig(stream=intended_stream) - - if len(logging.getLogger().handlers) != 1: - raise Exception('expected 1 logging handler') - - if logging.getLogger().handlers[0].stream is not intended_stream: - raise Exception('wrong handler stream') - - """ - self._verifyScriptSucceeds(script) - - def test_grpc_logger(self): - script = """if True: - import logging - - import grpc - - if "grpc" not in logging.Logger.manager.loggerDict: - raise Exception('grpc logger not found') - - root_logger = logging.getLogger("grpc") - if len(root_logger.handlers) != 1: - raise Exception('expected 1 root logger handler') - if not isinstance(root_logger.handlers[0], logging.NullHandler): - raise Exception('expected logging.NullHandler') - - """ - self._verifyScriptSucceeds(script) - - def _verifyScriptSucceeds(self, script): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of gRPC Python's interaction with the python logging module""" + +import unittest +import logging +import grpc +import os +import subprocess +import sys + +INTERPRETER = sys.executable + + +class LoggingTest(unittest.TestCase): + + def test_logger_not_occupied(self): + script = """if True: + import logging + + import grpc + + if len(logging.getLogger().handlers) != 0: + raise Exception('expected 0 logging handlers') + + """ + self._verifyScriptSucceeds(script) + + def test_handler_found(self): + script = """if True: + import logging + + import grpc + """ + out, err = self._verifyScriptSucceeds(script) + self.assertEqual(0, len(err), 'unexpected output to stderr') + + def test_can_configure_logger(self): + script = """if True: + import logging + import six + + import grpc + + + intended_stream = six.StringIO() + logging.basicConfig(stream=intended_stream) + + if len(logging.getLogger().handlers) != 1: + raise Exception('expected 1 logging handler') + + if logging.getLogger().handlers[0].stream is not intended_stream: + raise Exception('wrong handler stream') + + """ + self._verifyScriptSucceeds(script) + + def test_grpc_logger(self): + script = """if True: + import logging + + import grpc + + if "grpc" not in logging.Logger.manager.loggerDict: + raise Exception('grpc logger not found') + + root_logger = logging.getLogger("grpc") + if len(root_logger.handlers) != 1: + raise Exception('expected 1 root logger handler') + if not isinstance(root_logger.handlers[0], logging.NullHandler): + raise Exception('expected logging.NullHandler') + + """ + self._verifyScriptSucceeds(script) + + def _verifyScriptSucceeds(self, script): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' process = subprocess.Popen([INTERPRETER, '-c', script], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) - out, err = process.communicate() - self.assertEqual( - 0, process.returncode, - 'process failed with exit code %d (stdout: %s, stderr: %s)' % - (process.returncode, out, err)) - return out, err - - -if __name__ == '__main__': - unittest.main(verbosity=2) + out, err = process.communicate() + self.assertEqual( + 0, process.returncode, + 'process failed with exit code %d (stdout: %s, stderr: %s)' % + (process.returncode, out, err)) + return out, err + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py index 801d15937d..5b06eb2bfe 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py @@ -1,213 +1,213 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests application-provided metadata, status code, and details.""" - -import threading -import unittest -import logging - -import grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants -from tests.unit.framework.common import test_control - -_SERIALIZED_REQUEST = b'\x46\x47\x48' -_SERIALIZED_RESPONSE = b'\x49\x50\x51' - -_REQUEST_SERIALIZER = lambda unused_request: _SERIALIZED_REQUEST -_REQUEST_DESERIALIZER = lambda unused_serialized_request: object() -_RESPONSE_SERIALIZER = lambda unused_response: _SERIALIZED_RESPONSE -_RESPONSE_DESERIALIZER = lambda unused_serialized_response: object() - -_SERVICE = 'test.TestService' -_UNARY_UNARY = 'UnaryUnary' -_UNARY_STREAM = 'UnaryStream' -_STREAM_UNARY = 'StreamUnary' -_STREAM_STREAM = 'StreamStream' - -_CLIENT_METADATA = (('client-md-key', 'client-md-key'), ('client-md-key-bin', - b'\x00\x01')) - -_SERVER_INITIAL_METADATA = (('server-initial-md-key', - 'server-initial-md-value'), - ('server-initial-md-key-bin', b'\x00\x02')) - -_SERVER_TRAILING_METADATA = (('server-trailing-md-key', - 'server-trailing-md-value'), - ('server-trailing-md-key-bin', b'\x00\x03')) - -_NON_OK_CODE = grpc.StatusCode.NOT_FOUND -_DETAILS = 'Test details!' - -# calling abort should always fail an RPC, even for "invalid" codes -_ABORT_CODES = (_NON_OK_CODE, 3, grpc.StatusCode.OK) -_EXPECTED_CLIENT_CODES = (_NON_OK_CODE, grpc.StatusCode.UNKNOWN, - grpc.StatusCode.UNKNOWN) -_EXPECTED_DETAILS = (_DETAILS, _DETAILS, '') - - -class _Servicer(object): - - def __init__(self): - self._lock = threading.Lock() - self._abort_call = False - self._code = None - self._details = None - self._exception = False - self._return_none = False - self._received_client_metadata = None - - def unary_unary(self, request, context): - with self._lock: - self._received_client_metadata = context.invocation_metadata() - context.send_initial_metadata(_SERVER_INITIAL_METADATA) - context.set_trailing_metadata(_SERVER_TRAILING_METADATA) - if self._abort_call: - context.abort(self._code, self._details) - else: - if self._code is not None: - context.set_code(self._code) - if self._details is not None: - context.set_details(self._details) - if self._exception: - raise test_control.Defect() - else: - return None if self._return_none else object() - - def unary_stream(self, request, context): - with self._lock: - self._received_client_metadata = context.invocation_metadata() - context.send_initial_metadata(_SERVER_INITIAL_METADATA) - context.set_trailing_metadata(_SERVER_TRAILING_METADATA) - if self._abort_call: - context.abort(self._code, self._details) - else: - if self._code is not None: - context.set_code(self._code) - if self._details is not None: - context.set_details(self._details) - for _ in range(test_constants.STREAM_LENGTH // 2): - yield _SERIALIZED_RESPONSE - if self._exception: - raise test_control.Defect() - - def stream_unary(self, request_iterator, context): - with self._lock: - self._received_client_metadata = context.invocation_metadata() - context.send_initial_metadata(_SERVER_INITIAL_METADATA) - context.set_trailing_metadata(_SERVER_TRAILING_METADATA) - # TODO(https://github.com/grpc/grpc/issues/6891): just ignore the - # request iterator. - list(request_iterator) - if self._abort_call: - context.abort(self._code, self._details) - else: - if self._code is not None: - context.set_code(self._code) - if self._details is not None: - context.set_details(self._details) - if self._exception: - raise test_control.Defect() - else: - return None if self._return_none else _SERIALIZED_RESPONSE - - def stream_stream(self, request_iterator, context): - with self._lock: - self._received_client_metadata = context.invocation_metadata() - context.send_initial_metadata(_SERVER_INITIAL_METADATA) - context.set_trailing_metadata(_SERVER_TRAILING_METADATA) - # TODO(https://github.com/grpc/grpc/issues/6891): just ignore the - # request iterator. - list(request_iterator) - if self._abort_call: - context.abort(self._code, self._details) - else: - if self._code is not None: - context.set_code(self._code) - if self._details is not None: - context.set_details(self._details) - for _ in range(test_constants.STREAM_LENGTH // 3): - yield object() - if self._exception: - raise test_control.Defect() - - def set_abort_call(self): - with self._lock: - self._abort_call = True - - def set_code(self, code): - with self._lock: - self._code = code - - def set_details(self, details): - with self._lock: - self._details = details - - def set_exception(self): - with self._lock: - self._exception = True - - def set_return_none(self): - with self._lock: - self._return_none = True - - def received_client_metadata(self): - with self._lock: - return self._received_client_metadata - - -def _generic_handler(servicer): - method_handlers = { - _UNARY_UNARY: +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests application-provided metadata, status code, and details.""" + +import threading +import unittest +import logging + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_control + +_SERIALIZED_REQUEST = b'\x46\x47\x48' +_SERIALIZED_RESPONSE = b'\x49\x50\x51' + +_REQUEST_SERIALIZER = lambda unused_request: _SERIALIZED_REQUEST +_REQUEST_DESERIALIZER = lambda unused_serialized_request: object() +_RESPONSE_SERIALIZER = lambda unused_response: _SERIALIZED_RESPONSE +_RESPONSE_DESERIALIZER = lambda unused_serialized_response: object() + +_SERVICE = 'test.TestService' +_UNARY_UNARY = 'UnaryUnary' +_UNARY_STREAM = 'UnaryStream' +_STREAM_UNARY = 'StreamUnary' +_STREAM_STREAM = 'StreamStream' + +_CLIENT_METADATA = (('client-md-key', 'client-md-key'), ('client-md-key-bin', + b'\x00\x01')) + +_SERVER_INITIAL_METADATA = (('server-initial-md-key', + 'server-initial-md-value'), + ('server-initial-md-key-bin', b'\x00\x02')) + +_SERVER_TRAILING_METADATA = (('server-trailing-md-key', + 'server-trailing-md-value'), + ('server-trailing-md-key-bin', b'\x00\x03')) + +_NON_OK_CODE = grpc.StatusCode.NOT_FOUND +_DETAILS = 'Test details!' + +# calling abort should always fail an RPC, even for "invalid" codes +_ABORT_CODES = (_NON_OK_CODE, 3, grpc.StatusCode.OK) +_EXPECTED_CLIENT_CODES = (_NON_OK_CODE, grpc.StatusCode.UNKNOWN, + grpc.StatusCode.UNKNOWN) +_EXPECTED_DETAILS = (_DETAILS, _DETAILS, '') + + +class _Servicer(object): + + def __init__(self): + self._lock = threading.Lock() + self._abort_call = False + self._code = None + self._details = None + self._exception = False + self._return_none = False + self._received_client_metadata = None + + def unary_unary(self, request, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + if self._exception: + raise test_control.Defect() + else: + return None if self._return_none else object() + + def unary_stream(self, request, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + for _ in range(test_constants.STREAM_LENGTH // 2): + yield _SERIALIZED_RESPONSE + if self._exception: + raise test_control.Defect() + + def stream_unary(self, request_iterator, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + # TODO(https://github.com/grpc/grpc/issues/6891): just ignore the + # request iterator. + list(request_iterator) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + if self._exception: + raise test_control.Defect() + else: + return None if self._return_none else _SERIALIZED_RESPONSE + + def stream_stream(self, request_iterator, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + # TODO(https://github.com/grpc/grpc/issues/6891): just ignore the + # request iterator. + list(request_iterator) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + for _ in range(test_constants.STREAM_LENGTH // 3): + yield object() + if self._exception: + raise test_control.Defect() + + def set_abort_call(self): + with self._lock: + self._abort_call = True + + def set_code(self, code): + with self._lock: + self._code = code + + def set_details(self, details): + with self._lock: + self._details = details + + def set_exception(self): + with self._lock: + self._exception = True + + def set_return_none(self): + with self._lock: + self._return_none = True + + def received_client_metadata(self): + with self._lock: + return self._received_client_metadata + + +def _generic_handler(servicer): + method_handlers = { + _UNARY_UNARY: grpc.unary_unary_rpc_method_handler( servicer.unary_unary, request_deserializer=_REQUEST_DESERIALIZER, response_serializer=_RESPONSE_SERIALIZER), - _UNARY_STREAM: + _UNARY_STREAM: grpc.unary_stream_rpc_method_handler(servicer.unary_stream), - _STREAM_UNARY: + _STREAM_UNARY: grpc.stream_unary_rpc_method_handler(servicer.stream_unary), - _STREAM_STREAM: + _STREAM_STREAM: grpc.stream_stream_rpc_method_handler( servicer.stream_stream, request_deserializer=_REQUEST_DESERIALIZER, response_serializer=_RESPONSE_SERIALIZER), - } - return grpc.method_handlers_generic_handler(_SERVICE, method_handlers) - - -class MetadataCodeDetailsTest(unittest.TestCase): - - def setUp(self): - self._servicer = _Servicer() - self._server = test_common.test_server() - self._server.add_generic_rpc_handlers( - (_generic_handler(self._servicer),)) - port = self._server.add_insecure_port('[::]:0') - self._server.start() - - self._channel = grpc.insecure_channel('localhost:{}'.format(port)) - self._unary_unary = self._channel.unary_unary( - '/'.join(( - '', - _SERVICE, - _UNARY_UNARY, - )), - request_serializer=_REQUEST_SERIALIZER, - response_deserializer=_RESPONSE_DESERIALIZER, - ) + } + return grpc.method_handlers_generic_handler(_SERVICE, method_handlers) + + +class MetadataCodeDetailsTest(unittest.TestCase): + + def setUp(self): + self._servicer = _Servicer() + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers( + (_generic_handler(self._servicer),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + + self._channel = grpc.insecure_channel('localhost:{}'.format(port)) + self._unary_unary = self._channel.unary_unary( + '/'.join(( + '', + _SERVICE, + _UNARY_UNARY, + )), + request_serializer=_REQUEST_SERIALIZER, + response_deserializer=_RESPONSE_DESERIALIZER, + ) self._unary_stream = self._channel.unary_stream( '/'.join(( '', @@ -220,444 +220,444 @@ class MetadataCodeDetailsTest(unittest.TestCase): _SERVICE, _STREAM_UNARY, )),) - self._stream_stream = self._channel.stream_stream( - '/'.join(( - '', - _SERVICE, - _STREAM_STREAM, - )), - request_serializer=_REQUEST_SERIALIZER, - response_deserializer=_RESPONSE_DESERIALIZER, - ) - - def tearDown(self): - self._server.stop(None) - self._channel.close() - - def testSuccessfulUnaryUnary(self): - self._servicer.set_details(_DETAILS) - - unused_response, call = self._unary_unary.with_call( - object(), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - call.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_TRAILING_METADATA, - call.trailing_metadata())) - self.assertIs(grpc.StatusCode.OK, call.code()) - - def testSuccessfulUnaryStream(self): - self._servicer.set_details(_DETAILS) - + self._stream_stream = self._channel.stream_stream( + '/'.join(( + '', + _SERVICE, + _STREAM_STREAM, + )), + request_serializer=_REQUEST_SERIALIZER, + response_deserializer=_RESPONSE_DESERIALIZER, + ) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def testSuccessfulUnaryUnary(self): + self._servicer.set_details(_DETAILS) + + unused_response, call = self._unary_unary.with_call( + object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_TRAILING_METADATA, + call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, call.code()) + + def testSuccessfulUnaryStream(self): + self._servicer.set_details(_DETAILS) + response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST, metadata=_CLIENT_METADATA) - received_initial_metadata = response_iterator_call.initial_metadata() - list(response_iterator_call) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - response_iterator_call.trailing_metadata())) - self.assertIs(grpc.StatusCode.OK, response_iterator_call.code()) - - def testSuccessfulStreamUnary(self): - self._servicer.set_details(_DETAILS) - - unused_response, call = self._stream_unary.with_call( - iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH), - metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - call.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_TRAILING_METADATA, - call.trailing_metadata())) - self.assertIs(grpc.StatusCode.OK, call.code()) - - def testSuccessfulStreamStream(self): - self._servicer.set_details(_DETAILS) - + received_initial_metadata = response_iterator_call.initial_metadata() + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, response_iterator_call.code()) + + def testSuccessfulStreamUnary(self): + self._servicer.set_details(_DETAILS) + + unused_response, call = self._stream_unary.with_call( + iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_TRAILING_METADATA, + call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, call.code()) + + def testSuccessfulStreamStream(self): + self._servicer.set_details(_DETAILS) + response_iterator_call = self._stream_stream(iter( [object()] * test_constants.STREAM_LENGTH), metadata=_CLIENT_METADATA) - received_initial_metadata = response_iterator_call.initial_metadata() - list(response_iterator_call) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - response_iterator_call.trailing_metadata())) - self.assertIs(grpc.StatusCode.OK, response_iterator_call.code()) - - def testAbortedUnaryUnary(self): - test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, - _EXPECTED_DETAILS) - for abort_code, expected_code, expected_details in test_cases: - self._servicer.set_code(abort_code) - self._servicer.set_details(_DETAILS) - self._servicer.set_abort_call() - - with self.assertRaises(grpc.RpcError) as exception_context: - self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, - self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(expected_code, exception_context.exception.code()) - self.assertEqual(expected_details, - exception_context.exception.details()) - - def testAbortedUnaryStream(self): - test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, - _EXPECTED_DETAILS) - for abort_code, expected_code, expected_details in test_cases: - self._servicer.set_code(abort_code) - self._servicer.set_details(_DETAILS) - self._servicer.set_abort_call() - - response_iterator_call = self._unary_stream( - _SERIALIZED_REQUEST, metadata=_CLIENT_METADATA) - received_initial_metadata = \ - response_iterator_call.initial_metadata() - with self.assertRaises(grpc.RpcError): - self.assertEqual(len(list(response_iterator_call)), 0) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, - self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - response_iterator_call.trailing_metadata())) - self.assertIs(expected_code, response_iterator_call.code()) - self.assertEqual(expected_details, response_iterator_call.details()) - - def testAbortedStreamUnary(self): - test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, - _EXPECTED_DETAILS) - for abort_code, expected_code, expected_details in test_cases: - self._servicer.set_code(abort_code) - self._servicer.set_details(_DETAILS) - self._servicer.set_abort_call() - - with self.assertRaises(grpc.RpcError) as exception_context: + received_initial_metadata = response_iterator_call.initial_metadata() + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, response_iterator_call.code()) + + def testAbortedUnaryUnary(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(expected_code, exception_context.exception.code()) + self.assertEqual(expected_details, + exception_context.exception.details()) + + def testAbortedUnaryStream(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + response_iterator_call = self._unary_stream( + _SERIALIZED_REQUEST, metadata=_CLIENT_METADATA) + received_initial_metadata = \ + response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + self.assertEqual(len(list(response_iterator_call)), 0) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(expected_code, response_iterator_call.code()) + self.assertEqual(expected_details, response_iterator_call.details()) + + def testAbortedStreamUnary(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + with self.assertRaises(grpc.RpcError) as exception_context: self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, - self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(expected_code, exception_context.exception.code()) - self.assertEqual(expected_details, - exception_context.exception.details()) - - def testAbortedStreamStream(self): - test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, - _EXPECTED_DETAILS) - for abort_code, expected_code, expected_details in test_cases: - self._servicer.set_code(abort_code) - self._servicer.set_details(_DETAILS) - self._servicer.set_abort_call() - - response_iterator_call = self._stream_stream( - iter([object()] * test_constants.STREAM_LENGTH), - metadata=_CLIENT_METADATA) - received_initial_metadata = \ - response_iterator_call.initial_metadata() - with self.assertRaises(grpc.RpcError): - self.assertEqual(len(list(response_iterator_call)), 0) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, - self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - response_iterator_call.trailing_metadata())) - self.assertIs(expected_code, response_iterator_call.code()) - self.assertEqual(expected_details, response_iterator_call.details()) - - def testCustomCodeUnaryUnary(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - - with self.assertRaises(grpc.RpcError) as exception_context: - self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(_NON_OK_CODE, exception_context.exception.code()) - self.assertEqual(_DETAILS, exception_context.exception.details()) - - def testCustomCodeUnaryStream(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(expected_code, exception_context.exception.code()) + self.assertEqual(expected_details, + exception_context.exception.details()) + + def testAbortedStreamStream(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + response_iterator_call = self._stream_stream( + iter([object()] * test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + received_initial_metadata = \ + response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + self.assertEqual(len(list(response_iterator_call)), 0) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(expected_code, response_iterator_call.code()) + self.assertEqual(expected_details, response_iterator_call.details()) + + def testCustomCodeUnaryUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeUnaryStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST, metadata=_CLIENT_METADATA) - received_initial_metadata = response_iterator_call.initial_metadata() - with self.assertRaises(grpc.RpcError): - list(response_iterator_call) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - response_iterator_call.trailing_metadata())) - self.assertIs(_NON_OK_CODE, response_iterator_call.code()) - self.assertEqual(_DETAILS, response_iterator_call.details()) - - def testCustomCodeStreamUnary(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - - with self.assertRaises(grpc.RpcError) as exception_context: + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(_NON_OK_CODE, response_iterator_call.code()) + self.assertEqual(_DETAILS, response_iterator_call.details()) + + def testCustomCodeStreamUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + + with self.assertRaises(grpc.RpcError) as exception_context: self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(_NON_OK_CODE, exception_context.exception.code()) - self.assertEqual(_DETAILS, exception_context.exception.details()) - - def testCustomCodeStreamStream(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeStreamStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + response_iterator_call = self._stream_stream(iter( [object()] * test_constants.STREAM_LENGTH), metadata=_CLIENT_METADATA) - received_initial_metadata = response_iterator_call.initial_metadata() - with self.assertRaises(grpc.RpcError) as exception_context: - list(response_iterator_call) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(_NON_OK_CODE, exception_context.exception.code()) - self.assertEqual(_DETAILS, exception_context.exception.details()) - - def testCustomCodeExceptionUnaryUnary(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - self._servicer.set_exception() - - with self.assertRaises(grpc.RpcError) as exception_context: - self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(_NON_OK_CODE, exception_context.exception.code()) - self.assertEqual(_DETAILS, exception_context.exception.details()) - - def testCustomCodeExceptionUnaryStream(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - self._servicer.set_exception() - + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError) as exception_context: + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeExceptionUnaryUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeExceptionUnaryStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST, metadata=_CLIENT_METADATA) - received_initial_metadata = response_iterator_call.initial_metadata() - with self.assertRaises(grpc.RpcError): - list(response_iterator_call) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - response_iterator_call.trailing_metadata())) - self.assertIs(_NON_OK_CODE, response_iterator_call.code()) - self.assertEqual(_DETAILS, response_iterator_call.details()) - - def testCustomCodeExceptionStreamUnary(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - self._servicer.set_exception() - - with self.assertRaises(grpc.RpcError) as exception_context: + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(_NON_OK_CODE, response_iterator_call.code()) + self.assertEqual(_DETAILS, response_iterator_call.details()) + + def testCustomCodeExceptionStreamUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + + with self.assertRaises(grpc.RpcError) as exception_context: self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(_NON_OK_CODE, exception_context.exception.code()) - self.assertEqual(_DETAILS, exception_context.exception.details()) - - def testCustomCodeExceptionStreamStream(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - self._servicer.set_exception() - + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeExceptionStreamStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + response_iterator_call = self._stream_stream(iter( [object()] * test_constants.STREAM_LENGTH), metadata=_CLIENT_METADATA) - received_initial_metadata = response_iterator_call.initial_metadata() - with self.assertRaises(grpc.RpcError): - list(response_iterator_call) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, - received_initial_metadata)) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - response_iterator_call.trailing_metadata())) - self.assertIs(_NON_OK_CODE, response_iterator_call.code()) - self.assertEqual(_DETAILS, response_iterator_call.details()) - - def testCustomCodeReturnNoneUnaryUnary(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - self._servicer.set_return_none() - - with self.assertRaises(grpc.RpcError) as exception_context: - self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(_NON_OK_CODE, exception_context.exception.code()) - self.assertEqual(_DETAILS, exception_context.exception.details()) - - def testCustomCodeReturnNoneStreamUnary(self): - self._servicer.set_code(_NON_OK_CODE) - self._servicer.set_details(_DETAILS) - self._servicer.set_return_none() - - with self.assertRaises(grpc.RpcError) as exception_context: + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(_NON_OK_CODE, response_iterator_call.code()) + self.assertEqual(_DETAILS, response_iterator_call.details()) + + def testCustomCodeReturnNoneUnaryUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_return_none() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeReturnNoneStreamUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_return_none() + + with self.assertRaises(grpc.RpcError) as exception_context: self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH), metadata=_CLIENT_METADATA) - - self.assertTrue( - test_common.metadata_transmitted( - _CLIENT_METADATA, self._servicer.received_client_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_INITIAL_METADATA, - exception_context.exception.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted( - _SERVER_TRAILING_METADATA, - exception_context.exception.trailing_metadata())) - self.assertIs(_NON_OK_CODE, exception_context.exception.code()) - self.assertEqual(_DETAILS, exception_context.exception.details()) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py index 9347970e49..e2b36b1c70 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py @@ -1,211 +1,211 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests metadata flags feature by testing wait-for-ready semantics""" - -import time -import weakref -import unittest -import threading +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests metadata flags feature by testing wait-for-ready semantics""" + +import time +import weakref +import unittest +import threading import logging -import socket -from six.moves import queue - -import grpc - -from tests.unit import test_common -from tests.unit.framework.common import test_constants -import tests.unit.framework.common +import socket +from six.moves import queue + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +import tests.unit.framework.common from tests.unit.framework.common import get_socket - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x00' - - -def handle_unary_unary(test, request, servicer_context): - return _RESPONSE - - -def handle_unary_stream(test, request, servicer_context): - for _ in range(test_constants.STREAM_LENGTH): - yield _RESPONSE - - -def handle_stream_unary(test, request_iterator, servicer_context): - for _ in request_iterator: - pass - return _RESPONSE - - -def handle_stream_stream(test, request_iterator, servicer_context): - for _ in request_iterator: - yield _RESPONSE - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, test, request_streaming, response_streaming): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = None - self.response_serializer = None - self.unary_unary = None - self.unary_stream = None - self.stream_unary = None - self.stream_stream = None - if self.request_streaming and self.response_streaming: + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + + +def handle_unary_unary(test, request, servicer_context): + return _RESPONSE + + +def handle_unary_stream(test, request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(test, request_iterator, servicer_context): + for _ in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(test, request_iterator, servicer_context): + for _ in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, test, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: self.stream_stream = lambda req, ctx: handle_stream_stream( test, req, ctx) - elif self.request_streaming: + elif self.request_streaming: self.stream_unary = lambda req, ctx: handle_stream_unary( test, req, ctx) - elif self.response_streaming: + elif self.response_streaming: self.unary_stream = lambda req, ctx: handle_unary_stream( test, req, ctx) - else: + else: self.unary_unary = lambda req, ctx: handle_unary_unary( test, req, ctx) - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self, test): - self._test = test - - def service(self, handler_call_details): - if handler_call_details.method == _UNARY_UNARY: - return _MethodHandler(self._test, False, False) - elif handler_call_details.method == _UNARY_STREAM: - return _MethodHandler(self._test, False, True) - elif handler_call_details.method == _STREAM_UNARY: - return _MethodHandler(self._test, True, False) - elif handler_call_details.method == _STREAM_STREAM: - return _MethodHandler(self._test, True, True) - else: - return None - - -def create_dummy_channel(): - """Creating dummy channels is a workaround for retries""" + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, test): + self._test = test + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(self._test, False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(self._test, False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(self._test, True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(self._test, True, True) + else: + return None + + +def create_dummy_channel(): + """Creating dummy channels is a workaround for retries""" host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,)) sock.close() return grpc.insecure_channel('{}:{}'.format(host, port)) - - -def perform_unary_unary_call(channel, wait_for_ready=None): - channel.unary_unary(_UNARY_UNARY).__call__( - _REQUEST, - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready) - - -def perform_unary_unary_with_call(channel, wait_for_ready=None): - channel.unary_unary(_UNARY_UNARY).with_call( - _REQUEST, - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready) - - -def perform_unary_unary_future(channel, wait_for_ready=None): - channel.unary_unary(_UNARY_UNARY).future( - _REQUEST, - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready).result( - timeout=test_constants.LONG_TIMEOUT) - - -def perform_unary_stream_call(channel, wait_for_ready=None): - response_iterator = channel.unary_stream(_UNARY_STREAM).__call__( - _REQUEST, - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready) - for _ in response_iterator: - pass - - -def perform_stream_unary_call(channel, wait_for_ready=None): - channel.stream_unary(_STREAM_UNARY).__call__( - iter([_REQUEST] * test_constants.STREAM_LENGTH), - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready) - - -def perform_stream_unary_with_call(channel, wait_for_ready=None): - channel.stream_unary(_STREAM_UNARY).with_call( - iter([_REQUEST] * test_constants.STREAM_LENGTH), - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready) - - -def perform_stream_unary_future(channel, wait_for_ready=None): - channel.stream_unary(_STREAM_UNARY).future( - iter([_REQUEST] * test_constants.STREAM_LENGTH), - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready).result( - timeout=test_constants.LONG_TIMEOUT) - - -def perform_stream_stream_call(channel, wait_for_ready=None): - response_iterator = channel.stream_stream(_STREAM_STREAM).__call__( - iter([_REQUEST] * test_constants.STREAM_LENGTH), - timeout=test_constants.LONG_TIMEOUT, - wait_for_ready=wait_for_ready) - for _ in response_iterator: - pass - - -_ALL_CALL_CASES = [ - perform_unary_unary_call, perform_unary_unary_with_call, - perform_unary_unary_future, perform_unary_stream_call, - perform_stream_unary_call, perform_stream_unary_with_call, - perform_stream_unary_future, perform_stream_stream_call -] - - -class MetadataFlagsTest(unittest.TestCase): - - def check_connection_does_failfast(self, fn, channel, wait_for_ready=None): - try: - fn(channel, wait_for_ready) - self.fail("The Call should fail") - except BaseException as e: # pylint: disable=broad-except - self.assertIs(grpc.StatusCode.UNAVAILABLE, e.code()) - - def test_call_wait_for_ready_default(self): - for perform_call in _ALL_CALL_CASES: - with create_dummy_channel() as channel: - self.check_connection_does_failfast(perform_call, channel) - - def test_call_wait_for_ready_disabled(self): - for perform_call in _ALL_CALL_CASES: - with create_dummy_channel() as channel: + + +def perform_unary_unary_call(channel, wait_for_ready=None): + channel.unary_unary(_UNARY_UNARY).__call__( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_unary_unary_with_call(channel, wait_for_ready=None): + channel.unary_unary(_UNARY_UNARY).with_call( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_unary_unary_future(channel, wait_for_ready=None): + channel.unary_unary(_UNARY_UNARY).future( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready).result( + timeout=test_constants.LONG_TIMEOUT) + + +def perform_unary_stream_call(channel, wait_for_ready=None): + response_iterator = channel.unary_stream(_UNARY_STREAM).__call__( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + for _ in response_iterator: + pass + + +def perform_stream_unary_call(channel, wait_for_ready=None): + channel.stream_unary(_STREAM_UNARY).__call__( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_stream_unary_with_call(channel, wait_for_ready=None): + channel.stream_unary(_STREAM_UNARY).with_call( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_stream_unary_future(channel, wait_for_ready=None): + channel.stream_unary(_STREAM_UNARY).future( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready).result( + timeout=test_constants.LONG_TIMEOUT) + + +def perform_stream_stream_call(channel, wait_for_ready=None): + response_iterator = channel.stream_stream(_STREAM_STREAM).__call__( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + for _ in response_iterator: + pass + + +_ALL_CALL_CASES = [ + perform_unary_unary_call, perform_unary_unary_with_call, + perform_unary_unary_future, perform_unary_stream_call, + perform_stream_unary_call, perform_stream_unary_with_call, + perform_stream_unary_future, perform_stream_stream_call +] + + +class MetadataFlagsTest(unittest.TestCase): + + def check_connection_does_failfast(self, fn, channel, wait_for_ready=None): + try: + fn(channel, wait_for_ready) + self.fail("The Call should fail") + except BaseException as e: # pylint: disable=broad-except + self.assertIs(grpc.StatusCode.UNAVAILABLE, e.code()) + + def test_call_wait_for_ready_default(self): + for perform_call in _ALL_CALL_CASES: + with create_dummy_channel() as channel: + self.check_connection_does_failfast(perform_call, channel) + + def test_call_wait_for_ready_disabled(self): + for perform_call in _ALL_CALL_CASES: + with create_dummy_channel() as channel: self.check_connection_does_failfast(perform_call, channel, wait_for_ready=False) - - def test_call_wait_for_ready_enabled(self): - # To test the wait mechanism, Python thread is required to make - # client set up first without handling them case by case. - # Also, Python thread don't pass the unhandled exceptions to - # main thread. So, it need another method to store the - # exceptions and raise them again in main thread. - unhandled_exceptions = queue.Queue() - + + def test_call_wait_for_ready_enabled(self): + # To test the wait mechanism, Python thread is required to make + # client set up first without handling them case by case. + # Also, Python thread don't pass the unhandled exceptions to + # main thread. So, it need another method to store the + # exceptions and raise them again in main thread. + unhandled_exceptions = queue.Queue() + # We just need an unused TCP port host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,)) sock.close() @@ -226,9 +226,9 @@ class MetadataFlagsTest(unittest.TestCase): # If the call failed, the thread would be destroyed. The # channel object can be collected before calling the # callback, which will result in a deadlock. - wg.done() + wg.done() unhandled_exceptions.put(e, True) - + test_threads = [] for perform_call in _ALL_CALL_CASES: test_thread = threading.Thread(target=test_call, @@ -237,24 +237,24 @@ class MetadataFlagsTest(unittest.TestCase): test_thread.exception = None test_thread.start() test_threads.append(test_thread) - + # Start the server after the connections are waiting wg.wait() server = test_common.test_server(reuse_port=True) server.add_generic_rpc_handlers((_GenericHandler(weakref.proxy(self)),)) server.add_insecure_port(addr) server.start() - + for test_thread in test_threads: test_thread.join() - + # Stop the server to make test end properly server.stop(0) - + if not unhandled_exceptions.empty(): raise unhandled_exceptions.get(True) - - -if __name__ == '__main__': + + +if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) - unittest.main(verbosity=2) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py index 1de9b7fd86..3e7717b04c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py @@ -1,242 +1,242 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests server and client side metadata API.""" - -import unittest -import weakref -import logging - -import grpc -from grpc import _channel - -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_CHANNEL_ARGS = (('grpc.primary_user_agent', 'primary-agent'), - ('grpc.secondary_user_agent', 'secondary-agent')) - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x00' - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' - -_INVOCATION_METADATA = ( - ( - b'invocation-md-key', - u'invocation-md-value', - ), - ( - u'invocation-md-key-bin', - b'\x00\x01', - ), -) -_EXPECTED_INVOCATION_METADATA = ( - ( - 'invocation-md-key', - 'invocation-md-value', - ), - ( - 'invocation-md-key-bin', - b'\x00\x01', - ), -) - -_INITIAL_METADATA = ((b'initial-md-key', u'initial-md-value'), - (u'initial-md-key-bin', b'\x00\x02')) -_EXPECTED_INITIAL_METADATA = ( - ( - 'initial-md-key', - 'initial-md-value', - ), - ( - 'initial-md-key-bin', - b'\x00\x02', - ), -) - -_TRAILING_METADATA = ( - ( - 'server-trailing-md-key', - 'server-trailing-md-value', - ), - ( - 'server-trailing-md-key-bin', - b'\x00\x03', - ), -) -_EXPECTED_TRAILING_METADATA = _TRAILING_METADATA - - -def _user_agent(metadata): - for key, val in metadata: - if key == 'user-agent': - return val - raise KeyError('No user agent!') - - -def validate_client_metadata(test, servicer_context): - invocation_metadata = servicer_context.invocation_metadata() - test.assertTrue( - test_common.metadata_transmitted(_EXPECTED_INVOCATION_METADATA, - invocation_metadata)) - user_agent = _user_agent(invocation_metadata) - test.assertTrue( - user_agent.startswith('primary-agent ' + _channel._USER_AGENT)) - test.assertTrue(user_agent.endswith('secondary-agent')) - - -def handle_unary_unary(test, request, servicer_context): - validate_client_metadata(test, servicer_context) - servicer_context.send_initial_metadata(_INITIAL_METADATA) - servicer_context.set_trailing_metadata(_TRAILING_METADATA) - return _RESPONSE - - -def handle_unary_stream(test, request, servicer_context): - validate_client_metadata(test, servicer_context) - servicer_context.send_initial_metadata(_INITIAL_METADATA) - servicer_context.set_trailing_metadata(_TRAILING_METADATA) - for _ in range(test_constants.STREAM_LENGTH): - yield _RESPONSE - - -def handle_stream_unary(test, request_iterator, servicer_context): - validate_client_metadata(test, servicer_context) - servicer_context.send_initial_metadata(_INITIAL_METADATA) - servicer_context.set_trailing_metadata(_TRAILING_METADATA) - # TODO(issue:#6891) We should be able to remove this loop - for request in request_iterator: - pass - return _RESPONSE - - -def handle_stream_stream(test, request_iterator, servicer_context): - validate_client_metadata(test, servicer_context) - servicer_context.send_initial_metadata(_INITIAL_METADATA) - servicer_context.set_trailing_metadata(_TRAILING_METADATA) - # TODO(issue:#6891) We should be able to remove this loop, - # and replace with return; yield - for request in request_iterator: - yield _RESPONSE - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, test, request_streaming, response_streaming): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = None - self.response_serializer = None - self.unary_unary = None - self.unary_stream = None - self.stream_unary = None - self.stream_stream = None - if self.request_streaming and self.response_streaming: - self.stream_stream = lambda x, y: handle_stream_stream(test, x, y) - elif self.request_streaming: - self.stream_unary = lambda x, y: handle_stream_unary(test, x, y) - elif self.response_streaming: - self.unary_stream = lambda x, y: handle_unary_stream(test, x, y) - else: - self.unary_unary = lambda x, y: handle_unary_unary(test, x, y) - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self, test): - self._test = test - - def service(self, handler_call_details): - if handler_call_details.method == _UNARY_UNARY: - return _MethodHandler(self._test, False, False) - elif handler_call_details.method == _UNARY_STREAM: - return _MethodHandler(self._test, False, True) - elif handler_call_details.method == _STREAM_UNARY: - return _MethodHandler(self._test, True, False) - elif handler_call_details.method == _STREAM_STREAM: - return _MethodHandler(self._test, True, True) - else: - return None - - -class MetadataTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server() +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server and client side metadata API.""" + +import unittest +import weakref +import logging + +import grpc +from grpc import _channel + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_CHANNEL_ARGS = (('grpc.primary_user_agent', 'primary-agent'), + ('grpc.secondary_user_agent', 'secondary-agent')) + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + +_INVOCATION_METADATA = ( + ( + b'invocation-md-key', + u'invocation-md-value', + ), + ( + u'invocation-md-key-bin', + b'\x00\x01', + ), +) +_EXPECTED_INVOCATION_METADATA = ( + ( + 'invocation-md-key', + 'invocation-md-value', + ), + ( + 'invocation-md-key-bin', + b'\x00\x01', + ), +) + +_INITIAL_METADATA = ((b'initial-md-key', u'initial-md-value'), + (u'initial-md-key-bin', b'\x00\x02')) +_EXPECTED_INITIAL_METADATA = ( + ( + 'initial-md-key', + 'initial-md-value', + ), + ( + 'initial-md-key-bin', + b'\x00\x02', + ), +) + +_TRAILING_METADATA = ( + ( + 'server-trailing-md-key', + 'server-trailing-md-value', + ), + ( + 'server-trailing-md-key-bin', + b'\x00\x03', + ), +) +_EXPECTED_TRAILING_METADATA = _TRAILING_METADATA + + +def _user_agent(metadata): + for key, val in metadata: + if key == 'user-agent': + return val + raise KeyError('No user agent!') + + +def validate_client_metadata(test, servicer_context): + invocation_metadata = servicer_context.invocation_metadata() + test.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INVOCATION_METADATA, + invocation_metadata)) + user_agent = _user_agent(invocation_metadata) + test.assertTrue( + user_agent.startswith('primary-agent ' + _channel._USER_AGENT)) + test.assertTrue(user_agent.endswith('secondary-agent')) + + +def handle_unary_unary(test, request, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + return _RESPONSE + + +def handle_unary_stream(test, request, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(test, request_iterator, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + # TODO(issue:#6891) We should be able to remove this loop + for request in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(test, request_iterator, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + # TODO(issue:#6891) We should be able to remove this loop, + # and replace with return; yield + for request in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, test, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + self.stream_stream = lambda x, y: handle_stream_stream(test, x, y) + elif self.request_streaming: + self.stream_unary = lambda x, y: handle_stream_unary(test, x, y) + elif self.response_streaming: + self.unary_stream = lambda x, y: handle_unary_stream(test, x, y) + else: + self.unary_unary = lambda x, y: handle_unary_unary(test, x, y) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, test): + self._test = test + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(self._test, False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(self._test, False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(self._test, True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(self._test, True, True) + else: + return None + + +class MetadataTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() self._server.add_generic_rpc_handlers( (_GenericHandler(weakref.proxy(self)),)) - port = self._server.add_insecure_port('[::]:0') - self._server.start() + port = self._server.add_insecure_port('[::]:0') + self._server.start() self._channel = grpc.insecure_channel('localhost:%d' % port, options=_CHANNEL_ARGS) - - def tearDown(self): - self._server.stop(0) - self._channel.close() - - def testUnaryUnary(self): - multi_callable = self._channel.unary_unary(_UNARY_UNARY) - unused_response, call = multi_callable.with_call( - _REQUEST, metadata=_INVOCATION_METADATA) - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, - call.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, - call.trailing_metadata())) - - def testUnaryStream(self): - multi_callable = self._channel.unary_stream(_UNARY_STREAM) - call = multi_callable(_REQUEST, metadata=_INVOCATION_METADATA) - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, - call.initial_metadata())) - for _ in call: - pass - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, - call.trailing_metadata())) - - def testStreamUnary(self): - multi_callable = self._channel.stream_unary(_STREAM_UNARY) - unused_response, call = multi_callable.with_call( - iter([_REQUEST] * test_constants.STREAM_LENGTH), - metadata=_INVOCATION_METADATA) - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, - call.initial_metadata())) - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, - call.trailing_metadata())) - - def testStreamStream(self): - multi_callable = self._channel.stream_stream(_STREAM_STREAM) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testUnaryUnary(self): + multi_callable = self._channel.unary_unary(_UNARY_UNARY) + unused_response, call = multi_callable.with_call( + _REQUEST, metadata=_INVOCATION_METADATA) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + def testUnaryStream(self): + multi_callable = self._channel.unary_stream(_UNARY_STREAM) + call = multi_callable(_REQUEST, metadata=_INVOCATION_METADATA) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + for _ in call: + pass + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + def testStreamUnary(self): + multi_callable = self._channel.stream_unary(_STREAM_UNARY) + unused_response, call = multi_callable.with_call( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + metadata=_INVOCATION_METADATA) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + def testStreamStream(self): + multi_callable = self._channel.stream_stream(_STREAM_STREAM) call = multi_callable(iter([_REQUEST] * test_constants.STREAM_LENGTH), metadata=_INVOCATION_METADATA) - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, - call.initial_metadata())) - for _ in call: - pass - self.assertTrue( - test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, - call.trailing_metadata())) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + for _ in call: + pass + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py index c3318bf7df..16feb4b1ff 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py @@ -1,69 +1,69 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests that a channel will reconnect if a connection is dropped""" - -import socket -import time -import logging -import unittest - -import grpc -from grpc.framework.foundation import logging_pool - -from tests.unit.framework.common import test_constants -from tests.unit.framework.common import bound_socket - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x01' - -_UNARY_UNARY = '/test/UnaryUnary' - - -def _handle_unary_unary(unused_request, unused_servicer_context): - return _RESPONSE - - -class ReconnectTest(unittest.TestCase): - - def test_reconnect(self): - server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) - handler = grpc.method_handlers_generic_handler('test', { - 'UnaryUnary': +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests that a channel will reconnect if a connection is dropped""" + +import socket +import time +import logging +import unittest + +import grpc +from grpc.framework.foundation import logging_pool + +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import bound_socket + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x01' + +_UNARY_UNARY = '/test/UnaryUnary' + + +def _handle_unary_unary(unused_request, unused_servicer_context): + return _RESPONSE + + +class ReconnectTest(unittest.TestCase): + + def test_reconnect(self): + server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': grpc.unary_unary_rpc_method_handler(_handle_unary_unary) - }) - options = (('grpc.so_reuseport', 1),) - with bound_socket() as (host, port): - addr = '{}:{}'.format(host, port) - server = grpc.server(server_pool, (handler,), options=options) - server.add_insecure_port(addr) - server.start() - channel = grpc.insecure_channel(addr) - multi_callable = channel.unary_unary(_UNARY_UNARY) - self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) - server.stop(None) - # By default, the channel connectivity is checked every 5s - # GRPC_CLIENT_CHANNEL_BACKUP_POLL_INTERVAL_MS can be set to change - # this. - time.sleep(5.1) - server = grpc.server(server_pool, (handler,), options=options) - server.add_insecure_port(addr) - server.start() - self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) - server.stop(None) - channel.close() - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + }) + options = (('grpc.so_reuseport', 1),) + with bound_socket() as (host, port): + addr = '{}:{}'.format(host, port) + server = grpc.server(server_pool, (handler,), options=options) + server.add_insecure_port(addr) + server.start() + channel = grpc.insecure_channel(addr) + multi_callable = channel.unary_unary(_UNARY_UNARY) + self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) + server.stop(None) + # By default, the channel connectivity is checked every 5s + # GRPC_CLIENT_CHANNEL_BACKUP_POLL_INTERVAL_MS can be set to change + # this. + time.sleep(5.1) + server = grpc.server(server_pool, (handler,), options=options) + server.add_insecure_port(addr) + server.start() + self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) + server.stop(None) + channel.close() + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py index cd3036dd6b..ecd2ccadbd 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py @@ -1,259 +1,259 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests server responding with RESOURCE_EXHAUSTED.""" - -import threading -import unittest -import logging - -import grpc -from grpc import _channel -from grpc.framework.foundation import logging_pool - -from tests.unit import test_common -from tests.unit.framework.common import test_constants - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x00' - -_UNARY_UNARY = '/test/UnaryUnary' -_UNARY_STREAM = '/test/UnaryStream' -_STREAM_UNARY = '/test/StreamUnary' -_STREAM_STREAM = '/test/StreamStream' - - -class _TestTrigger(object): - - def __init__(self, total_call_count): - self._total_call_count = total_call_count - self._pending_calls = 0 - self._triggered = False - self._finish_condition = threading.Condition() - self._start_condition = threading.Condition() - - # Wait for all calls be blocked in their handler - def await_calls(self): - with self._start_condition: - while self._pending_calls < self._total_call_count: - self._start_condition.wait() - - # Block in a response handler and wait for a trigger - def await_trigger(self): - with self._start_condition: - self._pending_calls += 1 - self._start_condition.notify() - - with self._finish_condition: - if not self._triggered: - self._finish_condition.wait() - - # Finish all response handlers - def trigger(self): - with self._finish_condition: - self._triggered = True - self._finish_condition.notify_all() - - -def handle_unary_unary(trigger, request, servicer_context): - trigger.await_trigger() - return _RESPONSE - - -def handle_unary_stream(trigger, request, servicer_context): - trigger.await_trigger() - for _ in range(test_constants.STREAM_LENGTH): - yield _RESPONSE - - -def handle_stream_unary(trigger, request_iterator, servicer_context): - trigger.await_trigger() - # TODO(issue:#6891) We should be able to remove this loop - for request in request_iterator: - pass - return _RESPONSE - - -def handle_stream_stream(trigger, request_iterator, servicer_context): - trigger.await_trigger() - # TODO(issue:#6891) We should be able to remove this loop, - # and replace with return; yield - for request in request_iterator: - yield _RESPONSE - - -class _MethodHandler(grpc.RpcMethodHandler): - - def __init__(self, trigger, request_streaming, response_streaming): - self.request_streaming = request_streaming - self.response_streaming = response_streaming - self.request_deserializer = None - self.response_serializer = None - self.unary_unary = None - self.unary_stream = None - self.stream_unary = None - self.stream_stream = None - if self.request_streaming and self.response_streaming: - self.stream_stream = ( - lambda x, y: handle_stream_stream(trigger, x, y)) - elif self.request_streaming: - self.stream_unary = lambda x, y: handle_stream_unary(trigger, x, y) - elif self.response_streaming: - self.unary_stream = lambda x, y: handle_unary_stream(trigger, x, y) - else: - self.unary_unary = lambda x, y: handle_unary_unary(trigger, x, y) - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self, trigger): - self._trigger = trigger - - def service(self, handler_call_details): - if handler_call_details.method == _UNARY_UNARY: - return _MethodHandler(self._trigger, False, False) - elif handler_call_details.method == _UNARY_STREAM: - return _MethodHandler(self._trigger, False, True) - elif handler_call_details.method == _STREAM_UNARY: - return _MethodHandler(self._trigger, True, False) - elif handler_call_details.method == _STREAM_STREAM: - return _MethodHandler(self._trigger, True, True) - else: - return None - - -class ResourceExhaustedTest(unittest.TestCase): - - def setUp(self): - self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) - self._trigger = _TestTrigger(test_constants.THREAD_CONCURRENCY) - self._server = grpc.server( - self._server_pool, - handlers=(_GenericHandler(self._trigger),), - options=(('grpc.so_reuseport', 0),), - maximum_concurrent_rpcs=test_constants.THREAD_CONCURRENCY) - port = self._server.add_insecure_port('[::]:0') - self._server.start() - self._channel = grpc.insecure_channel('localhost:%d' % port) - - def tearDown(self): - self._server.stop(0) - self._channel.close() - - def testUnaryUnary(self): - multi_callable = self._channel.unary_unary(_UNARY_UNARY) - futures = [] - for _ in range(test_constants.THREAD_CONCURRENCY): - futures.append(multi_callable.future(_REQUEST)) - - self._trigger.await_calls() - - with self.assertRaises(grpc.RpcError) as exception_context: - multi_callable(_REQUEST) - - self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, - exception_context.exception.code()) - - future_exception = multi_callable.future(_REQUEST) - self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, - future_exception.exception().code()) - - self._trigger.trigger() - for future in futures: - self.assertEqual(_RESPONSE, future.result()) - - # Ensure a new request can be handled - self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) - - def testUnaryStream(self): - multi_callable = self._channel.unary_stream(_UNARY_STREAM) - calls = [] - for _ in range(test_constants.THREAD_CONCURRENCY): - calls.append(multi_callable(_REQUEST)) - - self._trigger.await_calls() - - with self.assertRaises(grpc.RpcError) as exception_context: - next(multi_callable(_REQUEST)) - - self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, - exception_context.exception.code()) - - self._trigger.trigger() - - for call in calls: - for response in call: - self.assertEqual(_RESPONSE, response) - - # Ensure a new request can be handled - new_call = multi_callable(_REQUEST) - for response in new_call: - self.assertEqual(_RESPONSE, response) - - def testStreamUnary(self): - multi_callable = self._channel.stream_unary(_STREAM_UNARY) - futures = [] - request = iter([_REQUEST] * test_constants.STREAM_LENGTH) - for _ in range(test_constants.THREAD_CONCURRENCY): - futures.append(multi_callable.future(request)) - - self._trigger.await_calls() - - with self.assertRaises(grpc.RpcError) as exception_context: - multi_callable(request) - - self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, - exception_context.exception.code()) - - future_exception = multi_callable.future(request) - self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, - future_exception.exception().code()) - - self._trigger.trigger() - - for future in futures: - self.assertEqual(_RESPONSE, future.result()) - - # Ensure a new request can be handled - self.assertEqual(_RESPONSE, multi_callable(request)) - - def testStreamStream(self): - multi_callable = self._channel.stream_stream(_STREAM_STREAM) - calls = [] - request = iter([_REQUEST] * test_constants.STREAM_LENGTH) - for _ in range(test_constants.THREAD_CONCURRENCY): - calls.append(multi_callable(request)) - - self._trigger.await_calls() - - with self.assertRaises(grpc.RpcError) as exception_context: - next(multi_callable(request)) - - self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, - exception_context.exception.code()) - - self._trigger.trigger() - - for call in calls: - for response in call: - self.assertEqual(_RESPONSE, response) - - # Ensure a new request can be handled - new_call = multi_callable(request) - for response in new_call: - self.assertEqual(_RESPONSE, response) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server responding with RESOURCE_EXHAUSTED.""" + +import threading +import unittest +import logging + +import grpc +from grpc import _channel +from grpc.framework.foundation import logging_pool + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +class _TestTrigger(object): + + def __init__(self, total_call_count): + self._total_call_count = total_call_count + self._pending_calls = 0 + self._triggered = False + self._finish_condition = threading.Condition() + self._start_condition = threading.Condition() + + # Wait for all calls be blocked in their handler + def await_calls(self): + with self._start_condition: + while self._pending_calls < self._total_call_count: + self._start_condition.wait() + + # Block in a response handler and wait for a trigger + def await_trigger(self): + with self._start_condition: + self._pending_calls += 1 + self._start_condition.notify() + + with self._finish_condition: + if not self._triggered: + self._finish_condition.wait() + + # Finish all response handlers + def trigger(self): + with self._finish_condition: + self._triggered = True + self._finish_condition.notify_all() + + +def handle_unary_unary(trigger, request, servicer_context): + trigger.await_trigger() + return _RESPONSE + + +def handle_unary_stream(trigger, request, servicer_context): + trigger.await_trigger() + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(trigger, request_iterator, servicer_context): + trigger.await_trigger() + # TODO(issue:#6891) We should be able to remove this loop + for request in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(trigger, request_iterator, servicer_context): + trigger.await_trigger() + # TODO(issue:#6891) We should be able to remove this loop, + # and replace with return; yield + for request in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, trigger, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + self.stream_stream = ( + lambda x, y: handle_stream_stream(trigger, x, y)) + elif self.request_streaming: + self.stream_unary = lambda x, y: handle_stream_unary(trigger, x, y) + elif self.response_streaming: + self.unary_stream = lambda x, y: handle_unary_stream(trigger, x, y) + else: + self.unary_unary = lambda x, y: handle_unary_unary(trigger, x, y) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, trigger): + self._trigger = trigger + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(self._trigger, False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(self._trigger, False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(self._trigger, True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(self._trigger, True, True) + else: + return None + + +class ResourceExhaustedTest(unittest.TestCase): + + def setUp(self): + self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + self._trigger = _TestTrigger(test_constants.THREAD_CONCURRENCY) + self._server = grpc.server( + self._server_pool, + handlers=(_GenericHandler(self._trigger),), + options=(('grpc.so_reuseport', 0),), + maximum_concurrent_rpcs=test_constants.THREAD_CONCURRENCY) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testUnaryUnary(self): + multi_callable = self._channel.unary_unary(_UNARY_UNARY) + futures = [] + for _ in range(test_constants.THREAD_CONCURRENCY): + futures.append(multi_callable.future(_REQUEST)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable(_REQUEST) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + future_exception = multi_callable.future(_REQUEST) + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + future_exception.exception().code()) + + self._trigger.trigger() + for future in futures: + self.assertEqual(_RESPONSE, future.result()) + + # Ensure a new request can be handled + self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) + + def testUnaryStream(self): + multi_callable = self._channel.unary_stream(_UNARY_STREAM) + calls = [] + for _ in range(test_constants.THREAD_CONCURRENCY): + calls.append(multi_callable(_REQUEST)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + next(multi_callable(_REQUEST)) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + self._trigger.trigger() + + for call in calls: + for response in call: + self.assertEqual(_RESPONSE, response) + + # Ensure a new request can be handled + new_call = multi_callable(_REQUEST) + for response in new_call: + self.assertEqual(_RESPONSE, response) + + def testStreamUnary(self): + multi_callable = self._channel.stream_unary(_STREAM_UNARY) + futures = [] + request = iter([_REQUEST] * test_constants.STREAM_LENGTH) + for _ in range(test_constants.THREAD_CONCURRENCY): + futures.append(multi_callable.future(request)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable(request) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + future_exception = multi_callable.future(request) + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + future_exception.exception().code()) + + self._trigger.trigger() + + for future in futures: + self.assertEqual(_RESPONSE, future.result()) + + # Ensure a new request can be handled + self.assertEqual(_RESPONSE, multi_callable(request)) + + def testStreamStream(self): + multi_callable = self._channel.stream_stream(_STREAM_STREAM) + calls = [] + request = iter([_REQUEST] * test_constants.STREAM_LENGTH) + for _ in range(test_constants.THREAD_CONCURRENCY): + calls.append(multi_callable(request)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + next(multi_callable(request)) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + self._trigger.trigger() + + for call in calls: + for response in call: + self.assertEqual(_RESPONSE, response) + + # Ensure a new request can be handled + new_call = multi_callable(request) + for response in new_call: + self.assertEqual(_RESPONSE, response) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py index fbbb6cf000..1d1fdba11e 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py @@ -1,97 +1,97 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Defines a number of module-scope gRPC scenarios to test server shutdown.""" - -import argparse -import os -import threading -import time -import logging - -import grpc -from tests.unit import test_common - -from concurrent import futures -from six.moves import queue - -WAIT_TIME = 1000 - -REQUEST = b'request' -RESPONSE = b'response' - -SERVER_RAISES_EXCEPTION = 'server_raises_exception' -SERVER_DEALLOCATED = 'server_deallocated' -SERVER_FORK_CAN_EXIT = 'server_fork_can_exit' - -FORK_EXIT = '/test/ForkExit' - - -def fork_and_exit(request, servicer_context): - pid = os.fork() - if pid == 0: - os._exit(0) - return RESPONSE - - -class GenericHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - if handler_call_details.method == FORK_EXIT: - return grpc.unary_unary_rpc_method_handler(fork_and_exit) - else: - return None - - -def run_server(port_queue): - server = test_common.test_server() - port = server.add_insecure_port('[::]:0') - port_queue.put(port) - server.add_generic_rpc_handlers((GenericHandler(),)) - server.start() - # threading.Event.wait() does not exhibit the bug identified in - # https://github.com/grpc/grpc/issues/17093, sleep instead - time.sleep(WAIT_TIME) - - -def run_test(args): - if args.scenario == SERVER_RAISES_EXCEPTION: - server = test_common.test_server() - server.start() - raise Exception() - elif args.scenario == SERVER_DEALLOCATED: - server = test_common.test_server() - server.start() - server.__del__() - while server._state.stage != grpc._server._ServerStage.STOPPED: - pass - elif args.scenario == SERVER_FORK_CAN_EXIT: - port_queue = queue.Queue() - thread = threading.Thread(target=run_server, args=(port_queue,)) - thread.daemon = True - thread.start() - port = port_queue.get() - channel = grpc.insecure_channel('localhost:%d' % port) - multi_callable = channel.unary_unary(FORK_EXIT) - result, call = multi_callable.with_call(REQUEST, wait_for_ready=True) - os.wait() - else: - raise ValueError('unknown test scenario') - - -if __name__ == '__main__': - logging.basicConfig() - parser = argparse.ArgumentParser() - parser.add_argument('scenario', type=str) - args = parser.parse_args() - run_test(args) +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines a number of module-scope gRPC scenarios to test server shutdown.""" + +import argparse +import os +import threading +import time +import logging + +import grpc +from tests.unit import test_common + +from concurrent import futures +from six.moves import queue + +WAIT_TIME = 1000 + +REQUEST = b'request' +RESPONSE = b'response' + +SERVER_RAISES_EXCEPTION = 'server_raises_exception' +SERVER_DEALLOCATED = 'server_deallocated' +SERVER_FORK_CAN_EXIT = 'server_fork_can_exit' + +FORK_EXIT = '/test/ForkExit' + + +def fork_and_exit(request, servicer_context): + pid = os.fork() + if pid == 0: + os._exit(0) + return RESPONSE + + +class GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == FORK_EXIT: + return grpc.unary_unary_rpc_method_handler(fork_and_exit) + else: + return None + + +def run_server(port_queue): + server = test_common.test_server() + port = server.add_insecure_port('[::]:0') + port_queue.put(port) + server.add_generic_rpc_handlers((GenericHandler(),)) + server.start() + # threading.Event.wait() does not exhibit the bug identified in + # https://github.com/grpc/grpc/issues/17093, sleep instead + time.sleep(WAIT_TIME) + + +def run_test(args): + if args.scenario == SERVER_RAISES_EXCEPTION: + server = test_common.test_server() + server.start() + raise Exception() + elif args.scenario == SERVER_DEALLOCATED: + server = test_common.test_server() + server.start() + server.__del__() + while server._state.stage != grpc._server._ServerStage.STOPPED: + pass + elif args.scenario == SERVER_FORK_CAN_EXIT: + port_queue = queue.Queue() + thread = threading.Thread(target=run_server, args=(port_queue,)) + thread.daemon = True + thread.start() + port = port_queue.get() + channel = grpc.insecure_channel('localhost:%d' % port) + multi_callable = channel.unary_unary(FORK_EXIT) + result, call = multi_callable.with_call(REQUEST, wait_for_ready=True) + os.wait() + else: + raise ValueError('unknown test scenario') + + +if __name__ == '__main__': + logging.basicConfig() + parser = argparse.ArgumentParser() + parser.add_argument('scenario', type=str) + args = parser.parse_args() + run_test(args) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py index 0fe0340985..c1dc7585f8 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py @@ -1,95 +1,95 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests clean shutdown of server on various interpreter exit conditions. - -The tests in this module spawn a subprocess for each test case, the -test is considered successful if it doesn't hang/timeout. -""" - -import atexit -import os -import subprocess -import sys -import threading -import unittest -import logging - -from tests.unit import _server_shutdown_scenarios - -INTERPRETER = sys.executable -BASE_COMMAND = [INTERPRETER, '-m', 'tests.unit._server_shutdown_scenarios'] - -processes = [] -process_lock = threading.Lock() - - -# Make sure we attempt to clean up any -# processes we may have left running -def cleanup_processes(): - with process_lock: - for process in processes: - try: - process.kill() - except Exception: # pylint: disable=broad-except - pass - - -atexit.register(cleanup_processes) - - -def wait(process): - with process_lock: - processes.append(process) - process.wait() - - -class ServerShutdown(unittest.TestCase): - - # Currently we shut down a server (if possible) after the Python server - # instance is garbage collected. This behavior may change in the future. - def test_deallocated_server_stops(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + [_server_shutdown_scenarios.SERVER_DEALLOCATED], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - wait(process) - - def test_server_exception_exits(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + [_server_shutdown_scenarios.SERVER_RAISES_EXCEPTION], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - wait(process) - - @unittest.skipIf(os.name == 'nt', 'fork not supported on windows') - def test_server_fork_can_exit(self): - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - process = subprocess.Popen( - BASE_COMMAND + [_server_shutdown_scenarios.SERVER_FORK_CAN_EXIT], - stdout=sys.stdout, - stderr=sys.stderr, - env=env) - wait(process) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests clean shutdown of server on various interpreter exit conditions. + +The tests in this module spawn a subprocess for each test case, the +test is considered successful if it doesn't hang/timeout. +""" + +import atexit +import os +import subprocess +import sys +import threading +import unittest +import logging + +from tests.unit import _server_shutdown_scenarios + +INTERPRETER = sys.executable +BASE_COMMAND = [INTERPRETER, '-m', 'tests.unit._server_shutdown_scenarios'] + +processes = [] +process_lock = threading.Lock() + + +# Make sure we attempt to clean up any +# processes we may have left running +def cleanup_processes(): + with process_lock: + for process in processes: + try: + process.kill() + except Exception: # pylint: disable=broad-except + pass + + +atexit.register(cleanup_processes) + + +def wait(process): + with process_lock: + processes.append(process) + process.wait() + + +class ServerShutdown(unittest.TestCase): + + # Currently we shut down a server (if possible) after the Python server + # instance is garbage collected. This behavior may change in the future. + def test_deallocated_server_stops(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_server_shutdown_scenarios.SERVER_DEALLOCATED], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + def test_server_exception_exits(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_server_shutdown_scenarios.SERVER_RAISES_EXCEPTION], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + @unittest.skipIf(os.name == 'nt', 'fork not supported on windows') + def test_server_fork_can_exit(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_server_shutdown_scenarios.SERVER_FORK_CAN_EXIT], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py index afe0294930..35d992a33d 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py @@ -1,166 +1,166 @@ -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests server certificate rotation. - -Here we test various aspects of gRPC Python, and in some cases gRPC -Core by extension, support for server certificate rotation. - -* ServerSSLCertReloadTestWithClientAuth: test ability to rotate - server's SSL cert for use in future channels with clients while not - affecting any existing channel. The server requires client - authentication. - -* ServerSSLCertReloadTestWithoutClientAuth: like - ServerSSLCertReloadTestWithClientAuth except that the server does - not authenticate the client. - -* ServerSSLCertReloadTestCertConfigReuse: tests gRPC Python's ability - to deal with user's reuse of ServerCertificateConfiguration instances. -""" - -import abc -import collections -import os -import six -import threading -import unittest -import logging - -from concurrent import futures - -import grpc -from tests.unit import resources -from tests.unit import test_common -from tests.testing import _application_common -from tests.testing import _server_application -from tests.testing.proto import services_pb2_grpc - -CA_1_PEM = resources.cert_hier_1_root_ca_cert() -CA_2_PEM = resources.cert_hier_2_root_ca_cert() - -CLIENT_KEY_1_PEM = resources.cert_hier_1_client_1_key() -CLIENT_CERT_CHAIN_1_PEM = (resources.cert_hier_1_client_1_cert() + - resources.cert_hier_1_intermediate_ca_cert()) - -CLIENT_KEY_2_PEM = resources.cert_hier_2_client_1_key() -CLIENT_CERT_CHAIN_2_PEM = (resources.cert_hier_2_client_1_cert() + - resources.cert_hier_2_intermediate_ca_cert()) - -SERVER_KEY_1_PEM = resources.cert_hier_1_server_1_key() -SERVER_CERT_CHAIN_1_PEM = (resources.cert_hier_1_server_1_cert() + - resources.cert_hier_1_intermediate_ca_cert()) - -SERVER_KEY_2_PEM = resources.cert_hier_2_server_1_key() -SERVER_CERT_CHAIN_2_PEM = (resources.cert_hier_2_server_1_cert() + - resources.cert_hier_2_intermediate_ca_cert()) - -# for use with the CertConfigFetcher. Roughly a simple custom mock -# implementation -Call = collections.namedtuple('Call', ['did_raise', 'returned_cert_config']) - - -def _create_channel(port, credentials): - return grpc.secure_channel('localhost:{}'.format(port), credentials) - - -def _create_client_stub(channel, expect_success): - if expect_success: - # per Nathaniel: there's some robustness issue if we start - # using a channel without waiting for it to be actually ready - grpc.channel_ready_future(channel).result(timeout=10) - return services_pb2_grpc.FirstServiceStub(channel) - - -class CertConfigFetcher(object): - - def __init__(self): - self._lock = threading.Lock() - self._calls = [] - self._should_raise = False - self._cert_config = None - - def reset(self): - with self._lock: - self._calls = [] - self._should_raise = False - self._cert_config = None - - def configure(self, should_raise, cert_config): - assert not (should_raise and cert_config), ( - "should not specify both should_raise and a cert_config at the same time" - ) - with self._lock: - self._should_raise = should_raise - self._cert_config = cert_config - - def getCalls(self): - with self._lock: - return self._calls - - def __call__(self): - with self._lock: - if self._should_raise: - self._calls.append(Call(True, None)) - raise ValueError('just for fun, should not affect the test') - else: - self._calls.append(Call(False, self._cert_config)) - return self._cert_config - - -class _ServerSSLCertReloadTest( - six.with_metaclass(abc.ABCMeta, unittest.TestCase)): - - def __init__(self, *args, **kwargs): - super(_ServerSSLCertReloadTest, self).__init__(*args, **kwargs) - self.server = None - self.port = None - - @abc.abstractmethod - def require_client_auth(self): - raise NotImplementedError() - - def setUp(self): - self.server = test_common.test_server() - services_pb2_grpc.add_FirstServiceServicer_to_server( - _server_application.FirstServiceServicer(), self.server) - switch_cert_on_client_num = 10 - initial_cert_config = grpc.ssl_server_certificate_configuration( - [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)], - root_certificates=CA_2_PEM) - self.cert_config_fetcher = CertConfigFetcher() - server_credentials = grpc.dynamic_ssl_server_credentials( - initial_cert_config, - self.cert_config_fetcher, - require_client_authentication=self.require_client_auth()) - self.port = self.server.add_secure_port('[::]:0', server_credentials) - self.server.start() - - def tearDown(self): - if self.server: - self.server.stop(None) - - def _perform_rpc(self, client_stub, expect_success): - # we don't care about the actual response of the rpc; only - # whether we can perform it or not, and if not, the status - # code must be UNAVAILABLE - request = _application_common.UNARY_UNARY_REQUEST - if expect_success: - response = client_stub.UnUn(request) - self.assertEqual(response, _application_common.UNARY_UNARY_RESPONSE) - else: - with self.assertRaises(grpc.RpcError) as exception_context: - client_stub.UnUn(request) +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server certificate rotation. + +Here we test various aspects of gRPC Python, and in some cases gRPC +Core by extension, support for server certificate rotation. + +* ServerSSLCertReloadTestWithClientAuth: test ability to rotate + server's SSL cert for use in future channels with clients while not + affecting any existing channel. The server requires client + authentication. + +* ServerSSLCertReloadTestWithoutClientAuth: like + ServerSSLCertReloadTestWithClientAuth except that the server does + not authenticate the client. + +* ServerSSLCertReloadTestCertConfigReuse: tests gRPC Python's ability + to deal with user's reuse of ServerCertificateConfiguration instances. +""" + +import abc +import collections +import os +import six +import threading +import unittest +import logging + +from concurrent import futures + +import grpc +from tests.unit import resources +from tests.unit import test_common +from tests.testing import _application_common +from tests.testing import _server_application +from tests.testing.proto import services_pb2_grpc + +CA_1_PEM = resources.cert_hier_1_root_ca_cert() +CA_2_PEM = resources.cert_hier_2_root_ca_cert() + +CLIENT_KEY_1_PEM = resources.cert_hier_1_client_1_key() +CLIENT_CERT_CHAIN_1_PEM = (resources.cert_hier_1_client_1_cert() + + resources.cert_hier_1_intermediate_ca_cert()) + +CLIENT_KEY_2_PEM = resources.cert_hier_2_client_1_key() +CLIENT_CERT_CHAIN_2_PEM = (resources.cert_hier_2_client_1_cert() + + resources.cert_hier_2_intermediate_ca_cert()) + +SERVER_KEY_1_PEM = resources.cert_hier_1_server_1_key() +SERVER_CERT_CHAIN_1_PEM = (resources.cert_hier_1_server_1_cert() + + resources.cert_hier_1_intermediate_ca_cert()) + +SERVER_KEY_2_PEM = resources.cert_hier_2_server_1_key() +SERVER_CERT_CHAIN_2_PEM = (resources.cert_hier_2_server_1_cert() + + resources.cert_hier_2_intermediate_ca_cert()) + +# for use with the CertConfigFetcher. Roughly a simple custom mock +# implementation +Call = collections.namedtuple('Call', ['did_raise', 'returned_cert_config']) + + +def _create_channel(port, credentials): + return grpc.secure_channel('localhost:{}'.format(port), credentials) + + +def _create_client_stub(channel, expect_success): + if expect_success: + # per Nathaniel: there's some robustness issue if we start + # using a channel without waiting for it to be actually ready + grpc.channel_ready_future(channel).result(timeout=10) + return services_pb2_grpc.FirstServiceStub(channel) + + +class CertConfigFetcher(object): + + def __init__(self): + self._lock = threading.Lock() + self._calls = [] + self._should_raise = False + self._cert_config = None + + def reset(self): + with self._lock: + self._calls = [] + self._should_raise = False + self._cert_config = None + + def configure(self, should_raise, cert_config): + assert not (should_raise and cert_config), ( + "should not specify both should_raise and a cert_config at the same time" + ) + with self._lock: + self._should_raise = should_raise + self._cert_config = cert_config + + def getCalls(self): + with self._lock: + return self._calls + + def __call__(self): + with self._lock: + if self._should_raise: + self._calls.append(Call(True, None)) + raise ValueError('just for fun, should not affect the test') + else: + self._calls.append(Call(False, self._cert_config)) + return self._cert_config + + +class _ServerSSLCertReloadTest( + six.with_metaclass(abc.ABCMeta, unittest.TestCase)): + + def __init__(self, *args, **kwargs): + super(_ServerSSLCertReloadTest, self).__init__(*args, **kwargs) + self.server = None + self.port = None + + @abc.abstractmethod + def require_client_auth(self): + raise NotImplementedError() + + def setUp(self): + self.server = test_common.test_server() + services_pb2_grpc.add_FirstServiceServicer_to_server( + _server_application.FirstServiceServicer(), self.server) + switch_cert_on_client_num = 10 + initial_cert_config = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)], + root_certificates=CA_2_PEM) + self.cert_config_fetcher = CertConfigFetcher() + server_credentials = grpc.dynamic_ssl_server_credentials( + initial_cert_config, + self.cert_config_fetcher, + require_client_authentication=self.require_client_auth()) + self.port = self.server.add_secure_port('[::]:0', server_credentials) + self.server.start() + + def tearDown(self): + if self.server: + self.server.stop(None) + + def _perform_rpc(self, client_stub, expect_success): + # we don't care about the actual response of the rpc; only + # whether we can perform it or not, and if not, the status + # code must be UNAVAILABLE + request = _application_common.UNARY_UNARY_REQUEST + if expect_success: + response = client_stub.UnUn(request) + self.assertEqual(response, _application_common.UNARY_UNARY_RESPONSE) + else: + with self.assertRaises(grpc.RpcError) as exception_context: + client_stub.UnUn(request) # If TLS 1.2 is used, then the client receives an alert message # before the handshake is complete, so the status is UNAVAILABLE. If # TLS 1.3 is used, then the client receives the alert message after @@ -169,343 +169,343 @@ class _ServerSSLCertReloadTest( # corresponding status code, so this yields an UNKNOWN status. self.assertTrue(exception_context.exception.code( ) in [grpc.StatusCode.UNAVAILABLE, grpc.StatusCode.UNKNOWN]) - - def _do_one_shot_client_rpc(self, - expect_success, - root_certificates=None, - private_key=None, - certificate_chain=None): - credentials = grpc.ssl_channel_credentials( - root_certificates=root_certificates, - private_key=private_key, - certificate_chain=certificate_chain) - with _create_channel(self.port, credentials) as client_channel: - client_stub = _create_client_stub(client_channel, expect_success) - self._perform_rpc(client_stub, expect_success) - - def _test(self): - # things should work... - self.cert_config_fetcher.configure(False, None) + + def _do_one_shot_client_rpc(self, + expect_success, + root_certificates=None, + private_key=None, + certificate_chain=None): + credentials = grpc.ssl_channel_credentials( + root_certificates=root_certificates, + private_key=private_key, + certificate_chain=certificate_chain) + with _create_channel(self.port, credentials) as client_channel: + client_stub = _create_client_stub(client_channel, expect_success) + self._perform_rpc(client_stub, expect_success) + + def _test(self): + # things should work... + self.cert_config_fetcher.configure(False, None) self._do_one_shot_client_rpc(True, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertIsNone(actual_calls[0].returned_cert_config) - - # client should reject server... - # fails because client trusts ca2 and so will reject server - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # client should reject server... + # fails because client trusts ca2 and so will reject server + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) self._do_one_shot_client_rpc(False, root_certificates=CA_2_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertGreaterEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - for i, call in enumerate(actual_calls): - self.assertFalse(call.did_raise, 'i= {}'.format(i)) - self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) - - # should work again... - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(True, None) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # should work again... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(True, None) self._do_one_shot_client_rpc(True, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertTrue(actual_calls[0].did_raise) - self.assertIsNone(actual_calls[0].returned_cert_config) - - # if with_client_auth, then client should be rejected by - # server because client uses key/cert1, but server trusts ca2, - # so server will reject - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertTrue(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # if with_client_auth, then client should be rejected by + # server because client uses key/cert1, but server trusts ca2, + # so server will reject + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) self._do_one_shot_client_rpc(not self.require_client_auth(), root_certificates=CA_1_PEM, private_key=CLIENT_KEY_1_PEM, certificate_chain=CLIENT_CERT_CHAIN_1_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertGreaterEqual(len(actual_calls), 1) - for i, call in enumerate(actual_calls): - self.assertFalse(call.did_raise, 'i= {}'.format(i)) - self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) - - # should work again... - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # should work again... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) self._do_one_shot_client_rpc(True, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertIsNone(actual_calls[0].returned_cert_config) - - # now create the "persistent" clients - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) - channel_A = _create_channel( - self.port, - grpc.ssl_channel_credentials( - root_certificates=CA_1_PEM, - private_key=CLIENT_KEY_2_PEM, - certificate_chain=CLIENT_CERT_CHAIN_2_PEM)) - persistent_client_stub_A = _create_client_stub(channel_A, True) - self._perform_rpc(persistent_client_stub_A, True) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertIsNone(actual_calls[0].returned_cert_config) - - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) - channel_B = _create_channel( - self.port, - grpc.ssl_channel_credentials( - root_certificates=CA_1_PEM, - private_key=CLIENT_KEY_2_PEM, - certificate_chain=CLIENT_CERT_CHAIN_2_PEM)) - persistent_client_stub_B = _create_client_stub(channel_B, True) - self._perform_rpc(persistent_client_stub_B, True) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertIsNone(actual_calls[0].returned_cert_config) - - # moment of truth!! client should reject server because the - # server switch cert... - cert_config = grpc.ssl_server_certificate_configuration( - [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], - root_certificates=CA_1_PEM) - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, cert_config) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # now create the "persistent" clients + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + channel_A = _create_channel( + self.port, + grpc.ssl_channel_credentials( + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM)) + persistent_client_stub_A = _create_client_stub(channel_A, True) + self._perform_rpc(persistent_client_stub_A, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + channel_B = _create_channel( + self.port, + grpc.ssl_channel_credentials( + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM)) + persistent_client_stub_B = _create_client_stub(channel_B, True) + self._perform_rpc(persistent_client_stub_B, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # moment of truth!! client should reject server because the + # server switch cert... + cert_config = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], + root_certificates=CA_1_PEM) + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, cert_config) self._do_one_shot_client_rpc(False, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertGreaterEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - for i, call in enumerate(actual_calls): - self.assertFalse(call.did_raise, 'i= {}'.format(i)) - self.assertEqual(call.returned_cert_config, cert_config, - 'i= {}'.format(i)) - - # now should work again... - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertEqual(call.returned_cert_config, cert_config, + 'i= {}'.format(i)) + + # now should work again... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) self._do_one_shot_client_rpc(True, root_certificates=CA_2_PEM, private_key=CLIENT_KEY_1_PEM, certificate_chain=CLIENT_CERT_CHAIN_1_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertIsNone(actual_calls[0].returned_cert_config) - - # client should be rejected by server if with_client_auth - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # client should be rejected by server if with_client_auth + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) self._do_one_shot_client_rpc(not self.require_client_auth(), root_certificates=CA_2_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertGreaterEqual(len(actual_calls), 1) - for i, call in enumerate(actual_calls): - self.assertFalse(call.did_raise, 'i= {}'.format(i)) - self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) - - # here client should reject server... - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # here client should reject server... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) self._do_one_shot_client_rpc(False, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertGreaterEqual(len(actual_calls), 1) - for i, call in enumerate(actual_calls): - self.assertFalse(call.did_raise, 'i= {}'.format(i)) - self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) - - # persistent clients should continue to work - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) - self._perform_rpc(persistent_client_stub_A, True) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 0) - - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, None) - self._perform_rpc(persistent_client_stub_B, True) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 0) - - channel_A.close() - channel_B.close() - - -class ServerSSLCertConfigFetcherParamsChecks(unittest.TestCase): - - def test_check_on_initial_config(self): - with self.assertRaises(TypeError): - grpc.dynamic_ssl_server_credentials(None, str) - with self.assertRaises(TypeError): - grpc.dynamic_ssl_server_credentials(1, str) - - def test_check_on_config_fetcher(self): - cert_config = grpc.ssl_server_certificate_configuration( - [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], - root_certificates=CA_1_PEM) - with self.assertRaises(TypeError): - grpc.dynamic_ssl_server_credentials(cert_config, None) - with self.assertRaises(TypeError): - grpc.dynamic_ssl_server_credentials(cert_config, 1) - - -class ServerSSLCertReloadTestWithClientAuth(_ServerSSLCertReloadTest): - - def require_client_auth(self): - return True - - test = _ServerSSLCertReloadTest._test - - -class ServerSSLCertReloadTestWithoutClientAuth(_ServerSSLCertReloadTest): - - def require_client_auth(self): - return False - - test = _ServerSSLCertReloadTest._test - - -class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest): - """Ensures that `ServerCertificateConfiguration` instances can be reused. - - Because gRPC Core takes ownership of the - `grpc_ssl_server_certificate_config` encapsulated by - `ServerCertificateConfiguration`, this test reuses the same - `ServerCertificateConfiguration` instances multiple times to make sure - gRPC Python takes care of maintaining the validity of - `ServerCertificateConfiguration` instances, so that such instances can be - re-used by user application. - """ - - def require_client_auth(self): - return True - - def setUp(self): - self.server = test_common.test_server() - services_pb2_grpc.add_FirstServiceServicer_to_server( - _server_application.FirstServiceServicer(), self.server) - self.cert_config_A = grpc.ssl_server_certificate_configuration( - [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)], - root_certificates=CA_2_PEM) - self.cert_config_B = grpc.ssl_server_certificate_configuration( - [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], - root_certificates=CA_1_PEM) - self.cert_config_fetcher = CertConfigFetcher() - server_credentials = grpc.dynamic_ssl_server_credentials( - self.cert_config_A, - self.cert_config_fetcher, - require_client_authentication=True) - self.port = self.server.add_secure_port('[::]:0', server_credentials) - self.server.start() - - def test_cert_config_reuse(self): - - # succeed with A - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, self.cert_config_A) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # persistent clients should continue to work + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._perform_rpc(persistent_client_stub_A, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 0) + + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._perform_rpc(persistent_client_stub_B, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 0) + + channel_A.close() + channel_B.close() + + +class ServerSSLCertConfigFetcherParamsChecks(unittest.TestCase): + + def test_check_on_initial_config(self): + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(None, str) + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(1, str) + + def test_check_on_config_fetcher(self): + cert_config = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], + root_certificates=CA_1_PEM) + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(cert_config, None) + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(cert_config, 1) + + +class ServerSSLCertReloadTestWithClientAuth(_ServerSSLCertReloadTest): + + def require_client_auth(self): + return True + + test = _ServerSSLCertReloadTest._test + + +class ServerSSLCertReloadTestWithoutClientAuth(_ServerSSLCertReloadTest): + + def require_client_auth(self): + return False + + test = _ServerSSLCertReloadTest._test + + +class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest): + """Ensures that `ServerCertificateConfiguration` instances can be reused. + + Because gRPC Core takes ownership of the + `grpc_ssl_server_certificate_config` encapsulated by + `ServerCertificateConfiguration`, this test reuses the same + `ServerCertificateConfiguration` instances multiple times to make sure + gRPC Python takes care of maintaining the validity of + `ServerCertificateConfiguration` instances, so that such instances can be + re-used by user application. + """ + + def require_client_auth(self): + return True + + def setUp(self): + self.server = test_common.test_server() + services_pb2_grpc.add_FirstServiceServicer_to_server( + _server_application.FirstServiceServicer(), self.server) + self.cert_config_A = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)], + root_certificates=CA_2_PEM) + self.cert_config_B = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], + root_certificates=CA_1_PEM) + self.cert_config_fetcher = CertConfigFetcher() + server_credentials = grpc.dynamic_ssl_server_credentials( + self.cert_config_A, + self.cert_config_fetcher, + require_client_authentication=True) + self.port = self.server.add_secure_port('[::]:0', server_credentials) + self.server.start() + + def test_cert_config_reuse(self): + + # succeed with A + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_A) self._do_one_shot_client_rpc(True, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertEqual(actual_calls[0].returned_cert_config, - self.cert_config_A) - - # fail with A - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, self.cert_config_A) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_A) + + # fail with A + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_A) self._do_one_shot_client_rpc(False, root_certificates=CA_2_PEM, private_key=CLIENT_KEY_1_PEM, certificate_chain=CLIENT_CERT_CHAIN_1_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertGreaterEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - for i, call in enumerate(actual_calls): - self.assertFalse(call.did_raise, 'i= {}'.format(i)) - self.assertEqual(call.returned_cert_config, self.cert_config_A, - 'i= {}'.format(i)) - - # succeed again with A - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, self.cert_config_A) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertEqual(call.returned_cert_config, self.cert_config_A, + 'i= {}'.format(i)) + + # succeed again with A + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_A) self._do_one_shot_client_rpc(True, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertEqual(actual_calls[0].returned_cert_config, - self.cert_config_A) - - # succeed with B - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, self.cert_config_B) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_A) + + # succeed with B + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_B) self._do_one_shot_client_rpc(True, root_certificates=CA_2_PEM, private_key=CLIENT_KEY_1_PEM, certificate_chain=CLIENT_CERT_CHAIN_1_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertEqual(actual_calls[0].returned_cert_config, - self.cert_config_B) - - # fail with B - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, self.cert_config_B) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_B) + + # fail with B + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_B) self._do_one_shot_client_rpc(False, root_certificates=CA_1_PEM, private_key=CLIENT_KEY_2_PEM, certificate_chain=CLIENT_CERT_CHAIN_2_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertGreaterEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - for i, call in enumerate(actual_calls): - self.assertFalse(call.did_raise, 'i= {}'.format(i)) - self.assertEqual(call.returned_cert_config, self.cert_config_B, - 'i= {}'.format(i)) - - # succeed again with B - self.cert_config_fetcher.reset() - self.cert_config_fetcher.configure(False, self.cert_config_B) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertEqual(call.returned_cert_config, self.cert_config_B, + 'i= {}'.format(i)) + + # succeed again with B + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_B) self._do_one_shot_client_rpc(True, root_certificates=CA_2_PEM, private_key=CLIENT_KEY_1_PEM, certificate_chain=CLIENT_CERT_CHAIN_1_PEM) - actual_calls = self.cert_config_fetcher.getCalls() - self.assertEqual(len(actual_calls), 1) - self.assertFalse(actual_calls[0].did_raise) - self.assertEqual(actual_calls[0].returned_cert_config, - self.cert_config_B) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_B) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py index 4907f74d1f..3c519219d5 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py @@ -1,59 +1,59 @@ -# Copyright 2018 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from concurrent import futures -import unittest -import logging - -import grpc - +# Copyright 2018 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from concurrent import futures +import unittest +import logging + +import grpc + from tests.unit import resources - -class _ActualGenericRpcHandler(grpc.GenericRpcHandler): - - def service(self, handler_call_details): - return None - - -class ServerTest(unittest.TestCase): - - def test_not_a_generic_rpc_handler_at_construction(self): - with self.assertRaises(AttributeError) as exception_context: + +class _ActualGenericRpcHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + return None + + +class ServerTest(unittest.TestCase): + + def test_not_a_generic_rpc_handler_at_construction(self): + with self.assertRaises(AttributeError) as exception_context: grpc.server(futures.ThreadPoolExecutor(max_workers=5), handlers=[ _ActualGenericRpcHandler(), object(), ]) - self.assertIn('grpc.GenericRpcHandler', - str(exception_context.exception)) - - def test_not_a_generic_rpc_handler_after_construction(self): - server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) - with self.assertRaises(AttributeError) as exception_context: - server.add_generic_rpc_handlers([ - _ActualGenericRpcHandler(), - object(), - ]) - self.assertIn('grpc.GenericRpcHandler', - str(exception_context.exception)) - + self.assertIn('grpc.GenericRpcHandler', + str(exception_context.exception)) + + def test_not_a_generic_rpc_handler_after_construction(self): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) + with self.assertRaises(AttributeError) as exception_context: + server.add_generic_rpc_handlers([ + _ActualGenericRpcHandler(), + object(), + ]) + self.assertIn('grpc.GenericRpcHandler', + str(exception_context.exception)) + def test_failed_port_binding_exception(self): server = grpc.server(None, options=(('grpc.so_reuseport', 0),)) port = server.add_insecure_port('localhost:0') bind_address = "localhost:%d" % port - + with self.assertRaises(RuntimeError): server.add_insecure_port(bind_address) @@ -64,6 +64,6 @@ class ServerTest(unittest.TestCase): server.add_secure_port(bind_address, server_credentials) -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py index 7f9fc1a9a5..3dd95ea8bf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py @@ -1,91 +1,91 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import division - -import datetime -from concurrent import futures -import unittest -import time -import threading -import six - -import grpc -from tests.unit.framework.common import test_constants - -_WAIT_FOR_BLOCKING = datetime.timedelta(seconds=1) - - -def _block_on_waiting(server, termination_event, timeout=None): - server.start() - server.wait_for_termination(timeout=timeout) - termination_event.set() - - -class ServerWaitForTerminationTest(unittest.TestCase): - - def test_unblock_by_invoking_stop(self): - termination_event = threading.Event() - server = grpc.server(futures.ThreadPoolExecutor()) - +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import division + +import datetime +from concurrent import futures +import unittest +import time +import threading +import six + +import grpc +from tests.unit.framework.common import test_constants + +_WAIT_FOR_BLOCKING = datetime.timedelta(seconds=1) + + +def _block_on_waiting(server, termination_event, timeout=None): + server.start() + server.wait_for_termination(timeout=timeout) + termination_event.set() + + +class ServerWaitForTerminationTest(unittest.TestCase): + + def test_unblock_by_invoking_stop(self): + termination_event = threading.Event() + server = grpc.server(futures.ThreadPoolExecutor()) + wait_thread = threading.Thread(target=_block_on_waiting, args=( server, termination_event, )) - wait_thread.daemon = True - wait_thread.start() - time.sleep(_WAIT_FOR_BLOCKING.total_seconds()) - - server.stop(None) - termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) - self.assertTrue(termination_event.is_set()) - - def test_unblock_by_del(self): - termination_event = threading.Event() - server = grpc.server(futures.ThreadPoolExecutor()) - + wait_thread.daemon = True + wait_thread.start() + time.sleep(_WAIT_FOR_BLOCKING.total_seconds()) + + server.stop(None) + termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) + self.assertTrue(termination_event.is_set()) + + def test_unblock_by_del(self): + termination_event = threading.Event() + server = grpc.server(futures.ThreadPoolExecutor()) + wait_thread = threading.Thread(target=_block_on_waiting, args=( server, termination_event, )) - wait_thread.daemon = True - wait_thread.start() - time.sleep(_WAIT_FOR_BLOCKING.total_seconds()) - - # Invoke manually here, in Python 2 it will be invoked by GC sometime. - server.__del__() - termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) - self.assertTrue(termination_event.is_set()) - - def test_unblock_by_timeout(self): - termination_event = threading.Event() - server = grpc.server(futures.ThreadPoolExecutor()) - + wait_thread.daemon = True + wait_thread.start() + time.sleep(_WAIT_FOR_BLOCKING.total_seconds()) + + # Invoke manually here, in Python 2 it will be invoked by GC sometime. + server.__del__() + termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) + self.assertTrue(termination_event.is_set()) + + def test_unblock_by_timeout(self): + termination_event = threading.Event() + server = grpc.server(futures.ThreadPoolExecutor()) + wait_thread = threading.Thread(target=_block_on_waiting, args=( server, termination_event, test_constants.SHORT_TIMEOUT / 2, )) - wait_thread.daemon = True - wait_thread.start() - - termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) - self.assertTrue(termination_event.is_set()) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + wait_thread.daemon = True + wait_thread.start() + + termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) + self.assertTrue(termination_event.is_set()) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py index 95a0c87e89..9bff4d2af0 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py @@ -1,106 +1,106 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests experimental TLS Session Resumption API""" - -import pickle -import unittest -import logging - -import grpc -from grpc import _channel -from grpc.experimental import session_cache - -from tests.unit import test_common -from tests.unit import resources - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x00\x00\x00' - -_UNARY_UNARY = '/test/UnaryUnary' - -_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' -_ID = 'id' -_ID_KEY = 'id_key' -_AUTH_CTX = 'auth_ctx' - -_PRIVATE_KEY = resources.private_key() -_CERTIFICATE_CHAIN = resources.certificate_chain() -_TEST_ROOT_CERTIFICATES = resources.test_root_certificates() -_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),) -_PROPERTY_OPTIONS = (( - 'grpc.ssl_target_name_override', - _SERVER_HOST_OVERRIDE, -),) - - -def handle_unary_unary(request, servicer_context): - return pickle.dumps({ - _ID: servicer_context.peer_identities(), - _ID_KEY: servicer_context.peer_identity_key(), - _AUTH_CTX: servicer_context.auth_context() - }) - - -def start_secure_server(): +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests experimental TLS Session Resumption API""" + +import pickle +import unittest +import logging + +import grpc +from grpc import _channel +from grpc.experimental import session_cache + +from tests.unit import test_common +from tests.unit import resources + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' +_ID = 'id' +_ID_KEY = 'id_key' +_AUTH_CTX = 'auth_ctx' + +_PRIVATE_KEY = resources.private_key() +_CERTIFICATE_CHAIN = resources.certificate_chain() +_TEST_ROOT_CERTIFICATES = resources.test_root_certificates() +_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),) +_PROPERTY_OPTIONS = (( + 'grpc.ssl_target_name_override', + _SERVER_HOST_OVERRIDE, +),) + + +def handle_unary_unary(request, servicer_context): + return pickle.dumps({ + _ID: servicer_context.peer_identities(), + _ID_KEY: servicer_context.peer_identity_key(), + _AUTH_CTX: servicer_context.auth_context() + }) + + +def start_secure_server(): handler = grpc.method_handlers_generic_handler( 'test', {'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary)}) - server = test_common.test_server() - server.add_generic_rpc_handlers((handler,)) - server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) - port = server.add_secure_port('[::]:0', server_cred) - server.start() - - return server, port - - -class SSLSessionCacheTest(unittest.TestCase): - - def _do_one_shot_client_rpc(self, channel_creds, channel_options, port, - expect_ssl_session_reused): + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + return server, port + + +class SSLSessionCacheTest(unittest.TestCase): + + def _do_one_shot_client_rpc(self, channel_creds, channel_options, port, + expect_ssl_session_reused): channel = grpc.secure_channel('localhost:{}'.format(port), channel_creds, options=channel_options) - response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) - auth_data = pickle.loads(response) - self.assertEqual(expect_ssl_session_reused, - auth_data[_AUTH_CTX]['ssl_session_reused']) - channel.close() - - def testSSLSessionCacheLRU(self): - server_1, port_1 = start_secure_server() - - cache = session_cache.ssl_session_cache_lru(1) - channel_creds = grpc.ssl_channel_credentials( - root_certificates=_TEST_ROOT_CERTIFICATES) - channel_options = _PROPERTY_OPTIONS + ( - ('grpc.ssl_session_cache', cache),) - - # Initial connection has no session to resume + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + auth_data = pickle.loads(response) + self.assertEqual(expect_ssl_session_reused, + auth_data[_AUTH_CTX]['ssl_session_reused']) + channel.close() + + def testSSLSessionCacheLRU(self): + server_1, port_1 = start_secure_server() + + cache = session_cache.ssl_session_cache_lru(1) + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES) + channel_options = _PROPERTY_OPTIONS + ( + ('grpc.ssl_session_cache', cache),) + + # Initial connection has no session to resume self._do_one_shot_client_rpc(channel_creds, channel_options, port_1, expect_ssl_session_reused=[b'false']) - - # Connection to server_1 resumes from initial session + + # Connection to server_1 resumes from initial session self._do_one_shot_client_rpc(channel_creds, channel_options, port_1, expect_ssl_session_reused=[b'true']) - - # Connection to a different server with the same name overwrites the cache entry - server_2, port_2 = start_secure_server() + + # Connection to a different server with the same name overwrites the cache entry + server_2, port_2 = start_secure_server() self._do_one_shot_client_rpc(channel_creds, channel_options, port_2, @@ -109,32 +109,32 @@ class SSLSessionCacheTest(unittest.TestCase): channel_options, port_2, expect_ssl_session_reused=[b'true']) - server_2.stop(None) - - # Connection to server_1 now falls back to full TLS handshake + server_2.stop(None) + + # Connection to server_1 now falls back to full TLS handshake self._do_one_shot_client_rpc(channel_creds, channel_options, port_1, expect_ssl_session_reused=[b'false']) - - # Re-creating server_1 causes old sessions to become invalid - server_1.stop(None) - server_1, port_1 = start_secure_server() - - # Old sessions should no longer be valid + + # Re-creating server_1 causes old sessions to become invalid + server_1.stop(None) + server_1, port_1 = start_secure_server() + + # Old sessions should no longer be valid self._do_one_shot_client_rpc(channel_creds, channel_options, port_1, expect_ssl_session_reused=[b'false']) - - # Resumption should work for subsequent connections + + # Resumption should work for subsequent connections self._do_one_shot_client_rpc(channel_creds, channel_options, port_1, expect_ssl_session_reused=[b'true']) - server_1.stop(None) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) + server_1.stop(None) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py index 9d8c17c851..0be1270749 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py @@ -1,119 +1,119 @@ -# Copyright 2019 the gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Client for testing responsiveness to signals.""" - -from __future__ import print_function - -import argparse -import functools -import logging -import signal -import sys - -import grpc - -SIGTERM_MESSAGE = "Handling sigterm!" - -UNARY_UNARY = "/test/Unary" -UNARY_STREAM = "/test/ServerStreaming" - -_MESSAGE = b'\x00\x00\x00' - -_ASSERTION_MESSAGE = "Control flow should never reach here." - -# NOTE(gnossen): We use a global variable here so that the signal handler can be -# installed before the RPC begins. If we do not do this, then we may receive the -# SIGINT before the signal handler is installed. I'm not happy with per-process -# global state, but the per-process global state that is signal handlers -# somewhat forces my hand. -per_process_rpc_future = None - - -def handle_sigint(unused_signum, unused_frame): - print(SIGTERM_MESSAGE) - if per_process_rpc_future is not None: - per_process_rpc_future.cancel() - sys.stderr.flush() - # This sys.exit(0) avoids an exception caused by the cancelled RPC. - sys.exit(0) - - -def main_unary(server_target): - """Initiate a unary RPC to be interrupted by a SIGINT.""" - global per_process_rpc_future # pylint: disable=global-statement - with grpc.insecure_channel(server_target) as channel: - multicallable = channel.unary_unary(UNARY_UNARY) - signal.signal(signal.SIGINT, handle_sigint) +# Copyright 2019 the gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Client for testing responsiveness to signals.""" + +from __future__ import print_function + +import argparse +import functools +import logging +import signal +import sys + +import grpc + +SIGTERM_MESSAGE = "Handling sigterm!" + +UNARY_UNARY = "/test/Unary" +UNARY_STREAM = "/test/ServerStreaming" + +_MESSAGE = b'\x00\x00\x00' + +_ASSERTION_MESSAGE = "Control flow should never reach here." + +# NOTE(gnossen): We use a global variable here so that the signal handler can be +# installed before the RPC begins. If we do not do this, then we may receive the +# SIGINT before the signal handler is installed. I'm not happy with per-process +# global state, but the per-process global state that is signal handlers +# somewhat forces my hand. +per_process_rpc_future = None + + +def handle_sigint(unused_signum, unused_frame): + print(SIGTERM_MESSAGE) + if per_process_rpc_future is not None: + per_process_rpc_future.cancel() + sys.stderr.flush() + # This sys.exit(0) avoids an exception caused by the cancelled RPC. + sys.exit(0) + + +def main_unary(server_target): + """Initiate a unary RPC to be interrupted by a SIGINT.""" + global per_process_rpc_future # pylint: disable=global-statement + with grpc.insecure_channel(server_target) as channel: + multicallable = channel.unary_unary(UNARY_UNARY) + signal.signal(signal.SIGINT, handle_sigint) per_process_rpc_future = multicallable.future(_MESSAGE, wait_for_ready=True) - result = per_process_rpc_future.result() - assert False, _ASSERTION_MESSAGE - - -def main_streaming(server_target): - """Initiate a streaming RPC to be interrupted by a SIGINT.""" - global per_process_rpc_future # pylint: disable=global-statement - with grpc.insecure_channel(server_target) as channel: - signal.signal(signal.SIGINT, handle_sigint) - per_process_rpc_future = channel.unary_stream(UNARY_STREAM)( - _MESSAGE, wait_for_ready=True) - for result in per_process_rpc_future: - pass - assert False, _ASSERTION_MESSAGE - - -def main_unary_with_exception(server_target): - """Initiate a unary RPC with a signal handler that will raise.""" - channel = grpc.insecure_channel(server_target) - try: - channel.unary_unary(UNARY_UNARY)(_MESSAGE, wait_for_ready=True) - except KeyboardInterrupt: - sys.stderr.write("Running signal handler.\n") - sys.stderr.flush() - - # This call should not hang. - channel.close() - - -def main_streaming_with_exception(server_target): - """Initiate a streaming RPC with a signal handler that will raise.""" - channel = grpc.insecure_channel(server_target) - try: + result = per_process_rpc_future.result() + assert False, _ASSERTION_MESSAGE + + +def main_streaming(server_target): + """Initiate a streaming RPC to be interrupted by a SIGINT.""" + global per_process_rpc_future # pylint: disable=global-statement + with grpc.insecure_channel(server_target) as channel: + signal.signal(signal.SIGINT, handle_sigint) + per_process_rpc_future = channel.unary_stream(UNARY_STREAM)( + _MESSAGE, wait_for_ready=True) + for result in per_process_rpc_future: + pass + assert False, _ASSERTION_MESSAGE + + +def main_unary_with_exception(server_target): + """Initiate a unary RPC with a signal handler that will raise.""" + channel = grpc.insecure_channel(server_target) + try: + channel.unary_unary(UNARY_UNARY)(_MESSAGE, wait_for_ready=True) + except KeyboardInterrupt: + sys.stderr.write("Running signal handler.\n") + sys.stderr.flush() + + # This call should not hang. + channel.close() + + +def main_streaming_with_exception(server_target): + """Initiate a streaming RPC with a signal handler that will raise.""" + channel = grpc.insecure_channel(server_target) + try: for _ in channel.unary_stream(UNARY_STREAM)(_MESSAGE, wait_for_ready=True): - pass - except KeyboardInterrupt: - sys.stderr.write("Running signal handler.\n") - sys.stderr.flush() - - # This call should not hang. - channel.close() - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Signal test client.') - parser.add_argument('server', help='Server target') - parser.add_argument('arity', help='Arity', choices=('unary', 'streaming')) + pass + except KeyboardInterrupt: + sys.stderr.write("Running signal handler.\n") + sys.stderr.flush() + + # This call should not hang. + channel.close() + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Signal test client.') + parser.add_argument('server', help='Server target') + parser.add_argument('arity', help='Arity', choices=('unary', 'streaming')) parser.add_argument('--exception', help='Whether the signal throws an exception', action='store_true') - args = parser.parse_args() - if args.arity == 'unary' and not args.exception: - main_unary(args.server) - elif args.arity == 'streaming' and not args.exception: - main_streaming(args.server) - elif args.arity == 'unary' and args.exception: - main_unary_with_exception(args.server) - else: - main_streaming_with_exception(args.server) + args = parser.parse_args() + if args.arity == 'unary' and not args.exception: + main_unary(args.server) + elif args.arity == 'streaming' and not args.exception: + main_streaming(args.server) + elif args.arity == 'unary' and args.exception: + main_unary_with_exception(args.server) + else: + main_streaming_with_exception(args.server) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_handling_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_handling_test.py index 9de3fe0505..a05e42d5a3 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_handling_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_handling_test.py @@ -1,200 +1,200 @@ -# Copyright 2019 the gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test of responsiveness to signals.""" - -import logging -import os -import signal -import subprocess -import tempfile -import threading -import unittest -import sys - -import grpc - -from tests.unit import test_common -from tests.unit import _signal_client - -_CLIENT_PATH = None -if sys.executable is not None: - _CLIENT_PATH = 'tests.unit._signal_client' -else: - # NOTE(rbellevi): For compatibility with internal testing. - if len(sys.argv) != 2: - raise RuntimeError("Must supply path to executable client.") - client_name = sys.argv[1].split("/")[-1] - del sys.argv[1] # For compatibility with test runner. - _CLIENT_PATH = os.path.realpath( - os.path.join(os.path.dirname(os.path.abspath(__file__)), client_name)) - -_HOST = 'localhost' - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self): - self._connected_clients_lock = threading.RLock() - self._connected_clients_event = threading.Event() - self._connected_clients = 0 - - self._unary_unary_handler = grpc.unary_unary_rpc_method_handler( - self._handle_unary_unary) - self._unary_stream_handler = grpc.unary_stream_rpc_method_handler( - self._handle_unary_stream) - - def _on_client_connect(self): - with self._connected_clients_lock: - self._connected_clients += 1 - self._connected_clients_event.set() - - def _on_client_disconnect(self): - with self._connected_clients_lock: - self._connected_clients -= 1 - if self._connected_clients == 0: - self._connected_clients_event.clear() - - def await_connected_client(self): - """Blocks until a client connects to the server.""" - self._connected_clients_event.wait() - - def _handle_unary_unary(self, request, servicer_context): - """Handles a unary RPC. - - Blocks until the client disconnects and then echoes. - """ - stop_event = threading.Event() - - def on_rpc_end(): - self._on_client_disconnect() - stop_event.set() - - servicer_context.add_callback(on_rpc_end) - self._on_client_connect() - stop_event.wait() - return request - - def _handle_unary_stream(self, request, servicer_context): - """Handles a server streaming RPC. - - Blocks until the client disconnects and then echoes. - """ - stop_event = threading.Event() - - def on_rpc_end(): - self._on_client_disconnect() - stop_event.set() - - servicer_context.add_callback(on_rpc_end) - self._on_client_connect() - stop_event.wait() - yield request - - def service(self, handler_call_details): - if handler_call_details.method == _signal_client.UNARY_UNARY: - return self._unary_unary_handler - elif handler_call_details.method == _signal_client.UNARY_STREAM: - return self._unary_stream_handler - else: - return None - - -def _read_stream(stream): - stream.seek(0) - return stream.read() - - -def _start_client(args, stdout, stderr): - invocation = None - if sys.executable is not None: - invocation = (sys.executable, '-m', _CLIENT_PATH) + tuple(args) - else: - invocation = (_CLIENT_PATH,) + tuple(args) - env = os.environ.copy() - env['Y_PYTHON_ENTRY_POINT'] = ':main' - return subprocess.Popen(invocation, stdout=stdout, stderr=stderr, env=env) - - -class SignalHandlingTest(unittest.TestCase): - - def setUp(self): - self._server = test_common.test_server() - self._port = self._server.add_insecure_port('{}:0'.format(_HOST)) - self._handler = _GenericHandler() - self._server.add_generic_rpc_handlers((self._handler,)) - self._server.start() - - def tearDown(self): - self._server.stop(None) - - @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') - def testUnary(self): - """Tests that the server unary code path does not stall signal handlers.""" - server_target = '{}:{}'.format(_HOST, self._port) - with tempfile.TemporaryFile(mode='r') as client_stdout: - with tempfile.TemporaryFile(mode='r') as client_stderr: - client = _start_client((server_target, 'unary'), client_stdout, - client_stderr) - self._handler.await_connected_client() - client.send_signal(signal.SIGINT) - self.assertFalse(client.wait(), msg=_read_stream(client_stderr)) - client_stdout.seek(0) - self.assertIn(_signal_client.SIGTERM_MESSAGE, - client_stdout.read()) - - @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') - def testStreaming(self): - """Tests that the server streaming code path does not stall signal handlers.""" - server_target = '{}:{}'.format(_HOST, self._port) - with tempfile.TemporaryFile(mode='r') as client_stdout: - with tempfile.TemporaryFile(mode='r') as client_stderr: - client = _start_client((server_target, 'streaming'), - client_stdout, client_stderr) - self._handler.await_connected_client() - client.send_signal(signal.SIGINT) - self.assertFalse(client.wait(), msg=_read_stream(client_stderr)) - client_stdout.seek(0) - self.assertIn(_signal_client.SIGTERM_MESSAGE, - client_stdout.read()) - - @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') - def testUnaryWithException(self): - server_target = '{}:{}'.format(_HOST, self._port) - with tempfile.TemporaryFile(mode='r') as client_stdout: - with tempfile.TemporaryFile(mode='r') as client_stderr: - client = _start_client(('--exception', server_target, 'unary'), - client_stdout, client_stderr) - self._handler.await_connected_client() - client.send_signal(signal.SIGINT) - client.wait() - self.assertEqual(0, client.returncode) - - @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') - def testStreamingHandlerWithException(self): - server_target = '{}:{}'.format(_HOST, self._port) - with tempfile.TemporaryFile(mode='r') as client_stdout: - with tempfile.TemporaryFile(mode='r') as client_stderr: - client = _start_client( - ('--exception', server_target, 'streaming'), client_stdout, - client_stderr) - self._handler.await_connected_client() - client.send_signal(signal.SIGINT) - client.wait() - print(_read_stream(client_stderr)) - self.assertEqual(0, client.returncode) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2019 the gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of responsiveness to signals.""" + +import logging +import os +import signal +import subprocess +import tempfile +import threading +import unittest +import sys + +import grpc + +from tests.unit import test_common +from tests.unit import _signal_client + +_CLIENT_PATH = None +if sys.executable is not None: + _CLIENT_PATH = 'tests.unit._signal_client' +else: + # NOTE(rbellevi): For compatibility with internal testing. + if len(sys.argv) != 2: + raise RuntimeError("Must supply path to executable client.") + client_name = sys.argv[1].split("/")[-1] + del sys.argv[1] # For compatibility with test runner. + _CLIENT_PATH = os.path.realpath( + os.path.join(os.path.dirname(os.path.abspath(__file__)), client_name)) + +_HOST = 'localhost' + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self): + self._connected_clients_lock = threading.RLock() + self._connected_clients_event = threading.Event() + self._connected_clients = 0 + + self._unary_unary_handler = grpc.unary_unary_rpc_method_handler( + self._handle_unary_unary) + self._unary_stream_handler = grpc.unary_stream_rpc_method_handler( + self._handle_unary_stream) + + def _on_client_connect(self): + with self._connected_clients_lock: + self._connected_clients += 1 + self._connected_clients_event.set() + + def _on_client_disconnect(self): + with self._connected_clients_lock: + self._connected_clients -= 1 + if self._connected_clients == 0: + self._connected_clients_event.clear() + + def await_connected_client(self): + """Blocks until a client connects to the server.""" + self._connected_clients_event.wait() + + def _handle_unary_unary(self, request, servicer_context): + """Handles a unary RPC. + + Blocks until the client disconnects and then echoes. + """ + stop_event = threading.Event() + + def on_rpc_end(): + self._on_client_disconnect() + stop_event.set() + + servicer_context.add_callback(on_rpc_end) + self._on_client_connect() + stop_event.wait() + return request + + def _handle_unary_stream(self, request, servicer_context): + """Handles a server streaming RPC. + + Blocks until the client disconnects and then echoes. + """ + stop_event = threading.Event() + + def on_rpc_end(): + self._on_client_disconnect() + stop_event.set() + + servicer_context.add_callback(on_rpc_end) + self._on_client_connect() + stop_event.wait() + yield request + + def service(self, handler_call_details): + if handler_call_details.method == _signal_client.UNARY_UNARY: + return self._unary_unary_handler + elif handler_call_details.method == _signal_client.UNARY_STREAM: + return self._unary_stream_handler + else: + return None + + +def _read_stream(stream): + stream.seek(0) + return stream.read() + + +def _start_client(args, stdout, stderr): + invocation = None + if sys.executable is not None: + invocation = (sys.executable, '-m', _CLIENT_PATH) + tuple(args) + else: + invocation = (_CLIENT_PATH,) + tuple(args) + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + return subprocess.Popen(invocation, stdout=stdout, stderr=stderr, env=env) + + +class SignalHandlingTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._port = self._server.add_insecure_port('{}:0'.format(_HOST)) + self._handler = _GenericHandler() + self._server.add_generic_rpc_handlers((self._handler,)) + self._server.start() + + def tearDown(self): + self._server.stop(None) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testUnary(self): + """Tests that the server unary code path does not stall signal handlers.""" + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client((server_target, 'unary'), client_stdout, + client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + self.assertFalse(client.wait(), msg=_read_stream(client_stderr)) + client_stdout.seek(0) + self.assertIn(_signal_client.SIGTERM_MESSAGE, + client_stdout.read()) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testStreaming(self): + """Tests that the server streaming code path does not stall signal handlers.""" + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client((server_target, 'streaming'), + client_stdout, client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + self.assertFalse(client.wait(), msg=_read_stream(client_stderr)) + client_stdout.seek(0) + self.assertIn(_signal_client.SIGTERM_MESSAGE, + client_stdout.read()) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testUnaryWithException(self): + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client(('--exception', server_target, 'unary'), + client_stdout, client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + client.wait() + self.assertEqual(0, client.returncode) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testStreamingHandlerWithException(self): + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client( + ('--exception', server_target, 'streaming'), client_stdout, + client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + client.wait() + print(_read_stream(client_stderr)) + self.assertEqual(0, client.returncode) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_tcp_proxy.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_tcp_proxy.py index 9a620f7508..84dc0e2d6c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_tcp_proxy.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_tcp_proxy.py @@ -1,141 +1,141 @@ -# Copyright 2019 the gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" Proxies a TCP connection between a single client-server pair. - -This proxy is not suitable for production, but should work well for cases in -which a test needs to spy on the bytes put on the wire between a server and -a client. -""" - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import datetime -import select -import socket -import threading - -from tests.unit.framework.common import get_socket - -_TCP_PROXY_BUFFER_SIZE = 1024 -_TCP_PROXY_TIMEOUT = datetime.timedelta(milliseconds=500) - - -def _init_proxy_socket(gateway_address, gateway_port): - proxy_socket = socket.create_connection((gateway_address, gateway_port)) - return proxy_socket - - -class TcpProxy(object): - """Proxies a TCP connection between one client and one server.""" - - def __init__(self, bind_address, gateway_address, gateway_port): - self._bind_address = bind_address - self._gateway_address = gateway_address - self._gateway_port = gateway_port - - self._byte_count_lock = threading.RLock() - self._sent_byte_count = 0 - self._received_byte_count = 0 - - self._stop_event = threading.Event() - - self._port = None - self._listen_socket = None - self._proxy_socket = None - - # The following three attributes are owned by the serving thread. - self._northbound_data = b"" - self._southbound_data = b"" - self._client_sockets = [] - - self._thread = threading.Thread(target=self._run_proxy) - - def start(self): - _, self._port, self._listen_socket = get_socket( - bind_address=self._bind_address) - self._proxy_socket = _init_proxy_socket(self._gateway_address, - self._gateway_port) - self._thread.start() - - def get_port(self): - return self._port - - def _handle_reads(self, sockets_to_read): - for socket_to_read in sockets_to_read: - if socket_to_read is self._listen_socket: - client_socket, client_address = socket_to_read.accept() - self._client_sockets.append(client_socket) - elif socket_to_read is self._proxy_socket: - data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE) - with self._byte_count_lock: - self._received_byte_count += len(data) - self._northbound_data += data - elif socket_to_read in self._client_sockets: - data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE) - if data: - with self._byte_count_lock: - self._sent_byte_count += len(data) - self._southbound_data += data - else: - self._client_sockets.remove(socket_to_read) - else: - raise RuntimeError('Unidentified socket appeared in read set.') - - def _handle_writes(self, sockets_to_write): - for socket_to_write in sockets_to_write: - if socket_to_write is self._proxy_socket: - if self._southbound_data: - self._proxy_socket.sendall(self._southbound_data) - self._southbound_data = b"" - elif socket_to_write in self._client_sockets: - if self._northbound_data: - socket_to_write.sendall(self._northbound_data) - self._northbound_data = b"" - - def _run_proxy(self): - while not self._stop_event.is_set(): - expected_reads = (self._listen_socket, self._proxy_socket) + tuple( - self._client_sockets) - expected_writes = expected_reads - sockets_to_read, sockets_to_write, _ = select.select( - expected_reads, expected_writes, (), - _TCP_PROXY_TIMEOUT.total_seconds()) - self._handle_reads(sockets_to_read) - self._handle_writes(sockets_to_write) - for client_socket in self._client_sockets: - client_socket.close() - - def stop(self): - self._stop_event.set() - self._thread.join() - self._listen_socket.close() - self._proxy_socket.close() - - def get_byte_count(self): - with self._byte_count_lock: - return self._sent_byte_count, self._received_byte_count - - def reset_byte_count(self): - with self._byte_count_lock: - self._byte_count = 0 - self._received_byte_count = 0 - - def __enter__(self): - self.start() - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.stop() +# Copyright 2019 the gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" Proxies a TCP connection between a single client-server pair. + +This proxy is not suitable for production, but should work well for cases in +which a test needs to spy on the bytes put on the wire between a server and +a client. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import datetime +import select +import socket +import threading + +from tests.unit.framework.common import get_socket + +_TCP_PROXY_BUFFER_SIZE = 1024 +_TCP_PROXY_TIMEOUT = datetime.timedelta(milliseconds=500) + + +def _init_proxy_socket(gateway_address, gateway_port): + proxy_socket = socket.create_connection((gateway_address, gateway_port)) + return proxy_socket + + +class TcpProxy(object): + """Proxies a TCP connection between one client and one server.""" + + def __init__(self, bind_address, gateway_address, gateway_port): + self._bind_address = bind_address + self._gateway_address = gateway_address + self._gateway_port = gateway_port + + self._byte_count_lock = threading.RLock() + self._sent_byte_count = 0 + self._received_byte_count = 0 + + self._stop_event = threading.Event() + + self._port = None + self._listen_socket = None + self._proxy_socket = None + + # The following three attributes are owned by the serving thread. + self._northbound_data = b"" + self._southbound_data = b"" + self._client_sockets = [] + + self._thread = threading.Thread(target=self._run_proxy) + + def start(self): + _, self._port, self._listen_socket = get_socket( + bind_address=self._bind_address) + self._proxy_socket = _init_proxy_socket(self._gateway_address, + self._gateway_port) + self._thread.start() + + def get_port(self): + return self._port + + def _handle_reads(self, sockets_to_read): + for socket_to_read in sockets_to_read: + if socket_to_read is self._listen_socket: + client_socket, client_address = socket_to_read.accept() + self._client_sockets.append(client_socket) + elif socket_to_read is self._proxy_socket: + data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE) + with self._byte_count_lock: + self._received_byte_count += len(data) + self._northbound_data += data + elif socket_to_read in self._client_sockets: + data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE) + if data: + with self._byte_count_lock: + self._sent_byte_count += len(data) + self._southbound_data += data + else: + self._client_sockets.remove(socket_to_read) + else: + raise RuntimeError('Unidentified socket appeared in read set.') + + def _handle_writes(self, sockets_to_write): + for socket_to_write in sockets_to_write: + if socket_to_write is self._proxy_socket: + if self._southbound_data: + self._proxy_socket.sendall(self._southbound_data) + self._southbound_data = b"" + elif socket_to_write in self._client_sockets: + if self._northbound_data: + socket_to_write.sendall(self._northbound_data) + self._northbound_data = b"" + + def _run_proxy(self): + while not self._stop_event.is_set(): + expected_reads = (self._listen_socket, self._proxy_socket) + tuple( + self._client_sockets) + expected_writes = expected_reads + sockets_to_read, sockets_to_write, _ = select.select( + expected_reads, expected_writes, (), + _TCP_PROXY_TIMEOUT.total_seconds()) + self._handle_reads(sockets_to_read) + self._handle_writes(sockets_to_write) + for client_socket in self._client_sockets: + client_socket.close() + + def stop(self): + self._stop_event.set() + self._thread.join() + self._listen_socket.close() + self._proxy_socket.close() + + def get_byte_count(self): + with self._byte_count_lock: + return self._sent_byte_count, self._received_byte_count + + def reset_byte_count(self): + with self._byte_count_lock: + self._byte_count = 0 + self._received_byte_count = 0 + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.stop() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_version_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_version_test.py index 699004330c..3d37b319e5 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_version_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_version_test.py @@ -1,30 +1,30 @@ -# Copyright 2018 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test for grpc.__version__""" - -import unittest -import grpc -import logging -from grpc import _grpcio_metadata - - -class VersionTest(unittest.TestCase): - - def test_get_version(self): - self.assertEqual(grpc.__version__, _grpcio_metadata.__version__) - - -if __name__ == '__main__': - logging.basicConfig() - unittest.main(verbosity=2) +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test for grpc.__version__""" + +import unittest +import grpc +import logging +from grpc import _grpcio_metadata + + +class VersionTest(unittest.TestCase): + + def test_get_version(self): + self.assertEqual(grpc.__version__, _grpcio_metadata.__version__) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/__init__.py index dc985eebb4..5fb4f3c3cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py index 6981322561..a111d68764 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py @@ -1,354 +1,354 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests Face interface compliance of the gRPC Python Beta API.""" - -import threading -import unittest - -from grpc.beta import implementations -from grpc.beta import interfaces -from grpc.framework.common import cardinality -from grpc.framework.interfaces.face import utilities -from tests.unit import resources -from tests.unit.beta import test_utilities -from tests.unit.framework.common import test_constants - -_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' - -_PER_RPC_CREDENTIALS_METADATA_KEY = b'my-call-credentials-metadata-key' -_PER_RPC_CREDENTIALS_METADATA_VALUE = b'my-call-credentials-metadata-value' - -_GROUP = 'group' -_UNARY_UNARY = 'unary-unary' -_UNARY_STREAM = 'unary-stream' -_STREAM_UNARY = 'stream-unary' -_STREAM_STREAM = 'stream-stream' - -_REQUEST = b'abc' -_RESPONSE = b'123' - - -class _Servicer(object): - - def __init__(self): - self._condition = threading.Condition() - self._peer = None - self._serviced = False - - def unary_unary(self, request, context): - with self._condition: - self._request = request - self._peer = context.protocol_context().peer() - self._invocation_metadata = context.invocation_metadata() - context.protocol_context().disable_next_response_compression() - self._serviced = True - self._condition.notify_all() - return _RESPONSE - - def unary_stream(self, request, context): - with self._condition: - self._request = request - self._peer = context.protocol_context().peer() - self._invocation_metadata = context.invocation_metadata() - context.protocol_context().disable_next_response_compression() - self._serviced = True - self._condition.notify_all() - return - yield # pylint: disable=unreachable - - def stream_unary(self, request_iterator, context): - for request in request_iterator: - self._request = request - with self._condition: - self._peer = context.protocol_context().peer() - self._invocation_metadata = context.invocation_metadata() - context.protocol_context().disable_next_response_compression() - self._serviced = True - self._condition.notify_all() - return _RESPONSE - - def stream_stream(self, request_iterator, context): - for request in request_iterator: - with self._condition: - self._peer = context.protocol_context().peer() - context.protocol_context().disable_next_response_compression() - yield _RESPONSE - with self._condition: - self._invocation_metadata = context.invocation_metadata() - self._serviced = True - self._condition.notify_all() - - def peer(self): - with self._condition: - return self._peer - - def block_until_serviced(self): - with self._condition: - while not self._serviced: - self._condition.wait() - - -class _BlockingIterator(object): - - def __init__(self, upstream): - self._condition = threading.Condition() - self._upstream = upstream - self._allowed = [] - - def __iter__(self): - return self - - def __next__(self): - return self.next() - - def next(self): - with self._condition: - while True: - if self._allowed is None: - raise StopIteration() - elif self._allowed: - return self._allowed.pop(0) - else: - self._condition.wait() - - def allow(self): - with self._condition: - try: - self._allowed.append(next(self._upstream)) - except StopIteration: - self._allowed = None - self._condition.notify_all() - - -def _metadata_plugin(context, callback): +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests Face interface compliance of the gRPC Python Beta API.""" + +import threading +import unittest + +from grpc.beta import implementations +from grpc.beta import interfaces +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities +from tests.unit import resources +from tests.unit.beta import test_utilities +from tests.unit.framework.common import test_constants + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' + +_PER_RPC_CREDENTIALS_METADATA_KEY = b'my-call-credentials-metadata-key' +_PER_RPC_CREDENTIALS_METADATA_VALUE = b'my-call-credentials-metadata-value' + +_GROUP = 'group' +_UNARY_UNARY = 'unary-unary' +_UNARY_STREAM = 'unary-stream' +_STREAM_UNARY = 'stream-unary' +_STREAM_STREAM = 'stream-stream' + +_REQUEST = b'abc' +_RESPONSE = b'123' + + +class _Servicer(object): + + def __init__(self): + self._condition = threading.Condition() + self._peer = None + self._serviced = False + + def unary_unary(self, request, context): + with self._condition: + self._request = request + self._peer = context.protocol_context().peer() + self._invocation_metadata = context.invocation_metadata() + context.protocol_context().disable_next_response_compression() + self._serviced = True + self._condition.notify_all() + return _RESPONSE + + def unary_stream(self, request, context): + with self._condition: + self._request = request + self._peer = context.protocol_context().peer() + self._invocation_metadata = context.invocation_metadata() + context.protocol_context().disable_next_response_compression() + self._serviced = True + self._condition.notify_all() + return + yield # pylint: disable=unreachable + + def stream_unary(self, request_iterator, context): + for request in request_iterator: + self._request = request + with self._condition: + self._peer = context.protocol_context().peer() + self._invocation_metadata = context.invocation_metadata() + context.protocol_context().disable_next_response_compression() + self._serviced = True + self._condition.notify_all() + return _RESPONSE + + def stream_stream(self, request_iterator, context): + for request in request_iterator: + with self._condition: + self._peer = context.protocol_context().peer() + context.protocol_context().disable_next_response_compression() + yield _RESPONSE + with self._condition: + self._invocation_metadata = context.invocation_metadata() + self._serviced = True + self._condition.notify_all() + + def peer(self): + with self._condition: + return self._peer + + def block_until_serviced(self): + with self._condition: + while not self._serviced: + self._condition.wait() + + +class _BlockingIterator(object): + + def __init__(self, upstream): + self._condition = threading.Condition() + self._upstream = upstream + self._allowed = [] + + def __iter__(self): + return self + + def __next__(self): + return self.next() + + def next(self): + with self._condition: + while True: + if self._allowed is None: + raise StopIteration() + elif self._allowed: + return self._allowed.pop(0) + else: + self._condition.wait() + + def allow(self): + with self._condition: + try: + self._allowed.append(next(self._upstream)) + except StopIteration: + self._allowed = None + self._condition.notify_all() + + +def _metadata_plugin(context, callback): callback([ (_PER_RPC_CREDENTIALS_METADATA_KEY, _PER_RPC_CREDENTIALS_METADATA_VALUE) ], None) - - -class BetaFeaturesTest(unittest.TestCase): - - def setUp(self): - self._servicer = _Servicer() - method_implementations = { - (_GROUP, _UNARY_UNARY): + + +class BetaFeaturesTest(unittest.TestCase): + + def setUp(self): + self._servicer = _Servicer() + method_implementations = { + (_GROUP, _UNARY_UNARY): utilities.unary_unary_inline(self._servicer.unary_unary), - (_GROUP, _UNARY_STREAM): + (_GROUP, _UNARY_STREAM): utilities.unary_stream_inline(self._servicer.unary_stream), - (_GROUP, _STREAM_UNARY): + (_GROUP, _STREAM_UNARY): utilities.stream_unary_inline(self._servicer.stream_unary), - (_GROUP, _STREAM_STREAM): + (_GROUP, _STREAM_STREAM): utilities.stream_stream_inline(self._servicer.stream_stream), - } - - cardinalities = { - _UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY, - _UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM, - _STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY, - _STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM, - } - - server_options = implementations.server_options( - thread_pool_size=test_constants.POOL_SIZE) + } + + cardinalities = { + _UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY, + _UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM, + _STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY, + _STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM, + } + + server_options = implementations.server_options( + thread_pool_size=test_constants.POOL_SIZE) self._server = implementations.server(method_implementations, options=server_options) - server_credentials = implementations.ssl_server_credentials([ - ( - resources.private_key(), - resources.certificate_chain(), - ), - ]) - port = self._server.add_secure_port('[::]:0', server_credentials) - self._server.start() - self._channel_credentials = implementations.ssl_channel_credentials( - resources.test_root_certificates()) - self._call_credentials = implementations.metadata_call_credentials( - _metadata_plugin) - channel = test_utilities.not_really_secure_channel( - 'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE) - stub_options = implementations.stub_options( - thread_pool_size=test_constants.POOL_SIZE) + server_credentials = implementations.ssl_server_credentials([ + ( + resources.private_key(), + resources.certificate_chain(), + ), + ]) + port = self._server.add_secure_port('[::]:0', server_credentials) + self._server.start() + self._channel_credentials = implementations.ssl_channel_credentials( + resources.test_root_certificates()) + self._call_credentials = implementations.metadata_call_credentials( + _metadata_plugin) + channel = test_utilities.not_really_secure_channel( + 'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE) + stub_options = implementations.stub_options( + thread_pool_size=test_constants.POOL_SIZE) self._dynamic_stub = implementations.dynamic_stub(channel, _GROUP, cardinalities, options=stub_options) - - def tearDown(self): - self._dynamic_stub = None - self._server.stop(test_constants.SHORT_TIMEOUT).wait() - - def test_unary_unary(self): - call_options = interfaces.grpc_call_options( - disable_compression=True, credentials=self._call_credentials) + + def tearDown(self): + self._dynamic_stub = None + self._server.stop(test_constants.SHORT_TIMEOUT).wait() + + def test_unary_unary(self): + call_options = interfaces.grpc_call_options( + disable_compression=True, credentials=self._call_credentials) response = getattr(self._dynamic_stub, _UNARY_UNARY)(_REQUEST, test_constants.LONG_TIMEOUT, protocol_options=call_options) - self.assertEqual(_RESPONSE, response) - self.assertIsNotNone(self._servicer.peer()) - invocation_metadata = [ - (metadatum.key, metadatum.value) - for metadatum in self._servicer._invocation_metadata - ] - self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, - _PER_RPC_CREDENTIALS_METADATA_VALUE), - invocation_metadata) - - def test_unary_stream(self): - call_options = interfaces.grpc_call_options( - disable_compression=True, credentials=self._call_credentials) - response_iterator = getattr(self._dynamic_stub, _UNARY_STREAM)( - _REQUEST, - test_constants.LONG_TIMEOUT, - protocol_options=call_options) - self._servicer.block_until_serviced() - self.assertIsNotNone(self._servicer.peer()) - invocation_metadata = [ - (metadatum.key, metadatum.value) - for metadatum in self._servicer._invocation_metadata - ] - self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, - _PER_RPC_CREDENTIALS_METADATA_VALUE), - invocation_metadata) - - def test_stream_unary(self): - call_options = interfaces.grpc_call_options( - credentials=self._call_credentials) - request_iterator = _BlockingIterator(iter((_REQUEST,))) - response_future = getattr(self._dynamic_stub, _STREAM_UNARY).future( - request_iterator, - test_constants.LONG_TIMEOUT, - protocol_options=call_options) - response_future.protocol_context().disable_next_request_compression() - request_iterator.allow() - response_future.protocol_context().disable_next_request_compression() - request_iterator.allow() - self._servicer.block_until_serviced() - self.assertIsNotNone(self._servicer.peer()) - self.assertEqual(_RESPONSE, response_future.result()) - invocation_metadata = [ - (metadatum.key, metadatum.value) - for metadatum in self._servicer._invocation_metadata - ] - self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, - _PER_RPC_CREDENTIALS_METADATA_VALUE), - invocation_metadata) - - def test_stream_stream(self): - call_options = interfaces.grpc_call_options( - credentials=self._call_credentials) - request_iterator = _BlockingIterator(iter((_REQUEST,))) - response_iterator = getattr(self._dynamic_stub, _STREAM_STREAM)( - request_iterator, - test_constants.SHORT_TIMEOUT, - protocol_options=call_options) - response_iterator.protocol_context().disable_next_request_compression() - request_iterator.allow() - response = next(response_iterator) - response_iterator.protocol_context().disable_next_request_compression() - request_iterator.allow() - self._servicer.block_until_serviced() - self.assertIsNotNone(self._servicer.peer()) - self.assertEqual(_RESPONSE, response) - invocation_metadata = [ - (metadatum.key, metadatum.value) - for metadatum in self._servicer._invocation_metadata - ] - self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, - _PER_RPC_CREDENTIALS_METADATA_VALUE), - invocation_metadata) - - -class ContextManagementAndLifecycleTest(unittest.TestCase): - - def setUp(self): - self._servicer = _Servicer() - self._method_implementations = { - (_GROUP, _UNARY_UNARY): + self.assertEqual(_RESPONSE, response) + self.assertIsNotNone(self._servicer.peer()) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + def test_unary_stream(self): + call_options = interfaces.grpc_call_options( + disable_compression=True, credentials=self._call_credentials) + response_iterator = getattr(self._dynamic_stub, _UNARY_STREAM)( + _REQUEST, + test_constants.LONG_TIMEOUT, + protocol_options=call_options) + self._servicer.block_until_serviced() + self.assertIsNotNone(self._servicer.peer()) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + def test_stream_unary(self): + call_options = interfaces.grpc_call_options( + credentials=self._call_credentials) + request_iterator = _BlockingIterator(iter((_REQUEST,))) + response_future = getattr(self._dynamic_stub, _STREAM_UNARY).future( + request_iterator, + test_constants.LONG_TIMEOUT, + protocol_options=call_options) + response_future.protocol_context().disable_next_request_compression() + request_iterator.allow() + response_future.protocol_context().disable_next_request_compression() + request_iterator.allow() + self._servicer.block_until_serviced() + self.assertIsNotNone(self._servicer.peer()) + self.assertEqual(_RESPONSE, response_future.result()) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + def test_stream_stream(self): + call_options = interfaces.grpc_call_options( + credentials=self._call_credentials) + request_iterator = _BlockingIterator(iter((_REQUEST,))) + response_iterator = getattr(self._dynamic_stub, _STREAM_STREAM)( + request_iterator, + test_constants.SHORT_TIMEOUT, + protocol_options=call_options) + response_iterator.protocol_context().disable_next_request_compression() + request_iterator.allow() + response = next(response_iterator) + response_iterator.protocol_context().disable_next_request_compression() + request_iterator.allow() + self._servicer.block_until_serviced() + self.assertIsNotNone(self._servicer.peer()) + self.assertEqual(_RESPONSE, response) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + +class ContextManagementAndLifecycleTest(unittest.TestCase): + + def setUp(self): + self._servicer = _Servicer() + self._method_implementations = { + (_GROUP, _UNARY_UNARY): utilities.unary_unary_inline(self._servicer.unary_unary), - (_GROUP, _UNARY_STREAM): + (_GROUP, _UNARY_STREAM): utilities.unary_stream_inline(self._servicer.unary_stream), - (_GROUP, _STREAM_UNARY): + (_GROUP, _STREAM_UNARY): utilities.stream_unary_inline(self._servicer.stream_unary), - (_GROUP, _STREAM_STREAM): + (_GROUP, _STREAM_STREAM): utilities.stream_stream_inline(self._servicer.stream_stream), - } - - self._cardinalities = { - _UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY, - _UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM, - _STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY, - _STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM, - } - - self._server_options = implementations.server_options( - thread_pool_size=test_constants.POOL_SIZE) - self._server_credentials = implementations.ssl_server_credentials([ - ( - resources.private_key(), - resources.certificate_chain(), - ), - ]) - self._channel_credentials = implementations.ssl_channel_credentials( - resources.test_root_certificates()) - self._stub_options = implementations.stub_options( - thread_pool_size=test_constants.POOL_SIZE) - - def test_stub_context(self): + } + + self._cardinalities = { + _UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY, + _UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM, + _STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY, + _STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM, + } + + self._server_options = implementations.server_options( + thread_pool_size=test_constants.POOL_SIZE) + self._server_credentials = implementations.ssl_server_credentials([ + ( + resources.private_key(), + resources.certificate_chain(), + ), + ]) + self._channel_credentials = implementations.ssl_channel_credentials( + resources.test_root_certificates()) + self._stub_options = implementations.stub_options( + thread_pool_size=test_constants.POOL_SIZE) + + def test_stub_context(self): server = implementations.server(self._method_implementations, options=self._server_options) - port = server.add_secure_port('[::]:0', self._server_credentials) - server.start() - - channel = test_utilities.not_really_secure_channel( - 'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE) + port = server.add_secure_port('[::]:0', self._server_credentials) + server.start() + + channel = test_utilities.not_really_secure_channel( + 'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE) dynamic_stub = implementations.dynamic_stub(channel, _GROUP, self._cardinalities, options=self._stub_options) - for _ in range(100): - with dynamic_stub: - pass - for _ in range(10): - with dynamic_stub: - call_options = interfaces.grpc_call_options( - disable_compression=True) + for _ in range(100): + with dynamic_stub: + pass + for _ in range(10): + with dynamic_stub: + call_options = interfaces.grpc_call_options( + disable_compression=True) response = getattr(dynamic_stub, _UNARY_UNARY)(_REQUEST, test_constants.LONG_TIMEOUT, protocol_options=call_options) - self.assertEqual(_RESPONSE, response) - self.assertIsNotNone(self._servicer.peer()) - - server.stop(test_constants.SHORT_TIMEOUT).wait() - - def test_server_lifecycle(self): - for _ in range(100): + self.assertEqual(_RESPONSE, response) + self.assertIsNotNone(self._servicer.peer()) + + server.stop(test_constants.SHORT_TIMEOUT).wait() + + def test_server_lifecycle(self): + for _ in range(100): server = implementations.server(self._method_implementations, options=self._server_options) - port = server.add_secure_port('[::]:0', self._server_credentials) - server.start() - server.stop(test_constants.SHORT_TIMEOUT).wait() - for _ in range(100): + port = server.add_secure_port('[::]:0', self._server_credentials) + server.start() + server.stop(test_constants.SHORT_TIMEOUT).wait() + for _ in range(100): server = implementations.server(self._method_implementations, options=self._server_options) - server.add_secure_port('[::]:0', self._server_credentials) - server.add_insecure_port('[::]:0') - with server: - server.stop(test_constants.SHORT_TIMEOUT) - server.stop(test_constants.SHORT_TIMEOUT) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + server.add_secure_port('[::]:0', self._server_credentials) + server.add_insecure_port('[::]:0') + with server: + server.stop(test_constants.SHORT_TIMEOUT) + server.stop(test_constants.SHORT_TIMEOUT) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py index 4eb570f21b..1416902eab 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py @@ -1,32 +1,32 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc.beta._connectivity_channel.""" - -import unittest - -from grpc.beta import interfaces - - -class ConnectivityStatesTest(unittest.TestCase): - - def testBetaConnectivityStates(self): - self.assertIsNotNone(interfaces.ChannelConnectivity.IDLE) - self.assertIsNotNone(interfaces.ChannelConnectivity.CONNECTING) - self.assertIsNotNone(interfaces.ChannelConnectivity.READY) - self.assertIsNotNone(interfaces.ChannelConnectivity.TRANSIENT_FAILURE) - self.assertIsNotNone(interfaces.ChannelConnectivity.FATAL_FAILURE) - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc.beta._connectivity_channel.""" + +import unittest + +from grpc.beta import interfaces + + +class ConnectivityStatesTest(unittest.TestCase): + + def testBetaConnectivityStates(self): + self.assertIsNotNone(interfaces.ChannelConnectivity.IDLE) + self.assertIsNotNone(interfaces.ChannelConnectivity.CONNECTING) + self.assertIsNotNone(interfaces.ChannelConnectivity.READY) + self.assertIsNotNone(interfaces.ChannelConnectivity.TRANSIENT_FAILURE) + self.assertIsNotNone(interfaces.ChannelConnectivity.FATAL_FAILURE) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py index ae60a0e4cd..75a615eeff 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py @@ -1,55 +1,55 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests the implementations module of the gRPC Python Beta API.""" - -import datetime -import unittest - -from oauth2client import client as oauth2client_client - -from grpc.beta import implementations -from tests.unit import resources - - -class ChannelCredentialsTest(unittest.TestCase): - - def test_runtime_provided_root_certificates(self): - channel_credentials = implementations.ssl_channel_credentials() - self.assertIsInstance(channel_credentials, - implementations.ChannelCredentials) - - def test_application_provided_root_certificates(self): - channel_credentials = implementations.ssl_channel_credentials( - resources.test_root_certificates()) - self.assertIsInstance(channel_credentials, - implementations.ChannelCredentials) - - -class CallCredentialsTest(unittest.TestCase): - - def test_google_call_credentials(self): - creds = oauth2client_client.GoogleCredentials( - 'token', 'client_id', 'secret', 'refresh_token', +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the implementations module of the gRPC Python Beta API.""" + +import datetime +import unittest + +from oauth2client import client as oauth2client_client + +from grpc.beta import implementations +from tests.unit import resources + + +class ChannelCredentialsTest(unittest.TestCase): + + def test_runtime_provided_root_certificates(self): + channel_credentials = implementations.ssl_channel_credentials() + self.assertIsInstance(channel_credentials, + implementations.ChannelCredentials) + + def test_application_provided_root_certificates(self): + channel_credentials = implementations.ssl_channel_credentials( + resources.test_root_certificates()) + self.assertIsInstance(channel_credentials, + implementations.ChannelCredentials) + + +class CallCredentialsTest(unittest.TestCase): + + def test_google_call_credentials(self): + creds = oauth2client_client.GoogleCredentials( + 'token', 'client_id', 'secret', 'refresh_token', datetime.datetime(2008, 6, 24), 'https://refresh.uri.com/', 'user_agent') - call_creds = implementations.google_call_credentials(creds) - self.assertIsInstance(call_creds, implementations.CallCredentials) - - def test_access_token_call_credentials(self): - call_creds = implementations.access_token_call_credentials('token') - self.assertIsInstance(call_creds, implementations.CallCredentials) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + call_creds = implementations.google_call_credentials(creds) + self.assertIsInstance(call_creds, implementations.CallCredentials) + + def test_access_token_call_credentials(self): + call_creds = implementations.access_token_call_credentials('token') + self.assertIsInstance(call_creds, implementations.CallCredentials) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py index dbede08a38..837d2bbebf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py @@ -1,59 +1,59 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of RPC-method-not-found behavior.""" - -import unittest - -from grpc.beta import implementations -from grpc.beta import interfaces -from grpc.framework.interfaces.face import face -from tests.unit.framework.common import test_constants - - -class NotFoundTest(unittest.TestCase): - - def setUp(self): - self._server = implementations.server({}) - port = self._server.add_insecure_port('[::]:0') - channel = implementations.insecure_channel('localhost', port) - self._generic_stub = implementations.generic_stub(channel) - self._server.start() - - def tearDown(self): - self._server.stop(0).wait() - self._generic_stub = None - - def test_blocking_unary_unary_not_found(self): - with self.assertRaises(face.LocalError) as exception_assertion_context: +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of RPC-method-not-found behavior.""" + +import unittest + +from grpc.beta import implementations +from grpc.beta import interfaces +from grpc.framework.interfaces.face import face +from tests.unit.framework.common import test_constants + + +class NotFoundTest(unittest.TestCase): + + def setUp(self): + self._server = implementations.server({}) + port = self._server.add_insecure_port('[::]:0') + channel = implementations.insecure_channel('localhost', port) + self._generic_stub = implementations.generic_stub(channel) + self._server.start() + + def tearDown(self): + self._server.stop(0).wait() + self._generic_stub = None + + def test_blocking_unary_unary_not_found(self): + with self.assertRaises(face.LocalError) as exception_assertion_context: self._generic_stub.blocking_unary_unary('groop', 'meffod', b'abc', test_constants.LONG_TIMEOUT, with_call=True) - self.assertIs(exception_assertion_context.exception.code, - interfaces.StatusCode.UNIMPLEMENTED) - - def test_future_stream_unary_not_found(self): - rpc_future = self._generic_stub.future_stream_unary( - 'grupe', 'mevvod', iter([b'def']), test_constants.LONG_TIMEOUT) - with self.assertRaises(face.LocalError) as exception_assertion_context: - rpc_future.result() - self.assertIs(exception_assertion_context.exception.code, - interfaces.StatusCode.UNIMPLEMENTED) - self.assertIs(rpc_future.exception().code, - interfaces.StatusCode.UNIMPLEMENTED) - - -if __name__ == '__main__': - unittest.main(verbosity=2) + self.assertIs(exception_assertion_context.exception.code, + interfaces.StatusCode.UNIMPLEMENTED) + + def test_future_stream_unary_not_found(self): + rpc_future = self._generic_stub.future_stream_unary( + 'grupe', 'mevvod', iter([b'def']), test_constants.LONG_TIMEOUT) + with self.assertRaises(face.LocalError) as exception_assertion_context: + rpc_future.result() + self.assertIs(exception_assertion_context.exception.code, + interfaces.StatusCode.UNIMPLEMENTED) + self.assertIs(rpc_future.exception().code, + interfaces.StatusCode.UNIMPLEMENTED) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py index 2a59e7b2c8..e042262796 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py @@ -1,93 +1,93 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests of grpc.beta.utilities.""" - -import threading -import time -import unittest - -from grpc.beta import implementations -from grpc.beta import utilities -from grpc.framework.foundation import future -from tests.unit.framework.common import test_constants - - -class _Callback(object): - - def __init__(self): - self._condition = threading.Condition() - self._value = None - - def accept_value(self, value): - with self._condition: - self._value = value - self._condition.notify_all() - - def block_until_called(self): - with self._condition: - while self._value is None: - self._condition.wait() - return self._value - - -@unittest.skip('https://github.com/grpc/grpc/issues/16134') -class ChannelConnectivityTest(unittest.TestCase): - - def test_lonely_channel_connectivity(self): - channel = implementations.insecure_channel('localhost', 12345) - callback = _Callback() - - ready_future = utilities.channel_ready_future(channel) - ready_future.add_done_callback(callback.accept_value) - with self.assertRaises(future.TimeoutError): - ready_future.result(timeout=test_constants.SHORT_TIMEOUT) - self.assertFalse(ready_future.cancelled()) - self.assertFalse(ready_future.done()) - self.assertTrue(ready_future.running()) - ready_future.cancel() - value_passed_to_callback = callback.block_until_called() - self.assertIs(ready_future, value_passed_to_callback) - self.assertTrue(ready_future.cancelled()) - self.assertTrue(ready_future.done()) - self.assertFalse(ready_future.running()) - - def test_immediately_connectable_channel_connectivity(self): - server = implementations.server({}) - port = server.add_insecure_port('[::]:0') - server.start() - channel = implementations.insecure_channel('localhost', port) - callback = _Callback() - - try: - ready_future = utilities.channel_ready_future(channel) - ready_future.add_done_callback(callback.accept_value) - self.assertIsNone( - ready_future.result(timeout=test_constants.LONG_TIMEOUT)) - value_passed_to_callback = callback.block_until_called() - self.assertIs(ready_future, value_passed_to_callback) - self.assertFalse(ready_future.cancelled()) - self.assertTrue(ready_future.done()) - self.assertFalse(ready_future.running()) - # Cancellation after maturity has no effect. - ready_future.cancel() - self.assertFalse(ready_future.cancelled()) - self.assertTrue(ready_future.done()) - self.assertFalse(ready_future.running()) - finally: - ready_future.cancel() - server.stop(0) - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc.beta.utilities.""" + +import threading +import time +import unittest + +from grpc.beta import implementations +from grpc.beta import utilities +from grpc.framework.foundation import future +from tests.unit.framework.common import test_constants + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._value = None + + def accept_value(self, value): + with self._condition: + self._value = value + self._condition.notify_all() + + def block_until_called(self): + with self._condition: + while self._value is None: + self._condition.wait() + return self._value + + +@unittest.skip('https://github.com/grpc/grpc/issues/16134') +class ChannelConnectivityTest(unittest.TestCase): + + def test_lonely_channel_connectivity(self): + channel = implementations.insecure_channel('localhost', 12345) + callback = _Callback() + + ready_future = utilities.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + with self.assertRaises(future.TimeoutError): + ready_future.result(timeout=test_constants.SHORT_TIMEOUT) + self.assertFalse(ready_future.cancelled()) + self.assertFalse(ready_future.done()) + self.assertTrue(ready_future.running()) + ready_future.cancel() + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertTrue(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + + def test_immediately_connectable_channel_connectivity(self): + server = implementations.server({}) + port = server.add_insecure_port('[::]:0') + server.start() + channel = implementations.insecure_channel('localhost', port) + callback = _Callback() + + try: + ready_future = utilities.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + self.assertIsNone( + ready_future.result(timeout=test_constants.LONG_TIMEOUT)) + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + # Cancellation after maturity has no effect. + ready_future.cancel() + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + finally: + ready_future.cancel() + server.stop(0) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/test_utilities.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/test_utilities.py index 4a32205b89..c8d920d35e 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/test_utilities.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/test_utilities.py @@ -1,40 +1,40 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Test-appropriate entry points into the gRPC Python Beta API.""" - -import grpc -from grpc.beta import implementations - - -def not_really_secure_channel(host, port, channel_credentials, - server_host_override): - """Creates an insecure Channel to a remote host. - - Args: - host: The name of the remote host to which to connect. - port: The port of the remote host to which to connect. - channel_credentials: The implementations.ChannelCredentials with which to - connect. - server_host_override: The target name used for SSL host name checking. - - Returns: - An implementations.Channel to the remote host through which RPCs may be - conducted. - """ - target = '%s:%d' % (host, port) - channel = grpc.secure_channel(target, channel_credentials, (( - 'grpc.ssl_target_name_override', - server_host_override, - ),)) - return implementations.Channel(channel) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test-appropriate entry points into the gRPC Python Beta API.""" + +import grpc +from grpc.beta import implementations + + +def not_really_secure_channel(host, port, channel_credentials, + server_host_override): + """Creates an insecure Channel to a remote host. + + Args: + host: The name of the remote host to which to connect. + port: The port of the remote host to which to connect. + channel_credentials: The implementations.ChannelCredentials with which to + connect. + server_host_override: The target name used for SSL host name checking. + + Returns: + An implementations.Channel to the remote host through which RPCs may be + conducted. + """ + target = '%s:%d' % (host, port) + channel = grpc.secure_channel(target, channel_credentials, (( + 'grpc.ssl_target_name_override', + server_host_override, + ),)) + return implementations.Channel(channel) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/README.md b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/README.md index d25440d660..100b43c1aa 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/README.md +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/README.md @@ -1,15 +1,15 @@ -These are test keys *NOT* to be used in production. - -The `certificate_hierarchy_1` and `certificate_hierarchy_2` contain -two disjoint but similarly organized certificate hierarchies. Each -contains: - -* The respective root CA cert in `certs/ca.cert.pem` - -* The intermediate CA cert in - `intermediate/certs/intermediate.cert.pem`, signed by the root CA - -* A client cert and a server cert--both signed by the intermediate - CA--in `intermediate/certs/client.cert.pem` and - `intermediate/certs/localhost-1.cert.pem`; the corresponding keys - are in `intermediate/private` +These are test keys *NOT* to be used in production. + +The `certificate_hierarchy_1` and `certificate_hierarchy_2` contain +two disjoint but similarly organized certificate hierarchies. Each +contains: + +* The respective root CA cert in `certs/ca.cert.pem` + +* The intermediate CA cert in + `intermediate/certs/intermediate.cert.pem`, signed by the root CA + +* A client cert and a server cert--both signed by the intermediate + CA--in `intermediate/certs/client.cert.pem` and + `intermediate/certs/localhost-1.cert.pem`; the corresponding keys + are in `intermediate/private` diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem index 5f62638013..49d39cd8ed 100755 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem @@ -1,4 +1,4 @@ ------BEGIN CERTIFICATE----- +-----BEGIN CERTIFICATE----- MIIDWjCCAkKgAwIBAgIUWrP0VvHcy+LP6UuYNtiL9gBhD5owDQYJKoZIhvcNAQEL BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw @@ -17,4 +17,4 @@ CVTtdJB4CYWpcNyXOdqefrbJW5QNljxgi6Fhvs7JJkBqdXIkWXtFk2eRgOIP2Eo9 /OHQHlYnwZFrk6sp4wPyR+A95S0toZBcyDVz7u+hOW0pGK3wviOe9lvRgj/H3Pwt bewb0l+MhRig0/DVHamyVxrDRbqInU1/GTNCwcZkXKYFWSf92U+kIcTth24Q1gcw eZiLl5FfrWokUNytFElXob0V0a5/kbhiLc3yWmvWqHTpqCALbVyF+rKJo2f5Kw== ------END CERTIFICATE----- +-----END CERTIFICATE----- diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key index 25cdade2fb..086462992c 100755 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key @@ -1,4 +1,4 @@ ------BEGIN PRIVATE KEY----- +-----BEGIN PRIVATE KEY----- MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDnE443EknxvxBq 6+hvn/t09hl8hx366EBYvZmVM/NC+7igXRAjiJiA/mIaCvL3MS0Iz5hBLxSGICU+ WproA3GCIFITIwcf/ETyWj/5xpgZ4AKrLrjQmmX8mhwUajfF3UvwMJrCOVqPp67t @@ -25,4 +25,4 @@ FRuGldlSOW1z/nSh8ViizSYE5H5HX1qkXEippvFRE88CgYB3Bfu3YQY60ITWIShv nNkdcbTT9eoP9suaRJjw92Ln+7ZpALYlQMKUZmJ/5uBmLs4RFwUTQruLOPL4yLTH awADWUzs3IRr1fwn9E+zM8JVyKCnUEM3w4N5UZskGO2klashAd30hWO+knRv/y0r uGIYs9Ek7YXlXIRVrzMwcsrt1w== ------END PRIVATE KEY----- +-----END PRIVATE KEY----- diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem index d37efbaafa..88244f856c 100755 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem @@ -1,4 +1,4 @@ ------BEGIN CERTIFICATE----- +-----BEGIN CERTIFICATE----- MIIDtDCCApygAwIBAgIUbJfTREJ6k6/+oInWhV1O1j3ZT0IwDQYJKoZIhvcNAQEL BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw @@ -19,4 +19,4 @@ Jl4RK0tSkGQ3YNY4NzXwQP/vmUgfkw8VBAZ4Y4GKxppdATjffIW+srbAmdDruIRM wPeikgOoRrXf0LA1fi4TqxARzeRwenQpayNfGHTvVF9aJkl8HoaMunTAdG5pIVcr 9GKi/gEMpXUJbbVv3U5frX1Wo4CFo+rZWJ/LyCMeb0jciNLxSdMwj/E/ZuExlyeZ gc9ctPjSMvgSyXEKv6Vwobleeg88V2ZgzenziORoWj4KszG/lbQZvg== ------END CERTIFICATE----- +-----END CERTIFICATE----- diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/__init__.py index dc985eebb4..5fb4f3c3cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py index 3006b859c4..8b58a0c46a 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py @@ -1,64 +1,64 @@ -# Copyright 2019 The gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import contextlib -import os -import socket +# Copyright 2019 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import contextlib +import os +import socket import errno - + _DEFAULT_SOCK_OPTIONS = (socket.SO_REUSEADDR, socket.SO_REUSEPORT) if os.name != 'nt' else ( socket.SO_REUSEADDR,) _UNRECOVERABLE_ERRNOS = (errno.EADDRINUSE, errno.ENOSR) - - -def get_socket(bind_address='localhost', + + +def get_socket(bind_address='localhost', port=0, - listen=True, + listen=True, sock_options=_DEFAULT_SOCK_OPTIONS): """Opens a socket. - - Useful for reserving a port for a system-under-test. - - Args: - bind_address: The host to which to bind. + + Useful for reserving a port for a system-under-test. + + Args: + bind_address: The host to which to bind. port: The port to which to bind. - listen: A boolean value indicating whether or not to listen on the socket. - sock_options: A sequence of socket options to apply to the socket. - - Returns: - A tuple containing: - - the address to which the socket is bound - - the port to which the socket is bound - - the socket object itself - """ - _sock_options = sock_options if sock_options else [] - if socket.has_ipv6: - address_families = (socket.AF_INET6, socket.AF_INET) - else: - address_families = (socket.AF_INET) - for address_family in address_families: - try: - sock = socket.socket(address_family, socket.SOCK_STREAM) - for sock_option in _sock_options: - sock.setsockopt(socket.SOL_SOCKET, sock_option, 1) + listen: A boolean value indicating whether or not to listen on the socket. + sock_options: A sequence of socket options to apply to the socket. + + Returns: + A tuple containing: + - the address to which the socket is bound + - the port to which the socket is bound + - the socket object itself + """ + _sock_options = sock_options if sock_options else [] + if socket.has_ipv6: + address_families = (socket.AF_INET6, socket.AF_INET) + else: + address_families = (socket.AF_INET) + for address_family in address_families: + try: + sock = socket.socket(address_family, socket.SOCK_STREAM) + for sock_option in _sock_options: + sock.setsockopt(socket.SOL_SOCKET, sock_option, 1) sock.bind((bind_address, port)) - if listen: - sock.listen(1) - return bind_address, sock.getsockname()[1], sock + if listen: + sock.listen(1) + return bind_address, sock.getsockname()[1], sock except OSError as os_error: - sock.close() + sock.close() if os_error.errno in _UNRECOVERABLE_ERRNOS: raise else: @@ -67,36 +67,36 @@ def get_socket(bind_address='localhost', # pointing to OSError. We need this catch to make it 2/3 agnostic. except socket.error: # pylint: disable=duplicate-except sock.close() - continue - raise RuntimeError("Failed to bind to {} with sock_options {}".format( - bind_address, sock_options)) - - -@contextlib.contextmanager -def bound_socket(bind_address='localhost', + continue + raise RuntimeError("Failed to bind to {} with sock_options {}".format( + bind_address, sock_options)) + + +@contextlib.contextmanager +def bound_socket(bind_address='localhost', port=0, - listen=True, + listen=True, sock_options=_DEFAULT_SOCK_OPTIONS): - """Opens a socket bound to an arbitrary port. - - Useful for reserving a port for a system-under-test. - - Args: - bind_address: The host to which to bind. + """Opens a socket bound to an arbitrary port. + + Useful for reserving a port for a system-under-test. + + Args: + bind_address: The host to which to bind. port: The port to which to bind. - listen: A boolean value indicating whether or not to listen on the socket. - sock_options: A sequence of socket options to apply to the socket. - - Yields: - A tuple containing: - - the address to which the socket is bound - - the port to which the socket is bound - """ + listen: A boolean value indicating whether or not to listen on the socket. + sock_options: A sequence of socket options to apply to the socket. + + Yields: + A tuple containing: + - the address to which the socket is bound + - the port to which the socket is bound + """ host, port, sock = get_socket(bind_address=bind_address, port=port, listen=listen, sock_options=sock_options) - try: - yield host, port - finally: - sock.close() + try: + yield host, port + finally: + sock.close() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py index 7e5a9d2c0f..2b9eb2e35b 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py @@ -1,45 +1,45 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Constants shared among tests throughout RPC Framework.""" - -# Value for maximum duration in seconds that a test is allowed for its actual -# behavioral logic, excluding all time spent deliberately waiting in the test. -TIME_ALLOWANCE = 10 -# Value for maximum duration in seconds of RPCs that may time out as part of a -# test. -SHORT_TIMEOUT = 4 -# Absurdly large value for maximum duration in seconds for should-not-time-out -# RPCs made during tests. -LONG_TIMEOUT = 3000 -# Values to supply on construction of an object that will service RPCs; these -# should not be used as the actual timeout values of any RPCs made during tests. -DEFAULT_TIMEOUT = 300 -MAXIMUM_TIMEOUT = 3600 - -# The number of payloads to transmit in streaming tests. -STREAM_LENGTH = 200 - -# The size of payloads to transmit in tests. -PAYLOAD_SIZE = 256 * 1024 + 17 - -# The concurrency to use in tests of concurrent RPCs that will not create as -# many threads as RPCs. -RPC_CONCURRENCY = 200 - -# The concurrency to use in tests of concurrent RPCs that will create as many -# threads as RPCs. -THREAD_CONCURRENCY = 25 - -# The size of thread pools to use in tests. -POOL_SIZE = 10 +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants shared among tests throughout RPC Framework.""" + +# Value for maximum duration in seconds that a test is allowed for its actual +# behavioral logic, excluding all time spent deliberately waiting in the test. +TIME_ALLOWANCE = 10 +# Value for maximum duration in seconds of RPCs that may time out as part of a +# test. +SHORT_TIMEOUT = 4 +# Absurdly large value for maximum duration in seconds for should-not-time-out +# RPCs made during tests. +LONG_TIMEOUT = 3000 +# Values to supply on construction of an object that will service RPCs; these +# should not be used as the actual timeout values of any RPCs made during tests. +DEFAULT_TIMEOUT = 300 +MAXIMUM_TIMEOUT = 3600 + +# The number of payloads to transmit in streaming tests. +STREAM_LENGTH = 200 + +# The size of payloads to transmit in tests. +PAYLOAD_SIZE = 256 * 1024 + 17 + +# The concurrency to use in tests of concurrent RPCs that will not create as +# many threads as RPCs. +RPC_CONCURRENCY = 200 + +# The concurrency to use in tests of concurrent RPCs that will create as many +# threads as RPCs. +THREAD_CONCURRENCY = 25 + +# The size of thread pools to use in tests. +POOL_SIZE = 10 diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_control.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_control.py index ea88372dc0..6a422825cc 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_control.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_control.py @@ -1,97 +1,97 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Code for instructing systems under test to block or fail.""" - -import abc -import contextlib -import threading - -import six - - -class Defect(Exception): - """Simulates a programming defect raised into in a system under test. - - Use of a standard exception type is too easily misconstrued as an actual - defect in either the test infrastructure or the system under test. - """ - - -class Control(six.with_metaclass(abc.ABCMeta)): - """An object that accepts program control from a system under test. - - Systems under test passed a Control should call its control() method - frequently during execution. The control() method may block, raise an - exception, or do nothing, all according to the enclosing test's desire for - the system under test to simulate hanging, failing, or functioning. - """ - - @abc.abstractmethod - def control(self): - """Potentially does anything.""" - raise NotImplementedError() - - -class PauseFailControl(Control): - """A Control that can be used to pause or fail code under control. - - This object is only safe for use from two threads: one of the system under - test calling control and the other from the test system calling pause, - block_until_paused, and fail. - """ - - def __init__(self): - self._condition = threading.Condition() - self._pause = False - self._paused = False - self._fail = False - - def control(self): - with self._condition: - if self._fail: - raise Defect() - - while self._pause: - self._paused = True - self._condition.notify_all() - self._condition.wait() - self._paused = False - - @contextlib.contextmanager - def pause(self): - """Pauses code under control while controlling code is in context.""" - with self._condition: - self._pause = True - yield - with self._condition: - self._pause = False - self._condition.notify_all() - - def block_until_paused(self): - """Blocks controlling code until code under control is paused. - - May only be called within the context of a pause call. - """ - with self._condition: - while not self._paused: - self._condition.wait() - - @contextlib.contextmanager - def fail(self): - """Fails code under control while controlling code is in context.""" - with self._condition: - self._fail = True - yield - with self._condition: - self._fail = False +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Code for instructing systems under test to block or fail.""" + +import abc +import contextlib +import threading + +import six + + +class Defect(Exception): + """Simulates a programming defect raised into in a system under test. + + Use of a standard exception type is too easily misconstrued as an actual + defect in either the test infrastructure or the system under test. + """ + + +class Control(six.with_metaclass(abc.ABCMeta)): + """An object that accepts program control from a system under test. + + Systems under test passed a Control should call its control() method + frequently during execution. The control() method may block, raise an + exception, or do nothing, all according to the enclosing test's desire for + the system under test to simulate hanging, failing, or functioning. + """ + + @abc.abstractmethod + def control(self): + """Potentially does anything.""" + raise NotImplementedError() + + +class PauseFailControl(Control): + """A Control that can be used to pause or fail code under control. + + This object is only safe for use from two threads: one of the system under + test calling control and the other from the test system calling pause, + block_until_paused, and fail. + """ + + def __init__(self): + self._condition = threading.Condition() + self._pause = False + self._paused = False + self._fail = False + + def control(self): + with self._condition: + if self._fail: + raise Defect() + + while self._pause: + self._paused = True + self._condition.notify_all() + self._condition.wait() + self._paused = False + + @contextlib.contextmanager + def pause(self): + """Pauses code under control while controlling code is in context.""" + with self._condition: + self._pause = True + yield + with self._condition: + self._pause = False + self._condition.notify_all() + + def block_until_paused(self): + """Blocks controlling code until code under control is paused. + + May only be called within the context of a pause call. + """ + with self._condition: + while not self._paused: + self._condition.wait() + + @contextlib.contextmanager + def fail(self): + """Fails code under control while controlling code is in context.""" + with self._condition: + self._fail = True + yield + with self._condition: + self._fail = False diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py index 1652a7633c..f90a11963f 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py @@ -1,101 +1,101 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Governs coverage for tests of RPCs throughout RPC Framework.""" - -import abc - -import six - -# This code is designed for use with the unittest module. -# pylint: disable=invalid-name - - -class Coverage(six.with_metaclass(abc.ABCMeta)): - """Specification of test coverage.""" - - @abc.abstractmethod - def testSuccessfulUnaryRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testSuccessfulUnaryRequestStreamResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testSuccessfulStreamRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testSuccessfulStreamRequestStreamResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testSequentialInvocations(self): - raise NotImplementedError() - - @abc.abstractmethod - def testParallelInvocations(self): - raise NotImplementedError() - - @abc.abstractmethod - def testWaitingForSomeButNotAllParallelInvocations(self): - raise NotImplementedError() - - @abc.abstractmethod - def testCancelledUnaryRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testCancelledUnaryRequestStreamResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testCancelledStreamRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testCancelledStreamRequestStreamResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testExpiredUnaryRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testExpiredUnaryRequestStreamResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testExpiredStreamRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testExpiredStreamRequestStreamResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testFailedUnaryRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testFailedUnaryRequestStreamResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testFailedStreamRequestUnaryResponse(self): - raise NotImplementedError() - - @abc.abstractmethod - def testFailedStreamRequestStreamResponse(self): - raise NotImplementedError() +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Governs coverage for tests of RPCs throughout RPC Framework.""" + +import abc + +import six + +# This code is designed for use with the unittest module. +# pylint: disable=invalid-name + + +class Coverage(six.with_metaclass(abc.ABCMeta)): + """Specification of test coverage.""" + + @abc.abstractmethod + def testSuccessfulUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSuccessfulUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSuccessfulStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSuccessfulStreamRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSequentialInvocations(self): + raise NotImplementedError() + + @abc.abstractmethod + def testParallelInvocations(self): + raise NotImplementedError() + + @abc.abstractmethod + def testWaitingForSomeButNotAllParallelInvocations(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledStreamRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredStreamRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedStreamRequestStreamResponse(self): + raise NotImplementedError() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py index dc985eebb4..5fb4f3c3cf 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py index 2a4c20ffc4..c4ea03177c 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py @@ -1,73 +1,73 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Tests for grpc.framework.foundation.logging_pool.""" - -import threading -import unittest - -from grpc.framework.foundation import logging_pool - -_POOL_SIZE = 16 - - -class _CallableObject(object): - - def __init__(self): - self._lock = threading.Lock() - self._passed_values = [] - - def __call__(self, value): - with self._lock: - self._passed_values.append(value) - - def passed_values(self): - with self._lock: - return tuple(self._passed_values) - - -class LoggingPoolTest(unittest.TestCase): - - def testUpAndDown(self): - pool = logging_pool.pool(_POOL_SIZE) - pool.shutdown(wait=True) - - with logging_pool.pool(_POOL_SIZE) as pool: - self.assertIsNotNone(pool) - - def testTaskExecuted(self): - test_list = [] - - with logging_pool.pool(_POOL_SIZE) as pool: - pool.submit(lambda: test_list.append(object())).result() - - self.assertTrue(test_list) - - def testException(self): - with logging_pool.pool(_POOL_SIZE) as pool: - raised_exception = pool.submit(lambda: 1 / 0).exception() - - self.assertIsNotNone(raised_exception) - - def testCallableObjectExecuted(self): - callable_object = _CallableObject() - passed_object = object() - with logging_pool.pool(_POOL_SIZE) as pool: - future = pool.submit(callable_object, passed_object) - self.assertIsNone(future.result()) - self.assertSequenceEqual((passed_object,), - callable_object.passed_values()) - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for grpc.framework.foundation.logging_pool.""" + +import threading +import unittest + +from grpc.framework.foundation import logging_pool + +_POOL_SIZE = 16 + + +class _CallableObject(object): + + def __init__(self): + self._lock = threading.Lock() + self._passed_values = [] + + def __call__(self, value): + with self._lock: + self._passed_values.append(value) + + def passed_values(self): + with self._lock: + return tuple(self._passed_values) + + +class LoggingPoolTest(unittest.TestCase): + + def testUpAndDown(self): + pool = logging_pool.pool(_POOL_SIZE) + pool.shutdown(wait=True) + + with logging_pool.pool(_POOL_SIZE) as pool: + self.assertIsNotNone(pool) + + def testTaskExecuted(self): + test_list = [] + + with logging_pool.pool(_POOL_SIZE) as pool: + pool.submit(lambda: test_list.append(object())).result() + + self.assertTrue(test_list) + + def testException(self): + with logging_pool.pool(_POOL_SIZE) as pool: + raised_exception = pool.submit(lambda: 1 / 0).exception() + + self.assertIsNotNone(raised_exception) + + def testCallableObjectExecuted(self): + callable_object = _CallableObject() + passed_object = object() + with logging_pool.pool(_POOL_SIZE) as pool: + future = pool.submit(callable_object, passed_object) + self.assertIsNone(future.result()) + self.assertSequenceEqual((passed_object,), + callable_object.passed_values()) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py index 9c3b526b50..dd5c5b3b03 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py @@ -1,57 +1,57 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Utilities for testing stream-related code.""" - -from grpc.framework.foundation import stream - - -class TestConsumer(stream.Consumer): - """A stream.Consumer instrumented for testing. - - Attributes: - calls: A sequence of value-termination pairs describing the history of calls - made on this object. - """ - - def __init__(self): - self.calls = [] - - def consume(self, value): - """See stream.Consumer.consume for specification.""" - self.calls.append((value, False)) - - def terminate(self): - """See stream.Consumer.terminate for specification.""" - self.calls.append((None, True)) - - def consume_and_terminate(self, value): - """See stream.Consumer.consume_and_terminate for specification.""" - self.calls.append((value, True)) - - def is_legal(self): - """Reports whether or not a legal sequence of calls has been made.""" - terminated = False - for value, terminal in self.calls: - if terminated: - return False - elif terminal: - terminated = True - elif value is None: - return False - else: # pylint: disable=useless-else-on-loop - return True - - def values(self): - """Returns the sequence of values that have been passed to this Consumer.""" - return [value for value, _ in self.calls if value] +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for testing stream-related code.""" + +from grpc.framework.foundation import stream + + +class TestConsumer(stream.Consumer): + """A stream.Consumer instrumented for testing. + + Attributes: + calls: A sequence of value-termination pairs describing the history of calls + made on this object. + """ + + def __init__(self): + self.calls = [] + + def consume(self, value): + """See stream.Consumer.consume for specification.""" + self.calls.append((value, False)) + + def terminate(self): + """See stream.Consumer.terminate for specification.""" + self.calls.append((None, True)) + + def consume_and_terminate(self, value): + """See stream.Consumer.consume_and_terminate for specification.""" + self.calls.append((value, True)) + + def is_legal(self): + """Reports whether or not a legal sequence of calls has been made.""" + terminated = False + for value, terminal in self.calls: + if terminated: + return False + elif terminal: + terminated = True + elif value is None: + return False + else: # pylint: disable=useless-else-on-loop + return True + + def values(self): + """Returns the sequence of values that have been passed to this Consumer.""" + return [value for value, _ in self.calls if value] diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/resources.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/resources.py index 1e6d40454a..6efd870fc8 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/resources.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/resources.py @@ -1,113 +1,113 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Constants and functions for data used in testing.""" - -import os -import pkgutil - -_ROOT_CERTIFICATES_RESOURCE_PATH = 'credentials/ca.pem' -_PRIVATE_KEY_RESOURCE_PATH = 'credentials/server1.key' -_CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem' - - -def test_root_certificates(): - return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH) - - -def private_key(): - return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH) - - -def certificate_chain(): - return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH) - - -def cert_hier_1_root_ca_cert(): - return pkgutil.get_data( - __name__, 'credentials/certificate_hierarchy_1/certs/ca.cert.pem') - - -def cert_hier_1_intermediate_ca_cert(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_1/intermediate/certs/intermediate.cert.pem' - ) - - -def cert_hier_1_client_1_key(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_1/intermediate/private/client.key.pem' - ) - - -def cert_hier_1_client_1_cert(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_1/intermediate/certs/client.cert.pem' - ) - - -def cert_hier_1_server_1_key(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_1/intermediate/private/localhost-1.key.pem' - ) - - -def cert_hier_1_server_1_cert(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_1/intermediate/certs/localhost-1.cert.pem' - ) - - -def cert_hier_2_root_ca_cert(): - return pkgutil.get_data( - __name__, 'credentials/certificate_hierarchy_2/certs/ca.cert.pem') - - -def cert_hier_2_intermediate_ca_cert(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_2/intermediate/certs/intermediate.cert.pem' - ) - - -def cert_hier_2_client_1_key(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_2/intermediate/private/client.key.pem' - ) - - -def cert_hier_2_client_1_cert(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_2/intermediate/certs/client.cert.pem' - ) - - -def cert_hier_2_server_1_key(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_2/intermediate/private/localhost-1.key.pem' - ) - - -def cert_hier_2_server_1_cert(): - return pkgutil.get_data( - __name__, - 'credentials/certificate_hierarchy_2/intermediate/certs/localhost-1.cert.pem' - ) +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants and functions for data used in testing.""" + +import os +import pkgutil + +_ROOT_CERTIFICATES_RESOURCE_PATH = 'credentials/ca.pem' +_PRIVATE_KEY_RESOURCE_PATH = 'credentials/server1.key' +_CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem' + + +def test_root_certificates(): + return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH) + + +def private_key(): + return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH) + + +def certificate_chain(): + return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH) + + +def cert_hier_1_root_ca_cert(): + return pkgutil.get_data( + __name__, 'credentials/certificate_hierarchy_1/certs/ca.cert.pem') + + +def cert_hier_1_intermediate_ca_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/certs/intermediate.cert.pem' + ) + + +def cert_hier_1_client_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/private/client.key.pem' + ) + + +def cert_hier_1_client_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/certs/client.cert.pem' + ) + + +def cert_hier_1_server_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/private/localhost-1.key.pem' + ) + + +def cert_hier_1_server_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/certs/localhost-1.cert.pem' + ) + + +def cert_hier_2_root_ca_cert(): + return pkgutil.get_data( + __name__, 'credentials/certificate_hierarchy_2/certs/ca.cert.pem') + + +def cert_hier_2_intermediate_ca_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/certs/intermediate.cert.pem' + ) + + +def cert_hier_2_client_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/private/client.key.pem' + ) + + +def cert_hier_2_client_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/certs/client.cert.pem' + ) + + +def cert_hier_2_server_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/private/localhost-1.key.pem' + ) + + +def cert_hier_2_server_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/certs/localhost-1.cert.pem' + ) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py index a916dac283..59ded0752f 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py @@ -1,134 +1,134 @@ -# Copyright 2015 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""Common code used throughout tests of gRPC.""" - -import collections -import threading - -from concurrent import futures -import grpc -import six - -INVOCATION_INITIAL_METADATA = ( - ('0', 'abc'), - ('1', 'def'), - ('2', 'ghi'), -) -SERVICE_INITIAL_METADATA = ( - ('3', 'jkl'), - ('4', 'mno'), - ('5', 'pqr'), -) -SERVICE_TERMINAL_METADATA = ( - ('6', 'stu'), - ('7', 'vwx'), - ('8', 'yza'), -) -DETAILS = 'test details' - - -def metadata_transmitted(original_metadata, transmitted_metadata): - """Judges whether or not metadata was acceptably transmitted. - - gRPC is allowed to insert key-value pairs into the metadata values given by - applications and to reorder key-value pairs with different keys but it is not - allowed to alter existing key-value pairs or to reorder key-value pairs with - the same key. - - Args: - original_metadata: A metadata value used in a test of gRPC. An iterable over - iterables of length 2. - transmitted_metadata: A metadata value corresponding to original_metadata - after having been transmitted via gRPC. An iterable over iterables of - length 2. - - Returns: - A boolean indicating whether transmitted_metadata accurately reflects - original_metadata after having been transmitted via gRPC. - """ - original = collections.defaultdict(list) - for key, value in original_metadata: - original[key].append(value) - transmitted = collections.defaultdict(list) - for key, value in transmitted_metadata: - transmitted[key].append(value) - - for key, values in six.iteritems(original): - transmitted_values = transmitted[key] - transmitted_iterator = iter(transmitted_values) - try: - for value in values: - while True: - transmitted_value = next(transmitted_iterator) - if value == transmitted_value: - break - except StopIteration: - return False - else: - return True - - -def test_secure_channel(target, channel_credentials, server_host_override): - """Creates an insecure Channel to a remote host. - - Args: - host: The name of the remote host to which to connect. - port: The port of the remote host to which to connect. - channel_credentials: The implementations.ChannelCredentials with which to - connect. - server_host_override: The target name used for SSL host name checking. - - Returns: - An implementations.Channel to the remote host through which RPCs may be - conducted. - """ - channel = grpc.secure_channel(target, channel_credentials, (( - 'grpc.ssl_target_name_override', - server_host_override, - ),)) - return channel - - -def test_server(max_workers=10, reuse_port=False): - """Creates an insecure grpc server. - - These servers have SO_REUSEPORT disabled to prevent cross-talk. - """ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common code used throughout tests of gRPC.""" + +import collections +import threading + +from concurrent import futures +import grpc +import six + +INVOCATION_INITIAL_METADATA = ( + ('0', 'abc'), + ('1', 'def'), + ('2', 'ghi'), +) +SERVICE_INITIAL_METADATA = ( + ('3', 'jkl'), + ('4', 'mno'), + ('5', 'pqr'), +) +SERVICE_TERMINAL_METADATA = ( + ('6', 'stu'), + ('7', 'vwx'), + ('8', 'yza'), +) +DETAILS = 'test details' + + +def metadata_transmitted(original_metadata, transmitted_metadata): + """Judges whether or not metadata was acceptably transmitted. + + gRPC is allowed to insert key-value pairs into the metadata values given by + applications and to reorder key-value pairs with different keys but it is not + allowed to alter existing key-value pairs or to reorder key-value pairs with + the same key. + + Args: + original_metadata: A metadata value used in a test of gRPC. An iterable over + iterables of length 2. + transmitted_metadata: A metadata value corresponding to original_metadata + after having been transmitted via gRPC. An iterable over iterables of + length 2. + + Returns: + A boolean indicating whether transmitted_metadata accurately reflects + original_metadata after having been transmitted via gRPC. + """ + original = collections.defaultdict(list) + for key, value in original_metadata: + original[key].append(value) + transmitted = collections.defaultdict(list) + for key, value in transmitted_metadata: + transmitted[key].append(value) + + for key, values in six.iteritems(original): + transmitted_values = transmitted[key] + transmitted_iterator = iter(transmitted_values) + try: + for value in values: + while True: + transmitted_value = next(transmitted_iterator) + if value == transmitted_value: + break + except StopIteration: + return False + else: + return True + + +def test_secure_channel(target, channel_credentials, server_host_override): + """Creates an insecure Channel to a remote host. + + Args: + host: The name of the remote host to which to connect. + port: The port of the remote host to which to connect. + channel_credentials: The implementations.ChannelCredentials with which to + connect. + server_host_override: The target name used for SSL host name checking. + + Returns: + An implementations.Channel to the remote host through which RPCs may be + conducted. + """ + channel = grpc.secure_channel(target, channel_credentials, (( + 'grpc.ssl_target_name_override', + server_host_override, + ),)) + return channel + + +def test_server(max_workers=10, reuse_port=False): + """Creates an insecure grpc server. + + These servers have SO_REUSEPORT disabled to prevent cross-talk. + """ return grpc.server(futures.ThreadPoolExecutor(max_workers=max_workers), options=(('grpc.so_reuseport', int(reuse_port)),)) - - -class WaitGroup(object): - - def __init__(self, n=0): - self.count = n - self.cv = threading.Condition() - - def add(self, n): - self.cv.acquire() - self.count += n - self.cv.release() - - def done(self): - self.cv.acquire() - self.count -= 1 - if self.count == 0: - self.cv.notify_all() - self.cv.release() - - def wait(self): - self.cv.acquire() - while self.count > 0: - self.cv.wait() - self.cv.release() + + +class WaitGroup(object): + + def __init__(self, n=0): + self.count = n + self.cv = threading.Condition() + + def add(self, n): + self.cv.acquire() + self.count += n + self.cv.release() + + def done(self): + self.cv.acquire() + self.count -= 1 + if self.count == 0: + self.cv.notify_all() + self.cv.release() + + def wait(self): + self.cv.acquire() + while self.count > 0: + self.cv.wait() + self.cv.release() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/thread_pool.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/thread_pool.py index c2d05c9cb6..094e203cd9 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/thread_pool.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/thread_pool.py @@ -1,34 +1,34 @@ -# Copyright 2016 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import threading -from concurrent import futures - - -class RecordingThreadPool(futures.ThreadPoolExecutor): - """A thread pool that records if used.""" - - def __init__(self, max_workers): - self._tp_executor = futures.ThreadPoolExecutor(max_workers=max_workers) - self._lock = threading.Lock() - self._was_used = False - - def submit(self, fn, *args, **kwargs): # pylint: disable=arguments-differ - with self._lock: - self._was_used = True - self._tp_executor.submit(fn, *args, **kwargs) - - def was_used(self): - with self._lock: - return self._was_used +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +from concurrent import futures + + +class RecordingThreadPool(futures.ThreadPoolExecutor): + """A thread pool that records if used.""" + + def __init__(self, max_workers): + self._tp_executor = futures.ThreadPoolExecutor(max_workers=max_workers) + self._lock = threading.Lock() + self._was_used = False + + def submit(self, fn, *args, **kwargs): # pylint: disable=arguments-differ + with self._lock: + self._was_used = True + self._tp_executor.submit(fn, *args, **kwargs) + + def was_used(self): + with self._lock: + return self._was_used diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py index b4533c806e..8ddd310696 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py @@ -1,21 +1,21 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -from tests import _loader -from tests import _runner - -Loader = _loader.Loader -Runner = _runner.Runner +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from tests import _loader +from tests import _runner + +Loader = _loader.Loader +Runner = _runner.Runner diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py index 690315b5ad..f4b321fc5b 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2019 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2019 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py index d69c738f4d..e74dec0739 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py @@ -1,27 +1,27 @@ -# Copyright 2019 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -from tests._sanity import _sanity_test - - -class AioSanityTest(_sanity_test.SanityTest): - - TEST_PKG_MODULE_NAME = 'tests_aio' - TEST_PKG_PATH = 'tests_aio' - - -if __name__ == '__main__': - unittest.main(verbosity=2) +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +from tests._sanity import _sanity_test + + +class AioSanityTest(_sanity_test.SanityTest): + + TEST_PKG_MODULE_NAME = 'tests_aio' + TEST_PKG_PATH = 'tests_aio' + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py index 1d0ca1eb72..561298a626 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py @@ -1,46 +1,46 @@ -# Copyright 2019 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import asyncio -import logging -import unittest - -from grpc.experimental import aio - -from src.proto.grpc.testing import benchmark_service_pb2_grpc +# Copyright 2019 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import logging +import unittest + +from grpc.experimental import aio + +from src.proto.grpc.testing import benchmark_service_pb2_grpc from tests_aio.benchmark import benchmark_servicer - - -async def _start_async_server(): - server = aio.server() - + + +async def _start_async_server(): + server = aio.server() + port = server.add_insecure_port('localhost:%s' % 50051) servicer = benchmark_servicer.BenchmarkServicer() - benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server( - servicer, server) - - await server.start() + benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server( + servicer, server) + + await server.start() logging.info('Benchmark server started at :%d' % port) - await server.wait_for_termination() - - -def main(): - loop = asyncio.get_event_loop() - loop.create_task(_start_async_server()) - loop.run_forever() - - -if __name__ == '__main__': + await server.wait_for_termination() + + +def main(): + loop = asyncio.get_event_loop() + loop.create_task(_start_async_server()) + loop.run_forever() + + +if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) - main() + main() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py index 690315b5ad..f4b321fc5b 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py @@ -1,13 +1,13 @@ -# Copyright 2019 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2019 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py index d74ee6cf90..ec5f2112da 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py @@ -1,26 +1,26 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging import functools -import asyncio +import asyncio from typing import Callable -import unittest -from grpc.experimental import aio - +import unittest +from grpc.experimental import aio + __all__ = 'AioTestBase' - + _COROUTINE_FUNCTION_ALLOWLIST = ['setUp', 'tearDown'] @@ -45,13 +45,13 @@ def _get_default_loop(debug=True): # NOTE(gnossen) this test class can also be implemented with metaclass. -class AioTestBase(unittest.TestCase): +class AioTestBase(unittest.TestCase): # NOTE(lidi) We need to pick a loop for entire testing phase, otherwise it # will trigger create new loops in new threads, leads to deadlock. _TEST_LOOP = _get_default_loop() - - @property - def loop(self): + + @property + def loop(self): return self._TEST_LOOP def __getattribute__(self, name): diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py index f979f285d3..5e5081a38d 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py @@ -1,27 +1,27 @@ -# Copyright 2019 The gRPC Authors -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import asyncio import datetime - + import grpc -from grpc.experimental import aio +from grpc.experimental import aio from tests.unit import resources - + from src.proto.grpc.testing import empty_pb2, messages_pb2, test_pb2_grpc from tests_aio.unit import _constants - + _INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" _TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" @@ -48,14 +48,14 @@ async def _maybe_echo_status(request: messages_pb2.SimpleRequest, class TestServiceServicer(test_pb2_grpc.TestServiceServicer): - + async def UnaryCall(self, request, context): await _maybe_echo_metadata(context) await _maybe_echo_status(request, context) return messages_pb2.SimpleResponse( payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE, body=b'\x00' * request.response_size)) - + async def EmptyCall(self, request, context): return empty_pb2.Empty() @@ -71,14 +71,14 @@ class TestServiceServicer(test_pb2_grpc.TestServiceServicer): payload=messages_pb2.Payload(type=request.response_type, body=b'\x00' * response_parameters.size)) - + # Next methods are extra ones that are registred programatically # when the sever is instantiated. They are not being provided by # the proto file. async def UnaryCallWithSleep(self, unused_request, unused_context): await asyncio.sleep(_constants.UNARY_CALL_WITH_SLEEP_VALUE) return messages_pb2.SimpleResponse() - + async def StreamingInputCall(self, request_async_iterator, unused_context): aggregate_size = 0 async for request in request_async_iterator: @@ -137,7 +137,7 @@ async def start_test_server(port=0, else: port = server.add_insecure_port('[::]:%d' % port) - await server.start() + await server.start() - # NOTE(lidizheng) returning the server to prevent it from deallocation - return 'localhost:%d' % port, server + # NOTE(lidizheng) returning the server to prevent it from deallocation + return 'localhost:%d' % port, server diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py index 0fdbdb2a2d..1961226fa6 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py @@ -1,31 +1,31 @@ -# Copyright 2019 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2019 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. """Tests behavior of the Call classes.""" -import asyncio -import logging -import unittest +import asyncio +import logging +import unittest import datetime - -import grpc + +import grpc from grpc.experimental import aio - + from src.proto.grpc.testing import messages_pb2, test_pb2_grpc from tests_aio.unit._test_base import AioTestBase -from tests_aio.unit._test_server import start_test_server +from tests_aio.unit._test_server import start_test_server from tests_aio.unit._constants import UNREACHABLE_TARGET - + _SHORT_TIMEOUT_S = datetime.timedelta(seconds=1).total_seconds() _NUM_STREAM_RESPONSES = 5 @@ -34,22 +34,22 @@ _REQUEST_PAYLOAD_SIZE = 7 _LOCAL_CANCEL_DETAILS_EXPECTATION = 'Locally cancelled by application!' _RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000) _INFINITE_INTERVAL_US = 2**31 - 1 - - + + class _MulticallableTestMixin(): - + async def setUp(self): address, self._server = await start_test_server() self._channel = aio.insecure_channel(address) self._stub = test_pb2_grpc.TestServiceStub(self._channel) - + async def tearDown(self): await self._channel.close() await self._server.stop(None) - - + + class TestUnaryUnaryCall(_MulticallableTestMixin, AioTestBase): - + async def test_call_to_string(self): call = self._stub.UnaryCall(messages_pb2.SimpleRequest()) @@ -63,47 +63,47 @@ class TestUnaryUnaryCall(_MulticallableTestMixin, AioTestBase): async def test_call_ok(self): call = self._stub.UnaryCall(messages_pb2.SimpleRequest()) - + self.assertFalse(call.done()) - + response = await call - + self.assertTrue(call.done()) self.assertIsInstance(response, messages_pb2.SimpleResponse) self.assertEqual(await call.code(), grpc.StatusCode.OK) - + # Response is cached at call object level, reentrance # returns again the same response response_retry = await call self.assertIs(response, response_retry) - + async def test_call_rpc_error(self): async with aio.insecure_channel(UNREACHABLE_TARGET) as channel: stub = test_pb2_grpc.TestServiceStub(channel) - + call = stub.UnaryCall(messages_pb2.SimpleRequest()) - + with self.assertRaises(aio.AioRpcError) as exception_context: await call - + self.assertEqual(grpc.StatusCode.UNAVAILABLE, exception_context.exception.code()) - + self.assertTrue(call.done()) self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code()) - + async def test_call_code_awaitable(self): call = self._stub.UnaryCall(messages_pb2.SimpleRequest()) self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_call_details_awaitable(self): call = self._stub.UnaryCall(messages_pb2.SimpleRequest()) self.assertEqual('', await call.details()) - + async def test_call_initial_metadata_awaitable(self): call = self._stub.UnaryCall(messages_pb2.SimpleRequest()) self.assertEqual(aio.Metadata(), await call.initial_metadata()) - + async def test_call_trailing_metadata_awaitable(self): call = self._stub.UnaryCall(messages_pb2.SimpleRequest()) self.assertEqual(aio.Metadata(), await call.trailing_metadata()) @@ -450,56 +450,56 @@ class TestUnaryStreamCall(_MulticallableTestMixin, AioTestBase): size=_RESPONSE_PAYLOAD_SIZE, interval_us=_RESPONSE_INTERVAL_US, )) - + call = self._stub.StreamingOutputCall(request, timeout=_SHORT_TIMEOUT_S * 2) - + response = await call.read() self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)) - + # Should be around the same as the timeout remained_time = call.time_remaining() self.assertGreater(remained_time, _SHORT_TIMEOUT_S * 3 / 2) self.assertLess(remained_time, _SHORT_TIMEOUT_S * 5 / 2) - + response = await call.read() self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)) - + # Should be around the timeout minus a unit of wait time remained_time = call.time_remaining() self.assertGreater(remained_time, _SHORT_TIMEOUT_S / 2) self.assertLess(remained_time, _SHORT_TIMEOUT_S * 3 / 2) - + self.assertEqual(grpc.StatusCode.OK, await call.code()) - - + + class TestStreamUnaryCall(_MulticallableTestMixin, AioTestBase): - + async def test_cancel_stream_unary(self): call = self._stub.StreamingInputCall() - + # Prepares the request payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE) request = messages_pb2.StreamingInputCallRequest(payload=payload) - + # Sends out requests for _ in range(_NUM_STREAM_RESPONSES): await call.write(request) - + # Cancels the RPC self.assertFalse(call.done()) self.assertFalse(call.cancelled()) self.assertTrue(call.cancel()) self.assertTrue(call.cancelled()) - + await call.done_writing() - + with self.assertRaises(asyncio.CancelledError): await call - + async def test_early_cancel_stream_unary(self): call = self._stub.StreamingInputCall() - + # Cancels the RPC self.assertFalse(call.done()) self.assertFalse(call.cancelled()) @@ -809,6 +809,6 @@ class TestStreamStreamCall(_MulticallableTestMixin, AioTestBase): self.assertEqual(await call.code(), grpc.StatusCode.OK) -if __name__ == '__main__': +if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) - unittest.main(verbosity=2) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py index 76027c433e..58cd555491 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py @@ -1,54 +1,54 @@ -# Copyright 2019 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Copyright 2019 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. """Tests behavior of the grpc.aio.Channel class.""" -import logging +import logging import os -import unittest - -import grpc +import unittest + +import grpc from grpc.experimental import aio - + from src.proto.grpc.testing import messages_pb2, test_pb2_grpc -from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_constants from tests_aio.unit._constants import (UNARY_CALL_WITH_SLEEP_VALUE, UNREACHABLE_TARGET) from tests_aio.unit._test_base import AioTestBase -from tests_aio.unit._test_server import start_test_server - -_UNARY_CALL_METHOD = '/grpc.testing.TestService/UnaryCall' +from tests_aio.unit._test_server import start_test_server + +_UNARY_CALL_METHOD = '/grpc.testing.TestService/UnaryCall' _UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep' _STREAMING_OUTPUT_CALL_METHOD = '/grpc.testing.TestService/StreamingOutputCall' - + _INVOCATION_METADATA = ( ('x-grpc-test-echo-initial', 'initial-md-value'), ('x-grpc-test-echo-trailing-bin', b'\x00\x02'), ) - + _NUM_STREAM_RESPONSES = 5 _REQUEST_PAYLOAD_SIZE = 7 _RESPONSE_PAYLOAD_SIZE = 42 - - + + class TestChannel(AioTestBase): - + async def setUp(self): self._server_target, self._server = await start_test_server() - + async def tearDown(self): await self._server.stop(None) - + async def test_async_context(self): async with aio.insecure_channel(self._server_target) as channel: hi = channel.unary_unary( @@ -56,17 +56,17 @@ class TestChannel(AioTestBase): request_serializer=messages_pb2.SimpleRequest.SerializeToString, response_deserializer=messages_pb2.SimpleResponse.FromString) await hi(messages_pb2.SimpleRequest()) - + async def test_unary_unary(self): async with aio.insecure_channel(self._server_target) as channel: - hi = channel.unary_unary( - _UNARY_CALL_METHOD, - request_serializer=messages_pb2.SimpleRequest.SerializeToString, - response_deserializer=messages_pb2.SimpleResponse.FromString) - response = await hi(messages_pb2.SimpleRequest()) - + hi = channel.unary_unary( + _UNARY_CALL_METHOD, + request_serializer=messages_pb2.SimpleRequest.SerializeToString, + response_deserializer=messages_pb2.SimpleResponse.FromString) + response = await hi(messages_pb2.SimpleRequest()) + self.assertIsInstance(response, messages_pb2.SimpleResponse) - + async def test_unary_call_times_out(self): async with aio.insecure_channel(self._server_target) as channel: hi = channel.unary_unary( @@ -74,11 +74,11 @@ class TestChannel(AioTestBase): request_serializer=messages_pb2.SimpleRequest.SerializeToString, response_deserializer=messages_pb2.SimpleResponse.FromString, ) - + with self.assertRaises(grpc.RpcError) as exception_context: await hi(messages_pb2.SimpleRequest(), timeout=UNARY_CALL_WITH_SLEEP_VALUE / 2) - + _, details = grpc.StatusCode.DEADLINE_EXCEEDED.value # pylint: disable=unused-variable self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, exception_context.exception.code()) @@ -87,7 +87,7 @@ class TestChannel(AioTestBase): self.assertIsNotNone(exception_context.exception.initial_metadata()) self.assertIsNotNone( exception_context.exception.trailing_metadata()) - + @unittest.skipIf(os.name == 'nt', 'TODO: https://github.com/grpc/grpc/issues/21658') async def test_unary_call_does_not_times_out(self): @@ -97,24 +97,24 @@ class TestChannel(AioTestBase): request_serializer=messages_pb2.SimpleRequest.SerializeToString, response_deserializer=messages_pb2.SimpleResponse.FromString, ) - + call = hi(messages_pb2.SimpleRequest(), timeout=UNARY_CALL_WITH_SLEEP_VALUE * 5) self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_unary_stream(self): channel = aio.insecure_channel(self._server_target) stub = test_pb2_grpc.TestServiceStub(channel) - + # Prepares the request request = messages_pb2.StreamingOutputCallRequest() for _ in range(_NUM_STREAM_RESPONSES): request.response_parameters.append( messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)) - + # Invokes the actual RPC call = stub.StreamingOutputCall(request) - + # Validates the responses response_cnt = 0 async for response in call: @@ -122,22 +122,22 @@ class TestChannel(AioTestBase): self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse) self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body)) - + self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt) self.assertEqual(await call.code(), grpc.StatusCode.OK) await channel.close() - + async def test_stream_unary_using_write(self): channel = aio.insecure_channel(self._server_target) stub = test_pb2_grpc.TestServiceStub(channel) - + # Invokes the actual RPC call = stub.StreamingInputCall() - + # Prepares the request payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE) request = messages_pb2.StreamingInputCallRequest(payload=payload) - + # Sends out requests for _ in range(_NUM_STREAM_RESPONSES): await call.write(request) @@ -225,6 +225,6 @@ class TestChannel(AioTestBase): await channel.close() -if __name__ == '__main__': +if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) - unittest.main(verbosity=2) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py index 8d5eed6972..b9183a22c7 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py @@ -1,33 +1,33 @@ -# Copyright 2019 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import logging -import unittest - +# Copyright 2019 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +import unittest + class TestInit(unittest.TestCase): - + def test_grpc(self): import grpc # pylint: disable=wrong-import-position channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) - + def test_grpc_dot_aio(self): import grpc.aio # pylint: disable=wrong-import-position channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) - -if __name__ == '__main__': + +if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) - unittest.main(verbosity=2) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py index 6e528faf37..61d1edd523 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py @@ -1,34 +1,34 @@ -# Copyright 2019 The gRPC Authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import asyncio +# Copyright 2019 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio import gc -import logging +import logging import socket import time -import unittest - -import grpc -from grpc.experimental import aio +import unittest + +import grpc +from grpc.experimental import aio from tests.unit import resources from tests.unit.framework.common import test_constants -from tests_aio.unit._test_base import AioTestBase - -_SIMPLE_UNARY_UNARY = '/test/SimpleUnaryUnary' -_BLOCK_FOREVER = '/test/BlockForever' -_BLOCK_BRIEFLY = '/test/BlockBriefly' +from tests_aio.unit._test_base import AioTestBase + +_SIMPLE_UNARY_UNARY = '/test/SimpleUnaryUnary' +_BLOCK_FOREVER = '/test/BlockForever' +_BLOCK_BRIEFLY = '/test/BlockBriefly' _UNARY_STREAM_ASYNC_GEN = '/test/UnaryStreamAsyncGen' _UNARY_STREAM_READER_WRITER = '/test/UnaryStreamReaderWriter' _UNARY_STREAM_EVILLY_MIXED = '/test/UnaryStreamEvillyMixed' @@ -42,17 +42,17 @@ _UNIMPLEMENTED_METHOD = '/test/UnimplementedMethod' _ERROR_IN_STREAM_STREAM = '/test/ErrorInStreamStream' _ERROR_WITHOUT_RAISE_IN_UNARY_UNARY = '/test/ErrorWithoutRaiseInUnaryUnary' _ERROR_WITHOUT_RAISE_IN_STREAM_STREAM = '/test/ErrorWithoutRaiseInStreamStream' - -_REQUEST = b'\x00\x00\x00' -_RESPONSE = b'\x01\x01\x01' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x01\x01\x01' _NUM_STREAM_REQUESTS = 3 _NUM_STREAM_RESPONSES = 5 - - -class _GenericHandler(grpc.GenericRpcHandler): - - def __init__(self): - self._called = asyncio.get_event_loop().create_future() + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self): + self._called = asyncio.get_event_loop().create_future() self._routing_table = { _SIMPLE_UNARY_UNARY: grpc.unary_unary_rpc_method_handler(self._unary_unary), @@ -97,18 +97,18 @@ class _GenericHandler(grpc.GenericRpcHandler): grpc.stream_stream_rpc_method_handler( self._error_without_raise_in_stream_stream), } - - @staticmethod - async def _unary_unary(unused_request, unused_context): - return _RESPONSE - - async def _block_forever(self, unused_request, unused_context): - await asyncio.get_event_loop().create_future() - + + @staticmethod + async def _unary_unary(unused_request, unused_context): + return _RESPONSE + + async def _block_forever(self, unused_request, unused_context): + await asyncio.get_event_loop().create_future() + async def _block_briefly(self, unused_request, unused_context): - await asyncio.sleep(test_constants.SHORT_TIMEOUT / 2) - return _RESPONSE - + await asyncio.sleep(test_constants.SHORT_TIMEOUT / 2) + return _RESPONSE + async def _unary_stream_async_gen(self, unused_request, unused_context): for _ in range(_NUM_STREAM_RESPONSES): yield _RESPONSE @@ -188,164 +188,164 @@ class _GenericHandler(grpc.GenericRpcHandler): assert _REQUEST == request context.set_code(grpc.StatusCode.INTERNAL) - def service(self, handler_details): - self._called.set_result(None) + def service(self, handler_details): + self._called.set_result(None) return self._routing_table.get(handler_details.method) - - async def wait_for_call(self): - await self._called - - -async def _start_test_server(): - server = aio.server() - port = server.add_insecure_port('[::]:0') - generic_handler = _GenericHandler() - server.add_generic_rpc_handlers((generic_handler,)) - await server.start() - return 'localhost:%d' % port, server, generic_handler - - -class TestServer(AioTestBase): - + + async def wait_for_call(self): + await self._called + + +async def _start_test_server(): + server = aio.server() + port = server.add_insecure_port('[::]:0') + generic_handler = _GenericHandler() + server.add_generic_rpc_handlers((generic_handler,)) + await server.start() + return 'localhost:%d' % port, server, generic_handler + + +class TestServer(AioTestBase): + async def setUp(self): addr, self._server, self._generic_handler = await _start_test_server() self._channel = aio.insecure_channel(addr) - + async def tearDown(self): await self._channel.close() await self._server.stop(None) - + async def test_unary_unary(self): unary_unary_call = self._channel.unary_unary(_SIMPLE_UNARY_UNARY) response = await unary_unary_call(_REQUEST) self.assertEqual(response, _RESPONSE) - + async def test_unary_stream_async_generator(self): unary_stream_call = self._channel.unary_stream(_UNARY_STREAM_ASYNC_GEN) call = unary_stream_call(_REQUEST) - + response_cnt = 0 async for response in call: response_cnt += 1 self.assertEqual(_RESPONSE, response) - + self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt) self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_unary_stream_reader_writer(self): unary_stream_call = self._channel.unary_stream( _UNARY_STREAM_READER_WRITER) call = unary_stream_call(_REQUEST) - + for _ in range(_NUM_STREAM_RESPONSES): response = await call.read() self.assertEqual(_RESPONSE, response) - + self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_unary_stream_evilly_mixed(self): unary_stream_call = self._channel.unary_stream( _UNARY_STREAM_EVILLY_MIXED) call = unary_stream_call(_REQUEST) - + # Uses reader API self.assertEqual(_RESPONSE, await call.read()) - + # Uses async generator API, mixed! with self.assertRaises(aio.UsageError): async for response in call: self.assertEqual(_RESPONSE, response) - + async def test_stream_unary_async_generator(self): stream_unary_call = self._channel.stream_unary(_STREAM_UNARY_ASYNC_GEN) call = stream_unary_call() - + for _ in range(_NUM_STREAM_REQUESTS): await call.write(_REQUEST) await call.done_writing() - + response = await call self.assertEqual(_RESPONSE, response) self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_stream_unary_reader_writer(self): stream_unary_call = self._channel.stream_unary( _STREAM_UNARY_READER_WRITER) call = stream_unary_call() - + for _ in range(_NUM_STREAM_REQUESTS): await call.write(_REQUEST) await call.done_writing() - + response = await call self.assertEqual(_RESPONSE, response) self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_stream_unary_evilly_mixed(self): stream_unary_call = self._channel.stream_unary( _STREAM_UNARY_EVILLY_MIXED) call = stream_unary_call() - + for _ in range(_NUM_STREAM_REQUESTS): await call.write(_REQUEST) await call.done_writing() - + response = await call self.assertEqual(_RESPONSE, response) self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_stream_stream_async_generator(self): stream_stream_call = self._channel.stream_stream( _STREAM_STREAM_ASYNC_GEN) call = stream_stream_call() - + for _ in range(_NUM_STREAM_REQUESTS): await call.write(_REQUEST) await call.done_writing() - + for _ in range(_NUM_STREAM_RESPONSES): response = await call.read() self.assertEqual(_RESPONSE, response) - + self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_stream_stream_reader_writer(self): stream_stream_call = self._channel.stream_stream( _STREAM_STREAM_READER_WRITER) call = stream_stream_call() - + for _ in range(_NUM_STREAM_REQUESTS): await call.write(_REQUEST) await call.done_writing() - + for _ in range(_NUM_STREAM_RESPONSES): response = await call.read() self.assertEqual(_RESPONSE, response) - + self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_stream_stream_evilly_mixed(self): stream_stream_call = self._channel.stream_stream( _STREAM_STREAM_EVILLY_MIXED) call = stream_stream_call() - + for _ in range(_NUM_STREAM_REQUESTS): await call.write(_REQUEST) await call.done_writing() - + for _ in range(_NUM_STREAM_RESPONSES): response = await call.read() self.assertEqual(_RESPONSE, response) - + self.assertEqual(await call.code(), grpc.StatusCode.OK) - + async def test_shutdown(self): await self._server.stop(None) # Ensures no SIGSEGV triggered, and ends within timeout. - + async def test_shutdown_after_call(self): await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST) - + await self._server.stop(None) async def test_graceful_shutdown_success(self): @@ -410,24 +410,24 @@ class TestServer(AioTestBase): async def test_shutdown_before_call(self): await self._server.stop(None) - + # Ensures the server is cleaned up at this point. # Some proper exception should be raised. with self.assertRaises(aio.AioRpcError): await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST) - + async def test_unimplemented(self): call = self._channel.unary_unary(_UNIMPLEMENTED_METHOD) with self.assertRaises(aio.AioRpcError) as exception_context: await call(_REQUEST) rpc_error = exception_context.exception self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code()) - + async def test_shutdown_during_stream_stream(self): stream_stream_call = self._channel.stream_stream( _STREAM_STREAM_ASYNC_GEN) call = stream_stream_call() - + # Don't half close the RPC yet, keep it alive. await call.write(_REQUEST) await self._server.stop(None) @@ -481,6 +481,6 @@ class TestServer(AioTestBase): server.add_secure_port(bind_address, server_credentials) -if __name__ == '__main__': +if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) - unittest.main(verbosity=2) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/ya.make b/contrib/libs/grpc/src/python/grpcio_tests/ya.make index f87a6aac0c..b0642eae34 100644 --- a/contrib/libs/grpc/src/python/grpcio_tests/ya.make +++ b/contrib/libs/grpc/src/python/grpcio_tests/ya.make @@ -1,141 +1,141 @@ -PY3TEST() - +PY3TEST() + LICENSE(Apache-2.0) - + LICENSE_TEXTS(.yandex_meta/licenses.list.txt) PEERDIR( contrib/libs/grpc/python ) -NO_LINT() - -PY_SRCS( - TOP_LEVEL - # tests/_sanity/__init__.py - # tests/testing/proto/__init__.py - # tests/testing/__init__.py - # tests/testing/_application_common.py - # tests/testing/_application_testing_common.py - # tests/testing/_client_application.py - # tests/testing/_client_test.py - # tests/testing/_server_application.py - # tests/testing/_server_test.py - # tests/testing/_time_test.py - tests/unit/__init__.py - tests/unit/_cython/__init__.py - tests/unit/_cython/_common.py - tests/unit/_cython/test_utilities.py - tests/unit/_exit_scenarios.py - tests/unit/_from_grpc_import_star.py +NO_LINT() + +PY_SRCS( + TOP_LEVEL + # tests/_sanity/__init__.py + # tests/testing/proto/__init__.py + # tests/testing/__init__.py + # tests/testing/_application_common.py + # tests/testing/_application_testing_common.py + # tests/testing/_client_application.py + # tests/testing/_client_test.py + # tests/testing/_server_application.py + # tests/testing/_server_test.py + # tests/testing/_time_test.py + tests/unit/__init__.py + tests/unit/_cython/__init__.py + tests/unit/_cython/_common.py + tests/unit/_cython/test_utilities.py + tests/unit/_exit_scenarios.py + tests/unit/_from_grpc_import_star.py tests/unit/_rpc_test_helpers.py - tests/unit/_server_shutdown_scenarios.py - tests/unit/_signal_client.py - tests/unit/_tcp_proxy.py - tests/unit/beta/__init__.py - tests/unit/beta/test_utilities.py - tests/unit/framework/__init__.py - tests/unit/framework/common/__init__.py - tests/unit/framework/common/test_constants.py - tests/unit/framework/common/test_control.py - tests/unit/framework/common/test_coverage.py - tests/unit/framework/foundation/__init__.py - tests/unit/resources.py - tests/unit/test_common.py - tests/unit/thread_pool.py - # protofiles - # tests/interop/__init__.py - # tests/interop/_intraop_test_case.py - # tests/interop/client.py - # tests/interop/methods.py - # tests/interop/resources.py - # tests/interop/server.py - # tests/interop/service.py - # protofiles - # tests/fork/__init__.py - # tests/fork/client.py - # tests/fork/methods.py - # protofiles - # tests/__init__.py - # tests/_loader.py - # tests/_result.py - # tests/_runner.py -) - -TEST_SRCS( - # coverage - # tests/_sanity/_sanity_test.py - tests/unit/_api_test.py - tests/unit/_abort_test.py - # CRASH - # tests/unit/_auth_context_test.py - tests/unit/_auth_test.py - tests/unit/_channel_args_test.py - tests/unit/_channel_close_test.py - tests/unit/_channel_connectivity_test.py - tests/unit/_channel_ready_future_test.py - # FLAKY - # tests/unit/_compression_test.py + tests/unit/_server_shutdown_scenarios.py + tests/unit/_signal_client.py + tests/unit/_tcp_proxy.py + tests/unit/beta/__init__.py + tests/unit/beta/test_utilities.py + tests/unit/framework/__init__.py + tests/unit/framework/common/__init__.py + tests/unit/framework/common/test_constants.py + tests/unit/framework/common/test_control.py + tests/unit/framework/common/test_coverage.py + tests/unit/framework/foundation/__init__.py + tests/unit/resources.py + tests/unit/test_common.py + tests/unit/thread_pool.py + # protofiles + # tests/interop/__init__.py + # tests/interop/_intraop_test_case.py + # tests/interop/client.py + # tests/interop/methods.py + # tests/interop/resources.py + # tests/interop/server.py + # tests/interop/service.py + # protofiles + # tests/fork/__init__.py + # tests/fork/client.py + # tests/fork/methods.py + # protofiles + # tests/__init__.py + # tests/_loader.py + # tests/_result.py + # tests/_runner.py +) + +TEST_SRCS( + # coverage + # tests/_sanity/_sanity_test.py + tests/unit/_api_test.py + tests/unit/_abort_test.py + # CRASH + # tests/unit/_auth_context_test.py + tests/unit/_auth_test.py + tests/unit/_channel_args_test.py + tests/unit/_channel_close_test.py + tests/unit/_channel_connectivity_test.py + tests/unit/_channel_ready_future_test.py + # FLAKY + # tests/unit/_compression_test.py tests/unit/_contextvars_propagation_test.py - tests/unit/_credentials_test.py - tests/unit/_cython/_cancel_many_calls_test.py - tests/unit/_cython/_channel_test.py - tests/unit/_cython/_fork_test.py - tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py - tests/unit/_cython/_no_messages_single_server_completion_queue_test.py - tests/unit/_cython/_read_some_but_not_all_responses_test.py - tests/unit/_cython/_server_test.py - tests/unit/_cython/cygrpc_test.py - tests/unit/_dns_resolver_test.py + tests/unit/_credentials_test.py + tests/unit/_cython/_cancel_many_calls_test.py + tests/unit/_cython/_channel_test.py + tests/unit/_cython/_fork_test.py + tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py + tests/unit/_cython/_no_messages_single_server_completion_queue_test.py + tests/unit/_cython/_read_some_but_not_all_responses_test.py + tests/unit/_cython/_server_test.py + tests/unit/_cython/cygrpc_test.py + tests/unit/_dns_resolver_test.py tests/unit/_dynamic_stubs_test.py - tests/unit/_empty_message_test.py - tests/unit/_error_message_encoding_test.py - tests/unit/_exit_test.py - tests/unit/_grpc_shutdown_test.py - tests/unit/_interceptor_test.py - tests/unit/_invalid_metadata_test.py - tests/unit/_invocation_defects_test.py - tests/unit/_local_credentials_test.py - tests/unit/_logging_test.py - tests/unit/_metadata_code_details_test.py - tests/unit/_metadata_flags_test.py - tests/unit/_metadata_test.py - tests/unit/_reconnect_test.py - tests/unit/_resource_exhausted_test.py + tests/unit/_empty_message_test.py + tests/unit/_error_message_encoding_test.py + tests/unit/_exit_test.py + tests/unit/_grpc_shutdown_test.py + tests/unit/_interceptor_test.py + tests/unit/_invalid_metadata_test.py + tests/unit/_invocation_defects_test.py + tests/unit/_local_credentials_test.py + tests/unit/_logging_test.py + tests/unit/_metadata_code_details_test.py + tests/unit/_metadata_flags_test.py + tests/unit/_metadata_test.py + tests/unit/_reconnect_test.py + tests/unit/_resource_exhausted_test.py tests/unit/_rpc_part_1_test.py tests/unit/_rpc_part_2_test.py - tests/unit/_server_shutdown_test.py - # tests.testing - # tests/unit/_server_ssl_cert_config_test.py - tests/unit/_server_test.py - tests/unit/_server_wait_for_termination_test.py - # CRASH - # tests/unit/_session_cache_test.py - tests/unit/_signal_handling_test.py - tests/unit/_version_test.py - tests/unit/beta/_beta_features_test.py - tests/unit/beta/_connectivity_channel_test.py - # oauth2client - # tests/unit/beta/_implementations_test.py - tests/unit/beta/_not_found_test.py - tests/unit/beta/_utilities_test.py - tests/unit/framework/foundation/_logging_pool_test.py - tests/unit/framework/foundation/stream_testing.py - # protofiles - # tests/interop/_insecure_intraop_test.py - # tests/interop/_secure_intraop_test.py - # tests/fork/_fork_interop_test.py -) - -SIZE(MEDIUM) - -RESOURCE_FILES( - PREFIX contrib/libs/grpc/src/python/grpcio_tests/ - tests/unit/credentials/ca.pem - tests/unit/credentials/server1.key - tests/unit/credentials/server1.pem -) - -REQUIREMENTS(network:full) - -END() + tests/unit/_server_shutdown_test.py + # tests.testing + # tests/unit/_server_ssl_cert_config_test.py + tests/unit/_server_test.py + tests/unit/_server_wait_for_termination_test.py + # CRASH + # tests/unit/_session_cache_test.py + tests/unit/_signal_handling_test.py + tests/unit/_version_test.py + tests/unit/beta/_beta_features_test.py + tests/unit/beta/_connectivity_channel_test.py + # oauth2client + # tests/unit/beta/_implementations_test.py + tests/unit/beta/_not_found_test.py + tests/unit/beta/_utilities_test.py + tests/unit/framework/foundation/_logging_pool_test.py + tests/unit/framework/foundation/stream_testing.py + # protofiles + # tests/interop/_insecure_intraop_test.py + # tests/interop/_secure_intraop_test.py + # tests/fork/_fork_interop_test.py +) + +SIZE(MEDIUM) + +RESOURCE_FILES( + PREFIX contrib/libs/grpc/src/python/grpcio_tests/ + tests/unit/credentials/ca.pem + tests/unit/credentials/server1.key + tests/unit/credentials/server1.pem +) + +REQUIREMENTS(network:full) + +END() |