aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/libs/grpc/src/python/grpcio_tests
diff options
context:
space:
mode:
authorheretic <heretic@yandex-team.ru>2022-02-10 16:45:46 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:46 +0300
commit81eddc8c0b55990194e112b02d127b87d54164a9 (patch)
tree9142afc54d335ea52910662635b898e79e192e49 /contrib/libs/grpc/src/python/grpcio_tests
parent397cbe258b9e064f49c4ca575279f02f39fef76e (diff)
downloadydb-81eddc8c0b55990194e112b02d127b87d54164a9.tar.gz
Restoring authorship annotation for <heretic@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/libs/grpc/src/python/grpcio_tests')
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt144
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/commands.py110
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py2
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/setup.py46
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py78
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py8
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py20
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py32
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py20
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py42
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py38
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py14
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py224
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py58
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py58
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py20
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py8
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py10
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py20
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py8
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py20
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py12
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py52
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py10
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py10
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py54
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py14
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py8
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py22
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py236
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py12
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py12
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py40
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py238
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py112
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py126
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py12
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py62
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py16
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py8
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py98
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py116
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py12
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py2
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py464
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py852
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py834
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py136
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py42
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py32
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py68
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py14
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py70
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py10
-rwxr-xr-xcontrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem36
-rwxr-xr-xcontrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key52
-rwxr-xr-xcontrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem40
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py52
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py4
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py310
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py110
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py12
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py118
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py734
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py26
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py948
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py26
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py564
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py26
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py122
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py268
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py912
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py98
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py26
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py386
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py26
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py350
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py198
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py32
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py274
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py86
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py248
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py302
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py104
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py388
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py1584
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py352
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py138
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py384
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py404
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py1034
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py790
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py1398
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py276
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py760
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py392
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py224
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py130
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py248
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py24
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py594
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py148
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py260
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py660
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py820
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py356
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py318
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py292
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py42
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py696
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py26
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py196
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py830
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/ya.make24
127 files changed, 12562 insertions, 12562 deletions
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt b/contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt
index 47c501db06..e0080a7b1f 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt
+++ b/contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt
@@ -1,72 +1,72 @@
-====================Apache-2.0====================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-====================COPYRIGHT====================
- * Copyright 2015 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2016 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2017 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2018 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2020 gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2018 The gRPC Authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2019 The gRPC Authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2019 The gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2019 gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2019 the gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2020 The gRPC Authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2020 The gRPC authors.
-
-
-====================COPYRIGHT====================
-// Copyright 2018 The gRPC Authors
-
-
-====================COPYRIGHT====================
-// Copyright 2019 The gRPC Authors
-
-
-====================COPYRIGHT====================
-// Copyright 2020 The gRPC Authors
+====================Apache-2.0====================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+====================COPYRIGHT====================
+ * Copyright 2015 gRPC authors.
+
+
+====================COPYRIGHT====================
+ * Copyright 2016 gRPC authors.
+
+
+====================COPYRIGHT====================
+ * Copyright 2017 gRPC authors.
+
+
+====================COPYRIGHT====================
+ * Copyright 2018 gRPC authors.
+
+
+====================COPYRIGHT====================
+ * Copyright 2020 gRPC authors.
+
+
+====================COPYRIGHT====================
+# Copyright 2018 The gRPC Authors.
+
+
+====================COPYRIGHT====================
+# Copyright 2019 The gRPC Authors.
+
+
+====================COPYRIGHT====================
+# Copyright 2019 The gRPC authors.
+
+
+====================COPYRIGHT====================
+# Copyright 2019 gRPC authors.
+
+
+====================COPYRIGHT====================
+# Copyright 2019 the gRPC authors.
+
+
+====================COPYRIGHT====================
+# Copyright 2020 The gRPC Authors.
+
+
+====================COPYRIGHT====================
+# Copyright 2020 The gRPC authors.
+
+
+====================COPYRIGHT====================
+// Copyright 2018 The gRPC Authors
+
+
+====================COPYRIGHT====================
+// Copyright 2019 The gRPC Authors
+
+
+====================COPYRIGHT====================
+// Copyright 2020 The gRPC Authors
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/commands.py b/contrib/libs/grpc/src/python/grpcio_tests/commands.py
index 5fd84b9b2c..889b0bd9dc 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/commands.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/commands.py
@@ -106,37 +106,37 @@ class TestLite(setuptools.Command):
self.distribution.fetch_build_eggs(self.distribution.tests_require)
-class TestPy3Only(setuptools.Command):
- """Command to run tests for Python 3+ features.
-
- This does not include asyncio tests, which are housed in a separate
- directory.
- """
-
- description = 'run tests for py3+ features'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- self._add_eggs_to_path()
- import tests
- loader = tests.Loader()
- loader.loadTestsFromNames(['tests_py3_only'])
- runner = tests.Runner()
- result = runner.run(loader.suite)
- if not result.wasSuccessful():
- sys.exit('Test failure')
-
- def _add_eggs_to_path(self):
- self.distribution.fetch_build_eggs(self.distribution.install_requires)
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
-
+class TestPy3Only(setuptools.Command):
+ """Command to run tests for Python 3+ features.
+
+ This does not include asyncio tests, which are housed in a separate
+ directory.
+ """
+
+ description = 'run tests for py3+ features'
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ self._add_eggs_to_path()
+ import tests
+ loader = tests.Loader()
+ loader.loadTestsFromNames(['tests_py3_only'])
+ runner = tests.Runner()
+ result = runner.run(loader.suite)
+ if not result.wasSuccessful():
+ sys.exit('Test failure')
+
+ def _add_eggs_to_path(self):
+ self.distribution.fetch_build_eggs(self.distribution.install_requires)
+ self.distribution.fetch_build_eggs(self.distribution.tests_require)
+
+
class TestAio(setuptools.Command):
"""Command to run aio tests without fetching or building anything."""
@@ -191,7 +191,7 @@ class TestGevent(setuptools.Command):
'unit._server_ssl_cert_config_test',
# TODO(https://github.com/grpc/grpc/issues/14901) enable this test
'protoc_plugin._python_plugin_test.PythonPluginTest',
- 'protoc_plugin._python_plugin_test.SimpleStubsPluginTest',
+ 'protoc_plugin._python_plugin_test.SimpleStubsPluginTest',
# Beta API is unsupported for gevent
'protoc_plugin.beta_python_plugin_test',
'unit.beta._beta_features_test',
@@ -220,17 +220,17 @@ class TestGevent(setuptools.Command):
'unit._cython._channel_test.ChannelTest.test_negative_deadline_connectivity',
# TODO(https://github.com/grpc/grpc/issues/15411) enable this test
'unit._local_credentials_test.LocalCredentialsTest',
- # TODO(https://github.com/grpc/grpc/issues/22020) LocalCredentials
- # aren't supported with custom io managers.
- 'unit._contextvars_propagation_test',
+ # TODO(https://github.com/grpc/grpc/issues/22020) LocalCredentials
+ # aren't supported with custom io managers.
+ 'unit._contextvars_propagation_test',
'testing._time_test.StrictRealTimeTest',
)
BANNED_WINDOWS_TESTS = (
# TODO(https://github.com/grpc/grpc/pull/15411) enable this test
- 'unit._dns_resolver_test.DNSResolverTest.test_connect_loopback',
- # TODO(https://github.com/grpc/grpc/pull/15411) enable this test
- 'unit._server_test.ServerTest.test_failed_port_binding_exception',
- )
+ 'unit._dns_resolver_test.DNSResolverTest.test_connect_loopback',
+ # TODO(https://github.com/grpc/grpc/pull/15411) enable this test
+ 'unit._server_test.ServerTest.test_failed_port_binding_exception',
+ )
description = 'run tests with gevent. Assumes grpc/gevent are installed'
user_options = []
@@ -269,18 +269,18 @@ class TestGevent(setuptools.Command):
class RunInterop(test.test):
description = 'run interop test client/server'
- user_options = [
- ('args=', None, 'pass-thru arguments for the client/server'),
- ('client', None, 'flag indicating to run the client'),
- ('server', None, 'flag indicating to run the server'),
- ('use-asyncio', None, 'flag indicating to run the asyncio stack')
- ]
+ user_options = [
+ ('args=', None, 'pass-thru arguments for the client/server'),
+ ('client', None, 'flag indicating to run the client'),
+ ('server', None, 'flag indicating to run the server'),
+ ('use-asyncio', None, 'flag indicating to run the asyncio stack')
+ ]
def initialize_options(self):
self.args = ''
self.client = False
self.server = False
- self.use_asyncio = False
+ self.use_asyncio = False
def finalize_options(self):
if self.client and self.server:
@@ -301,15 +301,15 @@ class RunInterop(test.test):
def run_server(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
- if self.use_asyncio:
- import asyncio
- from tests_aio.interop import server
- sys.argv[1:] = self.args.split()
- asyncio.get_event_loop().run_until_complete(server.serve())
- else:
- from tests.interop import server
- sys.argv[1:] = self.args.split()
- server.serve()
+ if self.use_asyncio:
+ import asyncio
+ from tests_aio.interop import server
+ sys.argv[1:] = self.args.split()
+ asyncio.get_event_loop().run_until_complete(server.serve())
+ else:
+ from tests.interop import server
+ sys.argv[1:] = self.args.split()
+ server.serve()
def run_client(self):
# We import here to ensure that our setuptools parent has had a chance to
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py b/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py
index 5f5aa21ddf..219b336a42 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py
@@ -14,4 +14,4 @@
# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!!
-VERSION = '1.33.2'
+VERSION = '1.33.2'
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/setup.py b/contrib/libs/grpc/src/python/grpcio_tests/setup.py
index 8a55511d42..87cccda425 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/setup.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/setup.py
@@ -13,7 +13,7 @@
# limitations under the License.
"""A setup module for the gRPC Python package."""
-import multiprocessing
+import multiprocessing
import os
import os.path
import sys
@@ -38,16 +38,16 @@ PACKAGE_DIRECTORIES = {
}
INSTALL_REQUIRES = (
- 'coverage>=4.0', 'grpcio>={version}'.format(version=grpc_version.VERSION),
+ 'coverage>=4.0', 'grpcio>={version}'.format(version=grpc_version.VERSION),
'grpcio-channelz>={version}'.format(version=grpc_version.VERSION),
'grpcio-status>={version}'.format(version=grpc_version.VERSION),
'grpcio-tools>={version}'.format(version=grpc_version.VERSION),
'grpcio-health-checking>={version}'.format(version=grpc_version.VERSION),
- 'oauth2client>=1.4.7', 'protobuf>=3.6.0', 'six>=1.10',
- 'google-auth>=1.17.2', 'requests>=2.14.2')
+ 'oauth2client>=1.4.7', 'protobuf>=3.6.0', 'six>=1.10',
+ 'google-auth>=1.17.2', 'requests>=2.14.2')
if not PY3:
- INSTALL_REQUIRES += ('futures>=2.2.0', 'enum34>=1.0.4')
+ INSTALL_REQUIRES += ('futures>=2.2.0', 'enum34>=1.0.4')
COMMAND_CLASS = {
# Run `preprocess` *before* doing any packaging!
@@ -59,7 +59,7 @@ COMMAND_CLASS = {
'test_lite': commands.TestLite,
'test_gevent': commands.TestGevent,
'test_aio': commands.TestAio,
- 'test_py3_only': commands.TestPy3Only,
+ 'test_py3_only': commands.TestPy3Only,
}
PACKAGE_DATA = {
@@ -68,7 +68,7 @@ PACKAGE_DATA = {
'credentials/server1.key',
'credentials/server1.pem',
],
- 'tests.protoc_plugin.protos.invocation_testing': ['same.proto',],
+ 'tests.protoc_plugin.protos.invocation_testing': ['same.proto',],
'tests.protoc_plugin.protos.invocation_testing.split_messages': [
'messages.proto',
],
@@ -94,19 +94,19 @@ TESTS_REQUIRE = INSTALL_REQUIRES
PACKAGES = setuptools.find_packages('.')
-if __name__ == "__main__":
- multiprocessing.freeze_support()
- setuptools.setup(
- name='grpcio-tests',
- version=grpc_version.VERSION,
- license=LICENSE,
- packages=list(PACKAGES),
- package_dir=PACKAGE_DIRECTORIES,
- package_data=PACKAGE_DATA,
- install_requires=INSTALL_REQUIRES,
- cmdclass=COMMAND_CLASS,
- tests_require=TESTS_REQUIRE,
- test_suite=TEST_SUITE,
- test_loader=TEST_LOADER,
- test_runner=TEST_RUNNER,
- )
+if __name__ == "__main__":
+ multiprocessing.freeze_support()
+ setuptools.setup(
+ name='grpcio-tests',
+ version=grpc_version.VERSION,
+ license=LICENSE,
+ packages=list(PACKAGES),
+ package_dir=PACKAGE_DIRECTORIES,
+ package_data=PACKAGE_DATA,
+ install_requires=INSTALL_REQUIRES,
+ cmdclass=COMMAND_CLASS,
+ tests_require=TESTS_REQUIRE,
+ test_suite=TEST_SUITE,
+ test_loader=TEST_LOADER,
+ test_runner=TEST_RUNNER,
+ )
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py
index 3d8ffa5e17..389d5f4f96 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py
@@ -106,13 +106,13 @@ class CaseResult(
stderr = self.stderr if stderr is None else stderr
skip_reason = self.skip_reason if skip_reason is None else skip_reason
traceback = self.traceback if traceback is None else traceback
- return CaseResult(id=self.id,
- name=name,
- kind=kind,
- stdout=stdout,
- stderr=stderr,
- skip_reason=skip_reason,
- traceback=traceback)
+ return CaseResult(id=self.id,
+ name=name,
+ kind=kind,
+ stdout=stdout,
+ stderr=stderr,
+ skip_reason=skip_reason,
+ traceback=traceback)
class AugmentedResult(unittest.TestResult):
@@ -147,9 +147,9 @@ class AugmentedResult(unittest.TestResult):
"""See unittest.TestResult.startTest."""
super(AugmentedResult, self).startTest(test)
case_id = self.id_map(test)
- self.cases[case_id] = CaseResult(id=case_id,
- name=test.id(),
- kind=CaseResult.Kind.RUNNING)
+ self.cases[case_id] = CaseResult(id=case_id,
+ name=test.id(),
+ kind=CaseResult.Kind.RUNNING)
def addError(self, test, err):
"""See unittest.TestResult.addError."""
@@ -275,8 +275,8 @@ class TerminalResult(CoverageResult):
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(TerminalResult, self).startTestRun()
- self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' +
- _Colors.END)
+ self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' +
+ _Colors.END)
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
@@ -287,43 +287,43 @@ class TerminalResult(CoverageResult):
def addError(self, test, err):
"""See unittest.TestResult.addError."""
super(TerminalResult, self).addError(test, err)
- self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) +
- _Colors.END)
+ self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) +
+ _Colors.END)
self.out.flush()
def addFailure(self, test, err):
"""See unittest.TestResult.addFailure."""
super(TerminalResult, self).addFailure(test, err)
- self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) +
- _Colors.END)
+ self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) +
+ _Colors.END)
self.out.flush()
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(TerminalResult, self).addSuccess(test)
- self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) +
- _Colors.END)
+ self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) +
+ _Colors.END)
self.out.flush()
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(TerminalResult, self).addSkip(test, reason)
- self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) +
- _Colors.END)
+ self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) +
+ _Colors.END)
self.out.flush()
def addExpectedFailure(self, test, err):
"""See unittest.TestResult.addExpectedFailure."""
super(TerminalResult, self).addExpectedFailure(test, err)
- self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) +
- _Colors.END)
+ self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) +
+ _Colors.END)
self.out.flush()
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(TerminalResult, self).addUnexpectedSuccess(test)
- self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) +
- _Colors.END)
+ self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) +
+ _Colors.END)
self.out.flush()
@@ -372,11 +372,11 @@ def summary(result):
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SKIP))
expected_failures = list(
- result.augmented_results(lambda case_result: case_result.kind is
- CaseResult.Kind.EXPECTED_FAILURE))
+ result.augmented_results(lambda case_result: case_result.kind is
+ CaseResult.Kind.EXPECTED_FAILURE))
unexpected_successes = list(
- result.augmented_results(lambda case_result: case_result.kind is
- CaseResult.Kind.UNEXPECTED_SUCCESS))
+ result.augmented_results(lambda case_result: case_result.kind is
+ CaseResult.Kind.UNEXPECTED_SUCCESS))
running_names = [case.name for case in running]
finished_count = (len(failures) + len(errors) + len(successes) +
len(expected_failures) + len(unexpected_successes))
@@ -395,17 +395,17 @@ def summary(result):
expected_fail=len(expected_failures),
unexpected_successful=len(unexpected_successes),
interrupted=str(running_names)))
- tracebacks = '\n\n'.join([
- (_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD +
- 'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD +
- 'stdout:' + _Colors.END + '\n' + '{stdout}\n' + _Colors.BOLD +
- 'stderr:' + _Colors.END + '\n' + '{stderr}\n').format(
- test_name=result.name,
- traceback=_traceback_string(*result.traceback),
- stdout=result.stdout,
- stderr=result.stderr)
- for result in itertools.chain(failures, errors)
- ])
+ tracebacks = '\n\n'.join([
+ (_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD +
+ 'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD +
+ 'stdout:' + _Colors.END + '\n' + '{stdout}\n' + _Colors.BOLD +
+ 'stderr:' + _Colors.END + '\n' + '{stderr}\n').format(
+ test_name=result.name,
+ traceback=_traceback_string(*result.traceback),
+ stdout=result.stdout,
+ stderr=result.stderr)
+ for result in itertools.chain(failures, errors)
+ ])
notes = 'Unexpected successes: {}\n'.format(
[result.name for result in unexpected_successes])
return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py
index d49c79e8aa..39da0399b0 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py
@@ -114,8 +114,8 @@ class AugmentedCase(collections.namedtuple('AugmentedCase', ['case', 'id'])):
return super(cls, AugmentedCase).__new__(cls, case, id)
-# NOTE(lidiz) This complex wrapper is not triggering setUpClass nor
-# tearDownClass. Do not use those methods, or fix this wrapper!
+# NOTE(lidiz) This complex wrapper is not triggering setUpClass nor
+# tearDownClass. Do not use those methods, or fix this wrapper!
class Runner(object):
def __init__(self, dedicated_threads=False):
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py
index fa29ba6e34..3aa92f37fb 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py
@@ -33,13 +33,13 @@ class SanityTest(unittest.TestCase):
loader = tests.Loader()
loader.loadTestsFromNames([self.TEST_PKG_MODULE_NAME])
test_suite_names = sorted({
- test_case_class.id().rsplit('.', 1)[0] for test_case_class in
- tests._loader.iterate_suite_cases(loader.suite)
+ test_case_class.id().rsplit('.', 1)[0] for test_case_class in
+ tests._loader.iterate_suite_cases(loader.suite)
})
tests_json_string = pkgutil.get_data(self.TEST_PKG_PATH, 'tests.json')
- tests_json = json.loads(
- tests_json_string.decode() if six.PY3 else tests_json_string)
+ tests_json = json.loads(
+ tests_json_string.decode() if six.PY3 else tests_json_string)
self.assertSequenceEqual(tests_json, test_suite_names)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py
index 45b5958035..784307ae00 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py
@@ -70,9 +70,9 @@ class _ChannelServerPair(object):
def __init__(self):
# Server will enable channelz service
- self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=3),
- options=_DISABLE_REUSE_PORT +
- _ENABLE_CHANNELZ)
+ self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=3),
+ options=_DISABLE_REUSE_PORT +
+ _ENABLE_CHANNELZ)
port = self.server.add_insecure_port('[::]:0')
self.server.add_generic_rpc_handlers((_GenericHandler(),))
self.server.start()
@@ -128,9 +128,9 @@ class ChannelzServicerTest(unittest.TestCase):
self._pairs = []
# This server is for Channelz info fetching only
# It self should not enable Channelz
- self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=3),
- options=_DISABLE_REUSE_PORT +
- _DISABLE_CHANNELZ)
+ self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=3),
+ options=_DISABLE_REUSE_PORT +
+ _DISABLE_CHANNELZ)
port = self._server.add_insecure_port('[::]:0')
channelz.add_channelz_servicer(self._server)
self._server.start()
@@ -264,8 +264,8 @@ class ChannelzServicerTest(unittest.TestCase):
self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0)
gsc_resp = self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
- subchannel_id=gtc_resp.channel[i].subchannel_ref[0].
- subchannel_id))
+ subchannel_id=gtc_resp.channel[i].subchannel_ref[0].
+ subchannel_id))
self.assertEqual(gtc_resp.channel[i].data.calls_started,
gsc_resp.subchannel.data.calls_started)
self.assertEqual(gtc_resp.channel[i].data.calls_succeeded,
@@ -332,8 +332,8 @@ class ChannelzServicerTest(unittest.TestCase):
self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0)
gsc_resp = self._channelz_stub.GetSubchannel(
channelz_pb2.GetSubchannelRequest(
- subchannel_id=gtc_resp.channel[i].subchannel_ref[0].
- subchannel_id))
+ subchannel_id=gtc_resp.channel[i].subchannel_ref[0].
+ subchannel_id))
self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
gs_resp = self._channelz_stub.GetSocket(
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py
index b6ed81eaaf..e2eff257fa 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py
@@ -125,9 +125,9 @@ class ForkInteropTest(unittest.TestCase):
def _verifyTestCase(self, test_case):
script = _CLIENT_FORK_SCRIPT_TEMPLATE % (test_case.name, self._port)
- process = subprocess.Popen([sys.executable, '-c', script],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ process = subprocess.Popen([sys.executable, '-c', script],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, process.kill)
try:
timer.start()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py
index a086682149..852e6da4d6 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py
@@ -30,22 +30,22 @@ def _args():
raise argparse.ArgumentTypeError('Only true/false allowed')
parser = argparse.ArgumentParser()
- parser.add_argument('--server_host',
- default="localhost",
- type=str,
- help='the host to which to connect')
- parser.add_argument('--server_port',
- type=int,
- required=True,
- help='the port to which to connect')
- parser.add_argument('--test_case',
- default='large_unary',
- type=str,
- help='the test case to execute')
- parser.add_argument('--use_tls',
- default=False,
- type=parse_bool,
- help='require a secure connection')
+ parser.add_argument('--server_host',
+ default="localhost",
+ type=str,
+ help='the host to which to connect')
+ parser.add_argument('--server_port',
+ type=int,
+ required=True,
+ help='the port to which to connect')
+ parser.add_argument('--test_case',
+ default='large_unary',
+ type=str,
+ help='the test case to execute')
+ parser.add_argument('--use_tls',
+ default=False,
+ type=parse_bool,
+ help='require a secure connection')
return parser.parse_args()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py
index 040dbfa4c2..2123c69916 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py
@@ -298,8 +298,8 @@ def _ping_pong_with_child_processes_after_first_response(
request_payload_sizes):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
+ response_parameters=(messages_pb2.ResponseParameters(
+ size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
if first_message_received:
@@ -338,8 +338,8 @@ def _in_progress_bidi_continue_call(channel):
inherited_code = parent_bidi_call.code()
inherited_details = parent_bidi_call.details()
if inherited_code != grpc.StatusCode.CANCELLED:
- raise ValueError('Expected inherited code CANCELLED, got %s' %
- inherited_code)
+ raise ValueError('Expected inherited code CANCELLED, got %s' %
+ inherited_code)
if inherited_details != 'Channel closed due to fork':
raise ValueError(
'Expected inherited details Channel closed due to fork, got %s'
@@ -347,10 +347,10 @@ def _in_progress_bidi_continue_call(channel):
# Don't run child_target after closing the parent call, as the call may have
# received a status from the server before fork occurs.
- _ping_pong_with_child_processes_after_first_response(channel,
- None,
- child_target,
- run_after_close=False)
+ _ping_pong_with_child_processes_after_first_response(channel,
+ None,
+ child_target,
+ run_after_close=False)
def _in_progress_bidi_same_channel_async_call(channel):
@@ -446,6 +446,6 @@ class TestCase(enum.Enum):
elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL:
_in_progress_bidi_new_channel_blocking_call(channel, args)
else:
- raise NotImplementedError('Test case "%s" not implemented!' %
- self.name)
+ raise NotImplementedError('Test case "%s" not implemented!' %
+ self.name)
channel.close()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
index 17efc38e0d..01345aaca0 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
@@ -73,8 +73,8 @@ class BaseWatchTests(object):
request = health_pb2.HealthCheckRequest(service='')
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
+ thread = threading.Thread(target=_consume_responses,
+ args=(rendezvous, response_queue))
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
@@ -92,8 +92,8 @@ class BaseWatchTests(object):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
+ thread = threading.Thread(target=_consume_responses,
+ args=(rendezvous, response_queue))
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
@@ -120,8 +120,8 @@ class BaseWatchTests(object):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
+ thread = threading.Thread(target=_consume_responses,
+ args=(rendezvous, response_queue))
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
@@ -143,10 +143,10 @@ class BaseWatchTests(object):
response_queue2 = queue.Queue()
rendezvous1 = self._stub.Watch(request)
rendezvous2 = self._stub.Watch(request)
- thread1 = threading.Thread(target=_consume_responses,
- args=(rendezvous1, response_queue1))
- thread2 = threading.Thread(target=_consume_responses,
- args=(rendezvous2, response_queue2))
+ thread1 = threading.Thread(target=_consume_responses,
+ args=(rendezvous1, response_queue1))
+ thread2 = threading.Thread(target=_consume_responses,
+ args=(rendezvous2, response_queue2))
thread1.start()
thread2.start()
@@ -182,8 +182,8 @@ class BaseWatchTests(object):
request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
+ thread = threading.Thread(target=_consume_responses,
+ args=(rendezvous, response_queue))
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
@@ -197,8 +197,8 @@ class BaseWatchTests(object):
# Wait, if necessary, for serving thread to process client cancellation
timeout = time.time() + test_constants.TIME_ALLOWANCE
- while (time.time() < timeout and
- self._servicer._send_response_callbacks[_WATCH_SERVICE]):
+ while (time.time() < timeout and
+ self._servicer._send_response_callbacks[_WATCH_SERVICE]):
time.sleep(1)
self.assertFalse(
self._servicer._send_response_callbacks[_WATCH_SERVICE],
@@ -209,8 +209,8 @@ class BaseWatchTests(object):
request = health_pb2.HealthCheckRequest(service='')
response_queue = queue.Queue()
rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
+ thread = threading.Thread(target=_consume_responses,
+ args=(rendezvous, response_queue))
thread.start()
response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
@@ -234,9 +234,9 @@ class HealthServicerTest(BaseWatchTests.WatchTests):
def setUp(self):
self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None)
- super(HealthServicerTest,
- self).start_server(non_blocking=True,
- thread_pool=self._thread_pool)
+ super(HealthServicerTest,
+ self).start_server(non_blocking=True,
+ thread_pool=self._thread_pool)
def test_check_empty_service(self):
request = health_pb2.HealthCheckRequest()
@@ -273,8 +273,8 @@ class HealthServicerTest(BaseWatchTests.WatchTests):
class HealthServicerBackwardsCompatibleWatchTest(BaseWatchTests.WatchTests):
def setUp(self):
- super(HealthServicerBackwardsCompatibleWatchTest,
- self).start_server(non_blocking=False, thread_pool=None)
+ super(HealthServicerBackwardsCompatibleWatchTest,
+ self).start_server(non_blocking=False, thread_pool=None)
if __name__ == '__main__':
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py
index 43d086c095..0753872b5e 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py
@@ -32,14 +32,14 @@ def _validate_payload_type_and_length(response, expected_type, expected_length):
def _expect_status_code(call, expected_code):
if call.code() != expected_code:
- raise ValueError('expected code %s, got %s' %
- (expected_code, call.code()))
+ raise ValueError('expected code %s, got %s' %
+ (expected_code, call.code()))
def _expect_status_details(call, expected_details):
if call.details() != expected_details:
- raise ValueError('expected message %s, got %s' %
- (expected_details, call.details()))
+ raise ValueError('expected message %s, got %s' %
+ (expected_details, call.details()))
def _validate_status_code_and_details(call, expected_code, expected_details):
@@ -102,9 +102,9 @@ def _max_streams(stub):
for _ in range(15):
futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST))
for future in futures:
- _validate_payload_type_and_length(future.result(),
- messages_pb2.COMPRESSABLE,
- _RESPONSE_SIZE)
+ _validate_payload_type_and_length(future.result(),
+ messages_pb2.COMPRESSABLE,
+ _RESPONSE_SIZE)
def _run_test_case(test_case, stub):
@@ -126,18 +126,18 @@ def _run_test_case(test_case, stub):
def _args():
parser = argparse.ArgumentParser()
- parser.add_argument('--server_host',
- help='the host to which to connect',
- type=str,
- default="127.0.0.1")
- parser.add_argument('--server_port',
- help='the port to which to connect',
- type=int,
- default="8080")
- parser.add_argument('--test_case',
- help='the test case to execute',
- type=str,
- default="goaway")
+ parser.add_argument('--server_host',
+ help='the host to which to connect',
+ type=str,
+ default="127.0.0.1")
+ parser.add_argument('--server_port',
+ help='the port to which to connect',
+ type=int,
+ default="8080")
+ parser.add_argument('--test_case',
+ help='the test case to execute',
+ type=str,
+ default="goaway")
return parser.parse_args()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
index 73d2e83395..bf1f1b118b 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
@@ -38,13 +38,13 @@ class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase):
resources.certificate_chain())]))
self.server.start()
self.stub = test_pb2_grpc.TestServiceStub(
- grpc.secure_channel(
- 'localhost:{}'.format(port),
- grpc.ssl_channel_credentials(
- resources.test_root_certificates()), ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
- ),)))
+ grpc.secure_channel(
+ 'localhost:{}'.format(port),
+ grpc.ssl_channel_credentials(
+ resources.test_root_certificates()), ((
+ 'grpc.ssl_target_name_override',
+ _SERVER_HOST_OVERRIDE,
+ ),)))
def tearDown(self):
self.server.stop(None)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py
index 689054480f..4d35f7ca32 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py
@@ -25,135 +25,135 @@ from tests.interop import methods
from tests.interop import resources
-def parse_interop_client_args():
+def parse_interop_client_args():
parser = argparse.ArgumentParser()
- parser.add_argument('--server_host',
- default="localhost",
- type=str,
- help='the host to which to connect')
- parser.add_argument('--server_port',
- type=int,
- required=True,
- help='the port to which to connect')
- parser.add_argument('--test_case',
- default='large_unary',
- type=str,
- help='the test case to execute')
- parser.add_argument('--use_tls',
- default=False,
- type=resources.parse_bool,
- help='require a secure connection')
- parser.add_argument('--use_alts',
- default=False,
- type=resources.parse_bool,
- help='require an ALTS secure connection')
- parser.add_argument('--use_test_ca',
- default=False,
- type=resources.parse_bool,
- help='replace platform root CAs with ca.pem')
- parser.add_argument('--custom_credentials_type',
- choices=["compute_engine_channel_creds"],
- default=None,
- help='use google default credentials')
- parser.add_argument('--server_host_override',
- type=str,
- help='the server host to which to claim to connect')
- parser.add_argument('--oauth_scope',
- type=str,
- help='scope for OAuth tokens')
- parser.add_argument('--default_service_account',
- type=str,
- help='email address of the default service account')
- parser.add_argument(
- "--grpc_test_use_grpclb_with_child_policy",
- type=str,
- help=(
- "If non-empty, set a static service config on channels created by "
- + "grpc::CreateTestChannel, that configures the grpclb LB policy " +
- "with a child policy being the value of this flag (e.g. round_robin "
- + "or pick_first)."))
+ parser.add_argument('--server_host',
+ default="localhost",
+ type=str,
+ help='the host to which to connect')
+ parser.add_argument('--server_port',
+ type=int,
+ required=True,
+ help='the port to which to connect')
+ parser.add_argument('--test_case',
+ default='large_unary',
+ type=str,
+ help='the test case to execute')
+ parser.add_argument('--use_tls',
+ default=False,
+ type=resources.parse_bool,
+ help='require a secure connection')
+ parser.add_argument('--use_alts',
+ default=False,
+ type=resources.parse_bool,
+ help='require an ALTS secure connection')
+ parser.add_argument('--use_test_ca',
+ default=False,
+ type=resources.parse_bool,
+ help='replace platform root CAs with ca.pem')
+ parser.add_argument('--custom_credentials_type',
+ choices=["compute_engine_channel_creds"],
+ default=None,
+ help='use google default credentials')
+ parser.add_argument('--server_host_override',
+ type=str,
+ help='the server host to which to claim to connect')
+ parser.add_argument('--oauth_scope',
+ type=str,
+ help='scope for OAuth tokens')
+ parser.add_argument('--default_service_account',
+ type=str,
+ help='email address of the default service account')
+ parser.add_argument(
+ "--grpc_test_use_grpclb_with_child_policy",
+ type=str,
+ help=(
+ "If non-empty, set a static service config on channels created by "
+ + "grpc::CreateTestChannel, that configures the grpclb LB policy " +
+ "with a child policy being the value of this flag (e.g. round_robin "
+ + "or pick_first)."))
return parser.parse_args()
-def _create_call_credentials(args):
+def _create_call_credentials(args):
if args.test_case == 'oauth2_auth_token':
google_credentials, unused_project_id = google_auth.default(
scopes=[args.oauth_scope])
google_credentials.refresh(google_auth.transport.requests.Request())
- return grpc.access_token_call_credentials(google_credentials.token)
+ return grpc.access_token_call_credentials(google_credentials.token)
elif args.test_case == 'compute_engine_creds':
google_credentials, unused_project_id = google_auth.default(
scopes=[args.oauth_scope])
- return grpc.metadata_call_credentials(
+ return grpc.metadata_call_credentials(
google_auth.transport.grpc.AuthMetadataPlugin(
credentials=google_credentials,
request=google_auth.transport.requests.Request()))
elif args.test_case == 'jwt_token_creds':
google_credentials = google_auth_jwt.OnDemandCredentials.from_service_account_file(
os.environ[google_auth.environment_vars.CREDENTIALS])
- return grpc.metadata_call_credentials(
+ return grpc.metadata_call_credentials(
google_auth.transport.grpc.AuthMetadataPlugin(
credentials=google_credentials, request=None))
else:
- return None
-
-
-def get_secure_channel_parameters(args):
- call_credentials = _create_call_credentials(args)
-
- channel_opts = ()
- if args.grpc_test_use_grpclb_with_child_policy:
- channel_opts += ((
- "grpc.service_config",
- '{"loadBalancingConfig": [{"grpclb": {"childPolicy": [{"%s": {}}]}}]}'
- % args.grpc_test_use_grpclb_with_child_policy),)
- if args.custom_credentials_type is not None:
- if args.custom_credentials_type == "compute_engine_channel_creds":
- assert call_credentials is None
- google_credentials, unused_project_id = google_auth.default(
- scopes=[args.oauth_scope])
- call_creds = grpc.metadata_call_credentials(
- google_auth.transport.grpc.AuthMetadataPlugin(
- credentials=google_credentials,
- request=google_auth.transport.requests.Request()))
- channel_credentials = grpc.compute_engine_channel_credentials(
- call_creds)
- else:
- raise ValueError("Unknown credentials type '{}'".format(
- args.custom_credentials_type))
- elif args.use_tls:
- if args.use_test_ca:
- root_certificates = resources.test_root_certificates()
- else:
- root_certificates = None # will load default roots.
-
- channel_credentials = grpc.ssl_channel_credentials(root_certificates)
- if call_credentials is not None:
- channel_credentials = grpc.composite_channel_credentials(
- channel_credentials, call_credentials)
-
- if args.server_host_override:
- channel_opts += ((
- 'grpc.ssl_target_name_override',
- args.server_host_override,
- ),)
- elif args.use_alts:
- channel_credentials = grpc.alts_channel_credentials()
-
- return channel_credentials, channel_opts
-
-
-def _create_channel(args):
- target = '{}:{}'.format(args.server_host, args.server_port)
-
- if args.use_tls or args.use_alts or args.custom_credentials_type is not None:
- channel_credentials, options = get_secure_channel_parameters(args)
- return grpc.secure_channel(target, channel_credentials, options)
- else:
- return grpc.insecure_channel(target)
-
-
-def create_stub(channel, args):
+ return None
+
+
+def get_secure_channel_parameters(args):
+ call_credentials = _create_call_credentials(args)
+
+ channel_opts = ()
+ if args.grpc_test_use_grpclb_with_child_policy:
+ channel_opts += ((
+ "grpc.service_config",
+ '{"loadBalancingConfig": [{"grpclb": {"childPolicy": [{"%s": {}}]}}]}'
+ % args.grpc_test_use_grpclb_with_child_policy),)
+ if args.custom_credentials_type is not None:
+ if args.custom_credentials_type == "compute_engine_channel_creds":
+ assert call_credentials is None
+ google_credentials, unused_project_id = google_auth.default(
+ scopes=[args.oauth_scope])
+ call_creds = grpc.metadata_call_credentials(
+ google_auth.transport.grpc.AuthMetadataPlugin(
+ credentials=google_credentials,
+ request=google_auth.transport.requests.Request()))
+ channel_credentials = grpc.compute_engine_channel_credentials(
+ call_creds)
+ else:
+ raise ValueError("Unknown credentials type '{}'".format(
+ args.custom_credentials_type))
+ elif args.use_tls:
+ if args.use_test_ca:
+ root_certificates = resources.test_root_certificates()
+ else:
+ root_certificates = None # will load default roots.
+
+ channel_credentials = grpc.ssl_channel_credentials(root_certificates)
+ if call_credentials is not None:
+ channel_credentials = grpc.composite_channel_credentials(
+ channel_credentials, call_credentials)
+
+ if args.server_host_override:
+ channel_opts += ((
+ 'grpc.ssl_target_name_override',
+ args.server_host_override,
+ ),)
+ elif args.use_alts:
+ channel_credentials = grpc.alts_channel_credentials()
+
+ return channel_credentials, channel_opts
+
+
+def _create_channel(args):
+ target = '{}:{}'.format(args.server_host, args.server_port)
+
+ if args.use_tls or args.use_alts or args.custom_credentials_type is not None:
+ channel_credentials, options = get_secure_channel_parameters(args)
+ return grpc.secure_channel(target, channel_credentials, options)
+ else:
+ return grpc.insecure_channel(target)
+
+
+def create_stub(channel, args):
if args.test_case == "unimplemented_service":
return test_pb2_grpc.UnimplementedServiceStub(channel)
else:
@@ -169,9 +169,9 @@ def _test_case_from_arg(test_case_arg):
def test_interoperability():
- args = parse_interop_client_args()
- channel = _create_channel(args)
- stub = create_stub(channel, args)
+ args = parse_interop_client_args()
+ channel = _create_channel(args)
+ stub = create_stub(channel, args)
test_case = _test_case_from_arg(args.test_case)
test_case.test_interoperability(stub, args)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py
index 880d915fb7..44a1c38bb9 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py
@@ -42,14 +42,14 @@ _TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
def _expect_status_code(call, expected_code):
if call.code() != expected_code:
- raise ValueError('expected code %s, got %s' %
- (expected_code, call.code()))
+ raise ValueError('expected code %s, got %s' %
+ (expected_code, call.code()))
def _expect_status_details(call, expected_details):
if call.details() != expected_details:
- raise ValueError('expected message %s, got %s' %
- (expected_details, call.details()))
+ raise ValueError('expected message %s, got %s' %
+ (expected_details, call.details()))
def _validate_status_code_and_details(call, expected_code, expected_details):
@@ -75,8 +75,8 @@ def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope,
payload=messages_pb2.Payload(body=b'\x00' * 271828),
fill_username=fill_username,
fill_oauth_scope=fill_oauth_scope)
- response_future = stub.UnaryCall.future(request,
- credentials=call_credentials)
+ response_future = stub.UnaryCall.future(request,
+ credentials=call_credentials)
response = response_future.result()
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
return response
@@ -85,8 +85,8 @@ def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope,
def _empty_unary(stub):
response = stub.EmptyCall(empty_pb2.Empty())
if not isinstance(response, empty_pb2.Empty):
- raise TypeError('response is of type "%s", not empty_pb2.Empty!' %
- type(response))
+ raise TypeError('response is of type "%s", not empty_pb2.Empty!' %
+ type(response))
def _large_unary(stub):
@@ -106,8 +106,8 @@ def _client_streaming(stub):
for payload in payloads)
response = stub.StreamingInputCall(requests)
if response.aggregated_payload_size != 74922:
- raise ValueError('incorrect size %d!' %
- response.aggregated_payload_size)
+ raise ValueError('incorrect size %d!' %
+ response.aggregated_payload_size)
def _server_streaming(stub):
@@ -191,14 +191,14 @@ def _ping_pong(stub):
request_payload_sizes):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
+ response_parameters=(messages_pb2.ResponseParameters(
+ size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
- _validate_payload_type_and_length(response,
- messages_pb2.COMPRESSABLE,
- response_size)
+ _validate_payload_type_and_length(response,
+ messages_pb2.COMPRESSABLE,
+ response_size)
def _cancel_after_begin(stub):
@@ -231,8 +231,8 @@ def _cancel_after_first_response(stub):
payload_size = request_payload_sizes[0]
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
+ response_parameters=(messages_pb2.ResponseParameters(
+ size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
@@ -301,10 +301,10 @@ def _status_code_and_message(stub):
payload=messages_pb2.Payload(body=b'\x00'),
response_status=messages_pb2.EchoStatus(code=code, message=details))
pipe.add(request) # sends the initial request.
- try:
- next(response_iterator)
- except grpc.RpcError as rpc_error:
- assert rpc_error.code() == status
+ try:
+ next(response_iterator)
+ except grpc.RpcError as rpc_error:
+ assert rpc_error.code() == status
# Dropping out of with block closes the pipe
_validate_status_code_and_details(response_iterator, status, details)
@@ -371,8 +371,8 @@ def _oauth2_auth_token(stub, args):
wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
response = _large_unary_common_behavior(stub, True, True, None)
if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
+ raise ValueError('expected username %s, got %s' %
+ (wanted_email, response.username))
if args.oauth_scope.find(response.oauth_scope) == -1:
raise ValueError(
'expected to find oauth scope "{}" in received "{}"'.format(
@@ -384,8 +384,8 @@ def _jwt_token_creds(stub, args):
wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
response = _large_unary_common_behavior(stub, True, False, None)
if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
+ raise ValueError('expected username %s, got %s' %
+ (wanted_email, response.username))
def _per_rpc_creds(stub, args):
@@ -399,8 +399,8 @@ def _per_rpc_creds(stub, args):
request=google_auth_transport_requests.Request()))
response = _large_unary_common_behavior(stub, True, False, call_credentials)
if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
+ raise ValueError('expected username %s, got %s' %
+ (wanted_email, response.username))
def _special_status_message(stub, args):
@@ -478,5 +478,5 @@ class TestCase(enum.Enum):
elif self is TestCase.SPECIAL_STATUS_MESSAGE:
_special_status_message(stub, args)
else:
- raise NotImplementedError('Test case "%s" not implemented!' %
- self.name)
+ raise NotImplementedError('Test case "%s" not implemented!' %
+ self.name)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py
index ba0c7a34aa..c85adb0b0b 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py
@@ -28,40 +28,40 @@ logging.basicConfig()
_LOGGER = logging.getLogger(__name__)
-def parse_interop_server_arguments():
+def parse_interop_server_arguments():
parser = argparse.ArgumentParser()
- parser.add_argument('--port',
- type=int,
- required=True,
- help='the port on which to serve')
- parser.add_argument('--use_tls',
- default=False,
- type=resources.parse_bool,
- help='require a secure connection')
- parser.add_argument('--use_alts',
- default=False,
- type=resources.parse_bool,
- help='require an ALTS connection')
- return parser.parse_args()
+ parser.add_argument('--port',
+ type=int,
+ required=True,
+ help='the port on which to serve')
+ parser.add_argument('--use_tls',
+ default=False,
+ type=resources.parse_bool,
+ help='require a secure connection')
+ parser.add_argument('--use_alts',
+ default=False,
+ type=resources.parse_bool,
+ help='require an ALTS connection')
+ return parser.parse_args()
+
+
+def get_server_credentials(use_tls):
+ if use_tls:
+ private_key = resources.private_key()
+ certificate_chain = resources.certificate_chain()
+ return grpc.ssl_server_credentials(((private_key, certificate_chain),))
+ else:
+ return grpc.alts_server_credentials()
+
+
+def serve():
+ args = parse_interop_server_arguments()
-
-def get_server_credentials(use_tls):
- if use_tls:
- private_key = resources.private_key()
- certificate_chain = resources.certificate_chain()
- return grpc.ssl_server_credentials(((private_key, certificate_chain),))
- else:
- return grpc.alts_server_credentials()
-
-
-def serve():
- args = parse_interop_server_arguments()
-
server = test_common.test_server()
test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(),
server)
- if args.use_tls or args.use_alts:
- credentials = get_server_credentials(args.use_tls)
+ if args.use_tls or args.use_alts:
+ credentials = get_server_credentials(args.use_tls)
server.add_secure_port('[::]:{}'.format(args.port), credentials)
else:
server.add_insecure_port('[::]:{}'.format(args.port))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py
index bd2b173209..08bb0c45a2 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py
@@ -56,8 +56,8 @@ class TestService(test_pb2_grpc.TestServiceServicer):
_maybe_echo_metadata(context)
_maybe_echo_status_and_message(request, context)
return messages_pb2.SimpleResponse(
- payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE,
- body=b'\x00' * request.response_size))
+ payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE,
+ body=b'\x00' * request.response_size))
def StreamingOutputCall(self, request, context):
_maybe_echo_status_and_message(request, context)
@@ -65,9 +65,9 @@ class TestService(test_pb2_grpc.TestServiceServicer):
if response_parameters.interval_us != 0:
time.sleep(response_parameters.interval_us / _US_IN_A_SECOND)
yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.response_type,
- body=b'\x00' *
- response_parameters.size))
+ payload=messages_pb2.Payload(type=request.response_type,
+ body=b'\x00' *
+ response_parameters.size))
def StreamingInputCall(self, request_iterator, context):
aggregate_size = 0
@@ -83,12 +83,12 @@ class TestService(test_pb2_grpc.TestServiceServicer):
_maybe_echo_status_and_message(request, context)
for response_parameters in request.response_parameters:
if response_parameters.interval_us != 0:
- time.sleep(response_parameters.interval_us /
- _US_IN_A_SECOND)
+ time.sleep(response_parameters.interval_us /
+ _US_IN_A_SECOND)
yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.payload.type,
- body=b'\x00' *
- response_parameters.size))
+ payload=messages_pb2.Payload(type=request.payload.type,
+ body=b'\x00' *
+ response_parameters.size))
# NOTE(nathaniel): Apparently this is the same as the full-duplex call?
# NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py
index 383077b630..17835e7c0d 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py
@@ -61,16 +61,16 @@ class BenchmarkClient:
self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub(
channel)
payload = messages_pb2.Payload(
- body=bytes(b'\0' *
- config.payload_config.simple_params.req_size))
+ body=bytes(b'\0' *
+ config.payload_config.simple_params.req_size))
self._request = messages_pb2.SimpleRequest(
payload=payload,
response_size=config.payload_config.simple_params.resp_size)
else:
self._generic = True
self._stub = GenericStub(channel)
- self._request = bytes(b'\0' *
- config.payload_config.bytebuf_params.req_size)
+ self._request = bytes(b'\0' *
+ config.payload_config.bytebuf_params.req_size)
self._hist = hist
self._response_callbacks = []
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py
index 174e4f84ae..75280bd771 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py
@@ -20,21 +20,21 @@ class BenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer):
"""Synchronous Server implementation for the Benchmark service."""
def UnaryCall(self, request, context):
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
+ payload = messages_pb2.Payload(body=b'\0' * request.response_size)
return messages_pb2.SimpleResponse(payload=payload)
def StreamingCall(self, request_iterator, context):
for request in request_iterator:
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
+ payload = messages_pb2.Payload(body=b'\0' * request.response_size)
yield messages_pb2.SimpleResponse(payload=payload)
-class GenericBenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer
- ):
+class GenericBenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer
+ ):
"""Generic Server implementation for the Benchmark service."""
def __init__(self, resp_size):
- self._response = b'\0' * resp_size
+ self._response = b'\0' * resp_size
def UnaryCall(self, request, context):
return self._response
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py
index 7d6598c97e..c5d299f646 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py
@@ -45,8 +45,8 @@ class OpenLoopClientRunner(ClientRunner):
super(OpenLoopClientRunner, self).__init__(client)
self._is_running = False
self._interval_generator = interval_generator
- self._dispatch_thread = threading.Thread(target=self._dispatch_requests,
- args=())
+ self._dispatch_thread = threading.Thread(target=self._dispatch_requests,
+ args=())
def start(self):
self._is_running = True
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py
index d90dcf4b07..8139a6ee2f 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py
@@ -65,16 +65,16 @@ class Histogram(object):
data.count = self._count
return data
- def merge(self, another_data):
- with self._lock:
- for i in range(len(self._buckets)):
- self._buckets[i] += another_data.bucket[i]
- self._min = min(self._min, another_data.min_seen)
- self._max = max(self._max, another_data.max_seen)
- self._sum += another_data.sum
- self._sum_of_squares += another_data.sum_of_squares
- self._count += another_data.count
-
+ def merge(self, another_data):
+ with self._lock:
+ for i in range(len(self._buckets)):
+ self._buckets[i] += another_data.bucket[i]
+ self._min = min(self._min, another_data.min_seen)
+ self._max = max(self._max, another_data.max_seen)
+ self._sum += another_data.sum
+ self._sum_of_squares += another_data.sum_of_squares
+ self._count += another_data.count
+
def _bucket_for(self, val):
val = min(val, self._max_possible)
return int(math.log(val, self.multiplier))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py
index b9013d84f3..a7e692821a 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py
@@ -37,10 +37,10 @@ def run_worker_server(port):
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='gRPC Python performance testing worker')
- parser.add_argument('--driver_port',
- type=int,
- dest='port',
- help='The port the worker should listen on')
+ parser.add_argument('--driver_port',
+ type=int,
+ dest='port',
+ help='The port the worker should listen on')
args = parser.parse_args()
run_worker_server(args.port)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py
index 16ac044d2a..65b081e5d1 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py
@@ -57,9 +57,9 @@ class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer):
def _get_server_status(self, start_time, end_time, port, cores):
end_time = time.time()
elapsed_time = end_time - start_time
- stats = stats_pb2.ServerStats(time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
+ stats = stats_pb2.ServerStats(time_elapsed=elapsed_time,
+ time_user=elapsed_time,
+ time_system=elapsed_time)
return control_pb2.ServerStatus(stats=stats, port=port, cores=cores)
def _create_server(self, config):
@@ -79,10 +79,10 @@ class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer):
servicer = benchmark_server.GenericBenchmarkServer(resp_size)
method_implementations = {
'StreamingCall':
- grpc.stream_stream_rpc_method_handler(servicer.StreamingCall
- ),
+ grpc.stream_stream_rpc_method_handler(servicer.StreamingCall
+ ),
'UnaryCall':
- grpc.unary_unary_rpc_method_handler(servicer.UnaryCall),
+ grpc.unary_unary_rpc_method_handler(servicer.UnaryCall),
}
handler = grpc.method_handlers_generic_handler(
'grpc.testing.BenchmarkService', method_implementations)
@@ -135,10 +135,10 @@ class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer):
latencies = qps_data.get_data()
end_time = time.time()
elapsed_time = end_time - start_time
- stats = stats_pb2.ClientStats(latencies=latencies,
- time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
+ stats = stats_pb2.ClientStats(latencies=latencies,
+ time_elapsed=elapsed_time,
+ time_user=elapsed_time,
+ time_system=elapsed_time)
return control_pb2.ClientStatus(stats=stats)
def _create_client_runner(self, server, config, qps_data):
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py
index 449fbdf95d..54a3b62420 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py
@@ -60,8 +60,8 @@ def _not_ok_unary_unary(request, servicer_context):
def _error_details_unary_unary(request, servicer_context):
details = any_pb2.Any()
details.Pack(
- error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(),
- detail='Intentionally invoked'))
+ error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(),
+ detail='Intentionally invoked'))
rich_status = status_pb2.Status(
code=code_pb2.INTERNAL,
message=_STATUS_DETAILS,
@@ -78,8 +78,8 @@ def _inconsistent_unary_unary(request, servicer_context):
servicer_context.set_code(grpc.StatusCode.NOT_FOUND)
servicer_context.set_details(_STATUS_DETAILS_ANOTHER)
# User put inconsistent status information in trailing metadata
- servicer_context.set_trailing_metadata(
- ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),))
+ servicer_context.set_trailing_metadata(
+ ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),))
def _invalid_code_unary_unary(request, servicer_context):
@@ -151,8 +151,8 @@ class StatusTest(unittest.TestCase):
self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL'))
# Check if the underlying proto message is intact
- self.assertEqual(
- status.details[0].Is(error_details_pb2.DebugInfo.DESCRIPTOR), True)
+ self.assertEqual(
+ status.details[0].Is(error_details_pb2.DebugInfo.DESCRIPTOR), True)
info = error_details_pb2.DebugInfo()
status.details[0].Unpack(info)
self.assertIn('_error_details_unary_unary', info.stack_entries[-1])
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py
index 0fcaca441e..01c14ba3e2 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py
@@ -42,34 +42,34 @@ def _args():
help='comma separated list of testcase:weighting of tests to run',
default='large_unary:100',
type=str)
- parser.add_argument('--test_duration_secs',
- help='number of seconds to run the stress test',
- default=-1,
- type=int)
- parser.add_argument('--num_channels_per_server',
- help='number of channels per server',
- default=1,
- type=int)
- parser.add_argument('--num_stubs_per_channel',
- help='number of stubs to create per channel',
- default=1,
- type=int)
- parser.add_argument('--metrics_port',
- help='the port to listen for metrics requests on',
- default=8081,
- type=int)
+ parser.add_argument('--test_duration_secs',
+ help='number of seconds to run the stress test',
+ default=-1,
+ type=int)
+ parser.add_argument('--num_channels_per_server',
+ help='number of channels per server',
+ default=1,
+ type=int)
+ parser.add_argument('--num_stubs_per_channel',
+ help='number of stubs to create per channel',
+ default=1,
+ type=int)
+ parser.add_argument('--metrics_port',
+ help='the port to listen for metrics requests on',
+ default=8081,
+ type=int)
parser.add_argument(
'--use_test_ca',
help='Whether to use our fake CA. Requires --use_tls=true',
default=False,
type=bool)
- parser.add_argument('--use_tls',
- help='Whether to use TLS',
- default=False,
- type=bool)
- parser.add_argument('--server_host_override',
- help='the server host to which to claim to connect',
- type=str)
+ parser.add_argument('--use_tls',
+ help='Whether to use TLS',
+ default=False,
+ type=bool)
+ parser.add_argument('--server_host_override',
+ help='the server host to which to claim to connect',
+ type=str)
return parser.parse_args()
@@ -102,9 +102,9 @@ def _get_channel(target, args):
'grpc.ssl_target_name_override',
args.server_host_override,
),)
- channel = grpc.secure_channel(target,
- channel_credentials,
- options=options)
+ channel = grpc.secure_channel(target,
+ channel_credentials,
+ options=options)
else:
channel = grpc.insecure_channel(target)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py
index 001538ecbf..1b6003fc69 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py
@@ -53,6 +53,6 @@ class TestRunner(threading.Thread):
except Exception as e: # pylint: disable=broad-except
traceback.print_exc()
self._exception_queue.put(
- Exception(
- "An exception occurred during test {}".format(
- test_case), e))
+ Exception(
+ "An exception occurred during test {}".format(
+ test_case), e))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py
index 2130df5e42..cd872ece29 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py
@@ -60,9 +60,9 @@ try:
@contextlib.contextmanager
def _running_server():
- server_process = subprocess.Popen([sys.executable, '-c', _SERVER_CODE],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ server_process = subprocess.Popen([sys.executable, '-c', _SERVER_CODE],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
try:
yield
finally:
@@ -76,8 +76,8 @@ try:
def profile(message_size, response_count):
request = unary_stream_benchmark_pb2.BenchmarkRequest(
message_size=message_size, response_count=response_count)
- with grpc.insecure_channel('[::]:{}'.format(_PORT),
- options=_GRPC_CHANNEL_OPTIONS) as channel:
+ with grpc.insecure_channel('[::]:{}'.format(_PORT),
+ options=_GRPC_CHANNEL_OPTIONS) as channel:
stub = unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceStub(
channel)
start = datetime.datetime.now()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py
index ef4c1e985d..57fa510913 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py
@@ -150,8 +150,8 @@ def _run_stream_stream(stub):
def _run_concurrent_stream_unary(stub):
future_calls = tuple(
- stub.StreUn.future(iter((_application_common.STREAM_UNARY_REQUEST,) *
- 3))
+ stub.StreUn.future(iter((_application_common.STREAM_UNARY_REQUEST,) *
+ 3))
for _ in range(test_constants.THREAD_CONCURRENCY))
for future_call in future_calls:
if future_call.code() is grpc.StatusCode.OK:
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py
index be62422b9f..cab665c045 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py
@@ -115,8 +115,8 @@ class TimeTest(object):
self._time.sleep_for(_QUANTUM * 3)
for test_event in test_events:
- (self.assertFalse if test_event in cancelled else self.assertTrue)(
- test_event.is_set())
+ (self.assertFalse if test_event in cancelled else self.assertTrue)(
+ test_event.is_set())
for background_noise_future in background_noise_futures:
background_noise_future.cancel()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py
index 5acd9d3eb1..d2eaf97d5f 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py
@@ -36,9 +36,9 @@ _ABORT_METADATA = (('a-trailing-metadata', '42'),)
class _Status(
- collections.namedtuple('_Status',
- ('code', 'details', 'trailing_metadata')),
- grpc.Status):
+ collections.namedtuple('_Status',
+ ('code', 'details', 'trailing_metadata')),
+ grpc.Status):
pass
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py
index 5aac7877ec..a459ee6e19 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py
@@ -63,8 +63,8 @@ class AllTest(unittest.TestCase):
'LocalConnectionType',
'local_channel_credentials',
'local_server_credentials',
- 'alts_channel_credentials',
- 'alts_server_credentials',
+ 'alts_channel_credentials',
+ 'alts_server_credentials',
'unary_unary_rpc_method_handler',
'unary_stream_rpc_method_handler',
'stream_unary_rpc_method_handler',
@@ -84,9 +84,9 @@ class AllTest(unittest.TestCase):
'secure_channel',
'intercept_channel',
'server',
- 'protos',
- 'services',
- 'protos_and_services',
+ 'protos',
+ 'services',
+ 'protos_and_services',
)
six.assertCountEqual(self, expected_grpc_code_elements,
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py
index 9d76fc757f..817c528237 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py
@@ -64,7 +64,7 @@ class AuthContextTest(unittest.TestCase):
def testInsecure(self):
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
@@ -83,7 +83,7 @@ class AuthContextTest(unittest.TestCase):
def testSecureNoCert(self):
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
@@ -93,9 +93,9 @@ class AuthContextTest(unittest.TestCase):
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES)
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
+ channel = grpc.secure_channel('localhost:{}'.format(port),
+ channel_creds,
+ options=_PROPERTY_OPTIONS)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
channel.close()
server.stop(None)
@@ -103,17 +103,17 @@ class AuthContextTest(unittest.TestCase):
auth_data = pickle.loads(response)
self.assertIsNone(auth_data[_ID])
self.assertIsNone(auth_data[_ID_KEY])
- self.assertDictEqual(
- {
- 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'],
- 'transport_security_type': [b'ssl'],
- 'ssl_session_reused': [b'false'],
- }, auth_data[_AUTH_CTX])
+ self.assertDictEqual(
+ {
+ 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'],
+ 'transport_security_type': [b'ssl'],
+ 'ssl_session_reused': [b'false'],
+ }, auth_data[_AUTH_CTX])
def testSecureClientCert(self):
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
@@ -128,9 +128,9 @@ class AuthContextTest(unittest.TestCase):
root_certificates=_TEST_ROOT_CERTIFICATES,
private_key=_PRIVATE_KEY,
certificate_chain=_CERTIFICATE_CHAIN)
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
+ channel = grpc.secure_channel('localhost:{}'.format(port),
+ channel_creds,
+ options=_PROPERTY_OPTIONS)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
channel.close()
@@ -146,9 +146,9 @@ class AuthContextTest(unittest.TestCase):
def _do_one_shot_client_rpc(self, channel_creds, channel_options, port,
expect_ssl_session_reused):
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=channel_options)
+ channel = grpc.secure_channel('localhost:{}'.format(port),
+ channel_creds,
+ options=channel_options)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
auth_data = pickle.loads(response)
self.assertEqual(expect_ssl_session_reused,
@@ -159,7 +159,7 @@ class AuthContextTest(unittest.TestCase):
# Set up a secure server
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
@@ -175,16 +175,16 @@ class AuthContextTest(unittest.TestCase):
('grpc.ssl_session_cache', cache),)
# Initial connection has no session to resume
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'false'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port,
+ expect_ssl_session_reused=[b'false'])
# Subsequent connections resume sessions
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'true'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port,
+ expect_ssl_session_reused=[b'true'])
server.stop(None)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py
index bb2a7400df..2f2eea61db 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Tests of channel arguments on client/server side."""
+"""Tests of channel arguments on client/server side."""
from concurrent import futures
import unittest
@@ -49,15 +49,15 @@ class ChannelArgsTest(unittest.TestCase):
grpc.insecure_channel('localhost:8080', options=TEST_CHANNEL_ARGS)
def test_server(self):
- grpc.server(futures.ThreadPoolExecutor(max_workers=1),
- options=TEST_CHANNEL_ARGS)
+ grpc.server(futures.ThreadPoolExecutor(max_workers=1),
+ options=TEST_CHANNEL_ARGS)
def test_invalid_client_args(self):
for invalid_arg in INVALID_TEST_CHANNEL_ARGS:
- self.assertRaises(ValueError,
- grpc.insecure_channel,
- 'localhost:8080',
- options=invalid_arg)
+ self.assertRaises(ValueError,
+ grpc.insecure_channel,
+ 'localhost:8080',
+ options=invalid_arg)
if __name__ == '__main__':
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py
index 76cd275aa1..d1b4c3c932 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py
@@ -87,8 +87,8 @@ class ChannelConnectivityTest(unittest.TestCase):
def test_immediately_connectable_channel_connectivity(self):
recording_thread_pool = thread_pool.RecordingThreadPool(
max_workers=None)
- server = grpc.server(recording_thread_pool,
- options=(('grpc.so_reuseport', 0),))
+ server = grpc.server(recording_thread_pool,
+ options=(('grpc.so_reuseport', 0),))
port = server.add_insecure_port('[::]:0')
server.start()
first_callback = _Callback()
@@ -132,8 +132,8 @@ class ChannelConnectivityTest(unittest.TestCase):
def test_reachable_then_unreachable_channel_connectivity(self):
recording_thread_pool = thread_pool.RecordingThreadPool(
max_workers=None)
- server = grpc.server(recording_thread_pool,
- options=(('grpc.so_reuseport', 0),))
+ server = grpc.server(recording_thread_pool,
+ options=(('grpc.so_reuseport', 0),))
port = server.add_insecure_port('[::]:0')
server.start()
callback = _Callback()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py
index 2c367d9852..ca9ebc16fe 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py
@@ -65,8 +65,8 @@ class ChannelReadyFutureTest(unittest.TestCase):
def test_immediately_connectable_channel_connectivity(self):
recording_thread_pool = thread_pool.RecordingThreadPool(
max_workers=None)
- server = grpc.server(recording_thread_pool,
- options=(('grpc.so_reuseport', 0),))
+ server = grpc.server(recording_thread_pool,
+ options=(('grpc.so_reuseport', 0),))
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py
index c04cdb497e..bc58e1032c 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py
@@ -214,8 +214,8 @@ def _get_compression_ratios(client_function, first_channel_kwargs,
second_bytes_sent, second_bytes_received = _get_byte_counts(
second_channel_kwargs, second_multicallable_kwargs, client_function,
second_server_kwargs, second_server_handler, message)
- return ((second_bytes_sent - first_bytes_sent) /
- float(first_bytes_sent),
+ return ((second_bytes_sent - first_bytes_sent) /
+ float(first_bytes_sent),
(second_bytes_received - first_bytes_received) /
float(first_bytes_received))
finally:
@@ -274,11 +274,11 @@ class CompressionTest(unittest.TestCase):
-1.0 * _COMPRESSION_RATIO_THRESHOLD,
msg='Actual compession ratio: {}'.format(compression_ratio))
- def assertConfigurationCompressed(self, client_streaming, server_streaming,
- channel_compression,
- multicallable_compression,
- server_compression,
- server_call_compression):
+ def assertConfigurationCompressed(self, client_streaming, server_streaming,
+ channel_compression,
+ multicallable_compression,
+ server_compression,
+ server_call_compression):
client_side_compressed = channel_compression or multicallable_compression
server_side_compressed = server_compression or server_call_compression
channel_kwargs = {
@@ -354,10 +354,10 @@ def _get_compression_test_name(client_streaming, server_streaming,
server_compression_str = _get_compression_str('Server', server_compression)
server_call_compression_str = _get_compression_str('ServerCall',
server_call_compression)
- return 'test{}{}{}{}{}'.format(arity, channel_compression_str,
- multicallable_compression_str,
- server_compression_str,
- server_call_compression_str)
+ return 'test{}{}{}{}{}'.format(arity, channel_compression_str,
+ multicallable_compression_str,
+ server_compression_str,
+ server_call_compression_str)
def _test_options():
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py
index 50b512b525..fec0fbd7df 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py
@@ -1,118 +1,118 @@
-# Copyright 2020 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test of propagation of contextvars to AuthMetadataPlugin threads.."""
-
-import contextlib
-import logging
-import os
-import sys
-import unittest
-
-import grpc
-
-from tests.unit import test_common
-
-_UNARY_UNARY = "/test/UnaryUnary"
-_REQUEST = b"0000"
-
-
-def _unary_unary_handler(request, context):
- return request
-
-
-def contextvars_supported():
- try:
- import contextvars
- return True
- except ImportError:
- return False
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_unary_unary_handler)
- else:
- raise NotImplementedError()
-
-
-@contextlib.contextmanager
-def _server():
- try:
- server = test_common.test_server()
- target = 'localhost:0'
- port = server.add_insecure_port(target)
- server.add_generic_rpc_handlers((_GenericHandler(),))
- server.start()
- yield port
- finally:
- server.stop(None)
-
-
-if contextvars_supported():
- import contextvars
-
- _EXPECTED_VALUE = 24601
- test_var = contextvars.ContextVar("test_var", default=None)
-
- def set_up_expected_context():
- test_var.set(_EXPECTED_VALUE)
-
- class TestCallCredentials(grpc.AuthMetadataPlugin):
-
- def __call__(self, context, callback):
- if test_var.get() != _EXPECTED_VALUE:
- raise AssertionError("{} != {}".format(test_var.get(),
- _EXPECTED_VALUE))
- callback((), None)
-
- def assert_called(self, test):
- test.assertTrue(self._invoked)
- test.assertEqual(_EXPECTED_VALUE, self._recorded_value)
-
-else:
-
- def set_up_expected_context():
- pass
-
- class TestCallCredentials(grpc.AuthMetadataPlugin):
-
- def __call__(self, context, callback):
- callback((), None)
-
-
-# TODO(https://github.com/grpc/grpc/issues/22257)
-@unittest.skipIf(os.name == "nt", "LocalCredentials not supported on Windows.")
-class ContextVarsPropagationTest(unittest.TestCase):
-
- def test_propagation_to_auth_plugin(self):
- set_up_expected_context()
- with _server() as port:
- target = "localhost:{}".format(port)
- local_credentials = grpc.local_channel_credentials()
- test_call_credentials = TestCallCredentials()
- call_credentials = grpc.metadata_call_credentials(
- test_call_credentials, "test call credentials")
- composite_credentials = grpc.composite_channel_credentials(
- local_credentials, call_credentials)
- with grpc.secure_channel(target, composite_credentials) as channel:
- stub = channel.unary_unary(_UNARY_UNARY)
- response = stub(_REQUEST, wait_for_ready=True)
- self.assertEqual(_REQUEST, response)
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test of propagation of contextvars to AuthMetadataPlugin threads.."""
+
+import contextlib
+import logging
+import os
+import sys
+import unittest
+
+import grpc
+
+from tests.unit import test_common
+
+_UNARY_UNARY = "/test/UnaryUnary"
+_REQUEST = b"0000"
+
+
+def _unary_unary_handler(request, context):
+ return request
+
+
+def contextvars_supported():
+ try:
+ import contextvars
+ return True
+ except ImportError:
+ return False
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ if handler_call_details.method == _UNARY_UNARY:
+ return grpc.unary_unary_rpc_method_handler(_unary_unary_handler)
+ else:
+ raise NotImplementedError()
+
+
+@contextlib.contextmanager
+def _server():
+ try:
+ server = test_common.test_server()
+ target = 'localhost:0'
+ port = server.add_insecure_port(target)
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ server.start()
+ yield port
+ finally:
+ server.stop(None)
+
+
+if contextvars_supported():
+ import contextvars
+
+ _EXPECTED_VALUE = 24601
+ test_var = contextvars.ContextVar("test_var", default=None)
+
+ def set_up_expected_context():
+ test_var.set(_EXPECTED_VALUE)
+
+ class TestCallCredentials(grpc.AuthMetadataPlugin):
+
+ def __call__(self, context, callback):
+ if test_var.get() != _EXPECTED_VALUE:
+ raise AssertionError("{} != {}".format(test_var.get(),
+ _EXPECTED_VALUE))
+ callback((), None)
+
+ def assert_called(self, test):
+ test.assertTrue(self._invoked)
+ test.assertEqual(_EXPECTED_VALUE, self._recorded_value)
+
+else:
+
+ def set_up_expected_context():
+ pass
+
+ class TestCallCredentials(grpc.AuthMetadataPlugin):
+
+ def __call__(self, context, callback):
+ callback((), None)
+
+
+# TODO(https://github.com/grpc/grpc/issues/22257)
+@unittest.skipIf(os.name == "nt", "LocalCredentials not supported on Windows.")
+class ContextVarsPropagationTest(unittest.TestCase):
+
+ def test_propagation_to_auth_plugin(self):
+ set_up_expected_context()
+ with _server() as port:
+ target = "localhost:{}".format(port)
+ local_credentials = grpc.local_channel_credentials()
+ test_call_credentials = TestCallCredentials()
+ call_credentials = grpc.metadata_call_credentials(
+ test_call_credentials, "test call credentials")
+ composite_credentials = grpc.composite_channel_credentials(
+ local_credentials, call_credentials)
+ with grpc.secure_channel(target, composite_credentials) as channel:
+ stub = channel.unary_unary(_UNARY_UNARY)
+ response = stub(_REQUEST, wait_for_ready=True)
+ self.assertEqual(_REQUEST, response)
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py
index 2c67e679fa..b279f3d07c 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py
@@ -193,8 +193,8 @@ class CancelManyCallsTest(unittest.TestCase):
client_due.add(tag)
client_calls.append(client_call)
- client_events_future = test_utilities.SimpleFuture(lambda: tuple(
- channel.next_call_event() for _ in range(_SUCCESSFUL_CALLS)))
+ client_events_future = test_utilities.SimpleFuture(lambda: tuple(
+ channel.next_call_event() for _ in range(_SUCCESSFUL_CALLS)))
with state.condition:
while True:
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py
index 54405483b2..144a2fcae3 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py
@@ -59,10 +59,10 @@ class Test(_common.RpcTest, unittest.TestCase):
cygrpc.ReceiveStatusOnClientOperation(_common.EMPTY_FLAGS),
], client_complete_rpc_tag)
- client_events_future = test_utilities.SimpleFuture(lambda: [
- self.channel.next_call_event(),
- self.channel.next_call_event(),
- ])
+ client_events_future = test_utilities.SimpleFuture(lambda: [
+ self.channel.next_call_event(),
+ self.channel.next_call_event(),
+ ])
server_request_call_event = self.server_driver.event_with_tag(
server_request_call_tag)
@@ -122,8 +122,8 @@ class Test(_common.RpcTest, unittest.TestCase):
)
def test_rpcs(self):
- expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5
- ] * _common.RPC_COUNT
+ expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5
+ ] * _common.RPC_COUNT
actuallys = _common.execute_many_times(self._do_rpcs)
self.assertSequenceEqual(expecteds, actuallys)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py
index 12d99e489f..38964768db 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py
@@ -58,10 +58,10 @@ class Test(_common.RpcTest, unittest.TestCase):
cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS),
], client_receive_initial_metadata_tag)
- client_events_future = test_utilities.SimpleFuture(lambda: [
- self.channel.next_call_event(),
- self.channel.next_call_event(),
- ])
+ client_events_future = test_utilities.SimpleFuture(lambda: [
+ self.channel.next_call_event(),
+ self.channel.next_call_event(),
+ ])
server_request_call_event = self.server_driver.event_with_tag(
server_request_call_tag)
@@ -116,8 +116,8 @@ class Test(_common.RpcTest, unittest.TestCase):
)
def test_rpcs(self):
- expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5
- ] * _common.RPC_COUNT
+ expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5
+ ] * _common.RPC_COUNT
actuallys = _common.execute_many_times(self._do_rpcs)
self.assertSequenceEqual(expecteds, actuallys)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py
index f1c2d636a3..1182f83a42 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py
@@ -116,12 +116,12 @@ class ServerClientMixin(object):
cygrpc.ChannelArgKey.ssl_target_name_override,
host_override,
),)
- self.client_channel = cygrpc.Channel(
- 'localhost:{}'.format(self.port).encode(),
- client_channel_arguments, client_credentials)
+ self.client_channel = cygrpc.Channel(
+ 'localhost:{}'.format(self.port).encode(),
+ client_channel_arguments, client_credentials)
else:
- self.client_channel = cygrpc.Channel(
- 'localhost:{}'.format(self.port).encode(), set(), None)
+ self.client_channel = cygrpc.Channel(
+ 'localhost:{}'.format(self.port).encode(), set(), None)
if host_override:
self.host_argument = None # default host
self.expected_host = host_override
@@ -227,8 +227,8 @@ class ServerClientMixin(object):
request_event.invocation_metadata))
self.assertEqual(METHOD, request_event.call_details.method)
self.assertEqual(self.expected_host, request_event.call_details.host)
- self.assertLess(abs(DEADLINE - request_event.call_details.deadline),
- DEADLINE_TOLERANCE)
+ self.assertLess(abs(DEADLINE - request_event.call_details.deadline),
+ DEADLINE_TOLERANCE)
server_call_tag = object()
server_call = request_event.call
@@ -322,21 +322,21 @@ class ServerClientMixin(object):
self.server_completion_queue,
server_request_tag)
client_call = self.client_channel.segregated_call(
- 0, METHOD, self.host_argument, DEADLINE, None, None,
- ([(
+ 0, METHOD, self.host_argument, DEADLINE, None, None,
+ ([(
[
cygrpc.SendInitialMetadataOperation(empty_metadata,
_EMPTY_FLAGS),
cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS),
],
object(),
- ),
- (
- [
- cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
- ],
- object(),
- )]))
+ ),
+ (
+ [
+ cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
+ ],
+ object(),
+ )]))
client_initial_metadata_event_future = test_utilities.SimpleFuture(
client_call.next_event)
@@ -377,10 +377,10 @@ class ServerClientMixin(object):
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),
], "Client epilogue")
# One for ReceiveStatusOnClient, one for SendCloseFromClient.
- client_events_future = test_utilities.SimpleFuture(lambda: {
- client_call.next_event(),
- client_call.next_event(),
- })
+ client_events_future = test_utilities.SimpleFuture(lambda: {
+ client_call.next_event(),
+ client_call.next_event(),
+ })
server_event_future = perform_server_operations([
cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py
index 99d9787250..43141255f1 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py
@@ -49,8 +49,8 @@ class DNSResolverTest(unittest.TestCase):
# NOTE(https://github.com/grpc/grpc/issues/18422)
# In short, Gevent + C-Ares = Segfault. The C-Ares driver is not
# supported by custom io manager like "gevent" or "libuv".
- with grpc.insecure_channel('loopback4.unittest.grpc.io:%d' %
- self._port) as channel:
+ with grpc.insecure_channel('loopback4.unittest.grpc.io:%d' %
+ self._port) as channel:
self.assertEqual(
channel.unary_unary(_METHOD)(
_REQUEST,
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py
index 8b512654b2..d2d8ce9f60 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py
@@ -1,119 +1,119 @@
-# Copyright 2019 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test of dynamic stub import API."""
-
-import contextlib
-import functools
-import logging
-import multiprocessing
-import os
-import sys
-import unittest
-
-
-@contextlib.contextmanager
-def _grpc_tools_unimportable():
- original_sys_path = sys.path
- sys.path = [path for path in sys.path if "grpcio_tools" not in path]
- try:
- import grpc_tools
- except ImportError:
- pass
- else:
- del grpc_tools
- sys.path = original_sys_path
- raise unittest.SkipTest("Failed to make grpc_tools unimportable.")
- try:
- yield
- finally:
- sys.path = original_sys_path
-
-
-def _collect_errors(fn):
-
- @functools.wraps(fn)
- def _wrapped(error_queue):
- try:
- fn()
- except Exception as e:
- error_queue.put(e)
- raise
-
- return _wrapped
-
-
-def _run_in_subprocess(test_case):
- sys.path.insert(
- 0, os.path.join(os.path.realpath(os.path.dirname(__file__)), ".."))
- error_queue = multiprocessing.Queue()
- proc = multiprocessing.Process(target=test_case, args=(error_queue,))
- proc.start()
- proc.join()
- sys.path.pop(0)
- if not error_queue.empty():
- raise error_queue.get()
- assert proc.exitcode == 0, "Process exited with code {}".format(
- proc.exitcode)
-
-
-def _assert_unimplemented(msg_substr):
- import grpc
- try:
- protos, services = grpc.protos_and_services(
- "tests/unit/data/foo/bar.proto")
- except NotImplementedError as e:
- assert msg_substr in str(e), "{} was not in '{}'".format(
- msg_substr, str(e))
- else:
- assert False, "Did not raise NotImplementedError"
-
-
-@_collect_errors
-def _test_sunny_day():
- if sys.version_info[0] == 3:
- import grpc
- protos, services = grpc.protos_and_services(
- os.path.join("tests", "unit", "data", "foo", "bar.proto"))
- assert protos.BarMessage is not None
- assert services.BarStub is not None
- else:
- _assert_unimplemented("Python 3")
-
-
-@_collect_errors
-def _test_grpc_tools_unimportable():
- with _grpc_tools_unimportable():
- if sys.version_info[0] == 3:
- _assert_unimplemented("grpcio-tools")
- else:
- _assert_unimplemented("Python 3")
-
-
-# NOTE(rbellevi): multiprocessing.Process fails to pickle function objects
-# when they do not come from the "__main__" module, so this test passes
-# if run directly on Windows, but not if started by the test runner.
-@unittest.skipIf(os.name == "nt", "Windows multiprocessing unsupported")
-class DynamicStubTest(unittest.TestCase):
-
- @unittest.skip('grpcio-tools package required')
- def test_sunny_day(self):
- _run_in_subprocess(_test_sunny_day)
-
- def test_grpc_tools_unimportable(self):
- _run_in_subprocess(_test_grpc_tools_unimportable)
-
-
-if __name__ == "__main__":
- logging.basicConfig()
- unittest.main(verbosity=2)
+# Copyright 2019 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test of dynamic stub import API."""
+
+import contextlib
+import functools
+import logging
+import multiprocessing
+import os
+import sys
+import unittest
+
+
+@contextlib.contextmanager
+def _grpc_tools_unimportable():
+ original_sys_path = sys.path
+ sys.path = [path for path in sys.path if "grpcio_tools" not in path]
+ try:
+ import grpc_tools
+ except ImportError:
+ pass
+ else:
+ del grpc_tools
+ sys.path = original_sys_path
+ raise unittest.SkipTest("Failed to make grpc_tools unimportable.")
+ try:
+ yield
+ finally:
+ sys.path = original_sys_path
+
+
+def _collect_errors(fn):
+
+ @functools.wraps(fn)
+ def _wrapped(error_queue):
+ try:
+ fn()
+ except Exception as e:
+ error_queue.put(e)
+ raise
+
+ return _wrapped
+
+
+def _run_in_subprocess(test_case):
+ sys.path.insert(
+ 0, os.path.join(os.path.realpath(os.path.dirname(__file__)), ".."))
+ error_queue = multiprocessing.Queue()
+ proc = multiprocessing.Process(target=test_case, args=(error_queue,))
+ proc.start()
+ proc.join()
+ sys.path.pop(0)
+ if not error_queue.empty():
+ raise error_queue.get()
+ assert proc.exitcode == 0, "Process exited with code {}".format(
+ proc.exitcode)
+
+
+def _assert_unimplemented(msg_substr):
+ import grpc
+ try:
+ protos, services = grpc.protos_and_services(
+ "tests/unit/data/foo/bar.proto")
+ except NotImplementedError as e:
+ assert msg_substr in str(e), "{} was not in '{}'".format(
+ msg_substr, str(e))
+ else:
+ assert False, "Did not raise NotImplementedError"
+
+
+@_collect_errors
+def _test_sunny_day():
+ if sys.version_info[0] == 3:
+ import grpc
+ protos, services = grpc.protos_and_services(
+ os.path.join("tests", "unit", "data", "foo", "bar.proto"))
+ assert protos.BarMessage is not None
+ assert services.BarStub is not None
+ else:
+ _assert_unimplemented("Python 3")
+
+
+@_collect_errors
+def _test_grpc_tools_unimportable():
+ with _grpc_tools_unimportable():
+ if sys.version_info[0] == 3:
+ _assert_unimplemented("grpcio-tools")
+ else:
+ _assert_unimplemented("Python 3")
+
+
+# NOTE(rbellevi): multiprocessing.Process fails to pickle function objects
+# when they do not come from the "__main__" module, so this test passes
+# if run directly on Windows, but not if started by the test runner.
+@unittest.skipIf(os.name == "nt", "Windows multiprocessing unsupported")
+class DynamicStubTest(unittest.TestCase):
+
+ @unittest.skip('grpcio-tools package required')
+ def test_sunny_day(self):
+ _run_in_subprocess(_test_sunny_day)
+
+ def test_grpc_tools_unimportable(self):
+ _run_in_subprocess(_test_grpc_tools_unimportable)
+
+
+if __name__ == "__main__":
+ logging.basicConfig()
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py
index 43ba5f4745..e58007ad3e 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py
@@ -63,8 +63,8 @@ class ErrorMessageEncodingTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
- self._server.add_generic_rpc_handlers(
- (_GenericHandler(weakref.proxy(self)),))
+ self._server.add_generic_rpc_handlers(
+ (_GenericHandler(weakref.proxy(self)),))
port = self._server.add_insecure_port('[::]:0')
self._server.start()
self._channel = grpc.insecure_channel('localhost:%d' % port)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py
index fe870b5a97..48ea054d2d 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py
@@ -165,9 +165,9 @@ if __name__ == '__main__':
logging.basicConfig()
parser = argparse.ArgumentParser()
parser.add_argument('scenario', type=str)
- parser.add_argument('--wait_for_interrupt',
- dest='wait_for_interrupt',
- action='store_true')
+ parser.add_argument('--wait_for_interrupt',
+ dest='wait_for_interrupt',
+ action='store_true')
args = parser.parse_args()
if args.scenario == UNSTARTED_SERVER:
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py
index 66e5196ff1..4cf5ab63bd 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py
@@ -24,7 +24,7 @@ import six
import subprocess
import sys
import threading
-import datetime
+import datetime
import time
import unittest
import logging
@@ -32,15 +32,15 @@ import logging
from tests.unit import _exit_scenarios
# SCENARIO_FILE = os.path.abspath(
-# os.path.join(os.path.dirname(os.path.realpath(__file__)),
-# '_exit_scenarios.py'))
+# os.path.join(os.path.dirname(os.path.realpath(__file__)),
+# '_exit_scenarios.py'))
INTERPRETER = sys.executable
BASE_COMMAND = [INTERPRETER, '-m', 'tests.unit._exit_scenarios']
BASE_SIGTERM_COMMAND = BASE_COMMAND + ['--wait_for_interrupt']
-INIT_TIME = datetime.timedelta(seconds=1)
-WAIT_CHECK_INTERVAL = datetime.timedelta(milliseconds=100)
-WAIT_CHECK_DEFAULT_TIMEOUT = datetime.timedelta(seconds=5)
+INIT_TIME = datetime.timedelta(seconds=1)
+WAIT_CHECK_INTERVAL = datetime.timedelta(milliseconds=100)
+WAIT_CHECK_DEFAULT_TIMEOUT = datetime.timedelta(seconds=5)
processes = []
process_lock = threading.Lock()
@@ -60,71 +60,71 @@ def cleanup_processes():
atexit.register(cleanup_processes)
-def _process_wait_with_timeout(process, timeout=WAIT_CHECK_DEFAULT_TIMEOUT):
- """A funciton to mimic 3.3+ only timeout argument in process.wait."""
- deadline = datetime.datetime.now() + timeout
- while (process.poll() is None) and (datetime.datetime.now() < deadline):
- time.sleep(WAIT_CHECK_INTERVAL.total_seconds())
- if process.returncode is None:
- raise RuntimeError('Process failed to exit within %s' % timeout)
-
-
+def _process_wait_with_timeout(process, timeout=WAIT_CHECK_DEFAULT_TIMEOUT):
+ """A funciton to mimic 3.3+ only timeout argument in process.wait."""
+ deadline = datetime.datetime.now() + timeout
+ while (process.poll() is None) and (datetime.datetime.now() < deadline):
+ time.sleep(WAIT_CHECK_INTERVAL.total_seconds())
+ if process.returncode is None:
+ raise RuntimeError('Process failed to exit within %s' % timeout)
+
+
def interrupt_and_wait(process):
with process_lock:
processes.append(process)
- time.sleep(INIT_TIME.total_seconds())
+ time.sleep(INIT_TIME.total_seconds())
os.kill(process.pid, signal.SIGINT)
- _process_wait_with_timeout(process)
+ _process_wait_with_timeout(process)
def wait(process):
with process_lock:
processes.append(process)
- _process_wait_with_timeout(process)
+ _process_wait_with_timeout(process)
-# TODO(lidiz) enable exit tests once the root cause found.
-@unittest.skip('https://github.com/grpc/grpc/issues/23982')
-@unittest.skip('https://github.com/grpc/grpc/issues/23028')
+# TODO(lidiz) enable exit tests once the root cause found.
+@unittest.skip('https://github.com/grpc/grpc/issues/23982')
+@unittest.skip('https://github.com/grpc/grpc/issues/23028')
class ExitTest(unittest.TestCase):
def test_unstarted_server(self):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen(BASE_COMMAND +
- [_exit_scenarios.UNSTARTED_SERVER],
- stdout=sys.stdout,
- stderr=sys.stderr,
- env=env)
+ process = subprocess.Popen(BASE_COMMAND +
+ [_exit_scenarios.UNSTARTED_SERVER],
+ stdout=sys.stdout,
+ stderr=sys.stderr,
+ env=env)
wait(process)
def test_unstarted_server_terminate(self):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen(BASE_SIGTERM_COMMAND +
- [_exit_scenarios.UNSTARTED_SERVER],
- stdout=sys.stdout,
- env=env)
+ process = subprocess.Popen(BASE_SIGTERM_COMMAND +
+ [_exit_scenarios.UNSTARTED_SERVER],
+ stdout=sys.stdout,
+ env=env)
interrupt_and_wait(process)
def test_running_server(self):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen(BASE_COMMAND +
- [_exit_scenarios.RUNNING_SERVER],
- stdout=sys.stdout,
- stderr=sys.stderr,
- env=env)
+ process = subprocess.Popen(BASE_COMMAND +
+ [_exit_scenarios.RUNNING_SERVER],
+ stdout=sys.stdout,
+ stderr=sys.stderr,
+ env=env)
wait(process)
def test_running_server_terminate(self):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen(BASE_SIGTERM_COMMAND +
- [_exit_scenarios.RUNNING_SERVER],
- stdout=sys.stdout,
- stderr=sys.stderr,
- env=env)
+ process = subprocess.Popen(BASE_SIGTERM_COMMAND +
+ [_exit_scenarios.RUNNING_SERVER],
+ stdout=sys.stdout,
+ stderr=sys.stderr,
+ env=env)
interrupt_and_wait(process)
def test_poll_connectivity_no_server(self):
@@ -151,21 +151,21 @@ class ExitTest(unittest.TestCase):
def test_poll_connectivity(self):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen(BASE_COMMAND +
- [_exit_scenarios.POLL_CONNECTIVITY],
- stdout=sys.stdout,
- stderr=sys.stderr,
- env=env)
+ process = subprocess.Popen(BASE_COMMAND +
+ [_exit_scenarios.POLL_CONNECTIVITY],
+ stdout=sys.stdout,
+ stderr=sys.stderr,
+ env=env)
wait(process)
def test_poll_connectivity_terminate(self):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen(BASE_SIGTERM_COMMAND +
- [_exit_scenarios.POLL_CONNECTIVITY],
- stdout=sys.stdout,
- stderr=sys.stderr,
- env=env)
+ process = subprocess.Popen(BASE_SIGTERM_COMMAND +
+ [_exit_scenarios.POLL_CONNECTIVITY],
+ stdout=sys.stdout,
+ stderr=sys.stderr,
+ env=env)
interrupt_and_wait(process)
@unittest.skipIf(os.name == 'nt',
@@ -173,11 +173,11 @@ class ExitTest(unittest.TestCase):
def test_in_flight_unary_unary_call(self):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen(BASE_COMMAND +
- [_exit_scenarios.IN_FLIGHT_UNARY_UNARY_CALL],
- stdout=sys.stdout,
- stderr=sys.stderr,
- env=env)
+ process = subprocess.Popen(BASE_COMMAND +
+ [_exit_scenarios.IN_FLIGHT_UNARY_UNARY_CALL],
+ stdout=sys.stdout,
+ stderr=sys.stderr,
+ env=env)
interrupt_and_wait(process)
@unittest.skipIf(os.name == 'nt',
@@ -257,5 +257,5 @@ class ExitTest(unittest.TestCase):
if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
+ logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py
index db61843047..619db7b3ff 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py
@@ -172,15 +172,15 @@ def _unary_unary_multi_callable(channel):
def _unary_stream_multi_callable(channel):
- return channel.unary_stream(_UNARY_STREAM,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
+ return channel.unary_stream(_UNARY_STREAM,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
def _stream_unary_multi_callable(channel):
- return channel.stream_unary(_STREAM_UNARY,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
+ return channel.stream_unary(_STREAM_UNARY,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
def _stream_stream_multi_callable(channel):
@@ -195,10 +195,10 @@ class _ClientCallDetails(
pass
-class _GenericClientInterceptor(grpc.UnaryUnaryClientInterceptor,
- grpc.UnaryStreamClientInterceptor,
- grpc.StreamUnaryClientInterceptor,
- grpc.StreamStreamClientInterceptor):
+class _GenericClientInterceptor(grpc.UnaryUnaryClientInterceptor,
+ grpc.UnaryStreamClientInterceptor,
+ grpc.StreamUnaryClientInterceptor,
+ grpc.StreamStreamClientInterceptor):
def __init__(self, interceptor_function):
self._fn = interceptor_function
@@ -231,11 +231,11 @@ class _GenericClientInterceptor(grpc.UnaryUnaryClientInterceptor,
return postprocess(response_it) if postprocess else response_it
-class _LoggingInterceptor(grpc.ServerInterceptor,
- grpc.UnaryUnaryClientInterceptor,
- grpc.UnaryStreamClientInterceptor,
- grpc.StreamUnaryClientInterceptor,
- grpc.StreamStreamClientInterceptor):
+class _LoggingInterceptor(grpc.ServerInterceptor,
+ grpc.UnaryUnaryClientInterceptor,
+ grpc.UnaryStreamClientInterceptor,
+ grpc.StreamUnaryClientInterceptor,
+ grpc.StreamStreamClientInterceptor):
def __init__(self, tag, record):
self.tag = tag
@@ -351,13 +351,13 @@ class InterceptorTest(unittest.TestCase):
lambda x: ('secret', '42') in x.invocation_metadata,
_LoggingInterceptor('s3', self._record))
- self._server = grpc.server(self._server_pool,
- options=(('grpc.so_reuseport', 0),),
- interceptors=(
- _LoggingInterceptor('s1', self._record),
- conditional_interceptor,
- _LoggingInterceptor('s2', self._record),
- ))
+ self._server = grpc.server(self._server_pool,
+ options=(('grpc.so_reuseport', 0),),
+ interceptors=(
+ _LoggingInterceptor('s1', self._record),
+ conditional_interceptor,
+ _LoggingInterceptor('s2', self._record),
+ ))
port = self._server.add_insecure_port('[::]:0')
self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
self._server.start()
@@ -424,11 +424,11 @@ class InterceptorTest(unittest.TestCase):
def testInterceptedHeaderManipulationWithServerSideVerification(self):
request = b'\x07\x08'
- channel = grpc.intercept_channel(
- self._channel, _append_request_header_interceptor('secret', '42'))
- channel = grpc.intercept_channel(
- channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _append_request_header_interceptor('secret', '42'))
+ channel = grpc.intercept_channel(
+ channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
self._record[:] = []
@@ -450,9 +450,9 @@ class InterceptorTest(unittest.TestCase):
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _unary_unary_multi_callable(channel)
multi_callable(
@@ -470,9 +470,9 @@ class InterceptorTest(unittest.TestCase):
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _unary_unary_multi_callable(channel)
with self.assertRaises(grpc.RpcError) as exception_context:
@@ -491,9 +491,9 @@ class InterceptorTest(unittest.TestCase):
def testInterceptedUnaryRequestBlockingUnaryResponseWithCall(self):
request = b'\x07\x08'
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
self._record[:] = []
@@ -513,9 +513,9 @@ class InterceptorTest(unittest.TestCase):
request = b'\x07\x08'
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _unary_unary_multi_callable(channel)
response_future = multi_callable.future(
@@ -532,9 +532,9 @@ class InterceptorTest(unittest.TestCase):
request = b'\x37\x58'
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _unary_stream_multi_callable(channel)
response_iterator = multi_callable(
@@ -551,9 +551,9 @@ class InterceptorTest(unittest.TestCase):
request = _EXCEPTION_REQUEST
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _unary_stream_multi_callable(channel)
response_iterator = multi_callable(
@@ -575,9 +575,9 @@ class InterceptorTest(unittest.TestCase):
request_iterator = iter(requests)
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _stream_unary_multi_callable(channel)
multi_callable(
@@ -596,9 +596,9 @@ class InterceptorTest(unittest.TestCase):
request_iterator = iter(requests)
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _stream_unary_multi_callable(channel)
multi_callable.with_call(
@@ -618,9 +618,9 @@ class InterceptorTest(unittest.TestCase):
request_iterator = iter(requests)
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _stream_unary_multi_callable(channel)
response_future = multi_callable.future(
@@ -639,9 +639,9 @@ class InterceptorTest(unittest.TestCase):
request_iterator = iter(requests)
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _stream_unary_multi_callable(channel)
response_future = multi_callable.future(
@@ -663,9 +663,9 @@ class InterceptorTest(unittest.TestCase):
request_iterator = iter(requests)
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _stream_stream_multi_callable(channel)
response_iterator = multi_callable(
@@ -684,9 +684,9 @@ class InterceptorTest(unittest.TestCase):
request_iterator = iter(requests)
self._record[:] = []
- channel = grpc.intercept_channel(
- self._channel, _LoggingInterceptor('c1', self._record),
- _LoggingInterceptor('c2', self._record))
+ channel = grpc.intercept_channel(
+ self._channel, _LoggingInterceptor('c1', self._record),
+ _LoggingInterceptor('c2', self._record))
multi_callable = _stream_stream_multi_callable(channel)
response_iterator = multi_callable(
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py
index 8e9b369fcd..d1f1499d8c 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py
@@ -36,15 +36,15 @@ def _unary_unary_multi_callable(channel):
def _unary_stream_multi_callable(channel):
- return channel.unary_stream(_UNARY_STREAM,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
+ return channel.unary_stream(_UNARY_STREAM,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
def _stream_unary_multi_callable(channel):
- return channel.stream_unary(_STREAM_UNARY,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
+ return channel.stream_unary(_STREAM_UNARY,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
def _stream_stream_multi_callable(channel):
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py
index 0a01834b2a..a0208b51df 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py
@@ -157,15 +157,15 @@ def _unary_unary_multi_callable(channel):
def _unary_stream_multi_callable(channel):
- return channel.unary_stream(_UNARY_STREAM,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
+ return channel.unary_stream(_UNARY_STREAM,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
def _stream_unary_multi_callable(channel):
- return channel.stream_unary(_STREAM_UNARY,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
+ return channel.stream_unary(_STREAM_UNARY,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
def _stream_stream_multi_callable(channel):
@@ -177,7 +177,7 @@ def _defective_handler_multi_callable(channel):
class InvocationDefectsTest(unittest.TestCase):
- """Tests the handling of exception-raising user code on the client-side."""
+ """Tests the handling of exception-raising user code on the client-side."""
def setUp(self):
self._control = test_control.PauseFailControl()
@@ -195,44 +195,44 @@ class InvocationDefectsTest(unittest.TestCase):
self._channel.close()
def testIterableStreamRequestBlockingUnaryResponse(self):
- requests = object()
+ requests = object()
multi_callable = _stream_unary_multi_callable(self._channel)
- with self.assertRaises(grpc.RpcError) as exception_context:
- multi_callable(
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ multi_callable(
requests,
metadata=(('test',
'IterableStreamRequestBlockingUnaryResponse'),))
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
def testIterableStreamRequestFutureUnaryResponse(self):
- requests = object()
+ requests = object()
multi_callable = _stream_unary_multi_callable(self._channel)
response_future = multi_callable.future(
requests,
metadata=(('test', 'IterableStreamRequestFutureUnaryResponse'),))
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_future.result()
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_future.result()
+
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
def testIterableStreamRequestStreamResponse(self):
- requests = object()
+ requests = object()
multi_callable = _stream_stream_multi_callable(self._channel)
response_iterator = multi_callable(
requests,
metadata=(('test', 'IterableStreamRequestStreamResponse'),))
- with self.assertRaises(grpc.RpcError) as exception_context:
+ with self.assertRaises(grpc.RpcError) as exception_context:
next(response_iterator)
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
def testIteratorStreamRequestStreamResponse(self):
requests_iterator = FailAfterFewIterationsCounter(
test_constants.STREAM_LENGTH // 2, b'\x07\x08')
@@ -241,21 +241,21 @@ class InvocationDefectsTest(unittest.TestCase):
requests_iterator,
metadata=(('test', 'IteratorStreamRequestStreamResponse'),))
- with self.assertRaises(grpc.RpcError) as exception_context:
+ with self.assertRaises(grpc.RpcError) as exception_context:
for _ in range(test_constants.STREAM_LENGTH // 2 + 1):
next(response_iterator)
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
def testDefectiveGenericRpcHandlerUnaryResponse(self):
request = b'\x07\x08'
multi_callable = _defective_handler_multi_callable(self._channel)
with self.assertRaises(grpc.RpcError) as exception_context:
- multi_callable(request,
- metadata=(('test',
- 'DefectiveGenericRpcHandlerUnary'),))
+ multi_callable(request,
+ metadata=(('test',
+ 'DefectiveGenericRpcHandlerUnary'),))
self.assertIs(grpc.StatusCode.UNKNOWN,
exception_context.exception.code())
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py
index ecca610ccd..cd1f71dbee 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py
@@ -47,10 +47,10 @@ class LocalCredentialsTest(unittest.TestCase):
server.start()
with grpc.secure_channel(server_addr.format(port),
channel_creds) as channel:
- self.assertEqual(
- b'abc',
- channel.unary_unary('/test/method')(b'abc',
- wait_for_ready=True))
+ self.assertEqual(
+ b'abc',
+ channel.unary_unary('/test/method')(b'abc',
+ wait_for_ready=True))
server.stop(None)
@unittest.skipIf(os.name == 'nt',
@@ -66,10 +66,10 @@ class LocalCredentialsTest(unittest.TestCase):
server.add_secure_port(server_addr, server_creds)
server.start()
with grpc.secure_channel(server_addr, channel_creds) as channel:
- self.assertEqual(
- b'abc',
- channel.unary_unary('/test/method')(b'abc',
- wait_for_ready=True))
+ self.assertEqual(
+ b'abc',
+ channel.unary_unary('/test/method')(b'abc',
+ wait_for_ready=True))
server.stop(None)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py
index 699299d32e..1304bb5587 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py
@@ -87,10 +87,10 @@ class LoggingTest(unittest.TestCase):
def _verifyScriptSucceeds(self, script):
env = os.environ.copy()
env['Y_PYTHON_ENTRY_POINT'] = ':main'
- process = subprocess.Popen([INTERPRETER, '-c', script],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- env=env)
+ process = subprocess.Popen([INTERPRETER, '-c', script],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=env)
out, err = process.communicate()
self.assertEqual(
0, process.returncode,
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py
index 24112695a6..5b06eb2bfe 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py
@@ -171,19 +171,19 @@ class _Servicer(object):
def _generic_handler(servicer):
method_handlers = {
_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(
- servicer.unary_unary,
- request_deserializer=_REQUEST_DESERIALIZER,
- response_serializer=_RESPONSE_SERIALIZER),
+ grpc.unary_unary_rpc_method_handler(
+ servicer.unary_unary,
+ request_deserializer=_REQUEST_DESERIALIZER,
+ response_serializer=_RESPONSE_SERIALIZER),
_UNARY_STREAM:
- grpc.unary_stream_rpc_method_handler(servicer.unary_stream),
+ grpc.unary_stream_rpc_method_handler(servicer.unary_stream),
_STREAM_UNARY:
- grpc.stream_unary_rpc_method_handler(servicer.stream_unary),
+ grpc.stream_unary_rpc_method_handler(servicer.stream_unary),
_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(
- servicer.stream_stream,
- request_deserializer=_REQUEST_DESERIALIZER,
- response_serializer=_RESPONSE_SERIALIZER),
+ grpc.stream_stream_rpc_method_handler(
+ servicer.stream_stream,
+ request_deserializer=_REQUEST_DESERIALIZER,
+ response_serializer=_RESPONSE_SERIALIZER),
}
return grpc.method_handlers_generic_handler(_SERVICE, method_handlers)
@@ -208,18 +208,18 @@ class MetadataCodeDetailsTest(unittest.TestCase):
request_serializer=_REQUEST_SERIALIZER,
response_deserializer=_RESPONSE_DESERIALIZER,
)
- self._unary_stream = self._channel.unary_stream(
- '/'.join((
- '',
- _SERVICE,
- _UNARY_STREAM,
- )),)
- self._stream_unary = self._channel.stream_unary(
- '/'.join((
- '',
- _SERVICE,
- _STREAM_UNARY,
- )),)
+ self._unary_stream = self._channel.unary_stream(
+ '/'.join((
+ '',
+ _SERVICE,
+ _UNARY_STREAM,
+ )),)
+ self._stream_unary = self._channel.stream_unary(
+ '/'.join((
+ '',
+ _SERVICE,
+ _STREAM_UNARY,
+ )),)
self._stream_stream = self._channel.stream_stream(
'/'.join((
'',
@@ -254,8 +254,8 @@ class MetadataCodeDetailsTest(unittest.TestCase):
def testSuccessfulUnaryStream(self):
self._servicer.set_details(_DETAILS)
- response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST,
- metadata=_CLIENT_METADATA)
+ response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST,
+ metadata=_CLIENT_METADATA)
received_initial_metadata = response_iterator_call.initial_metadata()
list(response_iterator_call)
@@ -292,9 +292,9 @@ class MetadataCodeDetailsTest(unittest.TestCase):
def testSuccessfulStreamStream(self):
self._servicer.set_details(_DETAILS)
- response_iterator_call = self._stream_stream(iter(
- [object()] * test_constants.STREAM_LENGTH),
- metadata=_CLIENT_METADATA)
+ response_iterator_call = self._stream_stream(iter(
+ [object()] * test_constants.STREAM_LENGTH),
+ metadata=_CLIENT_METADATA)
received_initial_metadata = response_iterator_call.initial_metadata()
list(response_iterator_call)
@@ -375,9 +375,9 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_abort_call()
with self.assertRaises(grpc.RpcError) as exception_context:
- self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
- test_constants.STREAM_LENGTH),
- metadata=_CLIENT_METADATA)
+ self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
+ test_constants.STREAM_LENGTH),
+ metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
@@ -450,8 +450,8 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
- response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST,
- metadata=_CLIENT_METADATA)
+ response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST,
+ metadata=_CLIENT_METADATA)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError):
list(response_iterator_call)
@@ -474,9 +474,9 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_details(_DETAILS)
with self.assertRaises(grpc.RpcError) as exception_context:
- self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
- test_constants.STREAM_LENGTH),
- metadata=_CLIENT_METADATA)
+ self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
+ test_constants.STREAM_LENGTH),
+ metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
@@ -496,9 +496,9 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_code(_NON_OK_CODE)
self._servicer.set_details(_DETAILS)
- response_iterator_call = self._stream_stream(iter(
- [object()] * test_constants.STREAM_LENGTH),
- metadata=_CLIENT_METADATA)
+ response_iterator_call = self._stream_stream(iter(
+ [object()] * test_constants.STREAM_LENGTH),
+ metadata=_CLIENT_METADATA)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError) as exception_context:
list(response_iterator_call)
@@ -543,8 +543,8 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_details(_DETAILS)
self._servicer.set_exception()
- response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST,
- metadata=_CLIENT_METADATA)
+ response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST,
+ metadata=_CLIENT_METADATA)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError):
list(response_iterator_call)
@@ -568,9 +568,9 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_exception()
with self.assertRaises(grpc.RpcError) as exception_context:
- self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
- test_constants.STREAM_LENGTH),
- metadata=_CLIENT_METADATA)
+ self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
+ test_constants.STREAM_LENGTH),
+ metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
@@ -591,9 +591,9 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_details(_DETAILS)
self._servicer.set_exception()
- response_iterator_call = self._stream_stream(iter(
- [object()] * test_constants.STREAM_LENGTH),
- metadata=_CLIENT_METADATA)
+ response_iterator_call = self._stream_stream(iter(
+ [object()] * test_constants.STREAM_LENGTH),
+ metadata=_CLIENT_METADATA)
received_initial_metadata = response_iterator_call.initial_metadata()
with self.assertRaises(grpc.RpcError):
list(response_iterator_call)
@@ -639,9 +639,9 @@ class MetadataCodeDetailsTest(unittest.TestCase):
self._servicer.set_return_none()
with self.assertRaises(grpc.RpcError) as exception_context:
- self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
- test_constants.STREAM_LENGTH),
- metadata=_CLIENT_METADATA)
+ self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] *
+ test_constants.STREAM_LENGTH),
+ metadata=_CLIENT_METADATA)
self.assertTrue(
test_common.metadata_transmitted(
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py
index fc86f0a656..e2b36b1c70 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py
@@ -17,7 +17,7 @@ import time
import weakref
import unittest
import threading
-import logging
+import logging
import socket
from six.moves import queue
@@ -26,7 +26,7 @@ import grpc
from tests.unit import test_common
from tests.unit.framework.common import test_constants
import tests.unit.framework.common
-from tests.unit.framework.common import get_socket
+from tests.unit.framework.common import get_socket
_UNARY_UNARY = '/test/UnaryUnary'
_UNARY_STREAM = '/test/UnaryStream'
@@ -69,17 +69,17 @@ class _MethodHandler(grpc.RpcMethodHandler):
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
- self.stream_stream = lambda req, ctx: handle_stream_stream(
- test, req, ctx)
+ self.stream_stream = lambda req, ctx: handle_stream_stream(
+ test, req, ctx)
elif self.request_streaming:
- self.stream_unary = lambda req, ctx: handle_stream_unary(
- test, req, ctx)
+ self.stream_unary = lambda req, ctx: handle_stream_unary(
+ test, req, ctx)
elif self.response_streaming:
- self.unary_stream = lambda req, ctx: handle_unary_stream(
- test, req, ctx)
+ self.unary_stream = lambda req, ctx: handle_unary_stream(
+ test, req, ctx)
else:
- self.unary_unary = lambda req, ctx: handle_unary_unary(
- test, req, ctx)
+ self.unary_unary = lambda req, ctx: handle_unary_unary(
+ test, req, ctx)
class _GenericHandler(grpc.GenericRpcHandler):
@@ -102,9 +102,9 @@ class _GenericHandler(grpc.GenericRpcHandler):
def create_dummy_channel():
"""Creating dummy channels is a workaround for retries"""
- host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,))
- sock.close()
- return grpc.insecure_channel('{}:{}'.format(host, port))
+ host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,))
+ sock.close()
+ return grpc.insecure_channel('{}:{}'.format(host, port))
def perform_unary_unary_call(channel, wait_for_ready=None):
@@ -194,9 +194,9 @@ class MetadataFlagsTest(unittest.TestCase):
def test_call_wait_for_ready_disabled(self):
for perform_call in _ALL_CALL_CASES:
with create_dummy_channel() as channel:
- self.check_connection_does_failfast(perform_call,
- channel,
- wait_for_ready=False)
+ self.check_connection_does_failfast(perform_call,
+ channel,
+ wait_for_ready=False)
def test_call_wait_for_ready_enabled(self):
# To test the wait mechanism, Python thread is required to make
@@ -206,55 +206,55 @@ class MetadataFlagsTest(unittest.TestCase):
# exceptions and raise them again in main thread.
unhandled_exceptions = queue.Queue()
- # We just need an unused TCP port
- host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,))
- sock.close()
-
- addr = '{}:{}'.format(host, port)
- wg = test_common.WaitGroup(len(_ALL_CALL_CASES))
-
- def wait_for_transient_failure(channel_connectivity):
- if channel_connectivity == grpc.ChannelConnectivity.TRANSIENT_FAILURE:
- wg.done()
-
- def test_call(perform_call):
- with grpc.insecure_channel(addr) as channel:
- try:
- channel.subscribe(wait_for_transient_failure)
- perform_call(channel, wait_for_ready=True)
- except BaseException as e: # pylint: disable=broad-except
- # If the call failed, the thread would be destroyed. The
- # channel object can be collected before calling the
- # callback, which will result in a deadlock.
+ # We just need an unused TCP port
+ host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,))
+ sock.close()
+
+ addr = '{}:{}'.format(host, port)
+ wg = test_common.WaitGroup(len(_ALL_CALL_CASES))
+
+ def wait_for_transient_failure(channel_connectivity):
+ if channel_connectivity == grpc.ChannelConnectivity.TRANSIENT_FAILURE:
+ wg.done()
+
+ def test_call(perform_call):
+ with grpc.insecure_channel(addr) as channel:
+ try:
+ channel.subscribe(wait_for_transient_failure)
+ perform_call(channel, wait_for_ready=True)
+ except BaseException as e: # pylint: disable=broad-except
+ # If the call failed, the thread would be destroyed. The
+ # channel object can be collected before calling the
+ # callback, which will result in a deadlock.
wg.done()
- unhandled_exceptions.put(e, True)
+ unhandled_exceptions.put(e, True)
- test_threads = []
- for perform_call in _ALL_CALL_CASES:
- test_thread = threading.Thread(target=test_call,
- args=(perform_call,))
- test_thread.daemon = True
- test_thread.exception = None
- test_thread.start()
- test_threads.append(test_thread)
+ test_threads = []
+ for perform_call in _ALL_CALL_CASES:
+ test_thread = threading.Thread(target=test_call,
+ args=(perform_call,))
+ test_thread.daemon = True
+ test_thread.exception = None
+ test_thread.start()
+ test_threads.append(test_thread)
- # Start the server after the connections are waiting
- wg.wait()
- server = test_common.test_server(reuse_port=True)
- server.add_generic_rpc_handlers((_GenericHandler(weakref.proxy(self)),))
- server.add_insecure_port(addr)
- server.start()
+ # Start the server after the connections are waiting
+ wg.wait()
+ server = test_common.test_server(reuse_port=True)
+ server.add_generic_rpc_handlers((_GenericHandler(weakref.proxy(self)),))
+ server.add_insecure_port(addr)
+ server.start()
- for test_thread in test_threads:
- test_thread.join()
+ for test_thread in test_threads:
+ test_thread.join()
- # Stop the server to make test end properly
- server.stop(0)
+ # Stop the server to make test end properly
+ server.stop(0)
- if not unhandled_exceptions.empty():
- raise unhandled_exceptions.get(True)
+ if not unhandled_exceptions.empty():
+ raise unhandled_exceptions.get(True)
if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
+ logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py
index e1498ca9ba..3e7717b04c 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py
@@ -177,12 +177,12 @@ class MetadataTest(unittest.TestCase):
def setUp(self):
self._server = test_common.test_server()
- self._server.add_generic_rpc_handlers(
- (_GenericHandler(weakref.proxy(self)),))
+ self._server.add_generic_rpc_handlers(
+ (_GenericHandler(weakref.proxy(self)),))
port = self._server.add_insecure_port('[::]:0')
self._server.start()
- self._channel = grpc.insecure_channel('localhost:%d' % port,
- options=_CHANNEL_ARGS)
+ self._channel = grpc.insecure_channel('localhost:%d' % port,
+ options=_CHANNEL_ARGS)
def tearDown(self):
self._server.stop(0)
@@ -225,8 +225,8 @@ class MetadataTest(unittest.TestCase):
def testStreamStream(self):
multi_callable = self._channel.stream_stream(_STREAM_STREAM)
- call = multi_callable(iter([_REQUEST] * test_constants.STREAM_LENGTH),
- metadata=_INVOCATION_METADATA)
+ call = multi_callable(iter([_REQUEST] * test_constants.STREAM_LENGTH),
+ metadata=_INVOCATION_METADATA)
self.assertTrue(
test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA,
call.initial_metadata()))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py
index 758895bf87..16feb4b1ff 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py
@@ -40,7 +40,7 @@ class ReconnectTest(unittest.TestCase):
server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(_handle_unary_unary)
+ grpc.unary_unary_rpc_method_handler(_handle_unary_unary)
})
options = (('grpc.so_reuseport', 1),)
with bound_socket() as (host, port):
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py
index d432c27ef0..9b0cb29a0d 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py
@@ -1,232 +1,232 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test of RPCs made against gRPC Python's application-layer API."""
-
-import itertools
-import threading
-import unittest
-import logging
-from concurrent import futures
-
-import grpc
-from grpc.framework.foundation import logging_pool
-
-from tests.unit._rpc_test_helpers import (
- TIMEOUT_SHORT, Callback, unary_unary_multi_callable,
- unary_stream_multi_callable, unary_stream_non_blocking_multi_callable,
- stream_unary_multi_callable, stream_stream_multi_callable,
- stream_stream_non_blocking_multi_callable, BaseRPCTest)
-from tests.unit.framework.common import test_constants
-
-
-class RPCPart1Test(BaseRPCTest, unittest.TestCase):
-
- def testExpiredStreamRequestBlockingUnaryResponse(self):
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- multi_callable = stream_unary_multi_callable(self._channel)
- with self._control.pause():
- with self.assertRaises(grpc.RpcError) as exception_context:
- multi_callable(
- request_iterator,
- timeout=TIMEOUT_SHORT,
- metadata=(('test',
- 'ExpiredStreamRequestBlockingUnaryResponse'),))
-
- self.assertIsInstance(exception_context.exception, grpc.RpcError)
- self.assertIsInstance(exception_context.exception, grpc.Call)
- self.assertIsNotNone(exception_context.exception.initial_metadata())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertIsNotNone(exception_context.exception.details())
- self.assertIsNotNone(exception_context.exception.trailing_metadata())
-
- def testExpiredStreamRequestFutureUnaryResponse(self):
- requests = tuple(
- b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
- callback = Callback()
-
- multi_callable = stream_unary_multi_callable(self._channel)
- with self._control.pause():
- response_future = multi_callable.future(
- request_iterator,
- timeout=TIMEOUT_SHORT,
- metadata=(('test', 'ExpiredStreamRequestFutureUnaryResponse'),))
- with self.assertRaises(grpc.FutureTimeoutError):
- response_future.result(timeout=TIMEOUT_SHORT / 2.0)
- response_future.add_done_callback(callback)
- value_passed_to_callback = callback.value()
-
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_future.result()
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertIsInstance(response_future.exception(), grpc.RpcError)
- self.assertIsNotNone(response_future.traceback())
- self.assertIs(response_future, value_passed_to_callback)
- self.assertIsNotNone(response_future.initial_metadata())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
- self.assertIsNotNone(response_future.details())
- self.assertIsNotNone(response_future.trailing_metadata())
-
- def testExpiredStreamRequestStreamResponse(self):
- self._expired_stream_request_stream_response(
- stream_stream_multi_callable(self._channel))
-
- def testExpiredStreamRequestStreamResponseNonBlocking(self):
- self._expired_stream_request_stream_response(
- stream_stream_non_blocking_multi_callable(self._channel))
-
- def testFailedUnaryRequestBlockingUnaryResponse(self):
- request = b'\x37\x17'
-
- multi_callable = unary_unary_multi_callable(self._channel)
- with self._control.fail():
- with self.assertRaises(grpc.RpcError) as exception_context:
- multi_callable.with_call(
- request,
- metadata=(('test',
- 'FailedUnaryRequestBlockingUnaryResponse'),))
-
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
- # sanity checks on to make sure returned string contains default members
- # of the error
- debug_error_string = exception_context.exception.debug_error_string()
- self.assertIn('created', debug_error_string)
- self.assertIn('description', debug_error_string)
- self.assertIn('file', debug_error_string)
- self.assertIn('file_line', debug_error_string)
-
- def testFailedUnaryRequestFutureUnaryResponse(self):
- request = b'\x37\x17'
- callback = Callback()
-
- multi_callable = unary_unary_multi_callable(self._channel)
- with self._control.fail():
- response_future = multi_callable.future(
- request,
- metadata=(('test', 'FailedUnaryRequestFutureUnaryResponse'),))
- response_future.add_done_callback(callback)
- value_passed_to_callback = callback.value()
-
- self.assertIsInstance(response_future, grpc.Future)
- self.assertIsInstance(response_future, grpc.Call)
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_future.result()
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
- self.assertIsInstance(response_future.exception(), grpc.RpcError)
- self.assertIsNotNone(response_future.traceback())
- self.assertIs(grpc.StatusCode.UNKNOWN,
- response_future.exception().code())
- self.assertIs(response_future, value_passed_to_callback)
-
- def testFailedUnaryRequestStreamResponse(self):
- self._failed_unary_request_stream_response(
- unary_stream_multi_callable(self._channel))
-
- def testFailedUnaryRequestStreamResponseNonBlocking(self):
- self._failed_unary_request_stream_response(
- unary_stream_non_blocking_multi_callable(self._channel))
-
- def testFailedStreamRequestBlockingUnaryResponse(self):
- requests = tuple(
- b'\x47\x58' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- multi_callable = stream_unary_multi_callable(self._channel)
- with self._control.fail():
- with self.assertRaises(grpc.RpcError) as exception_context:
- multi_callable(
- request_iterator,
- metadata=(('test',
- 'FailedStreamRequestBlockingUnaryResponse'),))
-
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
- def testFailedStreamRequestFutureUnaryResponse(self):
- requests = tuple(
- b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
- callback = Callback()
-
- multi_callable = stream_unary_multi_callable(self._channel)
- with self._control.fail():
- response_future = multi_callable.future(
- request_iterator,
- metadata=(('test', 'FailedStreamRequestFutureUnaryResponse'),))
- response_future.add_done_callback(callback)
- value_passed_to_callback = callback.value()
-
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_future.result()
- self.assertIs(grpc.StatusCode.UNKNOWN, response_future.code())
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
- self.assertIsInstance(response_future.exception(), grpc.RpcError)
- self.assertIsNotNone(response_future.traceback())
- self.assertIs(response_future, value_passed_to_callback)
-
- def testFailedStreamRequestStreamResponse(self):
- self._failed_stream_request_stream_response(
- stream_stream_multi_callable(self._channel))
-
- def testFailedStreamRequestStreamResponseNonBlocking(self):
- self._failed_stream_request_stream_response(
- stream_stream_non_blocking_multi_callable(self._channel))
-
- def testIgnoredUnaryRequestFutureUnaryResponse(self):
- request = b'\x37\x17'
-
- multi_callable = unary_unary_multi_callable(self._channel)
- multi_callable.future(
- request,
- metadata=(('test', 'IgnoredUnaryRequestFutureUnaryResponse'),))
-
- def testIgnoredUnaryRequestStreamResponse(self):
- self._ignored_unary_stream_request_future_unary_response(
- unary_stream_multi_callable(self._channel))
-
- def testIgnoredUnaryRequestStreamResponseNonBlocking(self):
- self._ignored_unary_stream_request_future_unary_response(
- unary_stream_non_blocking_multi_callable(self._channel))
-
- def testIgnoredStreamRequestFutureUnaryResponse(self):
- requests = tuple(
- b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- multi_callable = stream_unary_multi_callable(self._channel)
- multi_callable.future(
- request_iterator,
- metadata=(('test', 'IgnoredStreamRequestFutureUnaryResponse'),))
-
- def testIgnoredStreamRequestStreamResponse(self):
- self._ignored_stream_request_stream_response(
- stream_stream_multi_callable(self._channel))
-
- def testIgnoredStreamRequestStreamResponseNonBlocking(self):
- self._ignored_stream_request_stream_response(
- stream_stream_non_blocking_multi_callable(self._channel))
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test of RPCs made against gRPC Python's application-layer API."""
+
+import itertools
+import threading
+import unittest
+import logging
+from concurrent import futures
+
+import grpc
+from grpc.framework.foundation import logging_pool
+
+from tests.unit._rpc_test_helpers import (
+ TIMEOUT_SHORT, Callback, unary_unary_multi_callable,
+ unary_stream_multi_callable, unary_stream_non_blocking_multi_callable,
+ stream_unary_multi_callable, stream_stream_multi_callable,
+ stream_stream_non_blocking_multi_callable, BaseRPCTest)
+from tests.unit.framework.common import test_constants
+
+
+class RPCPart1Test(BaseRPCTest, unittest.TestCase):
+
+ def testExpiredStreamRequestBlockingUnaryResponse(self):
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ with self._control.pause():
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ multi_callable(
+ request_iterator,
+ timeout=TIMEOUT_SHORT,
+ metadata=(('test',
+ 'ExpiredStreamRequestBlockingUnaryResponse'),))
+
+ self.assertIsInstance(exception_context.exception, grpc.RpcError)
+ self.assertIsInstance(exception_context.exception, grpc.Call)
+ self.assertIsNotNone(exception_context.exception.initial_metadata())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ exception_context.exception.code())
+ self.assertIsNotNone(exception_context.exception.details())
+ self.assertIsNotNone(exception_context.exception.trailing_metadata())
+
+ def testExpiredStreamRequestFutureUnaryResponse(self):
+ requests = tuple(
+ b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+ callback = Callback()
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ with self._control.pause():
+ response_future = multi_callable.future(
+ request_iterator,
+ timeout=TIMEOUT_SHORT,
+ metadata=(('test', 'ExpiredStreamRequestFutureUnaryResponse'),))
+ with self.assertRaises(grpc.FutureTimeoutError):
+ response_future.result(timeout=TIMEOUT_SHORT / 2.0)
+ response_future.add_done_callback(callback)
+ value_passed_to_callback = callback.value()
+
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_future.result()
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ exception_context.exception.code())
+ self.assertIsInstance(response_future.exception(), grpc.RpcError)
+ self.assertIsNotNone(response_future.traceback())
+ self.assertIs(response_future, value_passed_to_callback)
+ self.assertIsNotNone(response_future.initial_metadata())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
+ self.assertIsNotNone(response_future.details())
+ self.assertIsNotNone(response_future.trailing_metadata())
+
+ def testExpiredStreamRequestStreamResponse(self):
+ self._expired_stream_request_stream_response(
+ stream_stream_multi_callable(self._channel))
+
+ def testExpiredStreamRequestStreamResponseNonBlocking(self):
+ self._expired_stream_request_stream_response(
+ stream_stream_non_blocking_multi_callable(self._channel))
+
+ def testFailedUnaryRequestBlockingUnaryResponse(self):
+ request = b'\x37\x17'
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ with self._control.fail():
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ multi_callable.with_call(
+ request,
+ metadata=(('test',
+ 'FailedUnaryRequestBlockingUnaryResponse'),))
+
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+ # sanity checks on to make sure returned string contains default members
+ # of the error
+ debug_error_string = exception_context.exception.debug_error_string()
+ self.assertIn('created', debug_error_string)
+ self.assertIn('description', debug_error_string)
+ self.assertIn('file', debug_error_string)
+ self.assertIn('file_line', debug_error_string)
+
+ def testFailedUnaryRequestFutureUnaryResponse(self):
+ request = b'\x37\x17'
+ callback = Callback()
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ with self._control.fail():
+ response_future = multi_callable.future(
+ request,
+ metadata=(('test', 'FailedUnaryRequestFutureUnaryResponse'),))
+ response_future.add_done_callback(callback)
+ value_passed_to_callback = callback.value()
+
+ self.assertIsInstance(response_future, grpc.Future)
+ self.assertIsInstance(response_future, grpc.Call)
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_future.result()
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+ self.assertIsInstance(response_future.exception(), grpc.RpcError)
+ self.assertIsNotNone(response_future.traceback())
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ response_future.exception().code())
+ self.assertIs(response_future, value_passed_to_callback)
+
+ def testFailedUnaryRequestStreamResponse(self):
+ self._failed_unary_request_stream_response(
+ unary_stream_multi_callable(self._channel))
+
+ def testFailedUnaryRequestStreamResponseNonBlocking(self):
+ self._failed_unary_request_stream_response(
+ unary_stream_non_blocking_multi_callable(self._channel))
+
+ def testFailedStreamRequestBlockingUnaryResponse(self):
+ requests = tuple(
+ b'\x47\x58' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ with self._control.fail():
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ multi_callable(
+ request_iterator,
+ metadata=(('test',
+ 'FailedStreamRequestBlockingUnaryResponse'),))
+
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
+ def testFailedStreamRequestFutureUnaryResponse(self):
+ requests = tuple(
+ b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+ callback = Callback()
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ with self._control.fail():
+ response_future = multi_callable.future(
+ request_iterator,
+ metadata=(('test', 'FailedStreamRequestFutureUnaryResponse'),))
+ response_future.add_done_callback(callback)
+ value_passed_to_callback = callback.value()
+
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_future.result()
+ self.assertIs(grpc.StatusCode.UNKNOWN, response_future.code())
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+ self.assertIsInstance(response_future.exception(), grpc.RpcError)
+ self.assertIsNotNone(response_future.traceback())
+ self.assertIs(response_future, value_passed_to_callback)
+
+ def testFailedStreamRequestStreamResponse(self):
+ self._failed_stream_request_stream_response(
+ stream_stream_multi_callable(self._channel))
+
+ def testFailedStreamRequestStreamResponseNonBlocking(self):
+ self._failed_stream_request_stream_response(
+ stream_stream_non_blocking_multi_callable(self._channel))
+
+ def testIgnoredUnaryRequestFutureUnaryResponse(self):
+ request = b'\x37\x17'
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ multi_callable.future(
+ request,
+ metadata=(('test', 'IgnoredUnaryRequestFutureUnaryResponse'),))
+
+ def testIgnoredUnaryRequestStreamResponse(self):
+ self._ignored_unary_stream_request_future_unary_response(
+ unary_stream_multi_callable(self._channel))
+
+ def testIgnoredUnaryRequestStreamResponseNonBlocking(self):
+ self._ignored_unary_stream_request_future_unary_response(
+ unary_stream_non_blocking_multi_callable(self._channel))
+
+ def testIgnoredStreamRequestFutureUnaryResponse(self):
+ requests = tuple(
+ b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ multi_callable.future(
+ request_iterator,
+ metadata=(('test', 'IgnoredStreamRequestFutureUnaryResponse'),))
+
+ def testIgnoredStreamRequestStreamResponse(self):
+ self._ignored_stream_request_stream_response(
+ stream_stream_multi_callable(self._channel))
+
+ def testIgnoredStreamRequestStreamResponseNonBlocking(self):
+ self._ignored_stream_request_stream_response(
+ stream_stream_non_blocking_multi_callable(self._channel))
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py
index 45a41109d8..0e559efec2 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py
@@ -1,426 +1,426 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test of RPCs made against gRPC Python's application-layer API."""
-
-import itertools
-import threading
-import unittest
-import logging
-from concurrent import futures
-
-import grpc
-from grpc.framework.foundation import logging_pool
-
-from tests.unit._rpc_test_helpers import (
- TIMEOUT_SHORT, Callback, unary_unary_multi_callable,
- unary_stream_multi_callable, unary_stream_non_blocking_multi_callable,
- stream_unary_multi_callable, stream_stream_multi_callable,
- stream_stream_non_blocking_multi_callable, BaseRPCTest)
-from tests.unit.framework.common import test_constants
-
-
-class RPCPart2Test(BaseRPCTest, unittest.TestCase):
-
- def testDefaultThreadPoolIsUsed(self):
- self._consume_one_stream_response_unary_request(
- unary_stream_multi_callable(self._channel))
- self.assertFalse(self._thread_pool.was_used())
-
- def testExperimentalThreadPoolIsUsed(self):
- self._consume_one_stream_response_unary_request(
- unary_stream_non_blocking_multi_callable(self._channel))
- self.assertTrue(self._thread_pool.was_used())
-
- def testUnrecognizedMethod(self):
- request = b'abc'
-
- with self.assertRaises(grpc.RpcError) as exception_context:
- self._channel.unary_unary('NoSuchMethod')(request)
-
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED,
- exception_context.exception.code())
-
- def testSuccessfulUnaryRequestBlockingUnaryResponse(self):
- request = b'\x07\x08'
- expected_response = self._handler.handle_unary_unary(request, None)
-
- multi_callable = unary_unary_multi_callable(self._channel)
- response = multi_callable(
- request,
- metadata=(('test', 'SuccessfulUnaryRequestBlockingUnaryResponse'),))
-
- self.assertEqual(expected_response, response)
-
- def testSuccessfulUnaryRequestBlockingUnaryResponseWithCall(self):
- request = b'\x07\x08'
- expected_response = self._handler.handle_unary_unary(request, None)
-
- multi_callable = unary_unary_multi_callable(self._channel)
- response, call = multi_callable.with_call(
- request,
- metadata=(('test',
- 'SuccessfulUnaryRequestBlockingUnaryResponseWithCall'),))
-
- self.assertEqual(expected_response, response)
- self.assertIs(grpc.StatusCode.OK, call.code())
- self.assertEqual('', call.debug_error_string())
-
- def testSuccessfulUnaryRequestFutureUnaryResponse(self):
- request = b'\x07\x08'
- expected_response = self._handler.handle_unary_unary(request, None)
-
- multi_callable = unary_unary_multi_callable(self._channel)
- response_future = multi_callable.future(
- request,
- metadata=(('test', 'SuccessfulUnaryRequestFutureUnaryResponse'),))
- response = response_future.result()
-
- self.assertIsInstance(response_future, grpc.Future)
- self.assertIsInstance(response_future, grpc.Call)
- self.assertEqual(expected_response, response)
- self.assertIsNone(response_future.exception())
- self.assertIsNone(response_future.traceback())
-
- def testSuccessfulUnaryRequestStreamResponse(self):
- request = b'\x37\x58'
- expected_responses = tuple(
- self._handler.handle_unary_stream(request, None))
-
- multi_callable = unary_stream_multi_callable(self._channel)
- response_iterator = multi_callable(
- request,
- metadata=(('test', 'SuccessfulUnaryRequestStreamResponse'),))
- responses = tuple(response_iterator)
-
- self.assertSequenceEqual(expected_responses, responses)
-
- def testSuccessfulStreamRequestBlockingUnaryResponse(self):
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- expected_response = self._handler.handle_stream_unary(
- iter(requests), None)
- request_iterator = iter(requests)
-
- multi_callable = stream_unary_multi_callable(self._channel)
- response = multi_callable(
- request_iterator,
- metadata=(('test',
- 'SuccessfulStreamRequestBlockingUnaryResponse'),))
-
- self.assertEqual(expected_response, response)
-
- def testSuccessfulStreamRequestBlockingUnaryResponseWithCall(self):
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- expected_response = self._handler.handle_stream_unary(
- iter(requests), None)
- request_iterator = iter(requests)
-
- multi_callable = stream_unary_multi_callable(self._channel)
- response, call = multi_callable.with_call(
- request_iterator,
- metadata=(
- ('test',
- 'SuccessfulStreamRequestBlockingUnaryResponseWithCall'),))
-
- self.assertEqual(expected_response, response)
- self.assertIs(grpc.StatusCode.OK, call.code())
-
- def testSuccessfulStreamRequestFutureUnaryResponse(self):
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- expected_response = self._handler.handle_stream_unary(
- iter(requests), None)
- request_iterator = iter(requests)
-
- multi_callable = stream_unary_multi_callable(self._channel)
- response_future = multi_callable.future(
- request_iterator,
- metadata=(('test', 'SuccessfulStreamRequestFutureUnaryResponse'),))
- response = response_future.result()
-
- self.assertEqual(expected_response, response)
- self.assertIsNone(response_future.exception())
- self.assertIsNone(response_future.traceback())
-
- def testSuccessfulStreamRequestStreamResponse(self):
- requests = tuple(
- b'\x77\x58' for _ in range(test_constants.STREAM_LENGTH))
-
- expected_responses = tuple(
- self._handler.handle_stream_stream(iter(requests), None))
- request_iterator = iter(requests)
-
- multi_callable = stream_stream_multi_callable(self._channel)
- response_iterator = multi_callable(
- request_iterator,
- metadata=(('test', 'SuccessfulStreamRequestStreamResponse'),))
- responses = tuple(response_iterator)
-
- self.assertSequenceEqual(expected_responses, responses)
-
- def testSequentialInvocations(self):
- first_request = b'\x07\x08'
- second_request = b'\x0809'
- expected_first_response = self._handler.handle_unary_unary(
- first_request, None)
- expected_second_response = self._handler.handle_unary_unary(
- second_request, None)
-
- multi_callable = unary_unary_multi_callable(self._channel)
- first_response = multi_callable(first_request,
- metadata=(('test',
- 'SequentialInvocations'),))
- second_response = multi_callable(second_request,
- metadata=(('test',
- 'SequentialInvocations'),))
-
- self.assertEqual(expected_first_response, first_response)
- self.assertEqual(expected_second_response, second_response)
-
- def testConcurrentBlockingInvocations(self):
- pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- expected_response = self._handler.handle_stream_unary(
- iter(requests), None)
- expected_responses = [expected_response
- ] * test_constants.THREAD_CONCURRENCY
- response_futures = [None] * test_constants.THREAD_CONCURRENCY
-
- multi_callable = stream_unary_multi_callable(self._channel)
- for index in range(test_constants.THREAD_CONCURRENCY):
- request_iterator = iter(requests)
- response_future = pool.submit(
- multi_callable,
- request_iterator,
- metadata=(('test', 'ConcurrentBlockingInvocations'),))
- response_futures[index] = response_future
- responses = tuple(
- response_future.result() for response_future in response_futures)
-
- pool.shutdown(wait=True)
- self.assertSequenceEqual(expected_responses, responses)
-
- def testConcurrentFutureInvocations(self):
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- expected_response = self._handler.handle_stream_unary(
- iter(requests), None)
- expected_responses = [expected_response
- ] * test_constants.THREAD_CONCURRENCY
- response_futures = [None] * test_constants.THREAD_CONCURRENCY
-
- multi_callable = stream_unary_multi_callable(self._channel)
- for index in range(test_constants.THREAD_CONCURRENCY):
- request_iterator = iter(requests)
- response_future = multi_callable.future(
- request_iterator,
- metadata=(('test', 'ConcurrentFutureInvocations'),))
- response_futures[index] = response_future
- responses = tuple(
- response_future.result() for response_future in response_futures)
-
- self.assertSequenceEqual(expected_responses, responses)
-
- def testWaitingForSomeButNotAllConcurrentFutureInvocations(self):
- pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
- request = b'\x67\x68'
- expected_response = self._handler.handle_unary_unary(request, None)
- response_futures = [None] * test_constants.THREAD_CONCURRENCY
- lock = threading.Lock()
- test_is_running_cell = [True]
-
- def wrap_future(future):
-
- def wrap():
- try:
- return future.result()
- except grpc.RpcError:
- with lock:
- if test_is_running_cell[0]:
- raise
- return None
-
- return wrap
-
- multi_callable = unary_unary_multi_callable(self._channel)
- for index in range(test_constants.THREAD_CONCURRENCY):
- inner_response_future = multi_callable.future(
- request,
- metadata=(
- ('test',
- 'WaitingForSomeButNotAllConcurrentFutureInvocations'),))
- outer_response_future = pool.submit(
- wrap_future(inner_response_future))
- response_futures[index] = outer_response_future
-
- some_completed_response_futures_iterator = itertools.islice(
- futures.as_completed(response_futures),
- test_constants.THREAD_CONCURRENCY // 2)
- for response_future in some_completed_response_futures_iterator:
- self.assertEqual(expected_response, response_future.result())
- with lock:
- test_is_running_cell[0] = False
-
- def testConsumingOneStreamResponseUnaryRequest(self):
- self._consume_one_stream_response_unary_request(
- unary_stream_multi_callable(self._channel))
-
- def testConsumingOneStreamResponseUnaryRequestNonBlocking(self):
- self._consume_one_stream_response_unary_request(
- unary_stream_non_blocking_multi_callable(self._channel))
-
- def testConsumingSomeButNotAllStreamResponsesUnaryRequest(self):
- self._consume_some_but_not_all_stream_responses_unary_request(
- unary_stream_multi_callable(self._channel))
-
- def testConsumingSomeButNotAllStreamResponsesUnaryRequestNonBlocking(self):
- self._consume_some_but_not_all_stream_responses_unary_request(
- unary_stream_non_blocking_multi_callable(self._channel))
-
- def testConsumingSomeButNotAllStreamResponsesStreamRequest(self):
- self._consume_some_but_not_all_stream_responses_stream_request(
- stream_stream_multi_callable(self._channel))
-
- def testConsumingSomeButNotAllStreamResponsesStreamRequestNonBlocking(self):
- self._consume_some_but_not_all_stream_responses_stream_request(
- stream_stream_non_blocking_multi_callable(self._channel))
-
- def testConsumingTooManyStreamResponsesStreamRequest(self):
- self._consume_too_many_stream_responses_stream_request(
- stream_stream_multi_callable(self._channel))
-
- def testConsumingTooManyStreamResponsesStreamRequestNonBlocking(self):
- self._consume_too_many_stream_responses_stream_request(
- stream_stream_non_blocking_multi_callable(self._channel))
-
- def testCancelledUnaryRequestUnaryResponse(self):
- request = b'\x07\x17'
-
- multi_callable = unary_unary_multi_callable(self._channel)
- with self._control.pause():
- response_future = multi_callable.future(
- request,
- metadata=(('test', 'CancelledUnaryRequestUnaryResponse'),))
- response_future.cancel()
-
- self.assertIs(grpc.StatusCode.CANCELLED, response_future.code())
- self.assertTrue(response_future.cancelled())
- with self.assertRaises(grpc.FutureCancelledError):
- response_future.result()
- with self.assertRaises(grpc.FutureCancelledError):
- response_future.exception()
- with self.assertRaises(grpc.FutureCancelledError):
- response_future.traceback()
-
- def testCancelledUnaryRequestStreamResponse(self):
- self._cancelled_unary_request_stream_response(
- unary_stream_multi_callable(self._channel))
-
- def testCancelledUnaryRequestStreamResponseNonBlocking(self):
- self._cancelled_unary_request_stream_response(
- unary_stream_non_blocking_multi_callable(self._channel))
-
- def testCancelledStreamRequestUnaryResponse(self):
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- multi_callable = stream_unary_multi_callable(self._channel)
- with self._control.pause():
- response_future = multi_callable.future(
- request_iterator,
- metadata=(('test', 'CancelledStreamRequestUnaryResponse'),))
- self._control.block_until_paused()
- response_future.cancel()
-
- self.assertIs(grpc.StatusCode.CANCELLED, response_future.code())
- self.assertTrue(response_future.cancelled())
- with self.assertRaises(grpc.FutureCancelledError):
- response_future.result()
- with self.assertRaises(grpc.FutureCancelledError):
- response_future.exception()
- with self.assertRaises(grpc.FutureCancelledError):
- response_future.traceback()
- self.assertIsNotNone(response_future.initial_metadata())
- self.assertIsNotNone(response_future.details())
- self.assertIsNotNone(response_future.trailing_metadata())
-
- def testCancelledStreamRequestStreamResponse(self):
- self._cancelled_stream_request_stream_response(
- stream_stream_multi_callable(self._channel))
-
- def testCancelledStreamRequestStreamResponseNonBlocking(self):
- self._cancelled_stream_request_stream_response(
- stream_stream_non_blocking_multi_callable(self._channel))
-
- def testExpiredUnaryRequestBlockingUnaryResponse(self):
- request = b'\x07\x17'
-
- multi_callable = unary_unary_multi_callable(self._channel)
- with self._control.pause():
- with self.assertRaises(grpc.RpcError) as exception_context:
- multi_callable.with_call(
- request,
- timeout=TIMEOUT_SHORT,
- metadata=(('test',
- 'ExpiredUnaryRequestBlockingUnaryResponse'),))
-
- self.assertIsInstance(exception_context.exception, grpc.Call)
- self.assertIsNotNone(exception_context.exception.initial_metadata())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertIsNotNone(exception_context.exception.details())
- self.assertIsNotNone(exception_context.exception.trailing_metadata())
-
- def testExpiredUnaryRequestFutureUnaryResponse(self):
- request = b'\x07\x17'
- callback = Callback()
-
- multi_callable = unary_unary_multi_callable(self._channel)
- with self._control.pause():
- response_future = multi_callable.future(
- request,
- timeout=TIMEOUT_SHORT,
- metadata=(('test', 'ExpiredUnaryRequestFutureUnaryResponse'),))
- response_future.add_done_callback(callback)
- value_passed_to_callback = callback.value()
-
- self.assertIs(response_future, value_passed_to_callback)
- self.assertIsNotNone(response_future.initial_metadata())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
- self.assertIsNotNone(response_future.details())
- self.assertIsNotNone(response_future.trailing_metadata())
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_future.result()
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertIsInstance(response_future.exception(), grpc.RpcError)
- self.assertIsNotNone(response_future.traceback())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- response_future.exception().code())
-
- def testExpiredUnaryRequestStreamResponse(self):
- self._expired_unary_request_stream_response(
- unary_stream_multi_callable(self._channel))
-
- def testExpiredUnaryRequestStreamResponseNonBlocking(self):
- self._expired_unary_request_stream_response(
- unary_stream_non_blocking_multi_callable(self._channel))
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test of RPCs made against gRPC Python's application-layer API."""
+
+import itertools
+import threading
+import unittest
+import logging
+from concurrent import futures
+
+import grpc
+from grpc.framework.foundation import logging_pool
+
+from tests.unit._rpc_test_helpers import (
+ TIMEOUT_SHORT, Callback, unary_unary_multi_callable,
+ unary_stream_multi_callable, unary_stream_non_blocking_multi_callable,
+ stream_unary_multi_callable, stream_stream_multi_callable,
+ stream_stream_non_blocking_multi_callable, BaseRPCTest)
+from tests.unit.framework.common import test_constants
+
+
+class RPCPart2Test(BaseRPCTest, unittest.TestCase):
+
+ def testDefaultThreadPoolIsUsed(self):
+ self._consume_one_stream_response_unary_request(
+ unary_stream_multi_callable(self._channel))
+ self.assertFalse(self._thread_pool.was_used())
+
+ def testExperimentalThreadPoolIsUsed(self):
+ self._consume_one_stream_response_unary_request(
+ unary_stream_non_blocking_multi_callable(self._channel))
+ self.assertTrue(self._thread_pool.was_used())
+
+ def testUnrecognizedMethod(self):
+ request = b'abc'
+
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ self._channel.unary_unary('NoSuchMethod')(request)
+
+ self.assertEqual(grpc.StatusCode.UNIMPLEMENTED,
+ exception_context.exception.code())
+
+ def testSuccessfulUnaryRequestBlockingUnaryResponse(self):
+ request = b'\x07\x08'
+ expected_response = self._handler.handle_unary_unary(request, None)
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ response = multi_callable(
+ request,
+ metadata=(('test', 'SuccessfulUnaryRequestBlockingUnaryResponse'),))
+
+ self.assertEqual(expected_response, response)
+
+ def testSuccessfulUnaryRequestBlockingUnaryResponseWithCall(self):
+ request = b'\x07\x08'
+ expected_response = self._handler.handle_unary_unary(request, None)
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ response, call = multi_callable.with_call(
+ request,
+ metadata=(('test',
+ 'SuccessfulUnaryRequestBlockingUnaryResponseWithCall'),))
+
+ self.assertEqual(expected_response, response)
+ self.assertIs(grpc.StatusCode.OK, call.code())
+ self.assertEqual('', call.debug_error_string())
+
+ def testSuccessfulUnaryRequestFutureUnaryResponse(self):
+ request = b'\x07\x08'
+ expected_response = self._handler.handle_unary_unary(request, None)
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ response_future = multi_callable.future(
+ request,
+ metadata=(('test', 'SuccessfulUnaryRequestFutureUnaryResponse'),))
+ response = response_future.result()
+
+ self.assertIsInstance(response_future, grpc.Future)
+ self.assertIsInstance(response_future, grpc.Call)
+ self.assertEqual(expected_response, response)
+ self.assertIsNone(response_future.exception())
+ self.assertIsNone(response_future.traceback())
+
+ def testSuccessfulUnaryRequestStreamResponse(self):
+ request = b'\x37\x58'
+ expected_responses = tuple(
+ self._handler.handle_unary_stream(request, None))
+
+ multi_callable = unary_stream_multi_callable(self._channel)
+ response_iterator = multi_callable(
+ request,
+ metadata=(('test', 'SuccessfulUnaryRequestStreamResponse'),))
+ responses = tuple(response_iterator)
+
+ self.assertSequenceEqual(expected_responses, responses)
+
+ def testSuccessfulStreamRequestBlockingUnaryResponse(self):
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ expected_response = self._handler.handle_stream_unary(
+ iter(requests), None)
+ request_iterator = iter(requests)
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ response = multi_callable(
+ request_iterator,
+ metadata=(('test',
+ 'SuccessfulStreamRequestBlockingUnaryResponse'),))
+
+ self.assertEqual(expected_response, response)
+
+ def testSuccessfulStreamRequestBlockingUnaryResponseWithCall(self):
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ expected_response = self._handler.handle_stream_unary(
+ iter(requests), None)
+ request_iterator = iter(requests)
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ response, call = multi_callable.with_call(
+ request_iterator,
+ metadata=(
+ ('test',
+ 'SuccessfulStreamRequestBlockingUnaryResponseWithCall'),))
+
+ self.assertEqual(expected_response, response)
+ self.assertIs(grpc.StatusCode.OK, call.code())
+
+ def testSuccessfulStreamRequestFutureUnaryResponse(self):
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ expected_response = self._handler.handle_stream_unary(
+ iter(requests), None)
+ request_iterator = iter(requests)
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ response_future = multi_callable.future(
+ request_iterator,
+ metadata=(('test', 'SuccessfulStreamRequestFutureUnaryResponse'),))
+ response = response_future.result()
+
+ self.assertEqual(expected_response, response)
+ self.assertIsNone(response_future.exception())
+ self.assertIsNone(response_future.traceback())
+
+ def testSuccessfulStreamRequestStreamResponse(self):
+ requests = tuple(
+ b'\x77\x58' for _ in range(test_constants.STREAM_LENGTH))
+
+ expected_responses = tuple(
+ self._handler.handle_stream_stream(iter(requests), None))
+ request_iterator = iter(requests)
+
+ multi_callable = stream_stream_multi_callable(self._channel)
+ response_iterator = multi_callable(
+ request_iterator,
+ metadata=(('test', 'SuccessfulStreamRequestStreamResponse'),))
+ responses = tuple(response_iterator)
+
+ self.assertSequenceEqual(expected_responses, responses)
+
+ def testSequentialInvocations(self):
+ first_request = b'\x07\x08'
+ second_request = b'\x0809'
+ expected_first_response = self._handler.handle_unary_unary(
+ first_request, None)
+ expected_second_response = self._handler.handle_unary_unary(
+ second_request, None)
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ first_response = multi_callable(first_request,
+ metadata=(('test',
+ 'SequentialInvocations'),))
+ second_response = multi_callable(second_request,
+ metadata=(('test',
+ 'SequentialInvocations'),))
+
+ self.assertEqual(expected_first_response, first_response)
+ self.assertEqual(expected_second_response, second_response)
+
+ def testConcurrentBlockingInvocations(self):
+ pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ expected_response = self._handler.handle_stream_unary(
+ iter(requests), None)
+ expected_responses = [expected_response
+ ] * test_constants.THREAD_CONCURRENCY
+ response_futures = [None] * test_constants.THREAD_CONCURRENCY
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ for index in range(test_constants.THREAD_CONCURRENCY):
+ request_iterator = iter(requests)
+ response_future = pool.submit(
+ multi_callable,
+ request_iterator,
+ metadata=(('test', 'ConcurrentBlockingInvocations'),))
+ response_futures[index] = response_future
+ responses = tuple(
+ response_future.result() for response_future in response_futures)
+
+ pool.shutdown(wait=True)
+ self.assertSequenceEqual(expected_responses, responses)
+
+ def testConcurrentFutureInvocations(self):
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ expected_response = self._handler.handle_stream_unary(
+ iter(requests), None)
+ expected_responses = [expected_response
+ ] * test_constants.THREAD_CONCURRENCY
+ response_futures = [None] * test_constants.THREAD_CONCURRENCY
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ for index in range(test_constants.THREAD_CONCURRENCY):
+ request_iterator = iter(requests)
+ response_future = multi_callable.future(
+ request_iterator,
+ metadata=(('test', 'ConcurrentFutureInvocations'),))
+ response_futures[index] = response_future
+ responses = tuple(
+ response_future.result() for response_future in response_futures)
+
+ self.assertSequenceEqual(expected_responses, responses)
+
+ def testWaitingForSomeButNotAllConcurrentFutureInvocations(self):
+ pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
+ request = b'\x67\x68'
+ expected_response = self._handler.handle_unary_unary(request, None)
+ response_futures = [None] * test_constants.THREAD_CONCURRENCY
+ lock = threading.Lock()
+ test_is_running_cell = [True]
+
+ def wrap_future(future):
+
+ def wrap():
+ try:
+ return future.result()
+ except grpc.RpcError:
+ with lock:
+ if test_is_running_cell[0]:
+ raise
+ return None
+
+ return wrap
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ for index in range(test_constants.THREAD_CONCURRENCY):
+ inner_response_future = multi_callable.future(
+ request,
+ metadata=(
+ ('test',
+ 'WaitingForSomeButNotAllConcurrentFutureInvocations'),))
+ outer_response_future = pool.submit(
+ wrap_future(inner_response_future))
+ response_futures[index] = outer_response_future
+
+ some_completed_response_futures_iterator = itertools.islice(
+ futures.as_completed(response_futures),
+ test_constants.THREAD_CONCURRENCY // 2)
+ for response_future in some_completed_response_futures_iterator:
+ self.assertEqual(expected_response, response_future.result())
+ with lock:
+ test_is_running_cell[0] = False
+
+ def testConsumingOneStreamResponseUnaryRequest(self):
+ self._consume_one_stream_response_unary_request(
+ unary_stream_multi_callable(self._channel))
+
+ def testConsumingOneStreamResponseUnaryRequestNonBlocking(self):
+ self._consume_one_stream_response_unary_request(
+ unary_stream_non_blocking_multi_callable(self._channel))
+
+ def testConsumingSomeButNotAllStreamResponsesUnaryRequest(self):
+ self._consume_some_but_not_all_stream_responses_unary_request(
+ unary_stream_multi_callable(self._channel))
+
+ def testConsumingSomeButNotAllStreamResponsesUnaryRequestNonBlocking(self):
+ self._consume_some_but_not_all_stream_responses_unary_request(
+ unary_stream_non_blocking_multi_callable(self._channel))
+
+ def testConsumingSomeButNotAllStreamResponsesStreamRequest(self):
+ self._consume_some_but_not_all_stream_responses_stream_request(
+ stream_stream_multi_callable(self._channel))
+
+ def testConsumingSomeButNotAllStreamResponsesStreamRequestNonBlocking(self):
+ self._consume_some_but_not_all_stream_responses_stream_request(
+ stream_stream_non_blocking_multi_callable(self._channel))
+
+ def testConsumingTooManyStreamResponsesStreamRequest(self):
+ self._consume_too_many_stream_responses_stream_request(
+ stream_stream_multi_callable(self._channel))
+
+ def testConsumingTooManyStreamResponsesStreamRequestNonBlocking(self):
+ self._consume_too_many_stream_responses_stream_request(
+ stream_stream_non_blocking_multi_callable(self._channel))
+
+ def testCancelledUnaryRequestUnaryResponse(self):
+ request = b'\x07\x17'
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ with self._control.pause():
+ response_future = multi_callable.future(
+ request,
+ metadata=(('test', 'CancelledUnaryRequestUnaryResponse'),))
+ response_future.cancel()
+
+ self.assertIs(grpc.StatusCode.CANCELLED, response_future.code())
+ self.assertTrue(response_future.cancelled())
+ with self.assertRaises(grpc.FutureCancelledError):
+ response_future.result()
+ with self.assertRaises(grpc.FutureCancelledError):
+ response_future.exception()
+ with self.assertRaises(grpc.FutureCancelledError):
+ response_future.traceback()
+
+ def testCancelledUnaryRequestStreamResponse(self):
+ self._cancelled_unary_request_stream_response(
+ unary_stream_multi_callable(self._channel))
+
+ def testCancelledUnaryRequestStreamResponseNonBlocking(self):
+ self._cancelled_unary_request_stream_response(
+ unary_stream_non_blocking_multi_callable(self._channel))
+
+ def testCancelledStreamRequestUnaryResponse(self):
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ multi_callable = stream_unary_multi_callable(self._channel)
+ with self._control.pause():
+ response_future = multi_callable.future(
+ request_iterator,
+ metadata=(('test', 'CancelledStreamRequestUnaryResponse'),))
+ self._control.block_until_paused()
+ response_future.cancel()
+
+ self.assertIs(grpc.StatusCode.CANCELLED, response_future.code())
+ self.assertTrue(response_future.cancelled())
+ with self.assertRaises(grpc.FutureCancelledError):
+ response_future.result()
+ with self.assertRaises(grpc.FutureCancelledError):
+ response_future.exception()
+ with self.assertRaises(grpc.FutureCancelledError):
+ response_future.traceback()
+ self.assertIsNotNone(response_future.initial_metadata())
+ self.assertIsNotNone(response_future.details())
+ self.assertIsNotNone(response_future.trailing_metadata())
+
+ def testCancelledStreamRequestStreamResponse(self):
+ self._cancelled_stream_request_stream_response(
+ stream_stream_multi_callable(self._channel))
+
+ def testCancelledStreamRequestStreamResponseNonBlocking(self):
+ self._cancelled_stream_request_stream_response(
+ stream_stream_non_blocking_multi_callable(self._channel))
+
+ def testExpiredUnaryRequestBlockingUnaryResponse(self):
+ request = b'\x07\x17'
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ with self._control.pause():
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ multi_callable.with_call(
+ request,
+ timeout=TIMEOUT_SHORT,
+ metadata=(('test',
+ 'ExpiredUnaryRequestBlockingUnaryResponse'),))
+
+ self.assertIsInstance(exception_context.exception, grpc.Call)
+ self.assertIsNotNone(exception_context.exception.initial_metadata())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ exception_context.exception.code())
+ self.assertIsNotNone(exception_context.exception.details())
+ self.assertIsNotNone(exception_context.exception.trailing_metadata())
+
+ def testExpiredUnaryRequestFutureUnaryResponse(self):
+ request = b'\x07\x17'
+ callback = Callback()
+
+ multi_callable = unary_unary_multi_callable(self._channel)
+ with self._control.pause():
+ response_future = multi_callable.future(
+ request,
+ timeout=TIMEOUT_SHORT,
+ metadata=(('test', 'ExpiredUnaryRequestFutureUnaryResponse'),))
+ response_future.add_done_callback(callback)
+ value_passed_to_callback = callback.value()
+
+ self.assertIs(response_future, value_passed_to_callback)
+ self.assertIsNotNone(response_future.initial_metadata())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code())
+ self.assertIsNotNone(response_future.details())
+ self.assertIsNotNone(response_future.trailing_metadata())
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_future.result()
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ exception_context.exception.code())
+ self.assertIsInstance(response_future.exception(), grpc.RpcError)
+ self.assertIsNotNone(response_future.traceback())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ response_future.exception().code())
+
+ def testExpiredUnaryRequestStreamResponse(self):
+ self._expired_unary_request_stream_response(
+ unary_stream_multi_callable(self._channel))
+
+ def testExpiredUnaryRequestStreamResponseNonBlocking(self):
+ self._expired_unary_request_stream_response(
+ unary_stream_non_blocking_multi_callable(self._channel))
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py
index e66e99f0a7..a3f18a9a49 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py
@@ -1,417 +1,417 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test helpers for RPC invocation tests."""
-
-import datetime
-import threading
-
-import grpc
-from grpc.framework.foundation import logging_pool
-
-from tests.unit import test_common
-from tests.unit import thread_pool
-from tests.unit.framework.common import test_constants
-from tests.unit.framework.common import test_control
-
-_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2
-_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:]
-_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3
-_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3]
-
-_UNARY_UNARY = '/test/UnaryUnary'
-_UNARY_STREAM = '/test/UnaryStream'
-_UNARY_STREAM_NON_BLOCKING = '/test/UnaryStreamNonBlocking'
-_STREAM_UNARY = '/test/StreamUnary'
-_STREAM_STREAM = '/test/StreamStream'
-_STREAM_STREAM_NON_BLOCKING = '/test/StreamStreamNonBlocking'
-
-TIMEOUT_SHORT = datetime.timedelta(seconds=1).total_seconds()
-
-
-class Callback(object):
-
- def __init__(self):
- self._condition = threading.Condition()
- self._value = None
- self._called = False
-
- def __call__(self, value):
- with self._condition:
- self._value = value
- self._called = True
- self._condition.notify_all()
-
- def value(self):
- with self._condition:
- while not self._called:
- self._condition.wait()
- return self._value
-
-
-class _Handler(object):
-
- def __init__(self, control, thread_pool):
- self._control = control
- self._thread_pool = thread_pool
- non_blocking_functions = (self.handle_unary_stream_non_blocking,
- self.handle_stream_stream_non_blocking)
- for non_blocking_function in non_blocking_functions:
- non_blocking_function.__func__.experimental_non_blocking = True
- non_blocking_function.__func__.experimental_thread_pool = self._thread_pool
-
- def handle_unary_unary(self, request, servicer_context):
- self._control.control()
- if servicer_context is not None:
- servicer_context.set_trailing_metadata(((
- 'testkey',
- 'testvalue',
- ),))
- # TODO(https://github.com/grpc/grpc/issues/8483): test the values
- # returned by these methods rather than only "smoke" testing that
- # the return after having been called.
- servicer_context.is_active()
- servicer_context.time_remaining()
- return request
-
- def handle_unary_stream(self, request, servicer_context):
- for _ in range(test_constants.STREAM_LENGTH):
- self._control.control()
- yield request
- self._control.control()
- if servicer_context is not None:
- servicer_context.set_trailing_metadata(((
- 'testkey',
- 'testvalue',
- ),))
-
- def handle_unary_stream_non_blocking(self, request, servicer_context,
- on_next):
- for _ in range(test_constants.STREAM_LENGTH):
- self._control.control()
- on_next(request)
- self._control.control()
- if servicer_context is not None:
- servicer_context.set_trailing_metadata(((
- 'testkey',
- 'testvalue',
- ),))
- on_next(None)
-
- def handle_stream_unary(self, request_iterator, servicer_context):
- if servicer_context is not None:
- servicer_context.invocation_metadata()
- self._control.control()
- response_elements = []
- for request in request_iterator:
- self._control.control()
- response_elements.append(request)
- self._control.control()
- if servicer_context is not None:
- servicer_context.set_trailing_metadata(((
- 'testkey',
- 'testvalue',
- ),))
- return b''.join(response_elements)
-
- def handle_stream_stream(self, request_iterator, servicer_context):
- self._control.control()
- if servicer_context is not None:
- servicer_context.set_trailing_metadata(((
- 'testkey',
- 'testvalue',
- ),))
- for request in request_iterator:
- self._control.control()
- yield request
- self._control.control()
-
- def handle_stream_stream_non_blocking(self, request_iterator,
- servicer_context, on_next):
- self._control.control()
- if servicer_context is not None:
- servicer_context.set_trailing_metadata(((
- 'testkey',
- 'testvalue',
- ),))
- for request in request_iterator:
- self._control.control()
- on_next(request)
- self._control.control()
- on_next(None)
-
-
-class _MethodHandler(grpc.RpcMethodHandler):
-
- def __init__(self, request_streaming, response_streaming,
- request_deserializer, response_serializer, unary_unary,
- unary_stream, stream_unary, stream_stream):
- self.request_streaming = request_streaming
- self.response_streaming = response_streaming
- self.request_deserializer = request_deserializer
- self.response_serializer = response_serializer
- self.unary_unary = unary_unary
- self.unary_stream = unary_stream
- self.stream_unary = stream_unary
- self.stream_stream = stream_stream
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def __init__(self, handler):
- self._handler = handler
-
- def service(self, handler_call_details):
- if handler_call_details.method == _UNARY_UNARY:
- return _MethodHandler(False, False, None, None,
- self._handler.handle_unary_unary, None, None,
- None)
- elif handler_call_details.method == _UNARY_STREAM:
- return _MethodHandler(False, True, _DESERIALIZE_REQUEST,
- _SERIALIZE_RESPONSE, None,
- self._handler.handle_unary_stream, None, None)
- elif handler_call_details.method == _UNARY_STREAM_NON_BLOCKING:
- return _MethodHandler(
- False, True, _DESERIALIZE_REQUEST, _SERIALIZE_RESPONSE, None,
- self._handler.handle_unary_stream_non_blocking, None, None)
- elif handler_call_details.method == _STREAM_UNARY:
- return _MethodHandler(True, False, _DESERIALIZE_REQUEST,
- _SERIALIZE_RESPONSE, None, None,
- self._handler.handle_stream_unary, None)
- elif handler_call_details.method == _STREAM_STREAM:
- return _MethodHandler(True, True, None, None, None, None, None,
- self._handler.handle_stream_stream)
- elif handler_call_details.method == _STREAM_STREAM_NON_BLOCKING:
- return _MethodHandler(
- True, True, None, None, None, None, None,
- self._handler.handle_stream_stream_non_blocking)
- else:
- return None
-
-
-def unary_unary_multi_callable(channel):
- return channel.unary_unary(_UNARY_UNARY)
-
-
-def unary_stream_multi_callable(channel):
- return channel.unary_stream(_UNARY_STREAM,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
-
-
-def unary_stream_non_blocking_multi_callable(channel):
- return channel.unary_stream(_UNARY_STREAM_NON_BLOCKING,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
-
-
-def stream_unary_multi_callable(channel):
- return channel.stream_unary(_STREAM_UNARY,
- request_serializer=_SERIALIZE_REQUEST,
- response_deserializer=_DESERIALIZE_RESPONSE)
-
-
-def stream_stream_multi_callable(channel):
- return channel.stream_stream(_STREAM_STREAM)
-
-
-def stream_stream_non_blocking_multi_callable(channel):
- return channel.stream_stream(_STREAM_STREAM_NON_BLOCKING)
-
-
-class BaseRPCTest(object):
-
- def setUp(self):
- self._control = test_control.PauseFailControl()
- self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None)
- self._handler = _Handler(self._control, self._thread_pool)
-
- self._server = test_common.test_server()
- port = self._server.add_insecure_port('[::]:0')
- self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
- self._server.start()
-
- self._channel = grpc.insecure_channel('localhost:%d' % port)
-
- def tearDown(self):
- self._server.stop(None)
- self._channel.close()
-
- def _consume_one_stream_response_unary_request(self, multi_callable):
- request = b'\x57\x38'
-
- response_iterator = multi_callable(
- request,
- metadata=(('test', 'ConsumingOneStreamResponseUnaryRequest'),))
- next(response_iterator)
-
- def _consume_some_but_not_all_stream_responses_unary_request(
- self, multi_callable):
- request = b'\x57\x38'
-
- response_iterator = multi_callable(
- request,
- metadata=(('test',
- 'ConsumingSomeButNotAllStreamResponsesUnaryRequest'),))
- for _ in range(test_constants.STREAM_LENGTH // 2):
- next(response_iterator)
-
- def _consume_some_but_not_all_stream_responses_stream_request(
- self, multi_callable):
- requests = tuple(
- b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- response_iterator = multi_callable(
- request_iterator,
- metadata=(('test',
- 'ConsumingSomeButNotAllStreamResponsesStreamRequest'),))
- for _ in range(test_constants.STREAM_LENGTH // 2):
- next(response_iterator)
-
- def _consume_too_many_stream_responses_stream_request(self, multi_callable):
- requests = tuple(
- b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- response_iterator = multi_callable(
- request_iterator,
- metadata=(('test',
- 'ConsumingTooManyStreamResponsesStreamRequest'),))
- for _ in range(test_constants.STREAM_LENGTH):
- next(response_iterator)
- for _ in range(test_constants.STREAM_LENGTH):
- with self.assertRaises(StopIteration):
- next(response_iterator)
-
- self.assertIsNotNone(response_iterator.initial_metadata())
- self.assertIs(grpc.StatusCode.OK, response_iterator.code())
- self.assertIsNotNone(response_iterator.details())
- self.assertIsNotNone(response_iterator.trailing_metadata())
-
- def _cancelled_unary_request_stream_response(self, multi_callable):
- request = b'\x07\x19'
-
- with self._control.pause():
- response_iterator = multi_callable(
- request,
- metadata=(('test', 'CancelledUnaryRequestStreamResponse'),))
- self._control.block_until_paused()
- response_iterator.cancel()
-
- with self.assertRaises(grpc.RpcError) as exception_context:
- next(response_iterator)
- self.assertIs(grpc.StatusCode.CANCELLED,
- exception_context.exception.code())
- self.assertIsNotNone(response_iterator.initial_metadata())
- self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code())
- self.assertIsNotNone(response_iterator.details())
- self.assertIsNotNone(response_iterator.trailing_metadata())
-
- def _cancelled_stream_request_stream_response(self, multi_callable):
- requests = tuple(
- b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- with self._control.pause():
- response_iterator = multi_callable(
- request_iterator,
- metadata=(('test', 'CancelledStreamRequestStreamResponse'),))
- response_iterator.cancel()
-
- with self.assertRaises(grpc.RpcError):
- next(response_iterator)
- self.assertIsNotNone(response_iterator.initial_metadata())
- self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code())
- self.assertIsNotNone(response_iterator.details())
- self.assertIsNotNone(response_iterator.trailing_metadata())
-
- def _expired_unary_request_stream_response(self, multi_callable):
- request = b'\x07\x19'
-
- with self._control.pause():
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_iterator = multi_callable(
- request,
- timeout=test_constants.SHORT_TIMEOUT,
- metadata=(('test', 'ExpiredUnaryRequestStreamResponse'),))
- next(response_iterator)
-
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- response_iterator.code())
-
- def _expired_stream_request_stream_response(self, multi_callable):
- requests = tuple(
- b'\x67\x18' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- with self._control.pause():
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_iterator = multi_callable(
- request_iterator,
- timeout=test_constants.SHORT_TIMEOUT,
- metadata=(('test', 'ExpiredStreamRequestStreamResponse'),))
- next(response_iterator)
-
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
- response_iterator.code())
-
- def _failed_unary_request_stream_response(self, multi_callable):
- request = b'\x37\x17'
-
- with self.assertRaises(grpc.RpcError) as exception_context:
- with self._control.fail():
- response_iterator = multi_callable(
- request,
- metadata=(('test', 'FailedUnaryRequestStreamResponse'),))
- next(response_iterator)
-
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
- def _failed_stream_request_stream_response(self, multi_callable):
- requests = tuple(
- b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- with self._control.fail():
- with self.assertRaises(grpc.RpcError) as exception_context:
- response_iterator = multi_callable(
- request_iterator,
- metadata=(('test', 'FailedStreamRequestStreamResponse'),))
- tuple(response_iterator)
-
- self.assertIs(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
- self.assertIs(grpc.StatusCode.UNKNOWN, response_iterator.code())
-
- def _ignored_unary_stream_request_future_unary_response(
- self, multi_callable):
- request = b'\x37\x17'
-
- multi_callable(request,
- metadata=(('test',
- 'IgnoredUnaryRequestStreamResponse'),))
-
- def _ignored_stream_request_stream_response(self, multi_callable):
- requests = tuple(
- b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
- request_iterator = iter(requests)
-
- multi_callable(request_iterator,
- metadata=(('test',
- 'IgnoredStreamRequestStreamResponse'),))
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test helpers for RPC invocation tests."""
+
+import datetime
+import threading
+
+import grpc
+from grpc.framework.foundation import logging_pool
+
+from tests.unit import test_common
+from tests.unit import thread_pool
+from tests.unit.framework.common import test_constants
+from tests.unit.framework.common import test_control
+
+_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2
+_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:]
+_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3
+_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3]
+
+_UNARY_UNARY = '/test/UnaryUnary'
+_UNARY_STREAM = '/test/UnaryStream'
+_UNARY_STREAM_NON_BLOCKING = '/test/UnaryStreamNonBlocking'
+_STREAM_UNARY = '/test/StreamUnary'
+_STREAM_STREAM = '/test/StreamStream'
+_STREAM_STREAM_NON_BLOCKING = '/test/StreamStreamNonBlocking'
+
+TIMEOUT_SHORT = datetime.timedelta(seconds=1).total_seconds()
+
+
+class Callback(object):
+
+ def __init__(self):
+ self._condition = threading.Condition()
+ self._value = None
+ self._called = False
+
+ def __call__(self, value):
+ with self._condition:
+ self._value = value
+ self._called = True
+ self._condition.notify_all()
+
+ def value(self):
+ with self._condition:
+ while not self._called:
+ self._condition.wait()
+ return self._value
+
+
+class _Handler(object):
+
+ def __init__(self, control, thread_pool):
+ self._control = control
+ self._thread_pool = thread_pool
+ non_blocking_functions = (self.handle_unary_stream_non_blocking,
+ self.handle_stream_stream_non_blocking)
+ for non_blocking_function in non_blocking_functions:
+ non_blocking_function.__func__.experimental_non_blocking = True
+ non_blocking_function.__func__.experimental_thread_pool = self._thread_pool
+
+ def handle_unary_unary(self, request, servicer_context):
+ self._control.control()
+ if servicer_context is not None:
+ servicer_context.set_trailing_metadata(((
+ 'testkey',
+ 'testvalue',
+ ),))
+ # TODO(https://github.com/grpc/grpc/issues/8483): test the values
+ # returned by these methods rather than only "smoke" testing that
+ # the return after having been called.
+ servicer_context.is_active()
+ servicer_context.time_remaining()
+ return request
+
+ def handle_unary_stream(self, request, servicer_context):
+ for _ in range(test_constants.STREAM_LENGTH):
+ self._control.control()
+ yield request
+ self._control.control()
+ if servicer_context is not None:
+ servicer_context.set_trailing_metadata(((
+ 'testkey',
+ 'testvalue',
+ ),))
+
+ def handle_unary_stream_non_blocking(self, request, servicer_context,
+ on_next):
+ for _ in range(test_constants.STREAM_LENGTH):
+ self._control.control()
+ on_next(request)
+ self._control.control()
+ if servicer_context is not None:
+ servicer_context.set_trailing_metadata(((
+ 'testkey',
+ 'testvalue',
+ ),))
+ on_next(None)
+
+ def handle_stream_unary(self, request_iterator, servicer_context):
+ if servicer_context is not None:
+ servicer_context.invocation_metadata()
+ self._control.control()
+ response_elements = []
+ for request in request_iterator:
+ self._control.control()
+ response_elements.append(request)
+ self._control.control()
+ if servicer_context is not None:
+ servicer_context.set_trailing_metadata(((
+ 'testkey',
+ 'testvalue',
+ ),))
+ return b''.join(response_elements)
+
+ def handle_stream_stream(self, request_iterator, servicer_context):
+ self._control.control()
+ if servicer_context is not None:
+ servicer_context.set_trailing_metadata(((
+ 'testkey',
+ 'testvalue',
+ ),))
+ for request in request_iterator:
+ self._control.control()
+ yield request
+ self._control.control()
+
+ def handle_stream_stream_non_blocking(self, request_iterator,
+ servicer_context, on_next):
+ self._control.control()
+ if servicer_context is not None:
+ servicer_context.set_trailing_metadata(((
+ 'testkey',
+ 'testvalue',
+ ),))
+ for request in request_iterator:
+ self._control.control()
+ on_next(request)
+ self._control.control()
+ on_next(None)
+
+
+class _MethodHandler(grpc.RpcMethodHandler):
+
+ def __init__(self, request_streaming, response_streaming,
+ request_deserializer, response_serializer, unary_unary,
+ unary_stream, stream_unary, stream_stream):
+ self.request_streaming = request_streaming
+ self.response_streaming = response_streaming
+ self.request_deserializer = request_deserializer
+ self.response_serializer = response_serializer
+ self.unary_unary = unary_unary
+ self.unary_stream = unary_stream
+ self.stream_unary = stream_unary
+ self.stream_stream = stream_stream
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def __init__(self, handler):
+ self._handler = handler
+
+ def service(self, handler_call_details):
+ if handler_call_details.method == _UNARY_UNARY:
+ return _MethodHandler(False, False, None, None,
+ self._handler.handle_unary_unary, None, None,
+ None)
+ elif handler_call_details.method == _UNARY_STREAM:
+ return _MethodHandler(False, True, _DESERIALIZE_REQUEST,
+ _SERIALIZE_RESPONSE, None,
+ self._handler.handle_unary_stream, None, None)
+ elif handler_call_details.method == _UNARY_STREAM_NON_BLOCKING:
+ return _MethodHandler(
+ False, True, _DESERIALIZE_REQUEST, _SERIALIZE_RESPONSE, None,
+ self._handler.handle_unary_stream_non_blocking, None, None)
+ elif handler_call_details.method == _STREAM_UNARY:
+ return _MethodHandler(True, False, _DESERIALIZE_REQUEST,
+ _SERIALIZE_RESPONSE, None, None,
+ self._handler.handle_stream_unary, None)
+ elif handler_call_details.method == _STREAM_STREAM:
+ return _MethodHandler(True, True, None, None, None, None, None,
+ self._handler.handle_stream_stream)
+ elif handler_call_details.method == _STREAM_STREAM_NON_BLOCKING:
+ return _MethodHandler(
+ True, True, None, None, None, None, None,
+ self._handler.handle_stream_stream_non_blocking)
+ else:
+ return None
+
+
+def unary_unary_multi_callable(channel):
+ return channel.unary_unary(_UNARY_UNARY)
+
+
+def unary_stream_multi_callable(channel):
+ return channel.unary_stream(_UNARY_STREAM,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
+
+
+def unary_stream_non_blocking_multi_callable(channel):
+ return channel.unary_stream(_UNARY_STREAM_NON_BLOCKING,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
+
+
+def stream_unary_multi_callable(channel):
+ return channel.stream_unary(_STREAM_UNARY,
+ request_serializer=_SERIALIZE_REQUEST,
+ response_deserializer=_DESERIALIZE_RESPONSE)
+
+
+def stream_stream_multi_callable(channel):
+ return channel.stream_stream(_STREAM_STREAM)
+
+
+def stream_stream_non_blocking_multi_callable(channel):
+ return channel.stream_stream(_STREAM_STREAM_NON_BLOCKING)
+
+
+class BaseRPCTest(object):
+
+ def setUp(self):
+ self._control = test_control.PauseFailControl()
+ self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None)
+ self._handler = _Handler(self._control, self._thread_pool)
+
+ self._server = test_common.test_server()
+ port = self._server.add_insecure_port('[::]:0')
+ self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
+ self._server.start()
+
+ self._channel = grpc.insecure_channel('localhost:%d' % port)
+
+ def tearDown(self):
+ self._server.stop(None)
+ self._channel.close()
+
+ def _consume_one_stream_response_unary_request(self, multi_callable):
+ request = b'\x57\x38'
+
+ response_iterator = multi_callable(
+ request,
+ metadata=(('test', 'ConsumingOneStreamResponseUnaryRequest'),))
+ next(response_iterator)
+
+ def _consume_some_but_not_all_stream_responses_unary_request(
+ self, multi_callable):
+ request = b'\x57\x38'
+
+ response_iterator = multi_callable(
+ request,
+ metadata=(('test',
+ 'ConsumingSomeButNotAllStreamResponsesUnaryRequest'),))
+ for _ in range(test_constants.STREAM_LENGTH // 2):
+ next(response_iterator)
+
+ def _consume_some_but_not_all_stream_responses_stream_request(
+ self, multi_callable):
+ requests = tuple(
+ b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ response_iterator = multi_callable(
+ request_iterator,
+ metadata=(('test',
+ 'ConsumingSomeButNotAllStreamResponsesStreamRequest'),))
+ for _ in range(test_constants.STREAM_LENGTH // 2):
+ next(response_iterator)
+
+ def _consume_too_many_stream_responses_stream_request(self, multi_callable):
+ requests = tuple(
+ b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ response_iterator = multi_callable(
+ request_iterator,
+ metadata=(('test',
+ 'ConsumingTooManyStreamResponsesStreamRequest'),))
+ for _ in range(test_constants.STREAM_LENGTH):
+ next(response_iterator)
+ for _ in range(test_constants.STREAM_LENGTH):
+ with self.assertRaises(StopIteration):
+ next(response_iterator)
+
+ self.assertIsNotNone(response_iterator.initial_metadata())
+ self.assertIs(grpc.StatusCode.OK, response_iterator.code())
+ self.assertIsNotNone(response_iterator.details())
+ self.assertIsNotNone(response_iterator.trailing_metadata())
+
+ def _cancelled_unary_request_stream_response(self, multi_callable):
+ request = b'\x07\x19'
+
+ with self._control.pause():
+ response_iterator = multi_callable(
+ request,
+ metadata=(('test', 'CancelledUnaryRequestStreamResponse'),))
+ self._control.block_until_paused()
+ response_iterator.cancel()
+
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ next(response_iterator)
+ self.assertIs(grpc.StatusCode.CANCELLED,
+ exception_context.exception.code())
+ self.assertIsNotNone(response_iterator.initial_metadata())
+ self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code())
+ self.assertIsNotNone(response_iterator.details())
+ self.assertIsNotNone(response_iterator.trailing_metadata())
+
+ def _cancelled_stream_request_stream_response(self, multi_callable):
+ requests = tuple(
+ b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ with self._control.pause():
+ response_iterator = multi_callable(
+ request_iterator,
+ metadata=(('test', 'CancelledStreamRequestStreamResponse'),))
+ response_iterator.cancel()
+
+ with self.assertRaises(grpc.RpcError):
+ next(response_iterator)
+ self.assertIsNotNone(response_iterator.initial_metadata())
+ self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code())
+ self.assertIsNotNone(response_iterator.details())
+ self.assertIsNotNone(response_iterator.trailing_metadata())
+
+ def _expired_unary_request_stream_response(self, multi_callable):
+ request = b'\x07\x19'
+
+ with self._control.pause():
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_iterator = multi_callable(
+ request,
+ timeout=test_constants.SHORT_TIMEOUT,
+ metadata=(('test', 'ExpiredUnaryRequestStreamResponse'),))
+ next(response_iterator)
+
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ exception_context.exception.code())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ response_iterator.code())
+
+ def _expired_stream_request_stream_response(self, multi_callable):
+ requests = tuple(
+ b'\x67\x18' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ with self._control.pause():
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_iterator = multi_callable(
+ request_iterator,
+ timeout=test_constants.SHORT_TIMEOUT,
+ metadata=(('test', 'ExpiredStreamRequestStreamResponse'),))
+ next(response_iterator)
+
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ exception_context.exception.code())
+ self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED,
+ response_iterator.code())
+
+ def _failed_unary_request_stream_response(self, multi_callable):
+ request = b'\x37\x17'
+
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ with self._control.fail():
+ response_iterator = multi_callable(
+ request,
+ metadata=(('test', 'FailedUnaryRequestStreamResponse'),))
+ next(response_iterator)
+
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
+ def _failed_stream_request_stream_response(self, multi_callable):
+ requests = tuple(
+ b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ with self._control.fail():
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ response_iterator = multi_callable(
+ request_iterator,
+ metadata=(('test', 'FailedStreamRequestStreamResponse'),))
+ tuple(response_iterator)
+
+ self.assertIs(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+ self.assertIs(grpc.StatusCode.UNKNOWN, response_iterator.code())
+
+ def _ignored_unary_stream_request_future_unary_response(
+ self, multi_callable):
+ request = b'\x37\x17'
+
+ multi_callable(request,
+ metadata=(('test',
+ 'IgnoredUnaryRequestStreamResponse'),))
+
+ def _ignored_stream_request_stream_response(self, multi_callable):
+ requests = tuple(
+ b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH))
+ request_iterator = iter(requests)
+
+ multi_callable(request_iterator,
+ metadata=(('test',
+ 'IgnoredStreamRequestStreamResponse'),))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py
index fba6072b94..35d992a33d 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py
@@ -161,14 +161,14 @@ class _ServerSSLCertReloadTest(
else:
with self.assertRaises(grpc.RpcError) as exception_context:
client_stub.UnUn(request)
- # If TLS 1.2 is used, then the client receives an alert message
- # before the handshake is complete, so the status is UNAVAILABLE. If
- # TLS 1.3 is used, then the client receives the alert message after
- # the handshake is complete, so the TSI handshaker returns the
- # TSI_PROTOCOL_FAILURE result. This result does not have a
- # corresponding status code, so this yields an UNKNOWN status.
- self.assertTrue(exception_context.exception.code(
- ) in [grpc.StatusCode.UNAVAILABLE, grpc.StatusCode.UNKNOWN])
+ # If TLS 1.2 is used, then the client receives an alert message
+ # before the handshake is complete, so the status is UNAVAILABLE. If
+ # TLS 1.3 is used, then the client receives the alert message after
+ # the handshake is complete, so the TSI handshaker returns the
+ # TSI_PROTOCOL_FAILURE result. This result does not have a
+ # corresponding status code, so this yields an UNKNOWN status.
+ self.assertTrue(exception_context.exception.code(
+ ) in [grpc.StatusCode.UNAVAILABLE, grpc.StatusCode.UNKNOWN])
def _do_one_shot_client_rpc(self,
expect_success,
@@ -186,10 +186,10 @@ class _ServerSSLCertReloadTest(
def _test(self):
# things should work...
self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -199,10 +199,10 @@ class _ServerSSLCertReloadTest(
# fails because client trusts ca2 and so will reject server
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(False,
+ root_certificates=CA_2_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -213,10 +213,10 @@ class _ServerSSLCertReloadTest(
# should work again...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(True, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertTrue(actual_calls[0].did_raise)
@@ -227,10 +227,10 @@ class _ServerSSLCertReloadTest(
# so server will reject
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(not self.require_client_auth(),
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
+ self._do_one_shot_client_rpc(not self.require_client_auth(),
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_1_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
for i, call in enumerate(actual_calls):
@@ -240,10 +240,10 @@ class _ServerSSLCertReloadTest(
# should work again...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -287,10 +287,10 @@ class _ServerSSLCertReloadTest(
root_certificates=CA_1_PEM)
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, cert_config)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(False,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -302,10 +302,10 @@ class _ServerSSLCertReloadTest(
# now should work again...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_2_PEM,
+ private_key=CLIENT_KEY_1_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -314,10 +314,10 @@ class _ServerSSLCertReloadTest(
# client should be rejected by server if with_client_auth
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(not self.require_client_auth(),
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(not self.require_client_auth(),
+ root_certificates=CA_2_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
for i, call in enumerate(actual_calls):
@@ -327,10 +327,10 @@ class _ServerSSLCertReloadTest(
# here client should reject server...
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(False,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
for i, call in enumerate(actual_calls):
@@ -426,10 +426,10 @@ class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
# succeed with A
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_A)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -439,10 +439,10 @@ class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
# fail with A
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_A)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
+ self._do_one_shot_client_rpc(False,
+ root_certificates=CA_2_PEM,
+ private_key=CLIENT_KEY_1_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -454,10 +454,10 @@ class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
# succeed again with A
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_A)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -467,10 +467,10 @@ class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
# succeed with B
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_B)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_2_PEM,
+ private_key=CLIENT_KEY_1_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -480,10 +480,10 @@ class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
# fail with B
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_B)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
+ self._do_one_shot_client_rpc(False,
+ root_certificates=CA_1_PEM,
+ private_key=CLIENT_KEY_2_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertGreaterEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
@@ -495,10 +495,10 @@ class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
# succeed again with B
self.cert_config_fetcher.reset()
self.cert_config_fetcher.configure(False, self.cert_config_B)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
+ self._do_one_shot_client_rpc(True,
+ root_certificates=CA_2_PEM,
+ private_key=CLIENT_KEY_1_PEM,
+ certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
actual_calls = self.cert_config_fetcher.getCalls()
self.assertEqual(len(actual_calls), 1)
self.assertFalse(actual_calls[0].did_raise)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py
index aee1053c99..3c519219d5 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py
@@ -18,9 +18,9 @@ import logging
import grpc
-from tests.unit import resources
+from tests.unit import resources
+
-
class _ActualGenericRpcHandler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
@@ -31,11 +31,11 @@ class ServerTest(unittest.TestCase):
def test_not_a_generic_rpc_handler_at_construction(self):
with self.assertRaises(AttributeError) as exception_context:
- grpc.server(futures.ThreadPoolExecutor(max_workers=5),
- handlers=[
- _ActualGenericRpcHandler(),
- object(),
- ])
+ grpc.server(futures.ThreadPoolExecutor(max_workers=5),
+ handlers=[
+ _ActualGenericRpcHandler(),
+ object(),
+ ])
self.assertIn('grpc.GenericRpcHandler',
str(exception_context.exception))
@@ -49,21 +49,21 @@ class ServerTest(unittest.TestCase):
self.assertIn('grpc.GenericRpcHandler',
str(exception_context.exception))
- def test_failed_port_binding_exception(self):
- server = grpc.server(None, options=(('grpc.so_reuseport', 0),))
- port = server.add_insecure_port('localhost:0')
- bind_address = "localhost:%d" % port
+ def test_failed_port_binding_exception(self):
+ server = grpc.server(None, options=(('grpc.so_reuseport', 0),))
+ port = server.add_insecure_port('localhost:0')
+ bind_address = "localhost:%d" % port
+
+ with self.assertRaises(RuntimeError):
+ server.add_insecure_port(bind_address)
+
+ server_credentials = grpc.ssl_server_credentials([
+ (resources.private_key(), resources.certificate_chain())
+ ])
+ with self.assertRaises(RuntimeError):
+ server.add_secure_port(bind_address, server_credentials)
+
- with self.assertRaises(RuntimeError):
- server.add_insecure_port(bind_address)
-
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- with self.assertRaises(RuntimeError):
- server.add_secure_port(bind_address, server_credentials)
-
-
if __name__ == '__main__':
logging.basicConfig()
unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py
index be691133a2..3dd95ea8bf 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py
@@ -39,11 +39,11 @@ class ServerWaitForTerminationTest(unittest.TestCase):
termination_event = threading.Event()
server = grpc.server(futures.ThreadPoolExecutor())
- wait_thread = threading.Thread(target=_block_on_waiting,
- args=(
- server,
- termination_event,
- ))
+ wait_thread = threading.Thread(target=_block_on_waiting,
+ args=(
+ server,
+ termination_event,
+ ))
wait_thread.daemon = True
wait_thread.start()
time.sleep(_WAIT_FOR_BLOCKING.total_seconds())
@@ -56,11 +56,11 @@ class ServerWaitForTerminationTest(unittest.TestCase):
termination_event = threading.Event()
server = grpc.server(futures.ThreadPoolExecutor())
- wait_thread = threading.Thread(target=_block_on_waiting,
- args=(
- server,
- termination_event,
- ))
+ wait_thread = threading.Thread(target=_block_on_waiting,
+ args=(
+ server,
+ termination_event,
+ ))
wait_thread.daemon = True
wait_thread.start()
time.sleep(_WAIT_FOR_BLOCKING.total_seconds())
@@ -74,12 +74,12 @@ class ServerWaitForTerminationTest(unittest.TestCase):
termination_event = threading.Event()
server = grpc.server(futures.ThreadPoolExecutor())
- wait_thread = threading.Thread(target=_block_on_waiting,
- args=(
- server,
- termination_event,
- test_constants.SHORT_TIMEOUT / 2,
- ))
+ wait_thread = threading.Thread(target=_block_on_waiting,
+ args=(
+ server,
+ termination_event,
+ test_constants.SHORT_TIMEOUT / 2,
+ ))
wait_thread.daemon = True
wait_thread.start()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py
index 8a5df7d512..9bff4d2af0 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py
@@ -53,9 +53,9 @@ def handle_unary_unary(request, servicer_context):
def start_secure_server():
- handler = grpc.method_handlers_generic_handler(
- 'test',
- {'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary)})
+ handler = grpc.method_handlers_generic_handler(
+ 'test',
+ {'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary)})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
@@ -69,9 +69,9 @@ class SSLSessionCacheTest(unittest.TestCase):
def _do_one_shot_client_rpc(self, channel_creds, channel_options, port,
expect_ssl_session_reused):
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=channel_options)
+ channel = grpc.secure_channel('localhost:{}'.format(port),
+ channel_creds,
+ options=channel_options)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
auth_data = pickle.loads(response)
self.assertEqual(expect_ssl_session_reused,
@@ -88,50 +88,50 @@ class SSLSessionCacheTest(unittest.TestCase):
('grpc.ssl_session_cache', cache),)
# Initial connection has no session to resume
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'false'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port_1,
+ expect_ssl_session_reused=[b'false'])
# Connection to server_1 resumes from initial session
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'true'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port_1,
+ expect_ssl_session_reused=[b'true'])
# Connection to a different server with the same name overwrites the cache entry
server_2, port_2 = start_secure_server()
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_2,
- expect_ssl_session_reused=[b'false'])
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_2,
- expect_ssl_session_reused=[b'true'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port_2,
+ expect_ssl_session_reused=[b'false'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port_2,
+ expect_ssl_session_reused=[b'true'])
server_2.stop(None)
# Connection to server_1 now falls back to full TLS handshake
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'false'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port_1,
+ expect_ssl_session_reused=[b'false'])
# Re-creating server_1 causes old sessions to become invalid
server_1.stop(None)
server_1, port_1 = start_secure_server()
# Old sessions should no longer be valid
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'false'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port_1,
+ expect_ssl_session_reused=[b'false'])
# Resumption should work for subsequent connections
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'true'])
+ self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port_1,
+ expect_ssl_session_reused=[b'true'])
server_1.stop(None)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py
index 123f6fe3de..0be1270749 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py
@@ -55,8 +55,8 @@ def main_unary(server_target):
with grpc.insecure_channel(server_target) as channel:
multicallable = channel.unary_unary(UNARY_UNARY)
signal.signal(signal.SIGINT, handle_sigint)
- per_process_rpc_future = multicallable.future(_MESSAGE,
- wait_for_ready=True)
+ per_process_rpc_future = multicallable.future(_MESSAGE,
+ wait_for_ready=True)
result = per_process_rpc_future.result()
assert False, _ASSERTION_MESSAGE
@@ -90,8 +90,8 @@ def main_streaming_with_exception(server_target):
"""Initiate a streaming RPC with a signal handler that will raise."""
channel = grpc.insecure_channel(server_target)
try:
- for _ in channel.unary_stream(UNARY_STREAM)(_MESSAGE,
- wait_for_ready=True):
+ for _ in channel.unary_stream(UNARY_STREAM)(_MESSAGE,
+ wait_for_ready=True):
pass
except KeyboardInterrupt:
sys.stderr.write("Running signal handler.\n")
@@ -105,9 +105,9 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Signal test client.')
parser.add_argument('server', help='Server target')
parser.add_argument('arity', help='Arity', choices=('unary', 'streaming'))
- parser.add_argument('--exception',
- help='Whether the signal throws an exception',
- action='store_true')
+ parser.add_argument('--exception',
+ help='Whether the signal throws an exception',
+ action='store_true')
args = parser.parse_args()
if args.arity == 'unary' and not args.exception:
main_unary(args.server)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py
index ae7bbc108c..a111d68764 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py
@@ -132,9 +132,9 @@ class _BlockingIterator(object):
def _metadata_plugin(context, callback):
- callback([
- (_PER_RPC_CREDENTIALS_METADATA_KEY, _PER_RPC_CREDENTIALS_METADATA_VALUE)
- ], None)
+ callback([
+ (_PER_RPC_CREDENTIALS_METADATA_KEY, _PER_RPC_CREDENTIALS_METADATA_VALUE)
+ ], None)
class BetaFeaturesTest(unittest.TestCase):
@@ -143,13 +143,13 @@ class BetaFeaturesTest(unittest.TestCase):
self._servicer = _Servicer()
method_implementations = {
(_GROUP, _UNARY_UNARY):
- utilities.unary_unary_inline(self._servicer.unary_unary),
+ utilities.unary_unary_inline(self._servicer.unary_unary),
(_GROUP, _UNARY_STREAM):
- utilities.unary_stream_inline(self._servicer.unary_stream),
+ utilities.unary_stream_inline(self._servicer.unary_stream),
(_GROUP, _STREAM_UNARY):
- utilities.stream_unary_inline(self._servicer.stream_unary),
+ utilities.stream_unary_inline(self._servicer.stream_unary),
(_GROUP, _STREAM_STREAM):
- utilities.stream_stream_inline(self._servicer.stream_stream),
+ utilities.stream_stream_inline(self._servicer.stream_stream),
}
cardinalities = {
@@ -161,8 +161,8 @@ class BetaFeaturesTest(unittest.TestCase):
server_options = implementations.server_options(
thread_pool_size=test_constants.POOL_SIZE)
- self._server = implementations.server(method_implementations,
- options=server_options)
+ self._server = implementations.server(method_implementations,
+ options=server_options)
server_credentials = implementations.ssl_server_credentials([
(
resources.private_key(),
@@ -179,10 +179,10 @@ class BetaFeaturesTest(unittest.TestCase):
'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE)
stub_options = implementations.stub_options(
thread_pool_size=test_constants.POOL_SIZE)
- self._dynamic_stub = implementations.dynamic_stub(channel,
- _GROUP,
- cardinalities,
- options=stub_options)
+ self._dynamic_stub = implementations.dynamic_stub(channel,
+ _GROUP,
+ cardinalities,
+ options=stub_options)
def tearDown(self):
self._dynamic_stub = None
@@ -191,10 +191,10 @@ class BetaFeaturesTest(unittest.TestCase):
def test_unary_unary(self):
call_options = interfaces.grpc_call_options(
disable_compression=True, credentials=self._call_credentials)
- response = getattr(self._dynamic_stub,
- _UNARY_UNARY)(_REQUEST,
- test_constants.LONG_TIMEOUT,
- protocol_options=call_options)
+ response = getattr(self._dynamic_stub,
+ _UNARY_UNARY)(_REQUEST,
+ test_constants.LONG_TIMEOUT,
+ protocol_options=call_options)
self.assertEqual(_RESPONSE, response)
self.assertIsNotNone(self._servicer.peer())
invocation_metadata = [
@@ -276,13 +276,13 @@ class ContextManagementAndLifecycleTest(unittest.TestCase):
self._servicer = _Servicer()
self._method_implementations = {
(_GROUP, _UNARY_UNARY):
- utilities.unary_unary_inline(self._servicer.unary_unary),
+ utilities.unary_unary_inline(self._servicer.unary_unary),
(_GROUP, _UNARY_STREAM):
- utilities.unary_stream_inline(self._servicer.unary_stream),
+ utilities.unary_stream_inline(self._servicer.unary_stream),
(_GROUP, _STREAM_UNARY):
- utilities.stream_unary_inline(self._servicer.stream_unary),
+ utilities.stream_unary_inline(self._servicer.stream_unary),
(_GROUP, _STREAM_STREAM):
- utilities.stream_stream_inline(self._servicer.stream_stream),
+ utilities.stream_stream_inline(self._servicer.stream_stream),
}
self._cardinalities = {
@@ -306,17 +306,17 @@ class ContextManagementAndLifecycleTest(unittest.TestCase):
thread_pool_size=test_constants.POOL_SIZE)
def test_stub_context(self):
- server = implementations.server(self._method_implementations,
- options=self._server_options)
+ server = implementations.server(self._method_implementations,
+ options=self._server_options)
port = server.add_secure_port('[::]:0', self._server_credentials)
server.start()
channel = test_utilities.not_really_secure_channel(
'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE)
- dynamic_stub = implementations.dynamic_stub(channel,
- _GROUP,
- self._cardinalities,
- options=self._stub_options)
+ dynamic_stub = implementations.dynamic_stub(channel,
+ _GROUP,
+ self._cardinalities,
+ options=self._stub_options)
for _ in range(100):
with dynamic_stub:
pass
@@ -324,10 +324,10 @@ class ContextManagementAndLifecycleTest(unittest.TestCase):
with dynamic_stub:
call_options = interfaces.grpc_call_options(
disable_compression=True)
- response = getattr(dynamic_stub,
- _UNARY_UNARY)(_REQUEST,
- test_constants.LONG_TIMEOUT,
- protocol_options=call_options)
+ response = getattr(dynamic_stub,
+ _UNARY_UNARY)(_REQUEST,
+ test_constants.LONG_TIMEOUT,
+ protocol_options=call_options)
self.assertEqual(_RESPONSE, response)
self.assertIsNotNone(self._servicer.peer())
@@ -335,14 +335,14 @@ class ContextManagementAndLifecycleTest(unittest.TestCase):
def test_server_lifecycle(self):
for _ in range(100):
- server = implementations.server(self._method_implementations,
- options=self._server_options)
+ server = implementations.server(self._method_implementations,
+ options=self._server_options)
port = server.add_secure_port('[::]:0', self._server_credentials)
server.start()
server.stop(test_constants.SHORT_TIMEOUT).wait()
for _ in range(100):
- server = implementations.server(self._method_implementations,
- options=self._server_options)
+ server = implementations.server(self._method_implementations,
+ options=self._server_options)
server.add_secure_port('[::]:0', self._server_credentials)
server.add_insecure_port('[::]:0')
with server:
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py
index 07127ea28c..75a615eeff 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py
@@ -41,8 +41,8 @@ class CallCredentialsTest(unittest.TestCase):
def test_google_call_credentials(self):
creds = oauth2client_client.GoogleCredentials(
'token', 'client_id', 'secret', 'refresh_token',
- datetime.datetime(2008, 6, 24), 'https://refresh.uri.com/',
- 'user_agent')
+ datetime.datetime(2008, 6, 24), 'https://refresh.uri.com/',
+ 'user_agent')
call_creds = implementations.google_call_credentials(creds)
self.assertIsInstance(call_creds, implementations.CallCredentials)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py
index 2b9df9f393..837d2bbebf 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py
@@ -36,11 +36,11 @@ class NotFoundTest(unittest.TestCase):
def test_blocking_unary_unary_not_found(self):
with self.assertRaises(face.LocalError) as exception_assertion_context:
- self._generic_stub.blocking_unary_unary('groop',
- 'meffod',
- b'abc',
- test_constants.LONG_TIMEOUT,
- with_call=True)
+ self._generic_stub.blocking_unary_unary('groop',
+ 'meffod',
+ b'abc',
+ test_constants.LONG_TIMEOUT,
+ with_call=True)
self.assertIs(exception_assertion_context.exception.code,
interfaces.StatusCode.UNIMPLEMENTED)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem
index ac1808a56d..49d39cd8ed 100755
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem
@@ -1,20 +1,20 @@
-----BEGIN CERTIFICATE-----
-MIIDWjCCAkKgAwIBAgIUWrP0VvHcy+LP6UuYNtiL9gBhD5owDQYJKoZIhvcNAQEL
-BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
-GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw
-MDMxNzE4NTk1MVoXDTMwMDMxNTE4NTk1MVowVjELMAkGA1UEBhMCQVUxEzARBgNV
-BAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0
-ZDEPMA0GA1UEAwwGdGVzdGNhMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAsGL0oXflF0LzoM+Bh+qUU9yhqzw2w8OOX5mu/iNCyUOBrqaHi7mGHx73GD01
-diNzCzvlcQqdNIH6NQSL7DTpBjca66jYT9u73vZe2MDrr1nVbuLvfu9850cdxiUO
-Inv5xf8+sTHG0C+a+VAvMhsLiRjsq+lXKRJyk5zkbbsETybqpxoJ+K7CoSy3yc/k
-QIY3TipwEtwkKP4hzyo6KiGd/DPexie4nBUInN3bS1BUeNZ5zeaIC2eg3bkeeW7c
-qT55b+Yen6CxY0TEkzBK6AKt/WUialKMgT0wbTxRZO7kUCH3Sq6e/wXeFdJ+HvdV
-LPlAg5TnMaNpRdQih/8nRFpsdwIDAQABoyAwHjAMBgNVHRMEBTADAQH/MA4GA1Ud
-DwEB/wQEAwICBDANBgkqhkiG9w0BAQsFAAOCAQEAkTrKZjBrJXHps/HrjNCFPb5a
-THuGPCSsepe1wkKdSp1h4HGRpLoCgcLysCJ5hZhRpHkRihhef+rFHEe60UePQO3S
-CVTtdJB4CYWpcNyXOdqefrbJW5QNljxgi6Fhvs7JJkBqdXIkWXtFk2eRgOIP2Eo9
-/OHQHlYnwZFrk6sp4wPyR+A95S0toZBcyDVz7u+hOW0pGK3wviOe9lvRgj/H3Pwt
-bewb0l+MhRig0/DVHamyVxrDRbqInU1/GTNCwcZkXKYFWSf92U+kIcTth24Q1gcw
-eZiLl5FfrWokUNytFElXob0V0a5/kbhiLc3yWmvWqHTpqCALbVyF+rKJo2f5Kw==
+MIIDWjCCAkKgAwIBAgIUWrP0VvHcy+LP6UuYNtiL9gBhD5owDQYJKoZIhvcNAQEL
+BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
+GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw
+MDMxNzE4NTk1MVoXDTMwMDMxNTE4NTk1MVowVjELMAkGA1UEBhMCQVUxEzARBgNV
+BAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0
+ZDEPMA0GA1UEAwwGdGVzdGNhMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAsGL0oXflF0LzoM+Bh+qUU9yhqzw2w8OOX5mu/iNCyUOBrqaHi7mGHx73GD01
+diNzCzvlcQqdNIH6NQSL7DTpBjca66jYT9u73vZe2MDrr1nVbuLvfu9850cdxiUO
+Inv5xf8+sTHG0C+a+VAvMhsLiRjsq+lXKRJyk5zkbbsETybqpxoJ+K7CoSy3yc/k
+QIY3TipwEtwkKP4hzyo6KiGd/DPexie4nBUInN3bS1BUeNZ5zeaIC2eg3bkeeW7c
+qT55b+Yen6CxY0TEkzBK6AKt/WUialKMgT0wbTxRZO7kUCH3Sq6e/wXeFdJ+HvdV
+LPlAg5TnMaNpRdQih/8nRFpsdwIDAQABoyAwHjAMBgNVHRMEBTADAQH/MA4GA1Ud
+DwEB/wQEAwICBDANBgkqhkiG9w0BAQsFAAOCAQEAkTrKZjBrJXHps/HrjNCFPb5a
+THuGPCSsepe1wkKdSp1h4HGRpLoCgcLysCJ5hZhRpHkRihhef+rFHEe60UePQO3S
+CVTtdJB4CYWpcNyXOdqefrbJW5QNljxgi6Fhvs7JJkBqdXIkWXtFk2eRgOIP2Eo9
+/OHQHlYnwZFrk6sp4wPyR+A95S0toZBcyDVz7u+hOW0pGK3wviOe9lvRgj/H3Pwt
+bewb0l+MhRig0/DVHamyVxrDRbqInU1/GTNCwcZkXKYFWSf92U+kIcTth24Q1gcw
+eZiLl5FfrWokUNytFElXob0V0a5/kbhiLc3yWmvWqHTpqCALbVyF+rKJo2f5Kw==
-----END CERTIFICATE-----
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key
index 2388411844..086462992c 100755
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key
@@ -1,28 +1,28 @@
-----BEGIN PRIVATE KEY-----
-MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDnE443EknxvxBq
-6+hvn/t09hl8hx366EBYvZmVM/NC+7igXRAjiJiA/mIaCvL3MS0Iz5hBLxSGICU+
-WproA3GCIFITIwcf/ETyWj/5xpgZ4AKrLrjQmmX8mhwUajfF3UvwMJrCOVqPp67t
-PtP+2kBXaqrXdvnvXR41FsIB8V7zIAuIZB6bHQhiGVlc1sgZYsE2EGG9WMmHtS86
-qkAOTjG2XyjmPTGAwhGDpYkYrpzp99IiDh4/Veai81hn0ssQkbry0XRD/Ig3jcHh
-23WiriPNJ0JsbgXUSLKRPZObA9VgOLy2aXoN84IMaeK3yy+cwSYG/99w93fUZJte
-MXwz4oYZAgMBAAECggEBAIVn2Ncai+4xbH0OLWckabwgyJ4IM9rDc0LIU368O1kU
-koais8qP9dujAWgfoh3sGh/YGgKn96VnsZjKHlyMgF+r4TaDJn3k2rlAOWcurGlj
-1qaVlsV4HiEzp7pxiDmHhWvp4672Bb6iBG+bsjCUOEk/n9o9KhZzIBluRhtxCmw5
-nw4Do7z00PTvN81260uPWSc04IrytvZUiAIx/5qxD72bij2xJ8t/I9GI8g4FtoVB
-8pB6S/hJX1PZhh9VlU6Yk+TOfOVnbebG4W5138LkB835eqk3Zz0qsbc2euoi8Hxi
-y1VGwQEmMQ63jXz4c6g+X55ifvUK9Jpn5E8pq+pMd7ECgYEA93lYq+Cr54K4ey5t
-sWMa+ye5RqxjzgXj2Kqr55jb54VWG7wp2iGbg8FMlkQwzTJwebzDyCSatguEZLuB
-gRGroRnsUOy9vBvhKPOch9bfKIl6qOgzMJB267fBVWx5ybnRbWN/I7RvMQf3k+9y
-biCIVnxDLEEYyx7z85/5qxsXg/MCgYEA7wmWKtCTn032Hy9P8OL49T0X6Z8FlkDC
-Rk42ygrc/MUbugq9RGUxcCxoImOG9JXUpEtUe31YDm2j+/nbvrjl6/bP2qWs0V7l
-dTJl6dABP51pCw8+l4cWgBBX08Lkeen812AAFNrjmDCjX6rHjWHLJcpS18fnRRkP
-V1d/AHWX7MMCgYEA6Gsw2guhp0Zf2GCcaNK5DlQab8OL4Hwrpttzo4kuTlwtqNKp
-Q9H4al9qfF4Cr1TFya98+EVYf8yFRM3NLNjZpe3gwYf2EerlJj7VLcahw0KKzoN1
-QBENfwgPLRk5sDkx9VhSmcfl/diLroZdpAwtv3vo4nEoxeuGFbKTGx3Qkf0CgYEA
-xyR+dcb05Ygm3w4klHQTowQ10s1H80iaUcZBgQuR1ghEtDbUPZHsoR5t1xCB02ys
-DgAwLv1bChIvxvH/L6KM8ovZ2LekBX4AviWxoBxJnfz/EVau98B0b1auRN6eSC83
-FRuGldlSOW1z/nSh8ViizSYE5H5HX1qkXEippvFRE88CgYB3Bfu3YQY60ITWIShv
-nNkdcbTT9eoP9suaRJjw92Ln+7ZpALYlQMKUZmJ/5uBmLs4RFwUTQruLOPL4yLTH
-awADWUzs3IRr1fwn9E+zM8JVyKCnUEM3w4N5UZskGO2klashAd30hWO+knRv/y0r
-uGIYs9Ek7YXlXIRVrzMwcsrt1w==
+MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDnE443EknxvxBq
+6+hvn/t09hl8hx366EBYvZmVM/NC+7igXRAjiJiA/mIaCvL3MS0Iz5hBLxSGICU+
+WproA3GCIFITIwcf/ETyWj/5xpgZ4AKrLrjQmmX8mhwUajfF3UvwMJrCOVqPp67t
+PtP+2kBXaqrXdvnvXR41FsIB8V7zIAuIZB6bHQhiGVlc1sgZYsE2EGG9WMmHtS86
+qkAOTjG2XyjmPTGAwhGDpYkYrpzp99IiDh4/Veai81hn0ssQkbry0XRD/Ig3jcHh
+23WiriPNJ0JsbgXUSLKRPZObA9VgOLy2aXoN84IMaeK3yy+cwSYG/99w93fUZJte
+MXwz4oYZAgMBAAECggEBAIVn2Ncai+4xbH0OLWckabwgyJ4IM9rDc0LIU368O1kU
+koais8qP9dujAWgfoh3sGh/YGgKn96VnsZjKHlyMgF+r4TaDJn3k2rlAOWcurGlj
+1qaVlsV4HiEzp7pxiDmHhWvp4672Bb6iBG+bsjCUOEk/n9o9KhZzIBluRhtxCmw5
+nw4Do7z00PTvN81260uPWSc04IrytvZUiAIx/5qxD72bij2xJ8t/I9GI8g4FtoVB
+8pB6S/hJX1PZhh9VlU6Yk+TOfOVnbebG4W5138LkB835eqk3Zz0qsbc2euoi8Hxi
+y1VGwQEmMQ63jXz4c6g+X55ifvUK9Jpn5E8pq+pMd7ECgYEA93lYq+Cr54K4ey5t
+sWMa+ye5RqxjzgXj2Kqr55jb54VWG7wp2iGbg8FMlkQwzTJwebzDyCSatguEZLuB
+gRGroRnsUOy9vBvhKPOch9bfKIl6qOgzMJB267fBVWx5ybnRbWN/I7RvMQf3k+9y
+biCIVnxDLEEYyx7z85/5qxsXg/MCgYEA7wmWKtCTn032Hy9P8OL49T0X6Z8FlkDC
+Rk42ygrc/MUbugq9RGUxcCxoImOG9JXUpEtUe31YDm2j+/nbvrjl6/bP2qWs0V7l
+dTJl6dABP51pCw8+l4cWgBBX08Lkeen812AAFNrjmDCjX6rHjWHLJcpS18fnRRkP
+V1d/AHWX7MMCgYEA6Gsw2guhp0Zf2GCcaNK5DlQab8OL4Hwrpttzo4kuTlwtqNKp
+Q9H4al9qfF4Cr1TFya98+EVYf8yFRM3NLNjZpe3gwYf2EerlJj7VLcahw0KKzoN1
+QBENfwgPLRk5sDkx9VhSmcfl/diLroZdpAwtv3vo4nEoxeuGFbKTGx3Qkf0CgYEA
+xyR+dcb05Ygm3w4klHQTowQ10s1H80iaUcZBgQuR1ghEtDbUPZHsoR5t1xCB02ys
+DgAwLv1bChIvxvH/L6KM8ovZ2LekBX4AviWxoBxJnfz/EVau98B0b1auRN6eSC83
+FRuGldlSOW1z/nSh8ViizSYE5H5HX1qkXEippvFRE88CgYB3Bfu3YQY60ITWIShv
+nNkdcbTT9eoP9suaRJjw92Ln+7ZpALYlQMKUZmJ/5uBmLs4RFwUTQruLOPL4yLTH
+awADWUzs3IRr1fwn9E+zM8JVyKCnUEM3w4N5UZskGO2klashAd30hWO+knRv/y0r
+uGIYs9Ek7YXlXIRVrzMwcsrt1w==
-----END PRIVATE KEY-----
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem
index 3025c1320e..88244f856c 100755
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem
@@ -1,22 +1,22 @@
-----BEGIN CERTIFICATE-----
-MIIDtDCCApygAwIBAgIUbJfTREJ6k6/+oInWhV1O1j3ZT0IwDQYJKoZIhvcNAQEL
-BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
-GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw
-MDMxODAzMTA0MloXDTMwMDMxNjAzMTA0MlowZTELMAkGA1UEBhMCVVMxETAPBgNV
-BAgMCElsbGlub2lzMRAwDgYDVQQHDAdDaGljYWdvMRUwEwYDVQQKDAxFeGFtcGxl
-LCBDby4xGjAYBgNVBAMMESoudGVzdC5nb29nbGUuY29tMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA5xOONxJJ8b8Qauvob5/7dPYZfIcd+uhAWL2ZlTPz
-Qvu4oF0QI4iYgP5iGgry9zEtCM+YQS8UhiAlPlqa6ANxgiBSEyMHH/xE8lo/+caY
-GeACqy640Jpl/JocFGo3xd1L8DCawjlaj6eu7T7T/tpAV2qq13b5710eNRbCAfFe
-8yALiGQemx0IYhlZXNbIGWLBNhBhvVjJh7UvOqpADk4xtl8o5j0xgMIRg6WJGK6c
-6ffSIg4eP1XmovNYZ9LLEJG68tF0Q/yIN43B4dt1oq4jzSdCbG4F1EiykT2TmwPV
-YDi8tml6DfOCDGnit8svnMEmBv/fcPd31GSbXjF8M+KGGQIDAQABo2swaTAJBgNV
-HRMEAjAAMAsGA1UdDwQEAwIF4DBPBgNVHREESDBGghAqLnRlc3QuZ29vZ2xlLmZy
-ghh3YXRlcnpvb2kudGVzdC5nb29nbGUuYmWCEioudGVzdC55b3V0dWJlLmNvbYcE
-wKgBAzANBgkqhkiG9w0BAQsFAAOCAQEAS8hDQA8PSgipgAml7Q3/djwQ644ghWQv
-C2Kb+r30RCY1EyKNhnQnIIh/OUbBZvh0M0iYsy6xqXgfDhCB93AA6j0i5cS8fkhH
-Jl4RK0tSkGQ3YNY4NzXwQP/vmUgfkw8VBAZ4Y4GKxppdATjffIW+srbAmdDruIRM
-wPeikgOoRrXf0LA1fi4TqxARzeRwenQpayNfGHTvVF9aJkl8HoaMunTAdG5pIVcr
-9GKi/gEMpXUJbbVv3U5frX1Wo4CFo+rZWJ/LyCMeb0jciNLxSdMwj/E/ZuExlyeZ
-gc9ctPjSMvgSyXEKv6Vwobleeg88V2ZgzenziORoWj4KszG/lbQZvg==
+MIIDtDCCApygAwIBAgIUbJfTREJ6k6/+oInWhV1O1j3ZT0IwDQYJKoZIhvcNAQEL
+BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
+GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw
+MDMxODAzMTA0MloXDTMwMDMxNjAzMTA0MlowZTELMAkGA1UEBhMCVVMxETAPBgNV
+BAgMCElsbGlub2lzMRAwDgYDVQQHDAdDaGljYWdvMRUwEwYDVQQKDAxFeGFtcGxl
+LCBDby4xGjAYBgNVBAMMESoudGVzdC5nb29nbGUuY29tMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA5xOONxJJ8b8Qauvob5/7dPYZfIcd+uhAWL2ZlTPz
+Qvu4oF0QI4iYgP5iGgry9zEtCM+YQS8UhiAlPlqa6ANxgiBSEyMHH/xE8lo/+caY
+GeACqy640Jpl/JocFGo3xd1L8DCawjlaj6eu7T7T/tpAV2qq13b5710eNRbCAfFe
+8yALiGQemx0IYhlZXNbIGWLBNhBhvVjJh7UvOqpADk4xtl8o5j0xgMIRg6WJGK6c
+6ffSIg4eP1XmovNYZ9LLEJG68tF0Q/yIN43B4dt1oq4jzSdCbG4F1EiykT2TmwPV
+YDi8tml6DfOCDGnit8svnMEmBv/fcPd31GSbXjF8M+KGGQIDAQABo2swaTAJBgNV
+HRMEAjAAMAsGA1UdDwQEAwIF4DBPBgNVHREESDBGghAqLnRlc3QuZ29vZ2xlLmZy
+ghh3YXRlcnpvb2kudGVzdC5nb29nbGUuYmWCEioudGVzdC55b3V0dWJlLmNvbYcE
+wKgBAzANBgkqhkiG9w0BAQsFAAOCAQEAS8hDQA8PSgipgAml7Q3/djwQ644ghWQv
+C2Kb+r30RCY1EyKNhnQnIIh/OUbBZvh0M0iYsy6xqXgfDhCB93AA6j0i5cS8fkhH
+Jl4RK0tSkGQ3YNY4NzXwQP/vmUgfkw8VBAZ4Y4GKxppdATjffIW+srbAmdDruIRM
+wPeikgOoRrXf0LA1fi4TqxARzeRwenQpayNfGHTvVF9aJkl8HoaMunTAdG5pIVcr
+9GKi/gEMpXUJbbVv3U5frX1Wo4CFo+rZWJ/LyCMeb0jciNLxSdMwj/E/ZuExlyeZ
+gc9ctPjSMvgSyXEKv6Vwobleeg88V2ZgzenziORoWj4KszG/lbQZvg==
-----END CERTIFICATE-----
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py
index ac4ca36b4a..8b58a0c46a 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py
@@ -15,25 +15,25 @@
import contextlib
import os
import socket
-import errno
+import errno
-_DEFAULT_SOCK_OPTIONS = (socket.SO_REUSEADDR,
- socket.SO_REUSEPORT) if os.name != 'nt' else (
- socket.SO_REUSEADDR,)
-_UNRECOVERABLE_ERRNOS = (errno.EADDRINUSE, errno.ENOSR)
+_DEFAULT_SOCK_OPTIONS = (socket.SO_REUSEADDR,
+ socket.SO_REUSEPORT) if os.name != 'nt' else (
+ socket.SO_REUSEADDR,)
+_UNRECOVERABLE_ERRNOS = (errno.EADDRINUSE, errno.ENOSR)
def get_socket(bind_address='localhost',
- port=0,
+ port=0,
listen=True,
- sock_options=_DEFAULT_SOCK_OPTIONS):
- """Opens a socket.
+ sock_options=_DEFAULT_SOCK_OPTIONS):
+ """Opens a socket.
Useful for reserving a port for a system-under-test.
Args:
bind_address: The host to which to bind.
- port: The port to which to bind.
+ port: The port to which to bind.
listen: A boolean value indicating whether or not to listen on the socket.
sock_options: A sequence of socket options to apply to the socket.
@@ -53,20 +53,20 @@ def get_socket(bind_address='localhost',
sock = socket.socket(address_family, socket.SOCK_STREAM)
for sock_option in _sock_options:
sock.setsockopt(socket.SOL_SOCKET, sock_option, 1)
- sock.bind((bind_address, port))
+ sock.bind((bind_address, port))
if listen:
sock.listen(1)
return bind_address, sock.getsockname()[1], sock
- except OSError as os_error:
+ except OSError as os_error:
+ sock.close()
+ if os_error.errno in _UNRECOVERABLE_ERRNOS:
+ raise
+ else:
+ continue
+ # For PY2, socket.error is a child class of IOError; for PY3, it is
+ # pointing to OSError. We need this catch to make it 2/3 agnostic.
+ except socket.error: # pylint: disable=duplicate-except
sock.close()
- if os_error.errno in _UNRECOVERABLE_ERRNOS:
- raise
- else:
- continue
- # For PY2, socket.error is a child class of IOError; for PY3, it is
- # pointing to OSError. We need this catch to make it 2/3 agnostic.
- except socket.error: # pylint: disable=duplicate-except
- sock.close()
continue
raise RuntimeError("Failed to bind to {} with sock_options {}".format(
bind_address, sock_options))
@@ -74,16 +74,16 @@ def get_socket(bind_address='localhost',
@contextlib.contextmanager
def bound_socket(bind_address='localhost',
- port=0,
+ port=0,
listen=True,
- sock_options=_DEFAULT_SOCK_OPTIONS):
+ sock_options=_DEFAULT_SOCK_OPTIONS):
"""Opens a socket bound to an arbitrary port.
Useful for reserving a port for a system-under-test.
Args:
bind_address: The host to which to bind.
- port: The port to which to bind.
+ port: The port to which to bind.
listen: A boolean value indicating whether or not to listen on the socket.
sock_options: A sequence of socket options to apply to the socket.
@@ -92,10 +92,10 @@ def bound_socket(bind_address='localhost',
- the address to which the socket is bound
- the port to which the socket is bound
"""
- host, port, sock = get_socket(bind_address=bind_address,
- port=port,
- listen=listen,
- sock_options=sock_options)
+ host, port, sock = get_socket(bind_address=bind_address,
+ port=port,
+ listen=listen,
+ sock_options=sock_options)
try:
yield host, port
finally:
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py
index 8205a818d7..59ded0752f 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py
@@ -105,8 +105,8 @@ def test_server(max_workers=10, reuse_port=False):
These servers have SO_REUSEPORT disabled to prevent cross-talk.
"""
- return grpc.server(futures.ThreadPoolExecutor(max_workers=max_workers),
- options=(('grpc.so_reuseport', int(reuse_port)),))
+ return grpc.server(futures.ThreadPoolExecutor(max_workers=max_workers),
+ options=(('grpc.so_reuseport', int(reuse_port)),))
class WaitGroup(object):
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py
index 1349f024f5..51a046c20c 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py
@@ -1,155 +1,155 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python AsyncIO Benchmark Clients."""
-
-import abc
-import asyncio
-import time
-import logging
-import random
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import (benchmark_service_pb2_grpc, control_pb2,
- messages_pb2)
-from tests.qps import histogram
-from tests.unit import resources
-
-
-class GenericStub(object):
-
- def __init__(self, channel: aio.Channel):
- self.UnaryCall = channel.unary_unary(
- '/grpc.testing.BenchmarkService/UnaryCall')
- self.StreamingCall = channel.stream_stream(
- '/grpc.testing.BenchmarkService/StreamingCall')
-
-
-class BenchmarkClient(abc.ABC):
- """Benchmark client interface that exposes a non-blocking send_request()."""
-
- def __init__(self, address: str, config: control_pb2.ClientConfig,
- hist: histogram.Histogram):
- # Disables underlying reuse of subchannels
- unique_option = (('iv', random.random()),)
-
- # Parses the channel argument from config
- channel_args = tuple(
- (arg.name, arg.str_value) if arg.HasField('str_value') else (
- arg.name, int(arg.int_value)) for arg in config.channel_args)
-
- # Creates the channel
- if config.HasField('security_params'):
- channel_credentials = grpc.ssl_channel_credentials(
- resources.test_root_certificates(),)
- server_host_override_option = ((
- 'grpc.ssl_target_name_override',
- config.security_params.server_host_override,
- ),)
- self._channel = aio.secure_channel(
- address, channel_credentials,
- unique_option + channel_args + server_host_override_option)
- else:
- self._channel = aio.insecure_channel(address,
- options=unique_option +
- channel_args)
-
- # Creates the stub
- if config.payload_config.WhichOneof('payload') == 'simple_params':
- self._generic = False
- self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub(
- self._channel)
- payload = messages_pb2.Payload(
- body=b'\0' * config.payload_config.simple_params.req_size)
- self._request = messages_pb2.SimpleRequest(
- payload=payload,
- response_size=config.payload_config.simple_params.resp_size)
- else:
- self._generic = True
- self._stub = GenericStub(self._channel)
- self._request = b'\0' * config.payload_config.bytebuf_params.req_size
-
- self._hist = hist
- self._response_callbacks = []
- self._concurrency = config.outstanding_rpcs_per_channel
-
- async def run(self) -> None:
- await self._channel.channel_ready()
-
- async def stop(self) -> None:
- await self._channel.close()
-
- def _record_query_time(self, query_time: float) -> None:
- self._hist.add(query_time * 1e9)
-
-
-class UnaryAsyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, address: str, config: control_pb2.ClientConfig,
- hist: histogram.Histogram):
- super().__init__(address, config, hist)
- self._running = None
- self._stopped = asyncio.Event()
-
- async def _send_request(self):
- start_time = time.monotonic()
- await self._stub.UnaryCall(self._request)
- self._record_query_time(time.monotonic() - start_time)
-
- async def _send_indefinitely(self) -> None:
- while self._running:
- await self._send_request()
-
- async def run(self) -> None:
- await super().run()
- self._running = True
- senders = (self._send_indefinitely() for _ in range(self._concurrency))
- await asyncio.gather(*senders)
- self._stopped.set()
-
- async def stop(self) -> None:
- self._running = False
- await self._stopped.wait()
- await super().stop()
-
-
-class StreamingAsyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, address: str, config: control_pb2.ClientConfig,
- hist: histogram.Histogram):
- super().__init__(address, config, hist)
- self._running = None
- self._stopped = asyncio.Event()
-
- async def _one_streaming_call(self):
- call = self._stub.StreamingCall()
- while self._running:
- start_time = time.time()
- await call.write(self._request)
- await call.read()
- self._record_query_time(time.time() - start_time)
- await call.done_writing()
-
- async def run(self):
- await super().run()
- self._running = True
- senders = (self._one_streaming_call() for _ in range(self._concurrency))
- await asyncio.gather(*senders)
- self._stopped.set()
-
- async def stop(self):
- self._running = False
- await self._stopped.wait()
- await super().stop()
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""The Python AsyncIO Benchmark Clients."""
+
+import abc
+import asyncio
+import time
+import logging
+import random
+
+import grpc
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import (benchmark_service_pb2_grpc, control_pb2,
+ messages_pb2)
+from tests.qps import histogram
+from tests.unit import resources
+
+
+class GenericStub(object):
+
+ def __init__(self, channel: aio.Channel):
+ self.UnaryCall = channel.unary_unary(
+ '/grpc.testing.BenchmarkService/UnaryCall')
+ self.StreamingCall = channel.stream_stream(
+ '/grpc.testing.BenchmarkService/StreamingCall')
+
+
+class BenchmarkClient(abc.ABC):
+ """Benchmark client interface that exposes a non-blocking send_request()."""
+
+ def __init__(self, address: str, config: control_pb2.ClientConfig,
+ hist: histogram.Histogram):
+ # Disables underlying reuse of subchannels
+ unique_option = (('iv', random.random()),)
+
+ # Parses the channel argument from config
+ channel_args = tuple(
+ (arg.name, arg.str_value) if arg.HasField('str_value') else (
+ arg.name, int(arg.int_value)) for arg in config.channel_args)
+
+ # Creates the channel
+ if config.HasField('security_params'):
+ channel_credentials = grpc.ssl_channel_credentials(
+ resources.test_root_certificates(),)
+ server_host_override_option = ((
+ 'grpc.ssl_target_name_override',
+ config.security_params.server_host_override,
+ ),)
+ self._channel = aio.secure_channel(
+ address, channel_credentials,
+ unique_option + channel_args + server_host_override_option)
+ else:
+ self._channel = aio.insecure_channel(address,
+ options=unique_option +
+ channel_args)
+
+ # Creates the stub
+ if config.payload_config.WhichOneof('payload') == 'simple_params':
+ self._generic = False
+ self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub(
+ self._channel)
+ payload = messages_pb2.Payload(
+ body=b'\0' * config.payload_config.simple_params.req_size)
+ self._request = messages_pb2.SimpleRequest(
+ payload=payload,
+ response_size=config.payload_config.simple_params.resp_size)
+ else:
+ self._generic = True
+ self._stub = GenericStub(self._channel)
+ self._request = b'\0' * config.payload_config.bytebuf_params.req_size
+
+ self._hist = hist
+ self._response_callbacks = []
+ self._concurrency = config.outstanding_rpcs_per_channel
+
+ async def run(self) -> None:
+ await self._channel.channel_ready()
+
+ async def stop(self) -> None:
+ await self._channel.close()
+
+ def _record_query_time(self, query_time: float) -> None:
+ self._hist.add(query_time * 1e9)
+
+
+class UnaryAsyncBenchmarkClient(BenchmarkClient):
+
+ def __init__(self, address: str, config: control_pb2.ClientConfig,
+ hist: histogram.Histogram):
+ super().__init__(address, config, hist)
+ self._running = None
+ self._stopped = asyncio.Event()
+
+ async def _send_request(self):
+ start_time = time.monotonic()
+ await self._stub.UnaryCall(self._request)
+ self._record_query_time(time.monotonic() - start_time)
+
+ async def _send_indefinitely(self) -> None:
+ while self._running:
+ await self._send_request()
+
+ async def run(self) -> None:
+ await super().run()
+ self._running = True
+ senders = (self._send_indefinitely() for _ in range(self._concurrency))
+ await asyncio.gather(*senders)
+ self._stopped.set()
+
+ async def stop(self) -> None:
+ self._running = False
+ await self._stopped.wait()
+ await super().stop()
+
+
+class StreamingAsyncBenchmarkClient(BenchmarkClient):
+
+ def __init__(self, address: str, config: control_pb2.ClientConfig,
+ hist: histogram.Histogram):
+ super().__init__(address, config, hist)
+ self._running = None
+ self._stopped = asyncio.Event()
+
+ async def _one_streaming_call(self):
+ call = self._stub.StreamingCall()
+ while self._running:
+ start_time = time.time()
+ await call.write(self._request)
+ await call.read()
+ self._record_query_time(time.time() - start_time)
+ await call.done_writing()
+
+ async def run(self):
+ await super().run()
+ self._running = True
+ senders = (self._one_streaming_call() for _ in range(self._concurrency))
+ await asyncio.gather(*senders)
+ self._stopped.set()
+
+ async def stop(self):
+ self._running = False
+ await self._stopped.wait()
+ await super().stop()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py
index 2286ebda10..50d3065cd1 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py
@@ -1,55 +1,55 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python AsyncIO Benchmark Servicers."""
-
-import asyncio
-import logging
-import unittest
-
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc, messages_pb2
-
-
-class BenchmarkServicer(benchmark_service_pb2_grpc.BenchmarkServiceServicer):
-
- async def UnaryCall(self, request, unused_context):
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- return messages_pb2.SimpleResponse(payload=payload)
-
- async def StreamingFromServer(self, request, unused_context):
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- # Sends response at full capacity!
- while True:
- yield messages_pb2.SimpleResponse(payload=payload)
-
- async def StreamingCall(self, request_iterator, unused_context):
- async for request in request_iterator:
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- yield messages_pb2.SimpleResponse(payload=payload)
-
-
-class GenericBenchmarkServicer(
- benchmark_service_pb2_grpc.BenchmarkServiceServicer):
- """Generic (no-codec) Server implementation for the Benchmark service."""
-
- def __init__(self, resp_size):
- self._response = '\0' * resp_size
-
- async def UnaryCall(self, unused_request, unused_context):
- return self._response
-
- async def StreamingCall(self, request_iterator, unused_context):
- async for _ in request_iterator:
- yield self._response
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""The Python AsyncIO Benchmark Servicers."""
+
+import asyncio
+import logging
+import unittest
+
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import benchmark_service_pb2_grpc, messages_pb2
+
+
+class BenchmarkServicer(benchmark_service_pb2_grpc.BenchmarkServiceServicer):
+
+ async def UnaryCall(self, request, unused_context):
+ payload = messages_pb2.Payload(body=b'\0' * request.response_size)
+ return messages_pb2.SimpleResponse(payload=payload)
+
+ async def StreamingFromServer(self, request, unused_context):
+ payload = messages_pb2.Payload(body=b'\0' * request.response_size)
+ # Sends response at full capacity!
+ while True:
+ yield messages_pb2.SimpleResponse(payload=payload)
+
+ async def StreamingCall(self, request_iterator, unused_context):
+ async for request in request_iterator:
+ payload = messages_pb2.Payload(body=b'\0' * request.response_size)
+ yield messages_pb2.SimpleResponse(payload=payload)
+
+
+class GenericBenchmarkServicer(
+ benchmark_service_pb2_grpc.BenchmarkServiceServicer):
+ """Generic (no-codec) Server implementation for the Benchmark service."""
+
+ def __init__(self, resp_size):
+ self._response = '\0' * resp_size
+
+ async def UnaryCall(self, unused_request, unused_context):
+ return self._response
+
+ async def StreamingCall(self, request_iterator, unused_context):
+ async for _ in request_iterator:
+ yield self._response
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py
index 727e4b7bf9..561298a626 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py
@@ -17,21 +17,21 @@ import logging
import unittest
from grpc.experimental import aio
-
+
from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from tests_aio.benchmark import benchmark_servicer
+from tests_aio.benchmark import benchmark_servicer
async def _start_async_server():
server = aio.server()
- port = server.add_insecure_port('localhost:%s' % 50051)
- servicer = benchmark_servicer.BenchmarkServicer()
+ port = server.add_insecure_port('localhost:%s' % 50051)
+ servicer = benchmark_servicer.BenchmarkServicer()
benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
servicer, server)
await server.start()
- logging.info('Benchmark server started at :%d' % port)
+ logging.info('Benchmark server started at :%d' % port)
await server.wait_for_termination()
@@ -42,5 +42,5 @@ def main():
if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
+ logging.basicConfig(level=logging.DEBUG)
main()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py
index d15bdcbf08..dc16f05087 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py
@@ -1,59 +1,59 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import asyncio
-import logging
-
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import worker_service_pb2_grpc
-from tests_aio.benchmark import worker_servicer
-
-
-async def run_worker_server(port: int) -> None:
- server = aio.server()
-
- servicer = worker_servicer.WorkerServicer()
- worker_service_pb2_grpc.add_WorkerServiceServicer_to_server(
- servicer, server)
-
- server.add_insecure_port('[::]:{}'.format(port))
-
- await server.start()
-
- await servicer.wait_for_quit()
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- parser = argparse.ArgumentParser(
- description='gRPC Python performance testing worker')
- parser.add_argument('--driver_port',
- type=int,
- dest='port',
- help='The port the worker should listen on')
- parser.add_argument('--uvloop',
- action='store_true',
- help='Use uvloop or not')
- args = parser.parse_args()
-
- if args.uvloop:
- import uvloop
- asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
- loop = uvloop.new_event_loop()
- asyncio.set_event_loop(loop)
-
- asyncio.get_event_loop().run_until_complete(run_worker_server(args.port))
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import asyncio
+import logging
+
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import worker_service_pb2_grpc
+from tests_aio.benchmark import worker_servicer
+
+
+async def run_worker_server(port: int) -> None:
+ server = aio.server()
+
+ servicer = worker_servicer.WorkerServicer()
+ worker_service_pb2_grpc.add_WorkerServiceServicer_to_server(
+ servicer, server)
+
+ server.add_insecure_port('[::]:{}'.format(port))
+
+ await server.start()
+
+ await servicer.wait_for_quit()
+ await server.stop(None)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ parser = argparse.ArgumentParser(
+ description='gRPC Python performance testing worker')
+ parser.add_argument('--driver_port',
+ type=int,
+ dest='port',
+ help='The port the worker should listen on')
+ parser.add_argument('--uvloop',
+ action='store_true',
+ help='Use uvloop or not')
+ args = parser.parse_args()
+
+ if args.uvloop:
+ import uvloop
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
+ loop = uvloop.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ asyncio.get_event_loop().run_until_complete(run_worker_server(args.port))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py
index 3b335556cf..4f80095cd2 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py
@@ -1,367 +1,367 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import collections
-import logging
-import multiprocessing
-import os
-import sys
-import time
-from typing import Tuple
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import (benchmark_service_pb2_grpc, control_pb2,
- stats_pb2, worker_service_pb2_grpc)
-from tests.qps import histogram
-from tests.unit import resources
-from tests.unit.framework.common import get_socket
-from tests_aio.benchmark import benchmark_client, benchmark_servicer
-
-_NUM_CORES = multiprocessing.cpu_count()
-_WORKER_ENTRY_FILE = os.path.join(
- os.path.split(os.path.abspath(__file__))[0], 'worker.py')
-
-_LOGGER = logging.getLogger(__name__)
-
-
-class _SubWorker(
- collections.namedtuple('_SubWorker',
- ['process', 'port', 'channel', 'stub'])):
- """A data class that holds information about a child qps worker."""
-
- def _repr(self):
- return f'<_SubWorker pid={self.process.pid} port={self.port}>'
-
- def __repr__(self):
- return self._repr()
-
- def __str__(self):
- return self._repr()
-
-
-def _get_server_status(start_time: float, end_time: float,
- port: int) -> control_pb2.ServerStatus:
- """Creates ServerStatus proto message."""
- end_time = time.monotonic()
- elapsed_time = end_time - start_time
- # TODO(lidiz) Collect accurate time system to compute QPS/core-second.
- stats = stats_pb2.ServerStats(time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
- return control_pb2.ServerStatus(stats=stats, port=port, cores=_NUM_CORES)
-
-
-def _create_server(config: control_pb2.ServerConfig) -> Tuple[aio.Server, int]:
- """Creates a server object according to the ServerConfig."""
- channel_args = tuple(
- (arg.name,
- arg.str_value) if arg.HasField('str_value') else (arg.name,
- int(arg.int_value))
- for arg in config.channel_args)
-
- server = aio.server(options=channel_args + (('grpc.so_reuseport', 1),))
- if config.server_type == control_pb2.ASYNC_SERVER:
- servicer = benchmark_servicer.BenchmarkServicer()
- benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
- servicer, server)
- elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER:
- resp_size = config.payload_config.bytebuf_params.resp_size
- servicer = benchmark_servicer.GenericBenchmarkServicer(resp_size)
- method_implementations = {
- 'StreamingCall':
- grpc.stream_stream_rpc_method_handler(servicer.StreamingCall),
- 'UnaryCall':
- grpc.unary_unary_rpc_method_handler(servicer.UnaryCall),
- }
- handler = grpc.method_handlers_generic_handler(
- 'grpc.testing.BenchmarkService', method_implementations)
- server.add_generic_rpc_handlers((handler,))
- else:
- raise NotImplementedError('Unsupported server type {}'.format(
- config.server_type))
-
- if config.HasField('security_params'): # Use SSL
- server_creds = grpc.ssl_server_credentials(
- ((resources.private_key(), resources.certificate_chain()),))
- port = server.add_secure_port('[::]:{}'.format(config.port),
- server_creds)
- else:
- port = server.add_insecure_port('[::]:{}'.format(config.port))
-
- return server, port
-
-
-def _get_client_status(start_time: float, end_time: float,
- qps_data: histogram.Histogram
- ) -> control_pb2.ClientStatus:
- """Creates ClientStatus proto message."""
- latencies = qps_data.get_data()
- end_time = time.monotonic()
- elapsed_time = end_time - start_time
- # TODO(lidiz) Collect accurate time system to compute QPS/core-second.
- stats = stats_pb2.ClientStats(latencies=latencies,
- time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
- return control_pb2.ClientStatus(stats=stats)
-
-
-def _create_client(server: str, config: control_pb2.ClientConfig,
- qps_data: histogram.Histogram
- ) -> benchmark_client.BenchmarkClient:
- """Creates a client object according to the ClientConfig."""
- if config.load_params.WhichOneof('load') != 'closed_loop':
- raise NotImplementedError(
- f'Unsupported load parameter {config.load_params}')
-
- if config.client_type == control_pb2.ASYNC_CLIENT:
- if config.rpc_type == control_pb2.UNARY:
- client_type = benchmark_client.UnaryAsyncBenchmarkClient
- elif config.rpc_type == control_pb2.STREAMING:
- client_type = benchmark_client.StreamingAsyncBenchmarkClient
- else:
- raise NotImplementedError(
- f'Unsupported rpc_type [{config.rpc_type}]')
- else:
- raise NotImplementedError(
- f'Unsupported client type {config.client_type}')
-
- return client_type(server, config, qps_data)
-
-
-def _pick_an_unused_port() -> int:
- """Picks an unused TCP port."""
- _, port, sock = get_socket()
- sock.close()
- return port
-
-
-async def _create_sub_worker() -> _SubWorker:
- """Creates a child qps worker as a subprocess."""
- port = _pick_an_unused_port()
-
- _LOGGER.info('Creating sub worker at port [%d]...', port)
- process = await asyncio.create_subprocess_exec(sys.executable,
- _WORKER_ENTRY_FILE,
- '--driver_port', str(port))
- _LOGGER.info('Created sub worker process for port [%d] at pid [%d]', port,
- process.pid)
- channel = aio.insecure_channel(f'localhost:{port}')
- _LOGGER.info('Waiting for sub worker at port [%d]', port)
- await channel.channel_ready()
- stub = worker_service_pb2_grpc.WorkerServiceStub(channel)
- return _SubWorker(
- process=process,
- port=port,
- channel=channel,
- stub=stub,
- )
-
-
-class WorkerServicer(worker_service_pb2_grpc.WorkerServiceServicer):
- """Python Worker Server implementation."""
-
- def __init__(self):
- self._loop = asyncio.get_event_loop()
- self._quit_event = asyncio.Event()
-
- async def _run_single_server(self, config, request_iterator, context):
- server, port = _create_server(config)
- await server.start()
- _LOGGER.info('Server started at port [%d]', port)
-
- start_time = time.monotonic()
- await context.write(_get_server_status(start_time, start_time, port))
-
- async for request in request_iterator:
- end_time = time.monotonic()
- status = _get_server_status(start_time, end_time, port)
- if request.mark.reset:
- start_time = end_time
- await context.write(status)
- await server.stop(None)
-
- async def RunServer(self, request_iterator, context):
- config_request = await context.read()
- config = config_request.setup
- _LOGGER.info('Received ServerConfig: %s', config)
-
- if config.server_processes <= 0:
- _LOGGER.info('Using server_processes == [%d]', _NUM_CORES)
- config.server_processes = _NUM_CORES
-
- if config.port == 0:
- config.port = _pick_an_unused_port()
- _LOGGER.info('Port picked [%d]', config.port)
-
- if config.server_processes == 1:
- # If server_processes == 1, start the server in this process.
- await self._run_single_server(config, request_iterator, context)
- else:
- # If server_processes > 1, offload to other processes.
- sub_workers = await asyncio.gather(*(
- _create_sub_worker() for _ in range(config.server_processes)))
-
- calls = [worker.stub.RunServer() for worker in sub_workers]
-
- config_request.setup.server_processes = 1
-
- for call in calls:
- await call.write(config_request)
- # An empty status indicates the peer is ready
- await call.read()
-
- start_time = time.monotonic()
- await context.write(
- _get_server_status(
- start_time,
- start_time,
- config.port,
- ))
-
- _LOGGER.info('Servers are ready to serve.')
-
- async for request in request_iterator:
- end_time = time.monotonic()
-
- for call in calls:
- await call.write(request)
- # Reports from sub workers doesn't matter
- await call.read()
-
- status = _get_server_status(
- start_time,
- end_time,
- config.port,
- )
- if request.mark.reset:
- start_time = end_time
- await context.write(status)
-
- for call in calls:
- await call.done_writing()
-
- for worker in sub_workers:
- await worker.stub.QuitWorker(control_pb2.Void())
- await worker.channel.close()
- _LOGGER.info('Waiting for [%s] to quit...', worker)
- await worker.process.wait()
-
- async def _run_single_client(self, config, request_iterator, context):
- running_tasks = []
- qps_data = histogram.Histogram(config.histogram_params.resolution,
- config.histogram_params.max_possible)
- start_time = time.monotonic()
-
- # Create a client for each channel as asyncio.Task
- for i in range(config.client_channels):
- server = config.server_targets[i % len(config.server_targets)]
- client = _create_client(server, config, qps_data)
- _LOGGER.info('Client created against server [%s]', server)
- running_tasks.append(self._loop.create_task(client.run()))
-
- end_time = time.monotonic()
- await context.write(_get_client_status(start_time, end_time, qps_data))
-
- # Respond to stat requests
- async for request in request_iterator:
- end_time = time.monotonic()
- status = _get_client_status(start_time, end_time, qps_data)
- if request.mark.reset:
- qps_data.reset()
- start_time = time.monotonic()
- await context.write(status)
-
- # Cleanup the clients
- for task in running_tasks:
- task.cancel()
-
- async def RunClient(self, request_iterator, context):
- config_request = await context.read()
- config = config_request.setup
- _LOGGER.info('Received ClientConfig: %s', config)
-
- if config.client_processes <= 0:
- _LOGGER.info('client_processes can\'t be [%d]',
- config.client_processes)
- _LOGGER.info('Using client_processes == [%d]', _NUM_CORES)
- config.client_processes = _NUM_CORES
-
- if config.client_processes == 1:
- # If client_processes == 1, run the benchmark in this process.
- await self._run_single_client(config, request_iterator, context)
- else:
- # If client_processes > 1, offload the work to other processes.
- sub_workers = await asyncio.gather(*(
- _create_sub_worker() for _ in range(config.client_processes)))
-
- calls = [worker.stub.RunClient() for worker in sub_workers]
-
- config_request.setup.client_processes = 1
-
- for call in calls:
- await call.write(config_request)
- # An empty status indicates the peer is ready
- await call.read()
-
- start_time = time.monotonic()
- result = histogram.Histogram(config.histogram_params.resolution,
- config.histogram_params.max_possible)
- end_time = time.monotonic()
- await context.write(_get_client_status(start_time, end_time,
- result))
-
- async for request in request_iterator:
- end_time = time.monotonic()
-
- for call in calls:
- _LOGGER.debug('Fetching status...')
- await call.write(request)
- sub_status = await call.read()
- result.merge(sub_status.stats.latencies)
- _LOGGER.debug('Update from sub worker count=[%d]',
- sub_status.stats.latencies.count)
-
- status = _get_client_status(start_time, end_time, result)
- if request.mark.reset:
- result.reset()
- start_time = time.monotonic()
- _LOGGER.debug('Reporting count=[%d]',
- status.stats.latencies.count)
- await context.write(status)
-
- for call in calls:
- await call.done_writing()
-
- for worker in sub_workers:
- await worker.stub.QuitWorker(control_pb2.Void())
- await worker.channel.close()
- _LOGGER.info('Waiting for sub worker [%s] to quit...', worker)
- await worker.process.wait()
- _LOGGER.info('Sub worker [%s] quit', worker)
-
- @staticmethod
- async def CoreCount(unused_request, unused_context):
- return control_pb2.CoreResponse(cores=_NUM_CORES)
-
- async def QuitWorker(self, unused_request, unused_context):
- _LOGGER.info('QuitWorker command received.')
- self._quit_event.set()
- return control_pb2.Void()
-
- async def wait_for_quit(self):
- await self._quit_event.wait()
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+import collections
+import logging
+import multiprocessing
+import os
+import sys
+import time
+from typing import Tuple
+
+import grpc
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import (benchmark_service_pb2_grpc, control_pb2,
+ stats_pb2, worker_service_pb2_grpc)
+from tests.qps import histogram
+from tests.unit import resources
+from tests.unit.framework.common import get_socket
+from tests_aio.benchmark import benchmark_client, benchmark_servicer
+
+_NUM_CORES = multiprocessing.cpu_count()
+_WORKER_ENTRY_FILE = os.path.join(
+ os.path.split(os.path.abspath(__file__))[0], 'worker.py')
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class _SubWorker(
+ collections.namedtuple('_SubWorker',
+ ['process', 'port', 'channel', 'stub'])):
+ """A data class that holds information about a child qps worker."""
+
+ def _repr(self):
+ return f'<_SubWorker pid={self.process.pid} port={self.port}>'
+
+ def __repr__(self):
+ return self._repr()
+
+ def __str__(self):
+ return self._repr()
+
+
+def _get_server_status(start_time: float, end_time: float,
+ port: int) -> control_pb2.ServerStatus:
+ """Creates ServerStatus proto message."""
+ end_time = time.monotonic()
+ elapsed_time = end_time - start_time
+ # TODO(lidiz) Collect accurate time system to compute QPS/core-second.
+ stats = stats_pb2.ServerStats(time_elapsed=elapsed_time,
+ time_user=elapsed_time,
+ time_system=elapsed_time)
+ return control_pb2.ServerStatus(stats=stats, port=port, cores=_NUM_CORES)
+
+
+def _create_server(config: control_pb2.ServerConfig) -> Tuple[aio.Server, int]:
+ """Creates a server object according to the ServerConfig."""
+ channel_args = tuple(
+ (arg.name,
+ arg.str_value) if arg.HasField('str_value') else (arg.name,
+ int(arg.int_value))
+ for arg in config.channel_args)
+
+ server = aio.server(options=channel_args + (('grpc.so_reuseport', 1),))
+ if config.server_type == control_pb2.ASYNC_SERVER:
+ servicer = benchmark_servicer.BenchmarkServicer()
+ benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
+ servicer, server)
+ elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER:
+ resp_size = config.payload_config.bytebuf_params.resp_size
+ servicer = benchmark_servicer.GenericBenchmarkServicer(resp_size)
+ method_implementations = {
+ 'StreamingCall':
+ grpc.stream_stream_rpc_method_handler(servicer.StreamingCall),
+ 'UnaryCall':
+ grpc.unary_unary_rpc_method_handler(servicer.UnaryCall),
+ }
+ handler = grpc.method_handlers_generic_handler(
+ 'grpc.testing.BenchmarkService', method_implementations)
+ server.add_generic_rpc_handlers((handler,))
+ else:
+ raise NotImplementedError('Unsupported server type {}'.format(
+ config.server_type))
+
+ if config.HasField('security_params'): # Use SSL
+ server_creds = grpc.ssl_server_credentials(
+ ((resources.private_key(), resources.certificate_chain()),))
+ port = server.add_secure_port('[::]:{}'.format(config.port),
+ server_creds)
+ else:
+ port = server.add_insecure_port('[::]:{}'.format(config.port))
+
+ return server, port
+
+
+def _get_client_status(start_time: float, end_time: float,
+ qps_data: histogram.Histogram
+ ) -> control_pb2.ClientStatus:
+ """Creates ClientStatus proto message."""
+ latencies = qps_data.get_data()
+ end_time = time.monotonic()
+ elapsed_time = end_time - start_time
+ # TODO(lidiz) Collect accurate time system to compute QPS/core-second.
+ stats = stats_pb2.ClientStats(latencies=latencies,
+ time_elapsed=elapsed_time,
+ time_user=elapsed_time,
+ time_system=elapsed_time)
+ return control_pb2.ClientStatus(stats=stats)
+
+
+def _create_client(server: str, config: control_pb2.ClientConfig,
+ qps_data: histogram.Histogram
+ ) -> benchmark_client.BenchmarkClient:
+ """Creates a client object according to the ClientConfig."""
+ if config.load_params.WhichOneof('load') != 'closed_loop':
+ raise NotImplementedError(
+ f'Unsupported load parameter {config.load_params}')
+
+ if config.client_type == control_pb2.ASYNC_CLIENT:
+ if config.rpc_type == control_pb2.UNARY:
+ client_type = benchmark_client.UnaryAsyncBenchmarkClient
+ elif config.rpc_type == control_pb2.STREAMING:
+ client_type = benchmark_client.StreamingAsyncBenchmarkClient
+ else:
+ raise NotImplementedError(
+ f'Unsupported rpc_type [{config.rpc_type}]')
+ else:
+ raise NotImplementedError(
+ f'Unsupported client type {config.client_type}')
+
+ return client_type(server, config, qps_data)
+
+
+def _pick_an_unused_port() -> int:
+ """Picks an unused TCP port."""
+ _, port, sock = get_socket()
+ sock.close()
+ return port
+
+
+async def _create_sub_worker() -> _SubWorker:
+ """Creates a child qps worker as a subprocess."""
+ port = _pick_an_unused_port()
+
+ _LOGGER.info('Creating sub worker at port [%d]...', port)
+ process = await asyncio.create_subprocess_exec(sys.executable,
+ _WORKER_ENTRY_FILE,
+ '--driver_port', str(port))
+ _LOGGER.info('Created sub worker process for port [%d] at pid [%d]', port,
+ process.pid)
+ channel = aio.insecure_channel(f'localhost:{port}')
+ _LOGGER.info('Waiting for sub worker at port [%d]', port)
+ await channel.channel_ready()
+ stub = worker_service_pb2_grpc.WorkerServiceStub(channel)
+ return _SubWorker(
+ process=process,
+ port=port,
+ channel=channel,
+ stub=stub,
+ )
+
+
+class WorkerServicer(worker_service_pb2_grpc.WorkerServiceServicer):
+ """Python Worker Server implementation."""
+
+ def __init__(self):
+ self._loop = asyncio.get_event_loop()
+ self._quit_event = asyncio.Event()
+
+ async def _run_single_server(self, config, request_iterator, context):
+ server, port = _create_server(config)
+ await server.start()
+ _LOGGER.info('Server started at port [%d]', port)
+
+ start_time = time.monotonic()
+ await context.write(_get_server_status(start_time, start_time, port))
+
+ async for request in request_iterator:
+ end_time = time.monotonic()
+ status = _get_server_status(start_time, end_time, port)
+ if request.mark.reset:
+ start_time = end_time
+ await context.write(status)
+ await server.stop(None)
+
+ async def RunServer(self, request_iterator, context):
+ config_request = await context.read()
+ config = config_request.setup
+ _LOGGER.info('Received ServerConfig: %s', config)
+
+ if config.server_processes <= 0:
+ _LOGGER.info('Using server_processes == [%d]', _NUM_CORES)
+ config.server_processes = _NUM_CORES
+
+ if config.port == 0:
+ config.port = _pick_an_unused_port()
+ _LOGGER.info('Port picked [%d]', config.port)
+
+ if config.server_processes == 1:
+ # If server_processes == 1, start the server in this process.
+ await self._run_single_server(config, request_iterator, context)
+ else:
+ # If server_processes > 1, offload to other processes.
+ sub_workers = await asyncio.gather(*(
+ _create_sub_worker() for _ in range(config.server_processes)))
+
+ calls = [worker.stub.RunServer() for worker in sub_workers]
+
+ config_request.setup.server_processes = 1
+
+ for call in calls:
+ await call.write(config_request)
+ # An empty status indicates the peer is ready
+ await call.read()
+
+ start_time = time.monotonic()
+ await context.write(
+ _get_server_status(
+ start_time,
+ start_time,
+ config.port,
+ ))
+
+ _LOGGER.info('Servers are ready to serve.')
+
+ async for request in request_iterator:
+ end_time = time.monotonic()
+
+ for call in calls:
+ await call.write(request)
+ # Reports from sub workers doesn't matter
+ await call.read()
+
+ status = _get_server_status(
+ start_time,
+ end_time,
+ config.port,
+ )
+ if request.mark.reset:
+ start_time = end_time
+ await context.write(status)
+
+ for call in calls:
+ await call.done_writing()
+
+ for worker in sub_workers:
+ await worker.stub.QuitWorker(control_pb2.Void())
+ await worker.channel.close()
+ _LOGGER.info('Waiting for [%s] to quit...', worker)
+ await worker.process.wait()
+
+ async def _run_single_client(self, config, request_iterator, context):
+ running_tasks = []
+ qps_data = histogram.Histogram(config.histogram_params.resolution,
+ config.histogram_params.max_possible)
+ start_time = time.monotonic()
+
+ # Create a client for each channel as asyncio.Task
+ for i in range(config.client_channels):
+ server = config.server_targets[i % len(config.server_targets)]
+ client = _create_client(server, config, qps_data)
+ _LOGGER.info('Client created against server [%s]', server)
+ running_tasks.append(self._loop.create_task(client.run()))
+
+ end_time = time.monotonic()
+ await context.write(_get_client_status(start_time, end_time, qps_data))
+
+ # Respond to stat requests
+ async for request in request_iterator:
+ end_time = time.monotonic()
+ status = _get_client_status(start_time, end_time, qps_data)
+ if request.mark.reset:
+ qps_data.reset()
+ start_time = time.monotonic()
+ await context.write(status)
+
+ # Cleanup the clients
+ for task in running_tasks:
+ task.cancel()
+
+ async def RunClient(self, request_iterator, context):
+ config_request = await context.read()
+ config = config_request.setup
+ _LOGGER.info('Received ClientConfig: %s', config)
+
+ if config.client_processes <= 0:
+ _LOGGER.info('client_processes can\'t be [%d]',
+ config.client_processes)
+ _LOGGER.info('Using client_processes == [%d]', _NUM_CORES)
+ config.client_processes = _NUM_CORES
+
+ if config.client_processes == 1:
+ # If client_processes == 1, run the benchmark in this process.
+ await self._run_single_client(config, request_iterator, context)
+ else:
+ # If client_processes > 1, offload the work to other processes.
+ sub_workers = await asyncio.gather(*(
+ _create_sub_worker() for _ in range(config.client_processes)))
+
+ calls = [worker.stub.RunClient() for worker in sub_workers]
+
+ config_request.setup.client_processes = 1
+
+ for call in calls:
+ await call.write(config_request)
+ # An empty status indicates the peer is ready
+ await call.read()
+
+ start_time = time.monotonic()
+ result = histogram.Histogram(config.histogram_params.resolution,
+ config.histogram_params.max_possible)
+ end_time = time.monotonic()
+ await context.write(_get_client_status(start_time, end_time,
+ result))
+
+ async for request in request_iterator:
+ end_time = time.monotonic()
+
+ for call in calls:
+ _LOGGER.debug('Fetching status...')
+ await call.write(request)
+ sub_status = await call.read()
+ result.merge(sub_status.stats.latencies)
+ _LOGGER.debug('Update from sub worker count=[%d]',
+ sub_status.stats.latencies.count)
+
+ status = _get_client_status(start_time, end_time, result)
+ if request.mark.reset:
+ result.reset()
+ start_time = time.monotonic()
+ _LOGGER.debug('Reporting count=[%d]',
+ status.stats.latencies.count)
+ await context.write(status)
+
+ for call in calls:
+ await call.done_writing()
+
+ for worker in sub_workers:
+ await worker.stub.QuitWorker(control_pb2.Void())
+ await worker.channel.close()
+ _LOGGER.info('Waiting for sub worker [%s] to quit...', worker)
+ await worker.process.wait()
+ _LOGGER.info('Sub worker [%s] quit', worker)
+
+ @staticmethod
+ async def CoreCount(unused_request, unused_context):
+ return control_pb2.CoreResponse(cores=_NUM_CORES)
+
+ async def QuitWorker(self, unused_request, unused_context):
+ _LOGGER.info('QuitWorker command received.')
+ self._quit_event.set()
+ return control_pb2.Void()
+
+ async def wait_for_quit(self):
+ await self._quit_event.wait()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py
index 2cb28cb464..1517f71d09 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py
index eeabf5ed7d..d6e9fd4279 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py
@@ -1,474 +1,474 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_channelz.v1.channelz."""
-
-import unittest
-import logging
-import asyncio
-
-import grpc
-from grpc.experimental import aio
-
-from grpc_channelz.v1 import channelz
-from grpc_channelz.v1 import channelz_pb2
-from grpc_channelz.v1 import channelz_pb2_grpc
-
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._test_base import AioTestBase
-
-_SUCCESSFUL_UNARY_UNARY = '/test/SuccessfulUnaryUnary'
-_FAILED_UNARY_UNARY = '/test/FailedUnaryUnary'
-_SUCCESSFUL_STREAM_STREAM = '/test/SuccessfulStreamStream'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_DISABLE_REUSE_PORT = (('grpc.so_reuseport', 0),)
-_ENABLE_CHANNELZ = (('grpc.enable_channelz', 1),)
-_DISABLE_CHANNELZ = (('grpc.enable_channelz', 0),)
-
-_LARGE_UNASSIGNED_ID = 10000
-
-
-async def _successful_unary_unary(request, servicer_context):
- return _RESPONSE
-
-
-async def _failed_unary_unary(request, servicer_context):
- servicer_context.set_code(grpc.StatusCode.INTERNAL)
- servicer_context.set_details("Channelz Test Intended Failure")
-
-
-async def _successful_stream_stream(request_iterator, servicer_context):
- async for _ in request_iterator:
- yield _RESPONSE
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_successful_unary_unary)
- elif handler_call_details.method == _FAILED_UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_failed_unary_unary)
- elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM:
- return grpc.stream_stream_rpc_method_handler(
- _successful_stream_stream)
- else:
- return None
-
-
-class _ChannelServerPair:
-
- def __init__(self):
- self.address = ''
- self.server = None
- self.channel = None
- self.server_ref_id = None
- self.channel_ref_id = None
-
- async def start(self):
- # Server will enable channelz service
- self.server = aio.server(options=_DISABLE_REUSE_PORT + _ENABLE_CHANNELZ)
- port = self.server.add_insecure_port('[::]:0')
- self.address = 'localhost:%d' % port
- self.server.add_generic_rpc_handlers((_GenericHandler(),))
- await self.server.start()
-
- # Channel will enable channelz service...
- self.channel = aio.insecure_channel(self.address,
- options=_ENABLE_CHANNELZ)
-
- async def bind_channelz(self, channelz_stub):
- resp = await channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
- for channel in resp.channel:
- if channel.data.target == self.address:
- self.channel_ref_id = channel.ref.channel_id
-
- resp = await channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=0))
- self.server_ref_id = resp.server[-1].ref.server_id
-
- async def stop(self):
- await self.channel.close()
- await self.server.stop(None)
-
-
-async def _create_channel_server_pairs(n, channelz_stub=None):
- """Create channel-server pairs."""
- pairs = [_ChannelServerPair() for i in range(n)]
- for pair in pairs:
- await pair.start()
- if channelz_stub:
- await pair.bind_channelz(channelz_stub)
- return pairs
-
-
-async def _destroy_channel_server_pairs(pairs):
- for pair in pairs:
- await pair.stop()
-
-
-class ChannelzServicerTest(AioTestBase):
-
- async def setUp(self):
- # This server is for Channelz info fetching only
- # It self should not enable Channelz
- self._server = aio.server(options=_DISABLE_REUSE_PORT +
- _DISABLE_CHANNELZ)
- port = self._server.add_insecure_port('[::]:0')
- channelz.add_channelz_servicer(self._server)
- await self._server.start()
-
- # This channel is used to fetch Channelz info only
- # Channelz should not be enabled
- self._channel = aio.insecure_channel('localhost:%d' % port,
- options=_DISABLE_CHANNELZ)
- self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def _get_server_by_ref_id(self, ref_id):
- """Server id may not be consecutive"""
- resp = await self._channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=ref_id))
- self.assertEqual(ref_id, resp.server[0].ref.server_id)
- return resp.server[0]
-
- async def _send_successful_unary_unary(self, pair):
- call = pair.channel.unary_unary(_SUCCESSFUL_UNARY_UNARY)(_REQUEST)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def _send_failed_unary_unary(self, pair):
- try:
- await pair.channel.unary_unary(_FAILED_UNARY_UNARY)(_REQUEST)
- except grpc.RpcError:
- return
- else:
- self.fail("This call supposed to fail")
-
- async def _send_successful_stream_stream(self, pair):
- call = pair.channel.stream_stream(_SUCCESSFUL_STREAM_STREAM)(iter(
- [_REQUEST] * test_constants.STREAM_LENGTH))
- cnt = 0
- async for _ in call:
- cnt += 1
- self.assertEqual(cnt, test_constants.STREAM_LENGTH)
-
- async def test_get_top_channels_high_start_id(self):
- pairs = await _create_channel_server_pairs(1)
-
- resp = await self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(
- start_channel_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(len(resp.channel), 0)
- self.assertEqual(resp.end, True)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_successful_request(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- await self._send_successful_unary_unary(pairs[0])
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
-
- self.assertEqual(resp.channel.data.calls_started, 1)
- self.assertEqual(resp.channel.data.calls_succeeded, 1)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_failed_request(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- await self._send_failed_unary_unary(pairs[0])
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, 1)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, 1)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_requests(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- k_success = 7
- k_failed = 9
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[0])
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_requests_many_channel(self):
- k_channels = 4
- pairs = await _create_channel_server_pairs(k_channels,
- self._channelz_stub)
- k_success = 11
- k_failed = 13
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- await self._send_successful_unary_unary(pairs[2])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[1])
- await self._send_failed_unary_unary(pairs[2])
-
- # The first channel saw only successes
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_success)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- # The second channel saw only failures
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[1].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- # The third channel saw both successes and failures
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[2].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- # The fourth channel saw nothing
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[3].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, 0)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_subchannels(self):
- k_channels = 4
- pairs = await _create_channel_server_pairs(k_channels,
- self._channelz_stub)
- k_success = 17
- k_failed = 19
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- await self._send_successful_unary_unary(pairs[2])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[1])
- await self._send_failed_unary_unary(pairs[2])
-
- for i in range(k_channels):
- gc_resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(
- channel_id=pairs[i].channel_ref_id))
- # If no call performed in the channel, there shouldn't be any subchannel
- if gc_resp.channel.data.calls_started == 0:
- self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
- continue
-
- # Otherwise, the subchannel should exist
- self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
- gsc_resp = await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gc_resp.channel.subchannel_ref[0].
- subchannel_id))
- self.assertEqual(gc_resp.channel.data.calls_started,
- gsc_resp.subchannel.data.calls_started)
- self.assertEqual(gc_resp.channel.data.calls_succeeded,
- gsc_resp.subchannel.data.calls_succeeded)
- self.assertEqual(gc_resp.channel.data.calls_failed,
- gsc_resp.subchannel.data.calls_failed)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_server_call(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- k_success = 23
- k_failed = 29
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[0])
-
- resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
- self.assertEqual(resp.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.data.calls_succeeded, k_success)
- self.assertEqual(resp.data.calls_failed, k_failed)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_subchannels_and_sockets(self):
- k_channels = 4
- pairs = await _create_channel_server_pairs(k_channels,
- self._channelz_stub)
- k_success = 3
- k_failed = 5
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- await self._send_successful_unary_unary(pairs[2])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[1])
- await self._send_failed_unary_unary(pairs[2])
-
- for i in range(k_channels):
- gc_resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(
- channel_id=pairs[i].channel_ref_id))
-
- # If no call performed in the channel, there shouldn't be any subchannel
- if gc_resp.channel.data.calls_started == 0:
- self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
- continue
-
- # Otherwise, the subchannel should exist
- self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
- gsc_resp = await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gc_resp.channel.subchannel_ref[0].
- subchannel_id))
- self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
-
- gs_resp = await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
- self.assertEqual(gsc_resp.subchannel.data.calls_started,
- gs_resp.socket.data.streams_started)
- self.assertEqual(0, gs_resp.socket.data.streams_failed)
- # Calls started == messages sent, only valid for unary calls
- self.assertEqual(gsc_resp.subchannel.data.calls_started,
- gs_resp.socket.data.messages_sent)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_streaming_rpc(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
- # In C++, the argument for _send_successful_stream_stream is message length.
- # Here the argument is still channel idx, to be consistent with the other two.
- await self._send_successful_stream_stream(pairs[0])
-
- gc_resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(gc_resp.channel.data.calls_started, 1)
- self.assertEqual(gc_resp.channel.data.calls_succeeded, 1)
- self.assertEqual(gc_resp.channel.data.calls_failed, 0)
- # Subchannel exists
- self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
-
- gsc_resp = await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gc_resp.channel.subchannel_ref[0].subchannel_id))
- self.assertEqual(gsc_resp.subchannel.data.calls_started, 1)
- self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1)
- self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0)
- # Socket exists
- self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
-
- gs_resp = await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
- self.assertEqual(gs_resp.socket.data.streams_started, 1)
- self.assertEqual(gs_resp.socket.data.streams_succeeded, 1)
- self.assertEqual(gs_resp.socket.data.streams_failed, 0)
- self.assertEqual(gs_resp.socket.data.messages_sent,
- test_constants.STREAM_LENGTH)
- self.assertEqual(gs_resp.socket.data.messages_received,
- test_constants.STREAM_LENGTH)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_server_sockets(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- await self._send_successful_unary_unary(pairs[0])
- await self._send_failed_unary_unary(pairs[0])
-
- resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
- self.assertEqual(resp.data.calls_started, 2)
- self.assertEqual(resp.data.calls_succeeded, 1)
- self.assertEqual(resp.data.calls_failed, 1)
-
- gss_resp = await self._channelz_stub.GetServerSockets(
- channelz_pb2.GetServerSocketsRequest(server_id=resp.ref.server_id,
- start_socket_id=0))
- # If the RPC call failed, it will raise a grpc.RpcError
- # So, if there is no exception raised, considered pass
- await _destroy_channel_server_pairs(pairs)
-
- async def test_server_listen_sockets(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
- self.assertEqual(len(resp.listen_socket), 1)
-
- gs_resp = await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=resp.listen_socket[0].socket_id))
- # If the RPC call failed, it will raise a grpc.RpcError
- # So, if there is no exception raised, considered pass
- await _destroy_channel_server_pairs(pairs)
-
- async def test_invalid_query_get_server(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetServer(
- channelz_pb2.GetServerRequest(server_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_channel(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_subchannel(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_socket(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(socket_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_server_sockets(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetServerSockets(
- channelz_pb2.GetServerSocketsRequest(
- server_id=_LARGE_UNASSIGNED_ID,
- start_socket_id=0,
- ))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests of grpc_channelz.v1.channelz."""
+
+import unittest
+import logging
+import asyncio
+
+import grpc
+from grpc.experimental import aio
+
+from grpc_channelz.v1 import channelz
+from grpc_channelz.v1 import channelz_pb2
+from grpc_channelz.v1 import channelz_pb2_grpc
+
+from tests.unit.framework.common import test_constants
+from tests_aio.unit._test_base import AioTestBase
+
+_SUCCESSFUL_UNARY_UNARY = '/test/SuccessfulUnaryUnary'
+_FAILED_UNARY_UNARY = '/test/FailedUnaryUnary'
+_SUCCESSFUL_STREAM_STREAM = '/test/SuccessfulStreamStream'
+
+_REQUEST = b'\x00\x00\x00'
+_RESPONSE = b'\x01\x01\x01'
+
+_DISABLE_REUSE_PORT = (('grpc.so_reuseport', 0),)
+_ENABLE_CHANNELZ = (('grpc.enable_channelz', 1),)
+_DISABLE_CHANNELZ = (('grpc.enable_channelz', 0),)
+
+_LARGE_UNASSIGNED_ID = 10000
+
+
+async def _successful_unary_unary(request, servicer_context):
+ return _RESPONSE
+
+
+async def _failed_unary_unary(request, servicer_context):
+ servicer_context.set_code(grpc.StatusCode.INTERNAL)
+ servicer_context.set_details("Channelz Test Intended Failure")
+
+
+async def _successful_stream_stream(request_iterator, servicer_context):
+ async for _ in request_iterator:
+ yield _RESPONSE
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY:
+ return grpc.unary_unary_rpc_method_handler(_successful_unary_unary)
+ elif handler_call_details.method == _FAILED_UNARY_UNARY:
+ return grpc.unary_unary_rpc_method_handler(_failed_unary_unary)
+ elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM:
+ return grpc.stream_stream_rpc_method_handler(
+ _successful_stream_stream)
+ else:
+ return None
+
+
+class _ChannelServerPair:
+
+ def __init__(self):
+ self.address = ''
+ self.server = None
+ self.channel = None
+ self.server_ref_id = None
+ self.channel_ref_id = None
+
+ async def start(self):
+ # Server will enable channelz service
+ self.server = aio.server(options=_DISABLE_REUSE_PORT + _ENABLE_CHANNELZ)
+ port = self.server.add_insecure_port('[::]:0')
+ self.address = 'localhost:%d' % port
+ self.server.add_generic_rpc_handlers((_GenericHandler(),))
+ await self.server.start()
+
+ # Channel will enable channelz service...
+ self.channel = aio.insecure_channel(self.address,
+ options=_ENABLE_CHANNELZ)
+
+ async def bind_channelz(self, channelz_stub):
+ resp = await channelz_stub.GetTopChannels(
+ channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
+ for channel in resp.channel:
+ if channel.data.target == self.address:
+ self.channel_ref_id = channel.ref.channel_id
+
+ resp = await channelz_stub.GetServers(
+ channelz_pb2.GetServersRequest(start_server_id=0))
+ self.server_ref_id = resp.server[-1].ref.server_id
+
+ async def stop(self):
+ await self.channel.close()
+ await self.server.stop(None)
+
+
+async def _create_channel_server_pairs(n, channelz_stub=None):
+ """Create channel-server pairs."""
+ pairs = [_ChannelServerPair() for i in range(n)]
+ for pair in pairs:
+ await pair.start()
+ if channelz_stub:
+ await pair.bind_channelz(channelz_stub)
+ return pairs
+
+
+async def _destroy_channel_server_pairs(pairs):
+ for pair in pairs:
+ await pair.stop()
+
+
+class ChannelzServicerTest(AioTestBase):
+
+ async def setUp(self):
+ # This server is for Channelz info fetching only
+ # It self should not enable Channelz
+ self._server = aio.server(options=_DISABLE_REUSE_PORT +
+ _DISABLE_CHANNELZ)
+ port = self._server.add_insecure_port('[::]:0')
+ channelz.add_channelz_servicer(self._server)
+ await self._server.start()
+
+ # This channel is used to fetch Channelz info only
+ # Channelz should not be enabled
+ self._channel = aio.insecure_channel('localhost:%d' % port,
+ options=_DISABLE_CHANNELZ)
+ self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+ async def _get_server_by_ref_id(self, ref_id):
+ """Server id may not be consecutive"""
+ resp = await self._channelz_stub.GetServers(
+ channelz_pb2.GetServersRequest(start_server_id=ref_id))
+ self.assertEqual(ref_id, resp.server[0].ref.server_id)
+ return resp.server[0]
+
+ async def _send_successful_unary_unary(self, pair):
+ call = pair.channel.unary_unary(_SUCCESSFUL_UNARY_UNARY)(_REQUEST)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def _send_failed_unary_unary(self, pair):
+ try:
+ await pair.channel.unary_unary(_FAILED_UNARY_UNARY)(_REQUEST)
+ except grpc.RpcError:
+ return
+ else:
+ self.fail("This call supposed to fail")
+
+ async def _send_successful_stream_stream(self, pair):
+ call = pair.channel.stream_stream(_SUCCESSFUL_STREAM_STREAM)(iter(
+ [_REQUEST] * test_constants.STREAM_LENGTH))
+ cnt = 0
+ async for _ in call:
+ cnt += 1
+ self.assertEqual(cnt, test_constants.STREAM_LENGTH)
+
+ async def test_get_top_channels_high_start_id(self):
+ pairs = await _create_channel_server_pairs(1)
+
+ resp = await self._channelz_stub.GetTopChannels(
+ channelz_pb2.GetTopChannelsRequest(
+ start_channel_id=_LARGE_UNASSIGNED_ID))
+ self.assertEqual(len(resp.channel), 0)
+ self.assertEqual(resp.end, True)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_successful_request(self):
+ pairs = await _create_channel_server_pairs(1, self._channelz_stub)
+
+ await self._send_successful_unary_unary(pairs[0])
+ resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
+
+ self.assertEqual(resp.channel.data.calls_started, 1)
+ self.assertEqual(resp.channel.data.calls_succeeded, 1)
+ self.assertEqual(resp.channel.data.calls_failed, 0)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_failed_request(self):
+ pairs = await _create_channel_server_pairs(1, self._channelz_stub)
+
+ await self._send_failed_unary_unary(pairs[0])
+ resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
+ self.assertEqual(resp.channel.data.calls_started, 1)
+ self.assertEqual(resp.channel.data.calls_succeeded, 0)
+ self.assertEqual(resp.channel.data.calls_failed, 1)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_many_requests(self):
+ pairs = await _create_channel_server_pairs(1, self._channelz_stub)
+
+ k_success = 7
+ k_failed = 9
+ for i in range(k_success):
+ await self._send_successful_unary_unary(pairs[0])
+ for i in range(k_failed):
+ await self._send_failed_unary_unary(pairs[0])
+ resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
+ self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
+ self.assertEqual(resp.channel.data.calls_succeeded, k_success)
+ self.assertEqual(resp.channel.data.calls_failed, k_failed)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_many_requests_many_channel(self):
+ k_channels = 4
+ pairs = await _create_channel_server_pairs(k_channels,
+ self._channelz_stub)
+ k_success = 11
+ k_failed = 13
+ for i in range(k_success):
+ await self._send_successful_unary_unary(pairs[0])
+ await self._send_successful_unary_unary(pairs[2])
+ for i in range(k_failed):
+ await self._send_failed_unary_unary(pairs[1])
+ await self._send_failed_unary_unary(pairs[2])
+
+ # The first channel saw only successes
+ resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
+ self.assertEqual(resp.channel.data.calls_started, k_success)
+ self.assertEqual(resp.channel.data.calls_succeeded, k_success)
+ self.assertEqual(resp.channel.data.calls_failed, 0)
+
+ # The second channel saw only failures
+ resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[1].channel_ref_id))
+ self.assertEqual(resp.channel.data.calls_started, k_failed)
+ self.assertEqual(resp.channel.data.calls_succeeded, 0)
+ self.assertEqual(resp.channel.data.calls_failed, k_failed)
+
+ # The third channel saw both successes and failures
+ resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[2].channel_ref_id))
+ self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
+ self.assertEqual(resp.channel.data.calls_succeeded, k_success)
+ self.assertEqual(resp.channel.data.calls_failed, k_failed)
+
+ # The fourth channel saw nothing
+ resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[3].channel_ref_id))
+ self.assertEqual(resp.channel.data.calls_started, 0)
+ self.assertEqual(resp.channel.data.calls_succeeded, 0)
+ self.assertEqual(resp.channel.data.calls_failed, 0)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_many_subchannels(self):
+ k_channels = 4
+ pairs = await _create_channel_server_pairs(k_channels,
+ self._channelz_stub)
+ k_success = 17
+ k_failed = 19
+ for i in range(k_success):
+ await self._send_successful_unary_unary(pairs[0])
+ await self._send_successful_unary_unary(pairs[2])
+ for i in range(k_failed):
+ await self._send_failed_unary_unary(pairs[1])
+ await self._send_failed_unary_unary(pairs[2])
+
+ for i in range(k_channels):
+ gc_resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(
+ channel_id=pairs[i].channel_ref_id))
+ # If no call performed in the channel, there shouldn't be any subchannel
+ if gc_resp.channel.data.calls_started == 0:
+ self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
+ continue
+
+ # Otherwise, the subchannel should exist
+ self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
+ gsc_resp = await self._channelz_stub.GetSubchannel(
+ channelz_pb2.GetSubchannelRequest(
+ subchannel_id=gc_resp.channel.subchannel_ref[0].
+ subchannel_id))
+ self.assertEqual(gc_resp.channel.data.calls_started,
+ gsc_resp.subchannel.data.calls_started)
+ self.assertEqual(gc_resp.channel.data.calls_succeeded,
+ gsc_resp.subchannel.data.calls_succeeded)
+ self.assertEqual(gc_resp.channel.data.calls_failed,
+ gsc_resp.subchannel.data.calls_failed)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_server_call(self):
+ pairs = await _create_channel_server_pairs(1, self._channelz_stub)
+
+ k_success = 23
+ k_failed = 29
+ for i in range(k_success):
+ await self._send_successful_unary_unary(pairs[0])
+ for i in range(k_failed):
+ await self._send_failed_unary_unary(pairs[0])
+
+ resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
+ self.assertEqual(resp.data.calls_started, k_success + k_failed)
+ self.assertEqual(resp.data.calls_succeeded, k_success)
+ self.assertEqual(resp.data.calls_failed, k_failed)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_many_subchannels_and_sockets(self):
+ k_channels = 4
+ pairs = await _create_channel_server_pairs(k_channels,
+ self._channelz_stub)
+ k_success = 3
+ k_failed = 5
+ for i in range(k_success):
+ await self._send_successful_unary_unary(pairs[0])
+ await self._send_successful_unary_unary(pairs[2])
+ for i in range(k_failed):
+ await self._send_failed_unary_unary(pairs[1])
+ await self._send_failed_unary_unary(pairs[2])
+
+ for i in range(k_channels):
+ gc_resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(
+ channel_id=pairs[i].channel_ref_id))
+
+ # If no call performed in the channel, there shouldn't be any subchannel
+ if gc_resp.channel.data.calls_started == 0:
+ self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
+ continue
+
+ # Otherwise, the subchannel should exist
+ self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
+ gsc_resp = await self._channelz_stub.GetSubchannel(
+ channelz_pb2.GetSubchannelRequest(
+ subchannel_id=gc_resp.channel.subchannel_ref[0].
+ subchannel_id))
+ self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
+
+ gs_resp = await self._channelz_stub.GetSocket(
+ channelz_pb2.GetSocketRequest(
+ socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
+ self.assertEqual(gsc_resp.subchannel.data.calls_started,
+ gs_resp.socket.data.streams_started)
+ self.assertEqual(0, gs_resp.socket.data.streams_failed)
+ # Calls started == messages sent, only valid for unary calls
+ self.assertEqual(gsc_resp.subchannel.data.calls_started,
+ gs_resp.socket.data.messages_sent)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_streaming_rpc(self):
+ pairs = await _create_channel_server_pairs(1, self._channelz_stub)
+ # In C++, the argument for _send_successful_stream_stream is message length.
+ # Here the argument is still channel idx, to be consistent with the other two.
+ await self._send_successful_stream_stream(pairs[0])
+
+ gc_resp = await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
+ self.assertEqual(gc_resp.channel.data.calls_started, 1)
+ self.assertEqual(gc_resp.channel.data.calls_succeeded, 1)
+ self.assertEqual(gc_resp.channel.data.calls_failed, 0)
+ # Subchannel exists
+ self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
+
+ gsc_resp = await self._channelz_stub.GetSubchannel(
+ channelz_pb2.GetSubchannelRequest(
+ subchannel_id=gc_resp.channel.subchannel_ref[0].subchannel_id))
+ self.assertEqual(gsc_resp.subchannel.data.calls_started, 1)
+ self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1)
+ self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0)
+ # Socket exists
+ self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
+
+ gs_resp = await self._channelz_stub.GetSocket(
+ channelz_pb2.GetSocketRequest(
+ socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
+ self.assertEqual(gs_resp.socket.data.streams_started, 1)
+ self.assertEqual(gs_resp.socket.data.streams_succeeded, 1)
+ self.assertEqual(gs_resp.socket.data.streams_failed, 0)
+ self.assertEqual(gs_resp.socket.data.messages_sent,
+ test_constants.STREAM_LENGTH)
+ self.assertEqual(gs_resp.socket.data.messages_received,
+ test_constants.STREAM_LENGTH)
+
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_server_sockets(self):
+ pairs = await _create_channel_server_pairs(1, self._channelz_stub)
+
+ await self._send_successful_unary_unary(pairs[0])
+ await self._send_failed_unary_unary(pairs[0])
+
+ resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
+ self.assertEqual(resp.data.calls_started, 2)
+ self.assertEqual(resp.data.calls_succeeded, 1)
+ self.assertEqual(resp.data.calls_failed, 1)
+
+ gss_resp = await self._channelz_stub.GetServerSockets(
+ channelz_pb2.GetServerSocketsRequest(server_id=resp.ref.server_id,
+ start_socket_id=0))
+ # If the RPC call failed, it will raise a grpc.RpcError
+ # So, if there is no exception raised, considered pass
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_server_listen_sockets(self):
+ pairs = await _create_channel_server_pairs(1, self._channelz_stub)
+
+ resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
+ self.assertEqual(len(resp.listen_socket), 1)
+
+ gs_resp = await self._channelz_stub.GetSocket(
+ channelz_pb2.GetSocketRequest(
+ socket_id=resp.listen_socket[0].socket_id))
+ # If the RPC call failed, it will raise a grpc.RpcError
+ # So, if there is no exception raised, considered pass
+ await _destroy_channel_server_pairs(pairs)
+
+ async def test_invalid_query_get_server(self):
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._channelz_stub.GetServer(
+ channelz_pb2.GetServerRequest(server_id=_LARGE_UNASSIGNED_ID))
+ self.assertEqual(grpc.StatusCode.NOT_FOUND,
+ exception_context.exception.code())
+
+ async def test_invalid_query_get_channel(self):
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._channelz_stub.GetChannel(
+ channelz_pb2.GetChannelRequest(channel_id=_LARGE_UNASSIGNED_ID))
+ self.assertEqual(grpc.StatusCode.NOT_FOUND,
+ exception_context.exception.code())
+
+ async def test_invalid_query_get_subchannel(self):
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._channelz_stub.GetSubchannel(
+ channelz_pb2.GetSubchannelRequest(
+ subchannel_id=_LARGE_UNASSIGNED_ID))
+ self.assertEqual(grpc.StatusCode.NOT_FOUND,
+ exception_context.exception.code())
+
+ async def test_invalid_query_get_socket(self):
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._channelz_stub.GetSocket(
+ channelz_pb2.GetSocketRequest(socket_id=_LARGE_UNASSIGNED_ID))
+ self.assertEqual(grpc.StatusCode.NOT_FOUND,
+ exception_context.exception.code())
+
+ async def test_invalid_query_get_server_sockets(self):
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._channelz_stub.GetServerSockets(
+ channelz_pb2.GetServerSocketsRequest(
+ server_id=_LARGE_UNASSIGNED_ID,
+ start_socket_id=0,
+ ))
+ self.assertEqual(grpc.StatusCode.NOT_FOUND,
+ exception_context.exception.code())
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py
index 2cb28cb464..1517f71d09 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py
index 5823badf7b..a539dbf140 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py
@@ -1,282 +1,282 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests AsyncIO version of grpcio-health-checking."""
-
-import asyncio
-import logging
-import time
-import random
-import unittest
-
-import grpc
-
-from grpc_health.v1 import health
-from grpc_health.v1 import health_pb2
-from grpc_health.v1 import health_pb2_grpc
-from grpc.experimental import aio
-
-from tests.unit.framework.common import test_constants
-
-from tests_aio.unit._test_base import AioTestBase
-
-_SERVING_SERVICE = 'grpc.test.TestServiceServing'
-_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown'
-_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing'
-_WATCH_SERVICE = 'grpc.test.WatchService'
-
-_LARGE_NUMBER_OF_STATUS_CHANGES = 1000
-
-
-async def _pipe_to_queue(call, queue):
- async for response in call:
- await queue.put(response)
-
-
-class HealthServicerTest(AioTestBase):
-
- async def setUp(self):
- self._servicer = health.aio.HealthServicer()
- await self._servicer.set(_SERVING_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- await self._servicer.set(_UNKNOWN_SERVICE,
- health_pb2.HealthCheckResponse.UNKNOWN)
- await self._servicer.set(_NOT_SERVING_SERVICE,
- health_pb2.HealthCheckResponse.NOT_SERVING)
- self._server = aio.server()
- port = self._server.add_insecure_port('[::]:0')
- health_pb2_grpc.add_HealthServicer_to_server(self._servicer,
- self._server)
- await self._server.start()
-
- self._channel = aio.insecure_channel('localhost:%d' % port)
- self._stub = health_pb2_grpc.HealthStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_check_empty_service(self):
- request = health_pb2.HealthCheckRequest()
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
-
- async def test_check_serving_service(self):
- request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE)
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
-
- async def test_check_unknown_service(self):
- request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE)
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
-
- async def test_check_not_serving_service(self):
- request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE)
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- resp.status)
-
- async def test_check_not_found_service(self):
- request = health_pb2.HealthCheckRequest(service='not-found')
- with self.assertRaises(aio.AioRpcError) as context:
- await self._stub.Check(request)
-
- self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
-
- async def test_health_service_name(self):
- self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health')
-
- async def test_watch_empty_service(self):
- request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
-
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue.get()).status)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_watch_new_service(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
-
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue.get()).status)
-
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.NOT_SERVING)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- (await queue.get()).status)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_watch_service_isolation(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
-
- await self._servicer.set('some-other-service',
- health_pb2.HealthCheckResponse.SERVING)
- # The change of health status in other service should be isolated.
- # Hence, no additional notification should be observed.
- with self.assertRaises(asyncio.TimeoutError):
- await asyncio.wait_for(queue.get(), test_constants.SHORT_TIMEOUT)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_two_watchers(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- queue1 = asyncio.Queue()
- queue2 = asyncio.Queue()
- call1 = self._stub.Watch(request)
- call2 = self._stub.Watch(request)
- task1 = self.loop.create_task(_pipe_to_queue(call1, queue1))
- task2 = self.loop.create_task(_pipe_to_queue(call2, queue2))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue1.get()).status)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue2.get()).status)
-
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue1.get()).status)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue2.get()).status)
-
- call1.cancel()
- call2.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task1
-
- with self.assertRaises(asyncio.CancelledError):
- await task2
-
- self.assertTrue(queue1.empty())
- self.assertTrue(queue2.empty())
-
- async def test_cancelled_watch_removed_from_watch_list(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
-
- call.cancel()
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- # Wait for the serving coroutine to process client cancellation.
- timeout = time.monotonic() + test_constants.TIME_ALLOWANCE
- while (time.monotonic() < timeout and self._servicer._server_watchers):
- await asyncio.sleep(1)
- self.assertFalse(self._servicer._server_watchers,
- 'There should not be any watcher left')
- self.assertTrue(queue.empty())
-
- async def test_graceful_shutdown(self):
- request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue.get()).status)
-
- await self._servicer.enter_graceful_shutdown()
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- (await queue.get()).status)
-
- # This should be a no-op.
- await self._servicer.set(health.OVERALL_HEALTH,
- health_pb2.HealthCheckResponse.SERVING)
-
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- resp.status)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_no_duplicate_status(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
- last_status = health_pb2.HealthCheckResponse.SERVICE_UNKNOWN
-
- for _ in range(_LARGE_NUMBER_OF_STATUS_CHANGES):
- if random.randint(0, 1) == 0:
- status = health_pb2.HealthCheckResponse.SERVING
- else:
- status = health_pb2.HealthCheckResponse.NOT_SERVING
-
- await self._servicer.set(_WATCH_SERVICE, status)
- if status != last_status:
- self.assertEqual(status, (await queue.get()).status)
- last_status = status
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests AsyncIO version of grpcio-health-checking."""
+
+import asyncio
+import logging
+import time
+import random
+import unittest
+
+import grpc
+
+from grpc_health.v1 import health
+from grpc_health.v1 import health_pb2
+from grpc_health.v1 import health_pb2_grpc
+from grpc.experimental import aio
+
+from tests.unit.framework.common import test_constants
+
+from tests_aio.unit._test_base import AioTestBase
+
+_SERVING_SERVICE = 'grpc.test.TestServiceServing'
+_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown'
+_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing'
+_WATCH_SERVICE = 'grpc.test.WatchService'
+
+_LARGE_NUMBER_OF_STATUS_CHANGES = 1000
+
+
+async def _pipe_to_queue(call, queue):
+ async for response in call:
+ await queue.put(response)
+
+
+class HealthServicerTest(AioTestBase):
+
+ async def setUp(self):
+ self._servicer = health.aio.HealthServicer()
+ await self._servicer.set(_SERVING_SERVICE,
+ health_pb2.HealthCheckResponse.SERVING)
+ await self._servicer.set(_UNKNOWN_SERVICE,
+ health_pb2.HealthCheckResponse.UNKNOWN)
+ await self._servicer.set(_NOT_SERVING_SERVICE,
+ health_pb2.HealthCheckResponse.NOT_SERVING)
+ self._server = aio.server()
+ port = self._server.add_insecure_port('[::]:0')
+ health_pb2_grpc.add_HealthServicer_to_server(self._servicer,
+ self._server)
+ await self._server.start()
+
+ self._channel = aio.insecure_channel('localhost:%d' % port)
+ self._stub = health_pb2_grpc.HealthStub(self._channel)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+ async def test_check_empty_service(self):
+ request = health_pb2.HealthCheckRequest()
+ resp = await self._stub.Check(request)
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
+
+ async def test_check_serving_service(self):
+ request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE)
+ resp = await self._stub.Check(request)
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
+
+ async def test_check_unknown_service(self):
+ request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE)
+ resp = await self._stub.Check(request)
+ self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
+
+ async def test_check_not_serving_service(self):
+ request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE)
+ resp = await self._stub.Check(request)
+ self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
+ resp.status)
+
+ async def test_check_not_found_service(self):
+ request = health_pb2.HealthCheckRequest(service='not-found')
+ with self.assertRaises(aio.AioRpcError) as context:
+ await self._stub.Check(request)
+
+ self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
+
+ async def test_health_service_name(self):
+ self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health')
+
+ async def test_watch_empty_service(self):
+ request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
+
+ call = self._stub.Watch(request)
+ queue = asyncio.Queue()
+ task = self.loop.create_task(_pipe_to_queue(call, queue))
+
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
+ (await queue.get()).status)
+
+ call.cancel()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ self.assertTrue(queue.empty())
+
+ async def test_watch_new_service(self):
+ request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
+ call = self._stub.Watch(request)
+ queue = asyncio.Queue()
+ task = self.loop.create_task(_pipe_to_queue(call, queue))
+
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
+ (await queue.get()).status)
+
+ await self._servicer.set(_WATCH_SERVICE,
+ health_pb2.HealthCheckResponse.SERVING)
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
+ (await queue.get()).status)
+
+ await self._servicer.set(_WATCH_SERVICE,
+ health_pb2.HealthCheckResponse.NOT_SERVING)
+ self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
+ (await queue.get()).status)
+
+ call.cancel()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ self.assertTrue(queue.empty())
+
+ async def test_watch_service_isolation(self):
+ request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
+ call = self._stub.Watch(request)
+ queue = asyncio.Queue()
+ task = self.loop.create_task(_pipe_to_queue(call, queue))
+
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
+ (await queue.get()).status)
+
+ await self._servicer.set('some-other-service',
+ health_pb2.HealthCheckResponse.SERVING)
+ # The change of health status in other service should be isolated.
+ # Hence, no additional notification should be observed.
+ with self.assertRaises(asyncio.TimeoutError):
+ await asyncio.wait_for(queue.get(), test_constants.SHORT_TIMEOUT)
+
+ call.cancel()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ self.assertTrue(queue.empty())
+
+ async def test_two_watchers(self):
+ request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
+ queue1 = asyncio.Queue()
+ queue2 = asyncio.Queue()
+ call1 = self._stub.Watch(request)
+ call2 = self._stub.Watch(request)
+ task1 = self.loop.create_task(_pipe_to_queue(call1, queue1))
+ task2 = self.loop.create_task(_pipe_to_queue(call2, queue2))
+
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
+ (await queue1.get()).status)
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
+ (await queue2.get()).status)
+
+ await self._servicer.set(_WATCH_SERVICE,
+ health_pb2.HealthCheckResponse.SERVING)
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
+ (await queue1.get()).status)
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
+ (await queue2.get()).status)
+
+ call1.cancel()
+ call2.cancel()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task1
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task2
+
+ self.assertTrue(queue1.empty())
+ self.assertTrue(queue2.empty())
+
+ async def test_cancelled_watch_removed_from_watch_list(self):
+ request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
+ call = self._stub.Watch(request)
+ queue = asyncio.Queue()
+ task = self.loop.create_task(_pipe_to_queue(call, queue))
+
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
+ (await queue.get()).status)
+
+ call.cancel()
+ await self._servicer.set(_WATCH_SERVICE,
+ health_pb2.HealthCheckResponse.SERVING)
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ # Wait for the serving coroutine to process client cancellation.
+ timeout = time.monotonic() + test_constants.TIME_ALLOWANCE
+ while (time.monotonic() < timeout and self._servicer._server_watchers):
+ await asyncio.sleep(1)
+ self.assertFalse(self._servicer._server_watchers,
+ 'There should not be any watcher left')
+ self.assertTrue(queue.empty())
+
+ async def test_graceful_shutdown(self):
+ request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
+ call = self._stub.Watch(request)
+ queue = asyncio.Queue()
+ task = self.loop.create_task(_pipe_to_queue(call, queue))
+
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
+ (await queue.get()).status)
+
+ await self._servicer.enter_graceful_shutdown()
+ self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
+ (await queue.get()).status)
+
+ # This should be a no-op.
+ await self._servicer.set(health.OVERALL_HEALTH,
+ health_pb2.HealthCheckResponse.SERVING)
+
+ resp = await self._stub.Check(request)
+ self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
+ resp.status)
+
+ call.cancel()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ self.assertTrue(queue.empty())
+
+ async def test_no_duplicate_status(self):
+ request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
+ call = self._stub.Watch(request)
+ queue = asyncio.Queue()
+ task = self.loop.create_task(_pipe_to_queue(call, queue))
+
+ self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
+ (await queue.get()).status)
+ last_status = health_pb2.HealthCheckResponse.SERVICE_UNKNOWN
+
+ for _ in range(_LARGE_NUMBER_OF_STATUS_CHANGES):
+ if random.randint(0, 1) == 0:
+ status = health_pb2.HealthCheckResponse.SERVING
+ else:
+ status = health_pb2.HealthCheckResponse.NOT_SERVING
+
+ await self._servicer.set(_WATCH_SERVICE, status)
+ if status != last_status:
+ self.assertEqual(status, (await queue.get()).status)
+ last_status = status
+
+ call.cancel()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ self.assertTrue(queue.empty())
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py
index 59b52328e8..b71ddbd314 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py
index 791d396371..a4c5e12ced 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py
@@ -1,61 +1,61 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import asyncio
-import logging
-import os
-
-import grpc
-from grpc.experimental import aio
-
-from tests.interop import client as interop_client_lib
-from tests_aio.interop import methods
-
-_LOGGER = logging.getLogger(__name__)
-_LOGGER.setLevel(logging.DEBUG)
-
-
-def _create_channel(args):
- target = f'{args.server_host}:{args.server_port}'
-
- if args.use_tls or args.use_alts or args.custom_credentials_type is not None:
- channel_credentials, options = interop_client_lib.get_secure_channel_parameters(
- args)
- return aio.secure_channel(target, channel_credentials, options)
- else:
- return aio.insecure_channel(target)
-
-
-def _test_case_from_arg(test_case_arg):
- for test_case in methods.TestCase:
- if test_case_arg == test_case.value:
- return test_case
- else:
- raise ValueError('No test case "%s"!' % test_case_arg)
-
-
-async def test_interoperability():
-
- args = interop_client_lib.parse_interop_client_args()
- channel = _create_channel(args)
- stub = interop_client_lib.create_stub(channel, args)
- test_case = _test_case_from_arg(args.test_case)
- await methods.test_interoperability(test_case, stub, args)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- asyncio.get_event_loop().set_debug(True)
- asyncio.get_event_loop().run_until_complete(test_interoperability())
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import asyncio
+import logging
+import os
+
+import grpc
+from grpc.experimental import aio
+
+from tests.interop import client as interop_client_lib
+from tests_aio.interop import methods
+
+_LOGGER = logging.getLogger(__name__)
+_LOGGER.setLevel(logging.DEBUG)
+
+
+def _create_channel(args):
+ target = f'{args.server_host}:{args.server_port}'
+
+ if args.use_tls or args.use_alts or args.custom_credentials_type is not None:
+ channel_credentials, options = interop_client_lib.get_secure_channel_parameters(
+ args)
+ return aio.secure_channel(target, channel_credentials, options)
+ else:
+ return aio.insecure_channel(target)
+
+
+def _test_case_from_arg(test_case_arg):
+ for test_case in methods.TestCase:
+ if test_case_arg == test_case.value:
+ return test_case
+ else:
+ raise ValueError('No test case "%s"!' % test_case_arg)
+
+
+async def test_interoperability():
+
+ args = interop_client_lib.parse_interop_client_args()
+ channel = _create_channel(args)
+ stub = interop_client_lib.create_stub(channel, args)
+ test_case = _test_case_from_arg(args.test_case)
+ await methods.test_interoperability(test_case, stub, args)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ asyncio.get_event_loop().set_debug(True)
+ asyncio.get_event_loop().run_until_complete(test_interoperability())
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py
index 6b58af8967..0db15be3a9 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py
@@ -1,134 +1,134 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Conducts interop tests locally."""
-
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.interop import resources
-from tests_aio.interop import methods
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-
-
-class InteropTestCaseMixin:
- """Unit test methods.
-
- This class must be mixed in with unittest.TestCase and a class that defines
- setUp and tearDown methods that manage a stub attribute.
- """
- _stub: test_pb2_grpc.TestServiceStub
-
- async def test_empty_unary(self):
- await methods.test_interoperability(methods.TestCase.EMPTY_UNARY,
- self._stub, None)
-
- async def test_large_unary(self):
- await methods.test_interoperability(methods.TestCase.LARGE_UNARY,
- self._stub, None)
-
- async def test_server_streaming(self):
- await methods.test_interoperability(methods.TestCase.SERVER_STREAMING,
- self._stub, None)
-
- async def test_client_streaming(self):
- await methods.test_interoperability(methods.TestCase.CLIENT_STREAMING,
- self._stub, None)
-
- async def test_ping_pong(self):
- await methods.test_interoperability(methods.TestCase.PING_PONG,
- self._stub, None)
-
- async def test_cancel_after_begin(self):
- await methods.test_interoperability(methods.TestCase.CANCEL_AFTER_BEGIN,
- self._stub, None)
-
- async def test_cancel_after_first_response(self):
- await methods.test_interoperability(
- methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE, self._stub, None)
-
- async def test_timeout_on_sleeping_server(self):
- await methods.test_interoperability(
- methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER, self._stub, None)
-
- async def test_empty_stream(self):
- await methods.test_interoperability(methods.TestCase.EMPTY_STREAM,
- self._stub, None)
-
- async def test_status_code_and_message(self):
- await methods.test_interoperability(
- methods.TestCase.STATUS_CODE_AND_MESSAGE, self._stub, None)
-
- async def test_unimplemented_method(self):
- await methods.test_interoperability(
- methods.TestCase.UNIMPLEMENTED_METHOD, self._stub, None)
-
- async def test_unimplemented_service(self):
- await methods.test_interoperability(
- methods.TestCase.UNIMPLEMENTED_SERVICE, self._stub, None)
-
- async def test_custom_metadata(self):
- await methods.test_interoperability(methods.TestCase.CUSTOM_METADATA,
- self._stub, None)
-
- async def test_special_status_message(self):
- await methods.test_interoperability(
- methods.TestCase.SPECIAL_STATUS_MESSAGE, self._stub, None)
-
-
-class InsecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-class SecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
-
- async def setUp(self):
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- channel_credentials = grpc.ssl_channel_credentials(
- resources.test_root_certificates())
- channel_options = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
- ),)
-
- address, self._server = await start_test_server(
- secure=True, server_credentials=server_credentials)
- self._channel = aio.secure_channel(address, channel_credentials,
- channel_options)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.INFO)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Conducts interop tests locally."""
+
+import logging
+import unittest
+
+import grpc
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import test_pb2_grpc
+from tests.interop import resources
+from tests_aio.interop import methods
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+
+_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
+
+
+class InteropTestCaseMixin:
+ """Unit test methods.
+
+ This class must be mixed in with unittest.TestCase and a class that defines
+ setUp and tearDown methods that manage a stub attribute.
+ """
+ _stub: test_pb2_grpc.TestServiceStub
+
+ async def test_empty_unary(self):
+ await methods.test_interoperability(methods.TestCase.EMPTY_UNARY,
+ self._stub, None)
+
+ async def test_large_unary(self):
+ await methods.test_interoperability(methods.TestCase.LARGE_UNARY,
+ self._stub, None)
+
+ async def test_server_streaming(self):
+ await methods.test_interoperability(methods.TestCase.SERVER_STREAMING,
+ self._stub, None)
+
+ async def test_client_streaming(self):
+ await methods.test_interoperability(methods.TestCase.CLIENT_STREAMING,
+ self._stub, None)
+
+ async def test_ping_pong(self):
+ await methods.test_interoperability(methods.TestCase.PING_PONG,
+ self._stub, None)
+
+ async def test_cancel_after_begin(self):
+ await methods.test_interoperability(methods.TestCase.CANCEL_AFTER_BEGIN,
+ self._stub, None)
+
+ async def test_cancel_after_first_response(self):
+ await methods.test_interoperability(
+ methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE, self._stub, None)
+
+ async def test_timeout_on_sleeping_server(self):
+ await methods.test_interoperability(
+ methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER, self._stub, None)
+
+ async def test_empty_stream(self):
+ await methods.test_interoperability(methods.TestCase.EMPTY_STREAM,
+ self._stub, None)
+
+ async def test_status_code_and_message(self):
+ await methods.test_interoperability(
+ methods.TestCase.STATUS_CODE_AND_MESSAGE, self._stub, None)
+
+ async def test_unimplemented_method(self):
+ await methods.test_interoperability(
+ methods.TestCase.UNIMPLEMENTED_METHOD, self._stub, None)
+
+ async def test_unimplemented_service(self):
+ await methods.test_interoperability(
+ methods.TestCase.UNIMPLEMENTED_SERVICE, self._stub, None)
+
+ async def test_custom_metadata(self):
+ await methods.test_interoperability(methods.TestCase.CUSTOM_METADATA,
+ self._stub, None)
+
+ async def test_special_status_message(self):
+ await methods.test_interoperability(
+ methods.TestCase.SPECIAL_STATUS_MESSAGE, self._stub, None)
+
+
+class InsecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
+
+ async def setUp(self):
+ address, self._server = await start_test_server()
+ self._channel = aio.insecure_channel(address)
+ self._stub = test_pb2_grpc.TestServiceStub(self._channel)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+
+class SecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
+
+ async def setUp(self):
+ server_credentials = grpc.ssl_server_credentials([
+ (resources.private_key(), resources.certificate_chain())
+ ])
+ channel_credentials = grpc.ssl_channel_credentials(
+ resources.test_root_certificates())
+ channel_options = ((
+ 'grpc.ssl_target_name_override',
+ _SERVER_HOST_OVERRIDE,
+ ),)
+
+ address, self._server = await start_test_server(
+ secure=True, server_credentials=server_credentials)
+ self._channel = aio.secure_channel(address, channel_credentials,
+ channel_options)
+ self._stub = test_pb2_grpc.TestServiceStub(self._channel)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.INFO)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py
index 1bb70326b9..aa39976981 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py
@@ -1,456 +1,456 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Implementations of interoperability test methods."""
-
-import argparse
-import asyncio
-import collections
-import datetime
-import enum
-import inspect
-import json
-import os
-import threading
-import time
-from typing import Any, Optional, Union
-
-import grpc
-from google import auth as google_auth
-from google.auth import environment_vars as google_auth_environment_vars
-from google.auth.transport import grpc as google_auth_transport_grpc
-from google.auth.transport import requests as google_auth_transport_requests
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import empty_pb2, messages_pb2, test_pb2_grpc
-
-_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
-_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
-
-
-async def _expect_status_code(call: aio.Call,
- expected_code: grpc.StatusCode) -> None:
- code = await call.code()
- if code != expected_code:
- raise ValueError('expected code %s, got %s' %
- (expected_code, await call.code()))
-
-
-async def _expect_status_details(call: aio.Call, expected_details: str) -> None:
- details = await call.details()
- if details != expected_details:
- raise ValueError('expected message %s, got %s' %
- (expected_details, await call.details()))
-
-
-async def _validate_status_code_and_details(call: aio.Call,
- expected_code: grpc.StatusCode,
- expected_details: str) -> None:
- await _expect_status_code(call, expected_code)
- await _expect_status_details(call, expected_details)
-
-
-def _validate_payload_type_and_length(
- response: Union[messages_pb2.SimpleResponse, messages_pb2.
- StreamingOutputCallResponse], expected_type: Any,
- expected_length: int) -> None:
- if response.payload.type is not expected_type:
- raise ValueError('expected payload type %s, got %s' %
- (expected_type, type(response.payload.type)))
- elif len(response.payload.body) != expected_length:
- raise ValueError('expected payload body size %d, got %d' %
- (expected_length, len(response.payload.body)))
-
-
-async def _large_unary_common_behavior(
- stub: test_pb2_grpc.TestServiceStub, fill_username: bool,
- fill_oauth_scope: bool, call_credentials: Optional[grpc.CallCredentials]
-) -> messages_pb2.SimpleResponse:
- size = 314159
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=size,
- payload=messages_pb2.Payload(body=b'\x00' * 271828),
- fill_username=fill_username,
- fill_oauth_scope=fill_oauth_scope)
- response = await stub.UnaryCall(request, credentials=call_credentials)
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
- return response
-
-
-async def _empty_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
- response = await stub.EmptyCall(empty_pb2.Empty())
- if not isinstance(response, empty_pb2.Empty):
- raise TypeError('response is of type "%s", not empty_pb2.Empty!' %
- type(response))
-
-
-async def _large_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
- await _large_unary_common_behavior(stub, False, False, None)
-
-
-async def _client_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
- payload_body_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
-
- async def request_gen():
- for size in payload_body_sizes:
- yield messages_pb2.StreamingInputCallRequest(
- payload=messages_pb2.Payload(body=b'\x00' * size))
-
- response = await stub.StreamingInputCall(request_gen())
- if response.aggregated_payload_size != sum(payload_body_sizes):
- raise ValueError('incorrect size %d!' %
- response.aggregated_payload_size)
-
-
-async def _server_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
- sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
-
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(
- messages_pb2.ResponseParameters(size=sizes[0]),
- messages_pb2.ResponseParameters(size=sizes[1]),
- messages_pb2.ResponseParameters(size=sizes[2]),
- messages_pb2.ResponseParameters(size=sizes[3]),
- ))
- call = stub.StreamingOutputCall(request)
- for size in sizes:
- response = await call.read()
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- size)
-
-
-async def _ping_pong(stub: test_pb2_grpc.TestServiceStub) -> None:
- request_response_sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
- request_payload_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
-
- call = stub.FullDuplexCall()
- for response_size, payload_size in zip(request_response_sizes,
- request_payload_sizes):
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
- payload=messages_pb2.Payload(body=b'\x00' * payload_size))
-
- await call.write(request)
- response = await call.read()
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- response_size)
- await call.done_writing()
- await _validate_status_code_and_details(call, grpc.StatusCode.OK, '')
-
-
-async def _cancel_after_begin(stub: test_pb2_grpc.TestServiceStub):
- call = stub.StreamingInputCall()
- call.cancel()
- if not call.cancelled():
- raise ValueError('expected cancelled method to return True')
- code = await call.code()
- if code is not grpc.StatusCode.CANCELLED:
- raise ValueError('expected status code CANCELLED')
-
-
-async def _cancel_after_first_response(stub: test_pb2_grpc.TestServiceStub):
- request_response_sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
- request_payload_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
-
- call = stub.FullDuplexCall()
-
- response_size = request_response_sizes[0]
- payload_size = request_payload_sizes[0]
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
- payload=messages_pb2.Payload(body=b'\x00' * payload_size))
-
- await call.write(request)
- await call.read()
-
- call.cancel()
-
- try:
- await call.read()
- except asyncio.CancelledError:
- assert await call.code() is grpc.StatusCode.CANCELLED
- else:
- raise ValueError('expected call to be cancelled')
-
-
-async def _timeout_on_sleeping_server(stub: test_pb2_grpc.TestServiceStub):
- request_payload_size = 27182
- time_limit = datetime.timedelta(seconds=1)
-
- call = stub.FullDuplexCall(timeout=time_limit.total_seconds())
-
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- payload=messages_pb2.Payload(body=b'\x00' * request_payload_size),
- response_parameters=(messages_pb2.ResponseParameters(
- interval_us=int(time_limit.total_seconds() * 2 * 10**6)),))
- await call.write(request)
- await call.done_writing()
- try:
- await call.read()
- except aio.AioRpcError as rpc_error:
- if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
- raise
- else:
- raise ValueError('expected call to exceed deadline')
-
-
-async def _empty_stream(stub: test_pb2_grpc.TestServiceStub):
- call = stub.FullDuplexCall()
- await call.done_writing()
- assert await call.read() == aio.EOF
-
-
-async def _status_code_and_message(stub: test_pb2_grpc.TestServiceStub):
- details = 'test status message'
- status = grpc.StatusCode.UNKNOWN # code = 2
-
- # Test with a UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=status.value[0],
- message=details))
- call = stub.UnaryCall(request)
- await _validate_status_code_and_details(call, status, details)
-
- # Test with a FullDuplexCall
- call = stub.FullDuplexCall()
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(size=1),),
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=status.value[0],
- message=details))
- await call.write(request) # sends the initial request.
- await call.done_writing()
- try:
- await call.read()
- except aio.AioRpcError as rpc_error:
- assert rpc_error.code() == status
- await _validate_status_code_and_details(call, status, details)
-
-
-async def _unimplemented_method(stub: test_pb2_grpc.TestServiceStub):
- call = stub.UnimplementedCall(empty_pb2.Empty())
- await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
-
-
-async def _unimplemented_service(stub: test_pb2_grpc.UnimplementedServiceStub):
- call = stub.UnimplementedCall(empty_pb2.Empty())
- await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
-
-
-async def _custom_metadata(stub: test_pb2_grpc.TestServiceStub):
- initial_metadata_value = "test_initial_metadata_value"
- trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b"
- metadata = aio.Metadata(
- (_INITIAL_METADATA_KEY, initial_metadata_value),
- (_TRAILING_METADATA_KEY, trailing_metadata_value),
- )
-
- async def _validate_metadata(call):
- initial_metadata = await call.initial_metadata()
- if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
- raise ValueError('expected initial metadata %s, got %s' %
- (initial_metadata_value,
- initial_metadata[_INITIAL_METADATA_KEY]))
-
- trailing_metadata = await call.trailing_metadata()
- if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
- raise ValueError('expected trailing metadata %s, got %s' %
- (trailing_metadata_value,
- trailing_metadata[_TRAILING_METADATA_KEY]))
-
- # Testing with UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'))
- call = stub.UnaryCall(request, metadata=metadata)
- await _validate_metadata(call)
-
- # Testing with FullDuplexCall
- call = stub.FullDuplexCall(metadata=metadata)
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(size=1),))
- await call.write(request)
- await call.read()
- await call.done_writing()
- await _validate_metadata(call)
-
-
-async def _compute_engine_creds(stub: test_pb2_grpc.TestServiceStub,
- args: argparse.Namespace):
- response = await _large_unary_common_behavior(stub, True, True, None)
- if args.default_service_account != response.username:
- raise ValueError('expected username %s, got %s' %
- (args.default_service_account, response.username))
-
-
-async def _oauth2_auth_token(stub: test_pb2_grpc.TestServiceStub,
- args: argparse.Namespace):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- response = await _large_unary_common_behavior(stub, True, True, None)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
- if args.oauth_scope.find(response.oauth_scope) == -1:
- raise ValueError(
- 'expected to find oauth scope "{}" in received "{}"'.format(
- response.oauth_scope, args.oauth_scope))
-
-
-async def _jwt_token_creds(stub: test_pb2_grpc.TestServiceStub):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- response = await _large_unary_common_behavior(stub, True, False, None)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
-
-
-async def _per_rpc_creds(stub: test_pb2_grpc.TestServiceStub,
- args: argparse.Namespace):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- google_credentials, unused_project_id = google_auth.default(
- scopes=[args.oauth_scope])
- call_credentials = grpc.metadata_call_credentials(
- google_auth_transport_grpc.AuthMetadataPlugin(
- credentials=google_credentials,
- request=google_auth_transport_requests.Request()))
- response = await _large_unary_common_behavior(stub, True, False,
- call_credentials)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
-
-
-async def _special_status_message(stub: test_pb2_grpc.TestServiceStub):
- details = b'\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP \xf0\x9f\x98\x88\t\n'.decode(
- 'utf-8')
- status = grpc.StatusCode.UNKNOWN # code = 2
-
- # Test with a UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=status.value[0],
- message=details))
- call = stub.UnaryCall(request)
- await _validate_status_code_and_details(call, status, details)
-
-
-@enum.unique
-class TestCase(enum.Enum):
- EMPTY_UNARY = 'empty_unary'
- LARGE_UNARY = 'large_unary'
- SERVER_STREAMING = 'server_streaming'
- CLIENT_STREAMING = 'client_streaming'
- PING_PONG = 'ping_pong'
- CANCEL_AFTER_BEGIN = 'cancel_after_begin'
- CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response'
- TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server'
- EMPTY_STREAM = 'empty_stream'
- STATUS_CODE_AND_MESSAGE = 'status_code_and_message'
- UNIMPLEMENTED_METHOD = 'unimplemented_method'
- UNIMPLEMENTED_SERVICE = 'unimplemented_service'
- CUSTOM_METADATA = "custom_metadata"
- COMPUTE_ENGINE_CREDS = 'compute_engine_creds'
- OAUTH2_AUTH_TOKEN = 'oauth2_auth_token'
- JWT_TOKEN_CREDS = 'jwt_token_creds'
- PER_RPC_CREDS = 'per_rpc_creds'
- SPECIAL_STATUS_MESSAGE = 'special_status_message'
-
-
-_TEST_CASE_IMPLEMENTATION_MAPPING = {
- TestCase.EMPTY_UNARY: _empty_unary,
- TestCase.LARGE_UNARY: _large_unary,
- TestCase.SERVER_STREAMING: _server_streaming,
- TestCase.CLIENT_STREAMING: _client_streaming,
- TestCase.PING_PONG: _ping_pong,
- TestCase.CANCEL_AFTER_BEGIN: _cancel_after_begin,
- TestCase.CANCEL_AFTER_FIRST_RESPONSE: _cancel_after_first_response,
- TestCase.TIMEOUT_ON_SLEEPING_SERVER: _timeout_on_sleeping_server,
- TestCase.EMPTY_STREAM: _empty_stream,
- TestCase.STATUS_CODE_AND_MESSAGE: _status_code_and_message,
- TestCase.UNIMPLEMENTED_METHOD: _unimplemented_method,
- TestCase.UNIMPLEMENTED_SERVICE: _unimplemented_service,
- TestCase.CUSTOM_METADATA: _custom_metadata,
- TestCase.COMPUTE_ENGINE_CREDS: _compute_engine_creds,
- TestCase.OAUTH2_AUTH_TOKEN: _oauth2_auth_token,
- TestCase.JWT_TOKEN_CREDS: _jwt_token_creds,
- TestCase.PER_RPC_CREDS: _per_rpc_creds,
- TestCase.SPECIAL_STATUS_MESSAGE: _special_status_message,
-}
-
-
-async def test_interoperability(case: TestCase,
- stub: test_pb2_grpc.TestServiceStub,
- args: Optional[argparse.Namespace] = None
- ) -> None:
- method = _TEST_CASE_IMPLEMENTATION_MAPPING.get(case)
- if method is None:
- raise NotImplementedError(f'Test case "{case}" not implemented!')
- else:
- num_params = len(inspect.signature(method).parameters)
- if num_params == 1:
- await method(stub)
- elif num_params == 2:
- if args is not None:
- await method(stub, args)
- else:
- raise ValueError(f'Failed to run case [{case}]: args is None')
- else:
- raise ValueError(f'Invalid number of parameters [{num_params}]')
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Implementations of interoperability test methods."""
+
+import argparse
+import asyncio
+import collections
+import datetime
+import enum
+import inspect
+import json
+import os
+import threading
+import time
+from typing import Any, Optional, Union
+
+import grpc
+from google import auth as google_auth
+from google.auth import environment_vars as google_auth_environment_vars
+from google.auth.transport import grpc as google_auth_transport_grpc
+from google.auth.transport import requests as google_auth_transport_requests
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import empty_pb2, messages_pb2, test_pb2_grpc
+
+_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
+_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
+
+
+async def _expect_status_code(call: aio.Call,
+ expected_code: grpc.StatusCode) -> None:
+ code = await call.code()
+ if code != expected_code:
+ raise ValueError('expected code %s, got %s' %
+ (expected_code, await call.code()))
+
+
+async def _expect_status_details(call: aio.Call, expected_details: str) -> None:
+ details = await call.details()
+ if details != expected_details:
+ raise ValueError('expected message %s, got %s' %
+ (expected_details, await call.details()))
+
+
+async def _validate_status_code_and_details(call: aio.Call,
+ expected_code: grpc.StatusCode,
+ expected_details: str) -> None:
+ await _expect_status_code(call, expected_code)
+ await _expect_status_details(call, expected_details)
+
+
+def _validate_payload_type_and_length(
+ response: Union[messages_pb2.SimpleResponse, messages_pb2.
+ StreamingOutputCallResponse], expected_type: Any,
+ expected_length: int) -> None:
+ if response.payload.type is not expected_type:
+ raise ValueError('expected payload type %s, got %s' %
+ (expected_type, type(response.payload.type)))
+ elif len(response.payload.body) != expected_length:
+ raise ValueError('expected payload body size %d, got %d' %
+ (expected_length, len(response.payload.body)))
+
+
+async def _large_unary_common_behavior(
+ stub: test_pb2_grpc.TestServiceStub, fill_username: bool,
+ fill_oauth_scope: bool, call_credentials: Optional[grpc.CallCredentials]
+) -> messages_pb2.SimpleResponse:
+ size = 314159
+ request = messages_pb2.SimpleRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_size=size,
+ payload=messages_pb2.Payload(body=b'\x00' * 271828),
+ fill_username=fill_username,
+ fill_oauth_scope=fill_oauth_scope)
+ response = await stub.UnaryCall(request, credentials=call_credentials)
+ _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
+ return response
+
+
+async def _empty_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
+ response = await stub.EmptyCall(empty_pb2.Empty())
+ if not isinstance(response, empty_pb2.Empty):
+ raise TypeError('response is of type "%s", not empty_pb2.Empty!' %
+ type(response))
+
+
+async def _large_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
+ await _large_unary_common_behavior(stub, False, False, None)
+
+
+async def _client_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
+ payload_body_sizes = (
+ 27182,
+ 8,
+ 1828,
+ 45904,
+ )
+
+ async def request_gen():
+ for size in payload_body_sizes:
+ yield messages_pb2.StreamingInputCallRequest(
+ payload=messages_pb2.Payload(body=b'\x00' * size))
+
+ response = await stub.StreamingInputCall(request_gen())
+ if response.aggregated_payload_size != sum(payload_body_sizes):
+ raise ValueError('incorrect size %d!' %
+ response.aggregated_payload_size)
+
+
+async def _server_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
+ sizes = (
+ 31415,
+ 9,
+ 2653,
+ 58979,
+ )
+
+ request = messages_pb2.StreamingOutputCallRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_parameters=(
+ messages_pb2.ResponseParameters(size=sizes[0]),
+ messages_pb2.ResponseParameters(size=sizes[1]),
+ messages_pb2.ResponseParameters(size=sizes[2]),
+ messages_pb2.ResponseParameters(size=sizes[3]),
+ ))
+ call = stub.StreamingOutputCall(request)
+ for size in sizes:
+ response = await call.read()
+ _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
+ size)
+
+
+async def _ping_pong(stub: test_pb2_grpc.TestServiceStub) -> None:
+ request_response_sizes = (
+ 31415,
+ 9,
+ 2653,
+ 58979,
+ )
+ request_payload_sizes = (
+ 27182,
+ 8,
+ 1828,
+ 45904,
+ )
+
+ call = stub.FullDuplexCall()
+ for response_size, payload_size in zip(request_response_sizes,
+ request_payload_sizes):
+ request = messages_pb2.StreamingOutputCallRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_parameters=(messages_pb2.ResponseParameters(
+ size=response_size),),
+ payload=messages_pb2.Payload(body=b'\x00' * payload_size))
+
+ await call.write(request)
+ response = await call.read()
+ _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
+ response_size)
+ await call.done_writing()
+ await _validate_status_code_and_details(call, grpc.StatusCode.OK, '')
+
+
+async def _cancel_after_begin(stub: test_pb2_grpc.TestServiceStub):
+ call = stub.StreamingInputCall()
+ call.cancel()
+ if not call.cancelled():
+ raise ValueError('expected cancelled method to return True')
+ code = await call.code()
+ if code is not grpc.StatusCode.CANCELLED:
+ raise ValueError('expected status code CANCELLED')
+
+
+async def _cancel_after_first_response(stub: test_pb2_grpc.TestServiceStub):
+ request_response_sizes = (
+ 31415,
+ 9,
+ 2653,
+ 58979,
+ )
+ request_payload_sizes = (
+ 27182,
+ 8,
+ 1828,
+ 45904,
+ )
+
+ call = stub.FullDuplexCall()
+
+ response_size = request_response_sizes[0]
+ payload_size = request_payload_sizes[0]
+ request = messages_pb2.StreamingOutputCallRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_parameters=(messages_pb2.ResponseParameters(
+ size=response_size),),
+ payload=messages_pb2.Payload(body=b'\x00' * payload_size))
+
+ await call.write(request)
+ await call.read()
+
+ call.cancel()
+
+ try:
+ await call.read()
+ except asyncio.CancelledError:
+ assert await call.code() is grpc.StatusCode.CANCELLED
+ else:
+ raise ValueError('expected call to be cancelled')
+
+
+async def _timeout_on_sleeping_server(stub: test_pb2_grpc.TestServiceStub):
+ request_payload_size = 27182
+ time_limit = datetime.timedelta(seconds=1)
+
+ call = stub.FullDuplexCall(timeout=time_limit.total_seconds())
+
+ request = messages_pb2.StreamingOutputCallRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ payload=messages_pb2.Payload(body=b'\x00' * request_payload_size),
+ response_parameters=(messages_pb2.ResponseParameters(
+ interval_us=int(time_limit.total_seconds() * 2 * 10**6)),))
+ await call.write(request)
+ await call.done_writing()
+ try:
+ await call.read()
+ except aio.AioRpcError as rpc_error:
+ if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
+ raise
+ else:
+ raise ValueError('expected call to exceed deadline')
+
+
+async def _empty_stream(stub: test_pb2_grpc.TestServiceStub):
+ call = stub.FullDuplexCall()
+ await call.done_writing()
+ assert await call.read() == aio.EOF
+
+
+async def _status_code_and_message(stub: test_pb2_grpc.TestServiceStub):
+ details = 'test status message'
+ status = grpc.StatusCode.UNKNOWN # code = 2
+
+ # Test with a UnaryCall
+ request = messages_pb2.SimpleRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_size=1,
+ payload=messages_pb2.Payload(body=b'\x00'),
+ response_status=messages_pb2.EchoStatus(code=status.value[0],
+ message=details))
+ call = stub.UnaryCall(request)
+ await _validate_status_code_and_details(call, status, details)
+
+ # Test with a FullDuplexCall
+ call = stub.FullDuplexCall()
+ request = messages_pb2.StreamingOutputCallRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_parameters=(messages_pb2.ResponseParameters(size=1),),
+ payload=messages_pb2.Payload(body=b'\x00'),
+ response_status=messages_pb2.EchoStatus(code=status.value[0],
+ message=details))
+ await call.write(request) # sends the initial request.
+ await call.done_writing()
+ try:
+ await call.read()
+ except aio.AioRpcError as rpc_error:
+ assert rpc_error.code() == status
+ await _validate_status_code_and_details(call, status, details)
+
+
+async def _unimplemented_method(stub: test_pb2_grpc.TestServiceStub):
+ call = stub.UnimplementedCall(empty_pb2.Empty())
+ await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
+
+
+async def _unimplemented_service(stub: test_pb2_grpc.UnimplementedServiceStub):
+ call = stub.UnimplementedCall(empty_pb2.Empty())
+ await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
+
+
+async def _custom_metadata(stub: test_pb2_grpc.TestServiceStub):
+ initial_metadata_value = "test_initial_metadata_value"
+ trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b"
+ metadata = aio.Metadata(
+ (_INITIAL_METADATA_KEY, initial_metadata_value),
+ (_TRAILING_METADATA_KEY, trailing_metadata_value),
+ )
+
+ async def _validate_metadata(call):
+ initial_metadata = await call.initial_metadata()
+ if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
+ raise ValueError('expected initial metadata %s, got %s' %
+ (initial_metadata_value,
+ initial_metadata[_INITIAL_METADATA_KEY]))
+
+ trailing_metadata = await call.trailing_metadata()
+ if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
+ raise ValueError('expected trailing metadata %s, got %s' %
+ (trailing_metadata_value,
+ trailing_metadata[_TRAILING_METADATA_KEY]))
+
+ # Testing with UnaryCall
+ request = messages_pb2.SimpleRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_size=1,
+ payload=messages_pb2.Payload(body=b'\x00'))
+ call = stub.UnaryCall(request, metadata=metadata)
+ await _validate_metadata(call)
+
+ # Testing with FullDuplexCall
+ call = stub.FullDuplexCall(metadata=metadata)
+ request = messages_pb2.StreamingOutputCallRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_parameters=(messages_pb2.ResponseParameters(size=1),))
+ await call.write(request)
+ await call.read()
+ await call.done_writing()
+ await _validate_metadata(call)
+
+
+async def _compute_engine_creds(stub: test_pb2_grpc.TestServiceStub,
+ args: argparse.Namespace):
+ response = await _large_unary_common_behavior(stub, True, True, None)
+ if args.default_service_account != response.username:
+ raise ValueError('expected username %s, got %s' %
+ (args.default_service_account, response.username))
+
+
+async def _oauth2_auth_token(stub: test_pb2_grpc.TestServiceStub,
+ args: argparse.Namespace):
+ json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
+ wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
+ response = await _large_unary_common_behavior(stub, True, True, None)
+ if wanted_email != response.username:
+ raise ValueError('expected username %s, got %s' %
+ (wanted_email, response.username))
+ if args.oauth_scope.find(response.oauth_scope) == -1:
+ raise ValueError(
+ 'expected to find oauth scope "{}" in received "{}"'.format(
+ response.oauth_scope, args.oauth_scope))
+
+
+async def _jwt_token_creds(stub: test_pb2_grpc.TestServiceStub):
+ json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
+ wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
+ response = await _large_unary_common_behavior(stub, True, False, None)
+ if wanted_email != response.username:
+ raise ValueError('expected username %s, got %s' %
+ (wanted_email, response.username))
+
+
+async def _per_rpc_creds(stub: test_pb2_grpc.TestServiceStub,
+ args: argparse.Namespace):
+ json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
+ wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
+ google_credentials, unused_project_id = google_auth.default(
+ scopes=[args.oauth_scope])
+ call_credentials = grpc.metadata_call_credentials(
+ google_auth_transport_grpc.AuthMetadataPlugin(
+ credentials=google_credentials,
+ request=google_auth_transport_requests.Request()))
+ response = await _large_unary_common_behavior(stub, True, False,
+ call_credentials)
+ if wanted_email != response.username:
+ raise ValueError('expected username %s, got %s' %
+ (wanted_email, response.username))
+
+
+async def _special_status_message(stub: test_pb2_grpc.TestServiceStub):
+ details = b'\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP \xf0\x9f\x98\x88\t\n'.decode(
+ 'utf-8')
+ status = grpc.StatusCode.UNKNOWN # code = 2
+
+ # Test with a UnaryCall
+ request = messages_pb2.SimpleRequest(
+ response_type=messages_pb2.COMPRESSABLE,
+ response_size=1,
+ payload=messages_pb2.Payload(body=b'\x00'),
+ response_status=messages_pb2.EchoStatus(code=status.value[0],
+ message=details))
+ call = stub.UnaryCall(request)
+ await _validate_status_code_and_details(call, status, details)
+
+
+@enum.unique
+class TestCase(enum.Enum):
+ EMPTY_UNARY = 'empty_unary'
+ LARGE_UNARY = 'large_unary'
+ SERVER_STREAMING = 'server_streaming'
+ CLIENT_STREAMING = 'client_streaming'
+ PING_PONG = 'ping_pong'
+ CANCEL_AFTER_BEGIN = 'cancel_after_begin'
+ CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response'
+ TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server'
+ EMPTY_STREAM = 'empty_stream'
+ STATUS_CODE_AND_MESSAGE = 'status_code_and_message'
+ UNIMPLEMENTED_METHOD = 'unimplemented_method'
+ UNIMPLEMENTED_SERVICE = 'unimplemented_service'
+ CUSTOM_METADATA = "custom_metadata"
+ COMPUTE_ENGINE_CREDS = 'compute_engine_creds'
+ OAUTH2_AUTH_TOKEN = 'oauth2_auth_token'
+ JWT_TOKEN_CREDS = 'jwt_token_creds'
+ PER_RPC_CREDS = 'per_rpc_creds'
+ SPECIAL_STATUS_MESSAGE = 'special_status_message'
+
+
+_TEST_CASE_IMPLEMENTATION_MAPPING = {
+ TestCase.EMPTY_UNARY: _empty_unary,
+ TestCase.LARGE_UNARY: _large_unary,
+ TestCase.SERVER_STREAMING: _server_streaming,
+ TestCase.CLIENT_STREAMING: _client_streaming,
+ TestCase.PING_PONG: _ping_pong,
+ TestCase.CANCEL_AFTER_BEGIN: _cancel_after_begin,
+ TestCase.CANCEL_AFTER_FIRST_RESPONSE: _cancel_after_first_response,
+ TestCase.TIMEOUT_ON_SLEEPING_SERVER: _timeout_on_sleeping_server,
+ TestCase.EMPTY_STREAM: _empty_stream,
+ TestCase.STATUS_CODE_AND_MESSAGE: _status_code_and_message,
+ TestCase.UNIMPLEMENTED_METHOD: _unimplemented_method,
+ TestCase.UNIMPLEMENTED_SERVICE: _unimplemented_service,
+ TestCase.CUSTOM_METADATA: _custom_metadata,
+ TestCase.COMPUTE_ENGINE_CREDS: _compute_engine_creds,
+ TestCase.OAUTH2_AUTH_TOKEN: _oauth2_auth_token,
+ TestCase.JWT_TOKEN_CREDS: _jwt_token_creds,
+ TestCase.PER_RPC_CREDS: _per_rpc_creds,
+ TestCase.SPECIAL_STATUS_MESSAGE: _special_status_message,
+}
+
+
+async def test_interoperability(case: TestCase,
+ stub: test_pb2_grpc.TestServiceStub,
+ args: Optional[argparse.Namespace] = None
+ ) -> None:
+ method = _TEST_CASE_IMPLEMENTATION_MAPPING.get(case)
+ if method is None:
+ raise NotImplementedError(f'Test case "{case}" not implemented!')
+ else:
+ num_params = len(inspect.signature(method).parameters)
+ if num_params == 1:
+ await method(stub)
+ elif num_params == 2:
+ if args is not None:
+ await method(stub, args)
+ else:
+ raise ValueError(f'Failed to run case [{case}]: args is None')
+ else:
+ raise ValueError(f'Invalid number of parameters [{num_params}]')
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py
index 7e5782b43d..509abdf0b2 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py
@@ -1,49 +1,49 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The gRPC interoperability test server using AsyncIO stack."""
-
-import asyncio
-import argparse
-import logging
-
-import grpc
-
-from tests.interop import server as interop_server_lib
-from tests_aio.unit import _test_server
-
-logging.basicConfig(level=logging.DEBUG)
-_LOGGER = logging.getLogger(__name__)
-_LOGGER.setLevel(logging.DEBUG)
-
-
-async def serve():
- args = interop_server_lib.parse_interop_server_arguments()
-
- if args.use_tls or args.use_alts:
- credentials = interop_server_lib.get_server_credentials(args.use_tls)
- address, server = await _test_server.start_test_server(
- port=args.port, secure=True, server_credentials=credentials)
- else:
- address, server = await _test_server.start_test_server(
- port=args.port,
- secure=False,
- )
-
- _LOGGER.info('Server serving at %s', address)
- await server.wait_for_termination()
- _LOGGER.info('Server stopped; exiting.')
-
-
-if __name__ == '__main__':
- asyncio.get_event_loop().run_until_complete(serve())
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""The gRPC interoperability test server using AsyncIO stack."""
+
+import asyncio
+import argparse
+import logging
+
+import grpc
+
+from tests.interop import server as interop_server_lib
+from tests_aio.unit import _test_server
+
+logging.basicConfig(level=logging.DEBUG)
+_LOGGER = logging.getLogger(__name__)
+_LOGGER.setLevel(logging.DEBUG)
+
+
+async def serve():
+ args = interop_server_lib.parse_interop_server_arguments()
+
+ if args.use_tls or args.use_alts:
+ credentials = interop_server_lib.get_server_credentials(args.use_tls)
+ address, server = await _test_server.start_test_server(
+ port=args.port, secure=True, server_credentials=credentials)
+ else:
+ address, server = await _test_server.start_test_server(
+ port=args.port,
+ secure=False,
+ )
+
+ _LOGGER.info('Server serving at %s', address)
+ await server.wait_for_termination()
+ _LOGGER.info('Server stopped; exiting.')
+
+
+if __name__ == '__main__':
+ asyncio.get_event_loop().run_until_complete(serve())
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py
index 8d89990e82..5772620b60 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py
index 73ecc46718..edd2d79eab 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py
@@ -1,193 +1,193 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_reflection.v1alpha.reflection."""
-
-import logging
-import unittest
-
-import grpc
-from google.protobuf import descriptor_pb2
-from grpc.experimental import aio
-
-from grpc_reflection.v1alpha import (reflection, reflection_pb2,
- reflection_pb2_grpc)
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing.proto2 import empty2_extensions_pb2
-from tests_aio.unit._test_base import AioTestBase
-
-_EMPTY_PROTO_FILE_NAME = 'src/proto/grpc/testing/empty.proto'
-_EMPTY_PROTO_SYMBOL_NAME = 'grpc.testing.Empty'
-_SERVICE_NAMES = ('Angstrom', 'Bohr', 'Curie', 'Dyson', 'Einstein', 'Feynman',
- 'Galilei')
-_EMPTY_EXTENSIONS_SYMBOL_NAME = 'grpc.testing.proto2.EmptyWithExtensions'
-_EMPTY_EXTENSIONS_NUMBERS = (
- 124,
- 125,
- 126,
- 127,
- 128,
-)
-
-
-def _file_descriptor_to_proto(descriptor):
- proto = descriptor_pb2.FileDescriptorProto()
- descriptor.CopyToProto(proto)
- return proto.SerializeToString()
-
-
-class ReflectionServicerTest(AioTestBase):
-
- async def setUp(self):
- self._server = aio.server()
- reflection.enable_server_reflection(_SERVICE_NAMES, self._server)
- port = self._server.add_insecure_port('[::]:0')
- await self._server.start()
-
- self._channel = aio.insecure_channel('localhost:%d' % port)
- self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel)
-
- async def tearDown(self):
- await self._server.stop(None)
- await self._channel.close()
-
- async def test_file_by_name(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_by_filename=_EMPTY_PROTO_FILE_NAME),
- reflection_pb2.ServerReflectionRequest(
- file_by_filename='i-donut-exist'),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(
- _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_file_by_symbol(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME),
- reflection_pb2.ServerReflectionRequest(
- file_containing_symbol='i.donut.exist.co.uk.org.net.me.name.foo'
- ),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(
- _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_file_containing_extension(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_containing_extension=reflection_pb2.ExtensionRequest(
- containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME,
- extension_number=125,
- ),),
- reflection_pb2.ServerReflectionRequest(
- file_containing_extension=reflection_pb2.ExtensionRequest(
- containing_type='i.donut.exist.co.uk.org.net.me.name.foo',
- extension_number=55,
- ),),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(_file_descriptor_to_proto(
- empty2_extensions_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_extension_numbers_of_type(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME),
- reflection_pb2.ServerReflectionRequest(
- all_extension_numbers_of_type='i.donut.exist.co.uk.net.name.foo'
- ),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- all_extension_numbers_response=reflection_pb2.
- ExtensionNumberResponse(
- base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME,
- extension_number=_EMPTY_EXTENSIONS_NUMBERS)),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_list_services(self):
- requests = (reflection_pb2.ServerReflectionRequest(list_services='',),)
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (reflection_pb2.ServerReflectionResponse(
- valid_host='',
- list_services_response=reflection_pb2.ListServiceResponse(
- service=tuple(
- reflection_pb2.ServiceResponse(name=name)
- for name in _SERVICE_NAMES))),)
- self.assertSequenceEqual(expected_responses, responses)
-
- def test_reflection_service_name(self):
- self.assertEqual(reflection.SERVICE_NAME,
- 'grpc.reflection.v1alpha.ServerReflection')
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2016 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests of grpc_reflection.v1alpha.reflection."""
+
+import logging
+import unittest
+
+import grpc
+from google.protobuf import descriptor_pb2
+from grpc.experimental import aio
+
+from grpc_reflection.v1alpha import (reflection, reflection_pb2,
+ reflection_pb2_grpc)
+from src.proto.grpc.testing import empty_pb2
+from src.proto.grpc.testing.proto2 import empty2_extensions_pb2
+from tests_aio.unit._test_base import AioTestBase
+
+_EMPTY_PROTO_FILE_NAME = 'src/proto/grpc/testing/empty.proto'
+_EMPTY_PROTO_SYMBOL_NAME = 'grpc.testing.Empty'
+_SERVICE_NAMES = ('Angstrom', 'Bohr', 'Curie', 'Dyson', 'Einstein', 'Feynman',
+ 'Galilei')
+_EMPTY_EXTENSIONS_SYMBOL_NAME = 'grpc.testing.proto2.EmptyWithExtensions'
+_EMPTY_EXTENSIONS_NUMBERS = (
+ 124,
+ 125,
+ 126,
+ 127,
+ 128,
+)
+
+
+def _file_descriptor_to_proto(descriptor):
+ proto = descriptor_pb2.FileDescriptorProto()
+ descriptor.CopyToProto(proto)
+ return proto.SerializeToString()
+
+
+class ReflectionServicerTest(AioTestBase):
+
+ async def setUp(self):
+ self._server = aio.server()
+ reflection.enable_server_reflection(_SERVICE_NAMES, self._server)
+ port = self._server.add_insecure_port('[::]:0')
+ await self._server.start()
+
+ self._channel = aio.insecure_channel('localhost:%d' % port)
+ self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel)
+
+ async def tearDown(self):
+ await self._server.stop(None)
+ await self._channel.close()
+
+ async def test_file_by_name(self):
+ requests = (
+ reflection_pb2.ServerReflectionRequest(
+ file_by_filename=_EMPTY_PROTO_FILE_NAME),
+ reflection_pb2.ServerReflectionRequest(
+ file_by_filename='i-donut-exist'),
+ )
+ responses = []
+ async for response in self._stub.ServerReflectionInfo(iter(requests)):
+ responses.append(response)
+ expected_responses = (
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ file_descriptor_response=reflection_pb2.FileDescriptorResponse(
+ file_descriptor_proto=(
+ _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ error_response=reflection_pb2.ErrorResponse(
+ error_code=grpc.StatusCode.NOT_FOUND.value[0],
+ error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
+ )),
+ )
+ self.assertSequenceEqual(expected_responses, responses)
+
+ async def test_file_by_symbol(self):
+ requests = (
+ reflection_pb2.ServerReflectionRequest(
+ file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME),
+ reflection_pb2.ServerReflectionRequest(
+ file_containing_symbol='i.donut.exist.co.uk.org.net.me.name.foo'
+ ),
+ )
+ responses = []
+ async for response in self._stub.ServerReflectionInfo(iter(requests)):
+ responses.append(response)
+ expected_responses = (
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ file_descriptor_response=reflection_pb2.FileDescriptorResponse(
+ file_descriptor_proto=(
+ _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ error_response=reflection_pb2.ErrorResponse(
+ error_code=grpc.StatusCode.NOT_FOUND.value[0],
+ error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
+ )),
+ )
+ self.assertSequenceEqual(expected_responses, responses)
+
+ async def test_file_containing_extension(self):
+ requests = (
+ reflection_pb2.ServerReflectionRequest(
+ file_containing_extension=reflection_pb2.ExtensionRequest(
+ containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME,
+ extension_number=125,
+ ),),
+ reflection_pb2.ServerReflectionRequest(
+ file_containing_extension=reflection_pb2.ExtensionRequest(
+ containing_type='i.donut.exist.co.uk.org.net.me.name.foo',
+ extension_number=55,
+ ),),
+ )
+ responses = []
+ async for response in self._stub.ServerReflectionInfo(iter(requests)):
+ responses.append(response)
+ expected_responses = (
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ file_descriptor_response=reflection_pb2.FileDescriptorResponse(
+ file_descriptor_proto=(_file_descriptor_to_proto(
+ empty2_extensions_pb2.DESCRIPTOR),))),
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ error_response=reflection_pb2.ErrorResponse(
+ error_code=grpc.StatusCode.NOT_FOUND.value[0],
+ error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
+ )),
+ )
+ self.assertSequenceEqual(expected_responses, responses)
+
+ async def test_extension_numbers_of_type(self):
+ requests = (
+ reflection_pb2.ServerReflectionRequest(
+ all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME),
+ reflection_pb2.ServerReflectionRequest(
+ all_extension_numbers_of_type='i.donut.exist.co.uk.net.name.foo'
+ ),
+ )
+ responses = []
+ async for response in self._stub.ServerReflectionInfo(iter(requests)):
+ responses.append(response)
+ expected_responses = (
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ all_extension_numbers_response=reflection_pb2.
+ ExtensionNumberResponse(
+ base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME,
+ extension_number=_EMPTY_EXTENSIONS_NUMBERS)),
+ reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ error_response=reflection_pb2.ErrorResponse(
+ error_code=grpc.StatusCode.NOT_FOUND.value[0],
+ error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
+ )),
+ )
+ self.assertSequenceEqual(expected_responses, responses)
+
+ async def test_list_services(self):
+ requests = (reflection_pb2.ServerReflectionRequest(list_services='',),)
+ responses = []
+ async for response in self._stub.ServerReflectionInfo(iter(requests)):
+ responses.append(response)
+ expected_responses = (reflection_pb2.ServerReflectionResponse(
+ valid_host='',
+ list_services_response=reflection_pb2.ListServiceResponse(
+ service=tuple(
+ reflection_pb2.ServiceResponse(name=name)
+ for name in _SERVICE_NAMES))),)
+ self.assertSequenceEqual(expected_responses, responses)
+
+ def test_reflection_service_name(self):
+ self.assertEqual(reflection.SERVICE_NAME,
+ 'grpc.reflection.v1alpha.ServerReflection')
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py
index 2cb28cb464..1517f71d09 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py
index 61c446bf0e..980cf5a67e 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py
@@ -1,175 +1,175 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_status with gRPC AsyncIO stack."""
-
-import logging
-import traceback
-import unittest
-
-import grpc
-from google.protobuf import any_pb2
-from google.rpc import code_pb2, error_details_pb2, status_pb2
-from grpc.experimental import aio
-
-from grpc_status import rpc_status
-from tests_aio.unit._test_base import AioTestBase
-
-_STATUS_OK = '/test/StatusOK'
-_STATUS_NOT_OK = '/test/StatusNotOk'
-_ERROR_DETAILS = '/test/ErrorDetails'
-_INCONSISTENT = '/test/Inconsistent'
-_INVALID_CODE = '/test/InvalidCode'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_GRPC_DETAILS_METADATA_KEY = 'grpc-status-details-bin'
-
-_STATUS_DETAILS = 'This is an error detail'
-_STATUS_DETAILS_ANOTHER = 'This is another error detail'
-
-
-async def _ok_unary_unary(request, servicer_context):
- return _RESPONSE
-
-
-async def _not_ok_unary_unary(request, servicer_context):
- await servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS)
-
-
-async def _error_details_unary_unary(request, servicer_context):
- details = any_pb2.Any()
- details.Pack(
- error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(),
- detail='Intentionally invoked'))
- rich_status = status_pb2.Status(
- code=code_pb2.INTERNAL,
- message=_STATUS_DETAILS,
- details=[details],
- )
- await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
-
-
-async def _inconsistent_unary_unary(request, servicer_context):
- rich_status = status_pb2.Status(
- code=code_pb2.INTERNAL,
- message=_STATUS_DETAILS,
- )
- servicer_context.set_code(grpc.StatusCode.NOT_FOUND)
- servicer_context.set_details(_STATUS_DETAILS_ANOTHER)
- # User put inconsistent status information in trailing metadata
- servicer_context.set_trailing_metadata(
- ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),))
-
-
-async def _invalid_code_unary_unary(request, servicer_context):
- rich_status = status_pb2.Status(
- code=42,
- message='Invalid code',
- )
- await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _STATUS_OK:
- return grpc.unary_unary_rpc_method_handler(_ok_unary_unary)
- elif handler_call_details.method == _STATUS_NOT_OK:
- return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary)
- elif handler_call_details.method == _ERROR_DETAILS:
- return grpc.unary_unary_rpc_method_handler(
- _error_details_unary_unary)
- elif handler_call_details.method == _INCONSISTENT:
- return grpc.unary_unary_rpc_method_handler(
- _inconsistent_unary_unary)
- elif handler_call_details.method == _INVALID_CODE:
- return grpc.unary_unary_rpc_method_handler(
- _invalid_code_unary_unary)
- else:
- return None
-
-
-class StatusTest(AioTestBase):
-
- async def setUp(self):
- self._server = aio.server()
- self._server.add_generic_rpc_handlers((_GenericHandler(),))
- port = self._server.add_insecure_port('[::]:0')
- await self._server.start()
-
- self._channel = aio.insecure_channel('localhost:%d' % port)
-
- async def tearDown(self):
- await self._server.stop(None)
- await self._channel.close()
-
- async def test_status_ok(self):
- call = self._channel.unary_unary(_STATUS_OK)(_REQUEST)
-
- # Succeed RPC doesn't have status
- status = await rpc_status.aio.from_call(call)
- self.assertIs(status, None)
-
- async def test_status_not_ok(self):
- call = self._channel.unary_unary(_STATUS_NOT_OK)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
-
- self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
- # Failed RPC doesn't automatically generate status
- status = await rpc_status.aio.from_call(call)
- self.assertIs(status, None)
-
- async def test_error_details(self):
- call = self._channel.unary_unary(_ERROR_DETAILS)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
-
- status = await rpc_status.aio.from_call(call)
- self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
- self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL'))
-
- # Check if the underlying proto message is intact
- self.assertTrue(status.details[0].Is(
- error_details_pb2.DebugInfo.DESCRIPTOR))
- info = error_details_pb2.DebugInfo()
- status.details[0].Unpack(info)
- self.assertIn('_error_details_unary_unary', info.stack_entries[-1])
-
- async def test_code_message_validation(self):
- call = self._channel.unary_unary(_INCONSISTENT)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
- self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND)
-
- # Code/Message validation failed
- with self.assertRaises(ValueError):
- await rpc_status.aio.from_call(call)
-
- async def test_invalid_code(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channel.unary_unary(_INVALID_CODE)(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN)
- # Invalid status code exception raised during coversion
- self.assertIn('Invalid status code', rpc_error.details())
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests of grpc_status with gRPC AsyncIO stack."""
+
+import logging
+import traceback
+import unittest
+
+import grpc
+from google.protobuf import any_pb2
+from google.rpc import code_pb2, error_details_pb2, status_pb2
+from grpc.experimental import aio
+
+from grpc_status import rpc_status
+from tests_aio.unit._test_base import AioTestBase
+
+_STATUS_OK = '/test/StatusOK'
+_STATUS_NOT_OK = '/test/StatusNotOk'
+_ERROR_DETAILS = '/test/ErrorDetails'
+_INCONSISTENT = '/test/Inconsistent'
+_INVALID_CODE = '/test/InvalidCode'
+
+_REQUEST = b'\x00\x00\x00'
+_RESPONSE = b'\x01\x01\x01'
+
+_GRPC_DETAILS_METADATA_KEY = 'grpc-status-details-bin'
+
+_STATUS_DETAILS = 'This is an error detail'
+_STATUS_DETAILS_ANOTHER = 'This is another error detail'
+
+
+async def _ok_unary_unary(request, servicer_context):
+ return _RESPONSE
+
+
+async def _not_ok_unary_unary(request, servicer_context):
+ await servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS)
+
+
+async def _error_details_unary_unary(request, servicer_context):
+ details = any_pb2.Any()
+ details.Pack(
+ error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(),
+ detail='Intentionally invoked'))
+ rich_status = status_pb2.Status(
+ code=code_pb2.INTERNAL,
+ message=_STATUS_DETAILS,
+ details=[details],
+ )
+ await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
+
+
+async def _inconsistent_unary_unary(request, servicer_context):
+ rich_status = status_pb2.Status(
+ code=code_pb2.INTERNAL,
+ message=_STATUS_DETAILS,
+ )
+ servicer_context.set_code(grpc.StatusCode.NOT_FOUND)
+ servicer_context.set_details(_STATUS_DETAILS_ANOTHER)
+ # User put inconsistent status information in trailing metadata
+ servicer_context.set_trailing_metadata(
+ ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),))
+
+
+async def _invalid_code_unary_unary(request, servicer_context):
+ rich_status = status_pb2.Status(
+ code=42,
+ message='Invalid code',
+ )
+ await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ if handler_call_details.method == _STATUS_OK:
+ return grpc.unary_unary_rpc_method_handler(_ok_unary_unary)
+ elif handler_call_details.method == _STATUS_NOT_OK:
+ return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary)
+ elif handler_call_details.method == _ERROR_DETAILS:
+ return grpc.unary_unary_rpc_method_handler(
+ _error_details_unary_unary)
+ elif handler_call_details.method == _INCONSISTENT:
+ return grpc.unary_unary_rpc_method_handler(
+ _inconsistent_unary_unary)
+ elif handler_call_details.method == _INVALID_CODE:
+ return grpc.unary_unary_rpc_method_handler(
+ _invalid_code_unary_unary)
+ else:
+ return None
+
+
+class StatusTest(AioTestBase):
+
+ async def setUp(self):
+ self._server = aio.server()
+ self._server.add_generic_rpc_handlers((_GenericHandler(),))
+ port = self._server.add_insecure_port('[::]:0')
+ await self._server.start()
+
+ self._channel = aio.insecure_channel('localhost:%d' % port)
+
+ async def tearDown(self):
+ await self._server.stop(None)
+ await self._channel.close()
+
+ async def test_status_ok(self):
+ call = self._channel.unary_unary(_STATUS_OK)(_REQUEST)
+
+ # Succeed RPC doesn't have status
+ status = await rpc_status.aio.from_call(call)
+ self.assertIs(status, None)
+
+ async def test_status_not_ok(self):
+ call = self._channel.unary_unary(_STATUS_NOT_OK)(_REQUEST)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+ rpc_error = exception_context.exception
+
+ self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
+ # Failed RPC doesn't automatically generate status
+ status = await rpc_status.aio.from_call(call)
+ self.assertIs(status, None)
+
+ async def test_error_details(self):
+ call = self._channel.unary_unary(_ERROR_DETAILS)(_REQUEST)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+ rpc_error = exception_context.exception
+
+ status = await rpc_status.aio.from_call(call)
+ self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
+ self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL'))
+
+ # Check if the underlying proto message is intact
+ self.assertTrue(status.details[0].Is(
+ error_details_pb2.DebugInfo.DESCRIPTOR))
+ info = error_details_pb2.DebugInfo()
+ status.details[0].Unpack(info)
+ self.assertIn('_error_details_unary_unary', info.stack_entries[-1])
+
+ async def test_code_message_validation(self):
+ call = self._channel.unary_unary(_INCONSISTENT)(_REQUEST)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+ rpc_error = exception_context.exception
+ self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND)
+
+ # Code/Message validation failed
+ with self.assertRaises(ValueError):
+ await rpc_status.aio.from_call(call)
+
+ async def test_invalid_code(self):
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._channel.unary_unary(_INVALID_CODE)(_REQUEST)
+ rpc_error = exception_context.exception
+ self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN)
+ # Invalid status code exception raised during coversion
+ self.assertIn('Invalid status code', rpc_error.details())
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py
index 6156aa98dc..016280a152 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py
@@ -1,99 +1,99 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import grpc
-from typing import AsyncIterable
-from grpc.experimental import aio
-from grpc.aio._typing import MetadatumType, MetadataKey, MetadataValue
-from grpc.aio._metadata import Metadata
-
-from tests.unit.framework.common import test_constants
-
-
-def seen_metadata(expected: Metadata, actual: Metadata):
- return not bool(set(tuple(expected)) - set(tuple(actual)))
-
-
-def seen_metadatum(expected_key: MetadataKey, expected_value: MetadataValue,
- actual: Metadata) -> bool:
- obtained = actual[expected_key]
- return obtained == expected_value
-
-
-async def block_until_certain_state(channel: aio.Channel,
- expected_state: grpc.ChannelConnectivity):
- state = channel.get_state()
- while state != expected_state:
- await channel.wait_for_state_change(state)
- state = channel.get_state()
-
-
-def inject_callbacks(call: aio.Call):
- first_callback_ran = asyncio.Event()
-
- def first_callback(call):
- # Validate that all resopnses have been received
- # and the call is an end state.
- assert call.done()
- first_callback_ran.set()
-
- second_callback_ran = asyncio.Event()
-
- def second_callback(call):
- # Validate that all responses have been received
- # and the call is an end state.
- assert call.done()
- second_callback_ran.set()
-
- call.add_done_callback(first_callback)
- call.add_done_callback(second_callback)
-
- async def validation():
- await asyncio.wait_for(
- asyncio.gather(first_callback_ran.wait(),
- second_callback_ran.wait()),
- test_constants.SHORT_TIMEOUT)
-
- return validation()
-
-
-class CountingRequestIterator:
-
- def __init__(self, request_iterator):
- self.request_cnt = 0
- self._request_iterator = request_iterator
-
- async def _forward_requests(self):
- async for request in self._request_iterator:
- self.request_cnt += 1
- yield request
-
- def __aiter__(self):
- return self._forward_requests()
-
-
-class CountingResponseIterator:
-
- def __init__(self, response_iterator):
- self.response_cnt = 0
- self._response_iterator = response_iterator
-
- async def _forward_responses(self):
- async for response in self._response_iterator:
- self.response_cnt += 1
- yield response
-
- def __aiter__(self):
- return self._forward_responses()
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+import grpc
+from typing import AsyncIterable
+from grpc.experimental import aio
+from grpc.aio._typing import MetadatumType, MetadataKey, MetadataValue
+from grpc.aio._metadata import Metadata
+
+from tests.unit.framework.common import test_constants
+
+
+def seen_metadata(expected: Metadata, actual: Metadata):
+ return not bool(set(tuple(expected)) - set(tuple(actual)))
+
+
+def seen_metadatum(expected_key: MetadataKey, expected_value: MetadataValue,
+ actual: Metadata) -> bool:
+ obtained = actual[expected_key]
+ return obtained == expected_value
+
+
+async def block_until_certain_state(channel: aio.Channel,
+ expected_state: grpc.ChannelConnectivity):
+ state = channel.get_state()
+ while state != expected_state:
+ await channel.wait_for_state_change(state)
+ state = channel.get_state()
+
+
+def inject_callbacks(call: aio.Call):
+ first_callback_ran = asyncio.Event()
+
+ def first_callback(call):
+ # Validate that all resopnses have been received
+ # and the call is an end state.
+ assert call.done()
+ first_callback_ran.set()
+
+ second_callback_ran = asyncio.Event()
+
+ def second_callback(call):
+ # Validate that all responses have been received
+ # and the call is an end state.
+ assert call.done()
+ second_callback_ran.set()
+
+ call.add_done_callback(first_callback)
+ call.add_done_callback(second_callback)
+
+ async def validation():
+ await asyncio.wait_for(
+ asyncio.gather(first_callback_ran.wait(),
+ second_callback_ran.wait()),
+ test_constants.SHORT_TIMEOUT)
+
+ return validation()
+
+
+class CountingRequestIterator:
+
+ def __init__(self, request_iterator):
+ self.request_cnt = 0
+ self._request_iterator = request_iterator
+
+ async def _forward_requests(self):
+ async for request in self._request_iterator:
+ self.request_cnt += 1
+ yield request
+
+ def __aiter__(self):
+ return self._forward_requests()
+
+
+class CountingResponseIterator:
+
+ def __init__(self, response_iterator):
+ self.response_cnt = 0
+ self._response_iterator = response_iterator
+
+ async def _forward_responses(self):
+ async for response in self._response_iterator:
+ self.response_cnt += 1
+ yield response
+
+ def __aiter__(self):
+ return self._forward_responses()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py
index aed61466d0..986a6f9d84 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py
@@ -1,16 +1,16 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-UNREACHABLE_TARGET = '0.0.0.1:1111'
-UNARY_CALL_WITH_SLEEP_VALUE = 0.2
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+UNREACHABLE_TARGET = '0.0.0.1:1111'
+UNARY_CALL_WITH_SLEEP_VALUE = 0.2
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py
index 023613ece1..c0594cb06a 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py
@@ -1,137 +1,137 @@
-# Copyright 2020 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests for the metadata abstraction that's used in the asynchronous driver."""
-import logging
-import unittest
-
-from grpc.experimental.aio import Metadata
-
-
-class TestTypeMetadata(unittest.TestCase):
- """Tests for the metadata type"""
-
- _DEFAULT_DATA = (("key1", "value1"), ("key2", "value2"))
- _MULTI_ENTRY_DATA = (("key1", "value1"), ("key1", "other value 1"),
- ("key2", "value2"))
-
- def test_init_metadata(self):
- test_cases = {
- "emtpy": (),
- "with-single-data": self._DEFAULT_DATA,
- "with-multi-data": self._MULTI_ENTRY_DATA,
- }
- for case, args in test_cases.items():
- with self.subTest(case=case):
- metadata = Metadata(*args)
- self.assertEqual(len(metadata), len(args))
-
- def test_get_item(self):
- metadata = Metadata(("key", "value1"), ("key", "value2"),
- ("key2", "other value"))
- self.assertEqual(metadata["key"], "value1")
- self.assertEqual(metadata["key2"], "other value")
- self.assertEqual(metadata.get("key"), "value1")
- self.assertEqual(metadata.get("key2"), "other value")
-
- with self.assertRaises(KeyError):
- metadata["key not found"]
- self.assertIsNone(metadata.get("key not found"))
-
- def test_add_value(self):
- metadata = Metadata()
- metadata.add("key", "value")
- metadata.add("key", "second value")
- metadata.add("key2", "value2")
-
- self.assertEqual(metadata["key"], "value")
- self.assertEqual(metadata["key2"], "value2")
-
- def test_get_all_items(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- self.assertEqual(metadata.get_all("key1"), ["value1", "other value 1"])
- self.assertEqual(metadata.get_all("key2"), ["value2"])
- self.assertEqual(metadata.get_all("non existing key"), [])
-
- def test_container(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- self.assertIn("key1", metadata)
-
- def test_equals(self):
- metadata = Metadata()
- for key, value in self._DEFAULT_DATA:
- metadata.add(key, value)
- metadata2 = Metadata(*self._DEFAULT_DATA)
-
- self.assertEqual(metadata, metadata2)
- self.assertNotEqual(metadata, "foo")
-
- def test_repr(self):
- metadata = Metadata(*self._DEFAULT_DATA)
- expected = "Metadata({0!r})".format(self._DEFAULT_DATA)
- self.assertEqual(repr(metadata), expected)
-
- def test_set(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- override_value = "override value"
- for _ in range(3):
- metadata["key1"] = override_value
-
- self.assertEqual(metadata["key1"], override_value)
- self.assertEqual(metadata.get_all("key1"),
- [override_value, "other value 1"])
-
- empty_metadata = Metadata()
- for _ in range(3):
- empty_metadata["key"] = override_value
-
- self.assertEqual(empty_metadata["key"], override_value)
- self.assertEqual(empty_metadata.get_all("key"), [override_value])
-
- def test_set_all(self):
- metadata = Metadata(*self._DEFAULT_DATA)
- metadata.set_all("key", ["value1", b"new value 2"])
-
- self.assertEqual(metadata["key"], "value1")
- self.assertEqual(metadata.get_all("key"), ["value1", b"new value 2"])
-
- def test_delete_values(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- del metadata["key1"]
- self.assertEqual(metadata.get("key1"), "other value 1")
-
- metadata.delete_all("key1")
- self.assertNotIn("key1", metadata)
-
- metadata.delete_all("key2")
- self.assertEqual(len(metadata), 0)
-
- with self.assertRaises(KeyError):
- del metadata["other key"]
-
- def test_metadata_from_tuple(self):
- scenarios = (
- (None, Metadata()),
- (Metadata(), Metadata()),
- (self._DEFAULT_DATA, Metadata(*self._DEFAULT_DATA)),
- (self._MULTI_ENTRY_DATA, Metadata(*self._MULTI_ENTRY_DATA)),
- (Metadata(*self._DEFAULT_DATA), Metadata(*self._DEFAULT_DATA)),
- )
- for source, expected in scenarios:
- with self.subTest(raw_metadata=source, expected=expected):
- self.assertEqual(expected, Metadata.from_tuple(source))
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
+# Copyright 2020 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for the metadata abstraction that's used in the asynchronous driver."""
+import logging
+import unittest
+
+from grpc.experimental.aio import Metadata
+
+
+class TestTypeMetadata(unittest.TestCase):
+ """Tests for the metadata type"""
+
+ _DEFAULT_DATA = (("key1", "value1"), ("key2", "value2"))
+ _MULTI_ENTRY_DATA = (("key1", "value1"), ("key1", "other value 1"),
+ ("key2", "value2"))
+
+ def test_init_metadata(self):
+ test_cases = {
+ "emtpy": (),
+ "with-single-data": self._DEFAULT_DATA,
+ "with-multi-data": self._MULTI_ENTRY_DATA,
+ }
+ for case, args in test_cases.items():
+ with self.subTest(case=case):
+ metadata = Metadata(*args)
+ self.assertEqual(len(metadata), len(args))
+
+ def test_get_item(self):
+ metadata = Metadata(("key", "value1"), ("key", "value2"),
+ ("key2", "other value"))
+ self.assertEqual(metadata["key"], "value1")
+ self.assertEqual(metadata["key2"], "other value")
+ self.assertEqual(metadata.get("key"), "value1")
+ self.assertEqual(metadata.get("key2"), "other value")
+
+ with self.assertRaises(KeyError):
+ metadata["key not found"]
+ self.assertIsNone(metadata.get("key not found"))
+
+ def test_add_value(self):
+ metadata = Metadata()
+ metadata.add("key", "value")
+ metadata.add("key", "second value")
+ metadata.add("key2", "value2")
+
+ self.assertEqual(metadata["key"], "value")
+ self.assertEqual(metadata["key2"], "value2")
+
+ def test_get_all_items(self):
+ metadata = Metadata(*self._MULTI_ENTRY_DATA)
+ self.assertEqual(metadata.get_all("key1"), ["value1", "other value 1"])
+ self.assertEqual(metadata.get_all("key2"), ["value2"])
+ self.assertEqual(metadata.get_all("non existing key"), [])
+
+ def test_container(self):
+ metadata = Metadata(*self._MULTI_ENTRY_DATA)
+ self.assertIn("key1", metadata)
+
+ def test_equals(self):
+ metadata = Metadata()
+ for key, value in self._DEFAULT_DATA:
+ metadata.add(key, value)
+ metadata2 = Metadata(*self._DEFAULT_DATA)
+
+ self.assertEqual(metadata, metadata2)
+ self.assertNotEqual(metadata, "foo")
+
+ def test_repr(self):
+ metadata = Metadata(*self._DEFAULT_DATA)
+ expected = "Metadata({0!r})".format(self._DEFAULT_DATA)
+ self.assertEqual(repr(metadata), expected)
+
+ def test_set(self):
+ metadata = Metadata(*self._MULTI_ENTRY_DATA)
+ override_value = "override value"
+ for _ in range(3):
+ metadata["key1"] = override_value
+
+ self.assertEqual(metadata["key1"], override_value)
+ self.assertEqual(metadata.get_all("key1"),
+ [override_value, "other value 1"])
+
+ empty_metadata = Metadata()
+ for _ in range(3):
+ empty_metadata["key"] = override_value
+
+ self.assertEqual(empty_metadata["key"], override_value)
+ self.assertEqual(empty_metadata.get_all("key"), [override_value])
+
+ def test_set_all(self):
+ metadata = Metadata(*self._DEFAULT_DATA)
+ metadata.set_all("key", ["value1", b"new value 2"])
+
+ self.assertEqual(metadata["key"], "value1")
+ self.assertEqual(metadata.get_all("key"), ["value1", b"new value 2"])
+
+ def test_delete_values(self):
+ metadata = Metadata(*self._MULTI_ENTRY_DATA)
+ del metadata["key1"]
+ self.assertEqual(metadata.get("key1"), "other value 1")
+
+ metadata.delete_all("key1")
+ self.assertNotIn("key1", metadata)
+
+ metadata.delete_all("key2")
+ self.assertEqual(len(metadata), 0)
+
+ with self.assertRaises(KeyError):
+ del metadata["other key"]
+
+ def test_metadata_from_tuple(self):
+ scenarios = (
+ (None, Metadata()),
+ (Metadata(), Metadata()),
+ (self._DEFAULT_DATA, Metadata(*self._DEFAULT_DATA)),
+ (self._MULTI_ENTRY_DATA, Metadata(*self._MULTI_ENTRY_DATA)),
+ (Metadata(*self._DEFAULT_DATA), Metadata(*self._DEFAULT_DATA)),
+ )
+ for source, expected in scenarios:
+ with self.subTest(raw_metadata=source, expected=expected):
+ self.assertEqual(expected, Metadata.from_tuple(source))
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py
index 3011d9f869..ec5f2112da 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py
@@ -12,55 +12,55 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import logging
-import functools
+import logging
+import functools
import asyncio
-from typing import Callable
+from typing import Callable
import unittest
from grpc.experimental import aio
-__all__ = 'AioTestBase'
+__all__ = 'AioTestBase'
-_COROUTINE_FUNCTION_ALLOWLIST = ['setUp', 'tearDown']
-
-
-def _async_to_sync_decorator(f: Callable, loop: asyncio.AbstractEventLoop):
-
- @functools.wraps(f)
- def wrapper(*args, **kwargs):
- return loop.run_until_complete(f(*args, **kwargs))
-
- return wrapper
-
-
-def _get_default_loop(debug=True):
- try:
- loop = asyncio.get_event_loop()
- except:
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
- finally:
- loop.set_debug(debug)
- return loop
-
-
-# NOTE(gnossen) this test class can also be implemented with metaclass.
+_COROUTINE_FUNCTION_ALLOWLIST = ['setUp', 'tearDown']
+
+
+def _async_to_sync_decorator(f: Callable, loop: asyncio.AbstractEventLoop):
+
+ @functools.wraps(f)
+ def wrapper(*args, **kwargs):
+ return loop.run_until_complete(f(*args, **kwargs))
+
+ return wrapper
+
+
+def _get_default_loop(debug=True):
+ try:
+ loop = asyncio.get_event_loop()
+ except:
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+ finally:
+ loop.set_debug(debug)
+ return loop
+
+
+# NOTE(gnossen) this test class can also be implemented with metaclass.
class AioTestBase(unittest.TestCase):
- # NOTE(lidi) We need to pick a loop for entire testing phase, otherwise it
- # will trigger create new loops in new threads, leads to deadlock.
- _TEST_LOOP = _get_default_loop()
+ # NOTE(lidi) We need to pick a loop for entire testing phase, otherwise it
+ # will trigger create new loops in new threads, leads to deadlock.
+ _TEST_LOOP = _get_default_loop()
@property
def loop(self):
- return self._TEST_LOOP
-
- def __getattribute__(self, name):
- """Overrides the loading logic to support coroutine functions."""
- attr = super().__getattribute__(name)
-
- # If possible, converts the coroutine into a sync function.
- if name.startswith('test_') or name in _COROUTINE_FUNCTION_ALLOWLIST:
- if asyncio.iscoroutinefunction(attr):
- return _async_to_sync_decorator(attr, self._TEST_LOOP)
- # For other attributes, let them pass.
- return attr
+ return self._TEST_LOOP
+
+ def __getattribute__(self, name):
+ """Overrides the loading logic to support coroutine functions."""
+ attr = super().__getattribute__(name)
+
+ # If possible, converts the coroutine into a sync function.
+ if name.startswith('test_') or name in _COROUTINE_FUNCTION_ALLOWLIST:
+ if asyncio.iscoroutinefunction(attr):
+ return _async_to_sync_decorator(attr, self._TEST_LOOP)
+ # For other attributes, let them pass.
+ return attr
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py
index cc68739d11..5e5081a38d 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py
@@ -12,132 +12,132 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import asyncio
-import datetime
+import asyncio
+import datetime
-import grpc
+import grpc
from grpc.experimental import aio
-from tests.unit import resources
-
-from src.proto.grpc.testing import empty_pb2, messages_pb2, test_pb2_grpc
-from tests_aio.unit import _constants
-
-_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
-_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
-
-
-async def _maybe_echo_metadata(servicer_context):
- """Copies metadata from request to response if it is present."""
- invocation_metadata = dict(servicer_context.invocation_metadata())
- if _INITIAL_METADATA_KEY in invocation_metadata:
- initial_metadatum = (_INITIAL_METADATA_KEY,
- invocation_metadata[_INITIAL_METADATA_KEY])
- await servicer_context.send_initial_metadata((initial_metadatum,))
- if _TRAILING_METADATA_KEY in invocation_metadata:
- trailing_metadatum = (_TRAILING_METADATA_KEY,
- invocation_metadata[_TRAILING_METADATA_KEY])
- servicer_context.set_trailing_metadata((trailing_metadatum,))
-
-
-async def _maybe_echo_status(request: messages_pb2.SimpleRequest,
- servicer_context):
- """Echos the RPC status if demanded by the request."""
- if request.HasField('response_status'):
- await servicer_context.abort(request.response_status.code,
- request.response_status.message)
-
-
-class TestServiceServicer(test_pb2_grpc.TestServiceServicer):
-
- async def UnaryCall(self, request, context):
- await _maybe_echo_metadata(context)
- await _maybe_echo_status(request, context)
- return messages_pb2.SimpleResponse(
- payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE,
- body=b'\x00' * request.response_size))
-
- async def EmptyCall(self, request, context):
- return empty_pb2.Empty()
-
- async def StreamingOutputCall(
- self, request: messages_pb2.StreamingOutputCallRequest,
- unused_context):
- for response_parameters in request.response_parameters:
- if response_parameters.interval_us != 0:
- await asyncio.sleep(
- datetime.timedelta(microseconds=response_parameters.
- interval_us).total_seconds())
- yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.response_type,
- body=b'\x00' *
- response_parameters.size))
-
- # Next methods are extra ones that are registred programatically
- # when the sever is instantiated. They are not being provided by
- # the proto file.
- async def UnaryCallWithSleep(self, unused_request, unused_context):
- await asyncio.sleep(_constants.UNARY_CALL_WITH_SLEEP_VALUE)
- return messages_pb2.SimpleResponse()
-
- async def StreamingInputCall(self, request_async_iterator, unused_context):
- aggregate_size = 0
- async for request in request_async_iterator:
- if request.payload is not None and request.payload.body:
- aggregate_size += len(request.payload.body)
- return messages_pb2.StreamingInputCallResponse(
- aggregated_payload_size=aggregate_size)
-
- async def FullDuplexCall(self, request_async_iterator, context):
- await _maybe_echo_metadata(context)
- async for request in request_async_iterator:
- await _maybe_echo_status(request, context)
- for response_parameters in request.response_parameters:
- if response_parameters.interval_us != 0:
- await asyncio.sleep(
- datetime.timedelta(microseconds=response_parameters.
- interval_us).total_seconds())
- yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.payload.type,
- body=b'\x00' *
- response_parameters.size))
-
-
-def _create_extra_generic_handler(servicer: TestServiceServicer):
- # Add programatically extra methods not provided by the proto file
- # that are used during the tests
- rpc_method_handlers = {
- 'UnaryCallWithSleep':
- grpc.unary_unary_rpc_method_handler(
- servicer.UnaryCallWithSleep,
- request_deserializer=messages_pb2.SimpleRequest.FromString,
- response_serializer=messages_pb2.SimpleResponse.
- SerializeToString)
- }
- return grpc.method_handlers_generic_handler('grpc.testing.TestService',
- rpc_method_handlers)
-
-
-async def start_test_server(port=0,
- secure=False,
- server_credentials=None,
- interceptors=None):
- server = aio.server(options=(('grpc.so_reuseport', 0),),
- interceptors=interceptors)
- servicer = TestServiceServicer()
- test_pb2_grpc.add_TestServiceServicer_to_server(servicer, server)
-
- server.add_generic_rpc_handlers((_create_extra_generic_handler(servicer),))
-
- if secure:
- if server_credentials is None:
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- port = server.add_secure_port('[::]:%d' % port, server_credentials)
- else:
- port = server.add_insecure_port('[::]:%d' % port)
-
+from tests.unit import resources
+
+from src.proto.grpc.testing import empty_pb2, messages_pb2, test_pb2_grpc
+from tests_aio.unit import _constants
+
+_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
+_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
+
+
+async def _maybe_echo_metadata(servicer_context):
+ """Copies metadata from request to response if it is present."""
+ invocation_metadata = dict(servicer_context.invocation_metadata())
+ if _INITIAL_METADATA_KEY in invocation_metadata:
+ initial_metadatum = (_INITIAL_METADATA_KEY,
+ invocation_metadata[_INITIAL_METADATA_KEY])
+ await servicer_context.send_initial_metadata((initial_metadatum,))
+ if _TRAILING_METADATA_KEY in invocation_metadata:
+ trailing_metadatum = (_TRAILING_METADATA_KEY,
+ invocation_metadata[_TRAILING_METADATA_KEY])
+ servicer_context.set_trailing_metadata((trailing_metadatum,))
+
+
+async def _maybe_echo_status(request: messages_pb2.SimpleRequest,
+ servicer_context):
+ """Echos the RPC status if demanded by the request."""
+ if request.HasField('response_status'):
+ await servicer_context.abort(request.response_status.code,
+ request.response_status.message)
+
+
+class TestServiceServicer(test_pb2_grpc.TestServiceServicer):
+
+ async def UnaryCall(self, request, context):
+ await _maybe_echo_metadata(context)
+ await _maybe_echo_status(request, context)
+ return messages_pb2.SimpleResponse(
+ payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE,
+ body=b'\x00' * request.response_size))
+
+ async def EmptyCall(self, request, context):
+ return empty_pb2.Empty()
+
+ async def StreamingOutputCall(
+ self, request: messages_pb2.StreamingOutputCallRequest,
+ unused_context):
+ for response_parameters in request.response_parameters:
+ if response_parameters.interval_us != 0:
+ await asyncio.sleep(
+ datetime.timedelta(microseconds=response_parameters.
+ interval_us).total_seconds())
+ yield messages_pb2.StreamingOutputCallResponse(
+ payload=messages_pb2.Payload(type=request.response_type,
+ body=b'\x00' *
+ response_parameters.size))
+
+ # Next methods are extra ones that are registred programatically
+ # when the sever is instantiated. They are not being provided by
+ # the proto file.
+ async def UnaryCallWithSleep(self, unused_request, unused_context):
+ await asyncio.sleep(_constants.UNARY_CALL_WITH_SLEEP_VALUE)
+ return messages_pb2.SimpleResponse()
+
+ async def StreamingInputCall(self, request_async_iterator, unused_context):
+ aggregate_size = 0
+ async for request in request_async_iterator:
+ if request.payload is not None and request.payload.body:
+ aggregate_size += len(request.payload.body)
+ return messages_pb2.StreamingInputCallResponse(
+ aggregated_payload_size=aggregate_size)
+
+ async def FullDuplexCall(self, request_async_iterator, context):
+ await _maybe_echo_metadata(context)
+ async for request in request_async_iterator:
+ await _maybe_echo_status(request, context)
+ for response_parameters in request.response_parameters:
+ if response_parameters.interval_us != 0:
+ await asyncio.sleep(
+ datetime.timedelta(microseconds=response_parameters.
+ interval_us).total_seconds())
+ yield messages_pb2.StreamingOutputCallResponse(
+ payload=messages_pb2.Payload(type=request.payload.type,
+ body=b'\x00' *
+ response_parameters.size))
+
+
+def _create_extra_generic_handler(servicer: TestServiceServicer):
+ # Add programatically extra methods not provided by the proto file
+ # that are used during the tests
+ rpc_method_handlers = {
+ 'UnaryCallWithSleep':
+ grpc.unary_unary_rpc_method_handler(
+ servicer.UnaryCallWithSleep,
+ request_deserializer=messages_pb2.SimpleRequest.FromString,
+ response_serializer=messages_pb2.SimpleResponse.
+ SerializeToString)
+ }
+ return grpc.method_handlers_generic_handler('grpc.testing.TestService',
+ rpc_method_handlers)
+
+
+async def start_test_server(port=0,
+ secure=False,
+ server_credentials=None,
+ interceptors=None):
+ server = aio.server(options=(('grpc.so_reuseport', 0),),
+ interceptors=interceptors)
+ servicer = TestServiceServicer()
+ test_pb2_grpc.add_TestServiceServicer_to_server(servicer, server)
+
+ server.add_generic_rpc_handlers((_create_extra_generic_handler(servicer),))
+
+ if secure:
+ if server_credentials is None:
+ server_credentials = grpc.ssl_server_credentials([
+ (resources.private_key(), resources.certificate_chain())
+ ])
+ port = server.add_secure_port('[::]:%d' % port, server_credentials)
+ else:
+ port = server.add_insecure_port('[::]:%d' % port)
+
await server.start()
-
+
# NOTE(lidizheng) returning the server to prevent it from deallocation
return 'localhost:%d' % port, server
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py
index 3bc5c474cb..828b6884df 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py
@@ -1,151 +1,151 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import logging
-import unittest
-import time
-import gc
-
-import grpc
-from grpc.experimental import aio
-from tests_aio.unit._test_base import AioTestBase
-from tests.unit.framework.common import test_constants
-
-_UNARY_UNARY_ABORT = '/test/UnaryUnaryAbort'
-_SUPPRESS_ABORT = '/test/SuppressAbort'
-_REPLACE_ABORT = '/test/ReplaceAbort'
-_ABORT_AFTER_REPLY = '/test/AbortAfterReply'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-_NUM_STREAM_RESPONSES = 5
-
-_ABORT_CODE = grpc.StatusCode.RESOURCE_EXHAUSTED
-_ABORT_DETAILS = 'Dummy error details'
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- @staticmethod
- async def _unary_unary_abort(unused_request, context):
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- raise RuntimeError('This line should not be executed')
-
- @staticmethod
- async def _suppress_abort(unused_request, context):
- try:
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- except aio.AbortError as e:
- pass
- return _RESPONSE
-
- @staticmethod
- async def _replace_abort(unused_request, context):
- try:
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- except aio.AbortError as e:
- await context.abort(grpc.StatusCode.INVALID_ARGUMENT,
- 'Override abort!')
-
- @staticmethod
- async def _abort_after_reply(unused_request, context):
- yield _RESPONSE
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- raise RuntimeError('This line should not be executed')
-
- def service(self, handler_details):
- if handler_details.method == _UNARY_UNARY_ABORT:
- return grpc.unary_unary_rpc_method_handler(self._unary_unary_abort)
- if handler_details.method == _SUPPRESS_ABORT:
- return grpc.unary_unary_rpc_method_handler(self._suppress_abort)
- if handler_details.method == _REPLACE_ABORT:
- return grpc.unary_unary_rpc_method_handler(self._replace_abort)
- if handler_details.method == _ABORT_AFTER_REPLY:
- return grpc.unary_stream_rpc_method_handler(self._abort_after_reply)
-
-
-async def _start_test_server():
- server = aio.server()
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
- return 'localhost:%d' % port, server
-
-
-class TestAbort(AioTestBase):
-
- async def setUp(self):
- address, self._server = await _start_test_server()
- self._channel = aio.insecure_channel(address)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_unary_unary_abort(self):
- method = self._channel.unary_unary(_UNARY_UNARY_ABORT)
- call = method(_REQUEST)
-
- self.assertEqual(_ABORT_CODE, await call.code())
- self.assertEqual(_ABORT_DETAILS, await call.details())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- async def test_suppress_abort(self):
- method = self._channel.unary_unary(_SUPPRESS_ABORT)
- call = method(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- async def test_replace_abort(self):
- method = self._channel.unary_unary(_REPLACE_ABORT)
- call = method(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- async def test_abort_after_reply(self):
- method = self._channel.unary_stream(_ABORT_AFTER_REPLY)
- call = method(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.read()
- await call.read()
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- self.assertEqual(_ABORT_CODE, await call.code())
- self.assertEqual(_ABORT_DETAILS, await call.details())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+import logging
+import unittest
+import time
+import gc
+
+import grpc
+from grpc.experimental import aio
+from tests_aio.unit._test_base import AioTestBase
+from tests.unit.framework.common import test_constants
+
+_UNARY_UNARY_ABORT = '/test/UnaryUnaryAbort'
+_SUPPRESS_ABORT = '/test/SuppressAbort'
+_REPLACE_ABORT = '/test/ReplaceAbort'
+_ABORT_AFTER_REPLY = '/test/AbortAfterReply'
+
+_REQUEST = b'\x00\x00\x00'
+_RESPONSE = b'\x01\x01\x01'
+_NUM_STREAM_RESPONSES = 5
+
+_ABORT_CODE = grpc.StatusCode.RESOURCE_EXHAUSTED
+_ABORT_DETAILS = 'Dummy error details'
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ @staticmethod
+ async def _unary_unary_abort(unused_request, context):
+ await context.abort(_ABORT_CODE, _ABORT_DETAILS)
+ raise RuntimeError('This line should not be executed')
+
+ @staticmethod
+ async def _suppress_abort(unused_request, context):
+ try:
+ await context.abort(_ABORT_CODE, _ABORT_DETAILS)
+ except aio.AbortError as e:
+ pass
+ return _RESPONSE
+
+ @staticmethod
+ async def _replace_abort(unused_request, context):
+ try:
+ await context.abort(_ABORT_CODE, _ABORT_DETAILS)
+ except aio.AbortError as e:
+ await context.abort(grpc.StatusCode.INVALID_ARGUMENT,
+ 'Override abort!')
+
+ @staticmethod
+ async def _abort_after_reply(unused_request, context):
+ yield _RESPONSE
+ await context.abort(_ABORT_CODE, _ABORT_DETAILS)
+ raise RuntimeError('This line should not be executed')
+
+ def service(self, handler_details):
+ if handler_details.method == _UNARY_UNARY_ABORT:
+ return grpc.unary_unary_rpc_method_handler(self._unary_unary_abort)
+ if handler_details.method == _SUPPRESS_ABORT:
+ return grpc.unary_unary_rpc_method_handler(self._suppress_abort)
+ if handler_details.method == _REPLACE_ABORT:
+ return grpc.unary_unary_rpc_method_handler(self._replace_abort)
+ if handler_details.method == _ABORT_AFTER_REPLY:
+ return grpc.unary_stream_rpc_method_handler(self._abort_after_reply)
+
+
+async def _start_test_server():
+ server = aio.server()
+ port = server.add_insecure_port('[::]:0')
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ await server.start()
+ return 'localhost:%d' % port, server
+
+
+class TestAbort(AioTestBase):
+
+ async def setUp(self):
+ address, self._server = await _start_test_server()
+ self._channel = aio.insecure_channel(address)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+ async def test_unary_unary_abort(self):
+ method = self._channel.unary_unary(_UNARY_UNARY_ABORT)
+ call = method(_REQUEST)
+
+ self.assertEqual(_ABORT_CODE, await call.code())
+ self.assertEqual(_ABORT_DETAILS, await call.details())
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ rpc_error = exception_context.exception
+ self.assertEqual(_ABORT_CODE, rpc_error.code())
+ self.assertEqual(_ABORT_DETAILS, rpc_error.details())
+
+ async def test_suppress_abort(self):
+ method = self._channel.unary_unary(_SUPPRESS_ABORT)
+ call = method(_REQUEST)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ rpc_error = exception_context.exception
+ self.assertEqual(_ABORT_CODE, rpc_error.code())
+ self.assertEqual(_ABORT_DETAILS, rpc_error.details())
+
+ async def test_replace_abort(self):
+ method = self._channel.unary_unary(_REPLACE_ABORT)
+ call = method(_REQUEST)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ rpc_error = exception_context.exception
+ self.assertEqual(_ABORT_CODE, rpc_error.code())
+ self.assertEqual(_ABORT_DETAILS, rpc_error.details())
+
+ async def test_abort_after_reply(self):
+ method = self._channel.unary_stream(_ABORT_AFTER_REPLY)
+ call = method(_REQUEST)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.read()
+ await call.read()
+
+ rpc_error = exception_context.exception
+ self.assertEqual(_ABORT_CODE, rpc_error.code())
+ self.assertEqual(_ABORT_DETAILS, rpc_error.details())
+
+ self.assertEqual(_ABORT_CODE, await call.code())
+ self.assertEqual(_ABORT_DETAILS, await call.details())
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py
index 49532e8b04..b7b18e08f6 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py
@@ -1,52 +1,52 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests AioRpcError class."""
-
-import logging
-import unittest
-
-import grpc
-
-from grpc.experimental import aio
-from grpc.aio._call import AioRpcError
-from tests_aio.unit._test_base import AioTestBase
-
-_TEST_INITIAL_METADATA = aio.Metadata(
- ('initial metadata key', 'initial metadata value'))
-_TEST_TRAILING_METADATA = aio.Metadata(
- ('trailing metadata key', 'trailing metadata value'))
-_TEST_DEBUG_ERROR_STRING = '{This is a debug string}'
-
-
-class TestAioRpcError(unittest.TestCase):
-
- def test_attributes(self):
- aio_rpc_error = AioRpcError(grpc.StatusCode.CANCELLED,
- initial_metadata=_TEST_INITIAL_METADATA,
- trailing_metadata=_TEST_TRAILING_METADATA,
- details="details",
- debug_error_string=_TEST_DEBUG_ERROR_STRING)
- self.assertEqual(aio_rpc_error.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(aio_rpc_error.details(), 'details')
- self.assertEqual(aio_rpc_error.initial_metadata(),
- _TEST_INITIAL_METADATA)
- self.assertEqual(aio_rpc_error.trailing_metadata(),
- _TEST_TRAILING_METADATA)
- self.assertEqual(aio_rpc_error.debug_error_string(),
- _TEST_DEBUG_ERROR_STRING)
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests AioRpcError class."""
+
+import logging
+import unittest
+
+import grpc
+
+from grpc.experimental import aio
+from grpc.aio._call import AioRpcError
+from tests_aio.unit._test_base import AioTestBase
+
+_TEST_INITIAL_METADATA = aio.Metadata(
+ ('initial metadata key', 'initial metadata value'))
+_TEST_TRAILING_METADATA = aio.Metadata(
+ ('trailing metadata key', 'trailing metadata value'))
+_TEST_DEBUG_ERROR_STRING = '{This is a debug string}'
+
+
+class TestAioRpcError(unittest.TestCase):
+
+ def test_attributes(self):
+ aio_rpc_error = AioRpcError(grpc.StatusCode.CANCELLED,
+ initial_metadata=_TEST_INITIAL_METADATA,
+ trailing_metadata=_TEST_TRAILING_METADATA,
+ details="details",
+ debug_error_string=_TEST_DEBUG_ERROR_STRING)
+ self.assertEqual(aio_rpc_error.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(aio_rpc_error.details(), 'details')
+ self.assertEqual(aio_rpc_error.initial_metadata(),
+ _TEST_INITIAL_METADATA)
+ self.assertEqual(aio_rpc_error.trailing_metadata(),
+ _TEST_TRAILING_METADATA)
+ self.assertEqual(aio_rpc_error.debug_error_string(),
+ _TEST_DEBUG_ERROR_STRING)
+
+
+if __name__ == '__main__':
+ logging.basicConfig()
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py
index 42a60abde8..fb30371468 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py
@@ -1,194 +1,194 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Porting auth context tests from sync stack."""
-
-import pickle
-import unittest
-import logging
-
-import grpc
-from grpc.experimental import aio
-from grpc.experimental import session_cache
-import six
-
-from tests.unit import resources
-from tests_aio.unit._test_base import AioTestBase
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x00\x00\x00'
-
-_UNARY_UNARY = '/test/UnaryUnary'
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-_CLIENT_IDS = (
- b'*.test.google.fr',
- b'waterzooi.test.google.be',
- b'*.test.youtube.com',
- b'192.168.1.3',
-)
-_ID = 'id'
-_ID_KEY = 'id_key'
-_AUTH_CTX = 'auth_ctx'
-
-_PRIVATE_KEY = resources.private_key()
-_CERTIFICATE_CHAIN = resources.certificate_chain()
-_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
-_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
-_PROPERTY_OPTIONS = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
-),)
-
-
-async def handle_unary_unary(unused_request: bytes,
- servicer_context: aio.ServicerContext):
- return pickle.dumps({
- _ID: servicer_context.peer_identities(),
- _ID_KEY: servicer_context.peer_identity_key(),
- _AUTH_CTX: servicer_context.auth_context()
- })
-
-
-class TestAuthContext(AioTestBase):
-
- async def test_insecure(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- port = server.add_insecure_port('[::]:0')
- await server.start()
-
- async with aio.insecure_channel('localhost:%d' % port) as channel:
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- await server.stop(None)
-
- auth_data = pickle.loads(response)
- self.assertIsNone(auth_data[_ID])
- self.assertIsNone(auth_data[_ID_KEY])
- self.assertDictEqual({}, auth_data[_AUTH_CTX])
-
- async def test_secure_no_cert(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
- port = server.add_secure_port('[::]:0', server_cred)
- await server.start()
-
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES)
- channel = aio.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- await channel.close()
- await server.stop(None)
-
- auth_data = pickle.loads(response)
- self.assertIsNone(auth_data[_ID])
- self.assertIsNone(auth_data[_ID_KEY])
- self.assertDictEqual(
- {
- 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'],
- 'transport_security_type': [b'ssl'],
- 'ssl_session_reused': [b'false'],
- }, auth_data[_AUTH_CTX])
-
- async def test_secure_client_cert(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(
- _SERVER_CERTS,
- root_certificates=_TEST_ROOT_CERTIFICATES,
- require_client_auth=True)
- port = server.add_secure_port('[::]:0', server_cred)
- await server.start()
-
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES,
- private_key=_PRIVATE_KEY,
- certificate_chain=_CERTIFICATE_CHAIN)
- channel = aio.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
-
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- await channel.close()
- await server.stop(None)
-
- auth_data = pickle.loads(response)
- auth_ctx = auth_data[_AUTH_CTX]
- self.assertCountEqual(_CLIENT_IDS, auth_data[_ID])
- self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY])
- self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type'])
- self.assertSequenceEqual([b'*.test.google.com'],
- auth_ctx['x509_common_name'])
-
- async def _do_one_shot_client_rpc(self, channel_creds, channel_options,
- port, expect_ssl_session_reused):
- channel = aio.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=channel_options)
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- auth_data = pickle.loads(response)
- self.assertEqual(expect_ssl_session_reused,
- auth_data[_AUTH_CTX]['ssl_session_reused'])
- await channel.close()
-
- async def test_session_resumption(self):
- # Set up a secure server
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
- port = server.add_secure_port('[::]:0', server_cred)
- await server.start()
-
- # Create a cache for TLS session tickets
- cache = session_cache.ssl_session_cache_lru(1)
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES)
- channel_options = _PROPERTY_OPTIONS + (
- ('grpc.ssl_session_cache', cache),)
-
- # Initial connection has no session to resume
- await self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'false'])
-
- # Subsequent connections resume sessions
- await self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'true'])
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main()
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Porting auth context tests from sync stack."""
+
+import pickle
+import unittest
+import logging
+
+import grpc
+from grpc.experimental import aio
+from grpc.experimental import session_cache
+import six
+
+from tests.unit import resources
+from tests_aio.unit._test_base import AioTestBase
+
+_REQUEST = b'\x00\x00\x00'
+_RESPONSE = b'\x00\x00\x00'
+
+_UNARY_UNARY = '/test/UnaryUnary'
+
+_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
+_CLIENT_IDS = (
+ b'*.test.google.fr',
+ b'waterzooi.test.google.be',
+ b'*.test.youtube.com',
+ b'192.168.1.3',
+)
+_ID = 'id'
+_ID_KEY = 'id_key'
+_AUTH_CTX = 'auth_ctx'
+
+_PRIVATE_KEY = resources.private_key()
+_CERTIFICATE_CHAIN = resources.certificate_chain()
+_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
+_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
+_PROPERTY_OPTIONS = ((
+ 'grpc.ssl_target_name_override',
+ _SERVER_HOST_OVERRIDE,
+),)
+
+
+async def handle_unary_unary(unused_request: bytes,
+ servicer_context: aio.ServicerContext):
+ return pickle.dumps({
+ _ID: servicer_context.peer_identities(),
+ _ID_KEY: servicer_context.peer_identity_key(),
+ _AUTH_CTX: servicer_context.auth_context()
+ })
+
+
+class TestAuthContext(AioTestBase):
+
+ async def test_insecure(self):
+ handler = grpc.method_handlers_generic_handler('test', {
+ 'UnaryUnary':
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ })
+ server = aio.server()
+ server.add_generic_rpc_handlers((handler,))
+ port = server.add_insecure_port('[::]:0')
+ await server.start()
+
+ async with aio.insecure_channel('localhost:%d' % port) as channel:
+ response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
+ await server.stop(None)
+
+ auth_data = pickle.loads(response)
+ self.assertIsNone(auth_data[_ID])
+ self.assertIsNone(auth_data[_ID_KEY])
+ self.assertDictEqual({}, auth_data[_AUTH_CTX])
+
+ async def test_secure_no_cert(self):
+ handler = grpc.method_handlers_generic_handler('test', {
+ 'UnaryUnary':
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ })
+ server = aio.server()
+ server.add_generic_rpc_handlers((handler,))
+ server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
+ port = server.add_secure_port('[::]:0', server_cred)
+ await server.start()
+
+ channel_creds = grpc.ssl_channel_credentials(
+ root_certificates=_TEST_ROOT_CERTIFICATES)
+ channel = aio.secure_channel('localhost:{}'.format(port),
+ channel_creds,
+ options=_PROPERTY_OPTIONS)
+ response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
+ await channel.close()
+ await server.stop(None)
+
+ auth_data = pickle.loads(response)
+ self.assertIsNone(auth_data[_ID])
+ self.assertIsNone(auth_data[_ID_KEY])
+ self.assertDictEqual(
+ {
+ 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'],
+ 'transport_security_type': [b'ssl'],
+ 'ssl_session_reused': [b'false'],
+ }, auth_data[_AUTH_CTX])
+
+ async def test_secure_client_cert(self):
+ handler = grpc.method_handlers_generic_handler('test', {
+ 'UnaryUnary':
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ })
+ server = aio.server()
+ server.add_generic_rpc_handlers((handler,))
+ server_cred = grpc.ssl_server_credentials(
+ _SERVER_CERTS,
+ root_certificates=_TEST_ROOT_CERTIFICATES,
+ require_client_auth=True)
+ port = server.add_secure_port('[::]:0', server_cred)
+ await server.start()
+
+ channel_creds = grpc.ssl_channel_credentials(
+ root_certificates=_TEST_ROOT_CERTIFICATES,
+ private_key=_PRIVATE_KEY,
+ certificate_chain=_CERTIFICATE_CHAIN)
+ channel = aio.secure_channel('localhost:{}'.format(port),
+ channel_creds,
+ options=_PROPERTY_OPTIONS)
+
+ response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
+ await channel.close()
+ await server.stop(None)
+
+ auth_data = pickle.loads(response)
+ auth_ctx = auth_data[_AUTH_CTX]
+ self.assertCountEqual(_CLIENT_IDS, auth_data[_ID])
+ self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY])
+ self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type'])
+ self.assertSequenceEqual([b'*.test.google.com'],
+ auth_ctx['x509_common_name'])
+
+ async def _do_one_shot_client_rpc(self, channel_creds, channel_options,
+ port, expect_ssl_session_reused):
+ channel = aio.secure_channel('localhost:{}'.format(port),
+ channel_creds,
+ options=channel_options)
+ response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
+ auth_data = pickle.loads(response)
+ self.assertEqual(expect_ssl_session_reused,
+ auth_data[_AUTH_CTX]['ssl_session_reused'])
+ await channel.close()
+
+ async def test_session_resumption(self):
+ # Set up a secure server
+ handler = grpc.method_handlers_generic_handler('test', {
+ 'UnaryUnary':
+ grpc.unary_unary_rpc_method_handler(handle_unary_unary)
+ })
+ server = aio.server()
+ server.add_generic_rpc_handlers((handler,))
+ server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
+ port = server.add_secure_port('[::]:0', server_cred)
+ await server.start()
+
+ # Create a cache for TLS session tickets
+ cache = session_cache.ssl_session_cache_lru(1)
+ channel_creds = grpc.ssl_channel_credentials(
+ root_certificates=_TEST_ROOT_CERTIFICATES)
+ channel_options = _PROPERTY_OPTIONS + (
+ ('grpc.ssl_session_cache', cache),)
+
+ # Initial connection has no session to resume
+ await self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port,
+ expect_ssl_session_reused=[b'false'])
+
+ # Subsequent connections resume sessions
+ await self._do_one_shot_client_rpc(channel_creds,
+ channel_options,
+ port,
+ expect_ssl_session_reused=[b'true'])
+ await server.stop(None)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py
index b3b389a59f..1961226fa6 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py
@@ -11,804 +11,804 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Tests behavior of the Call classes."""
-
+"""Tests behavior of the Call classes."""
+
import asyncio
import logging
import unittest
-import datetime
+import datetime
import grpc
-from grpc.experimental import aio
+from grpc.experimental import aio
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests_aio.unit._test_base import AioTestBase
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-
-_SHORT_TIMEOUT_S = datetime.timedelta(seconds=1).total_seconds()
-
-_NUM_STREAM_RESPONSES = 5
-_RESPONSE_PAYLOAD_SIZE = 42
-_REQUEST_PAYLOAD_SIZE = 7
-_LOCAL_CANCEL_DETAILS_EXPECTATION = 'Locally cancelled by application!'
-_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
-_INFINITE_INTERVAL_US = 2**31 - 1
-
-
-class _MulticallableTestMixin():
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-class TestUnaryUnaryCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_call_to_string(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertTrue(str(call) is not None)
- self.assertTrue(repr(call) is not None)
-
- await call
-
- self.assertTrue(str(call) is not None)
- self.assertTrue(repr(call) is not None)
-
- async def test_call_ok(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.done())
-
- response = await call
-
- self.assertTrue(call.done())
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- # Response is cached at call object level, reentrance
- # returns again the same response
- response_retry = await call
- self.assertIs(response, response_retry)
-
- async def test_call_rpc_error(self):
- async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- call = stub.UnaryCall(messages_pb2.SimpleRequest())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- async def test_call_code_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_call_details_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual('', await call.details())
-
- async def test_call_initial_metadata_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(aio.Metadata(), await call.initial_metadata())
-
- async def test_call_trailing_metadata_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(aio.Metadata(), await call.trailing_metadata())
-
- async def test_call_initial_metadata_cancelable(self):
- coro_started = asyncio.Event()
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- coro_started.set()
- await call.initial_metadata()
-
- task = self.loop.create_task(coro())
- await coro_started.wait()
- task.cancel()
-
- # Test that initial metadata can still be asked thought
- # a cancellation happened with the previous task
- self.assertEqual(aio.Metadata(), await call.initial_metadata())
-
- async def test_call_initial_metadata_multiple_waiters(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- return await call.initial_metadata()
-
- task1 = self.loop.create_task(coro())
- task2 = self.loop.create_task(coro())
-
- await call
- expected = [aio.Metadata() for _ in range(2)]
- self.assertEqual(expected, await asyncio.gather(*[task1, task2]))
-
- async def test_call_code_cancelable(self):
- coro_started = asyncio.Event()
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- coro_started.set()
- await call.code()
-
- task = self.loop.create_task(coro())
- await coro_started.wait()
- task.cancel()
-
- # Test that code can still be asked thought
- # a cancellation happened with the previous task
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_call_code_multiple_waiters(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- return await call.code()
-
- task1 = self.loop.create_task(coro())
- task2 = self.loop.create_task(coro())
-
- await call
-
- self.assertEqual([grpc.StatusCode.OK, grpc.StatusCode.OK], await
- asyncio.gather(task1, task2))
-
- async def test_cancel_unary_unary(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.cancelled())
-
- self.assertTrue(call.cancel())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- # The info in the RpcError should match the info in Call object.
- self.assertTrue(call.cancelled())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- 'Locally cancelled by application!')
-
- async def test_cancel_unary_unary_in_task(self):
- coro_started = asyncio.Event()
- call = self._stub.EmptyCall(messages_pb2.SimpleRequest())
-
- async def another_coro():
- coro_started.set()
- await call
-
- task = self.loop.create_task(another_coro())
- await coro_started.wait()
-
- self.assertFalse(task.done())
- task.cancel()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- async def test_passing_credentials_fails_over_insecure_channel(self):
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
- with self.assertRaisesRegex(
- aio.UsageError,
- "Call credentials are only valid on secure channels"):
- self._stub.UnaryCall(messages_pb2.SimpleRequest(),
- credentials=call_credentials)
-
-
-class TestUnaryStreamCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_call_rpc_error(self):
- channel = aio.insecure_channel(UNREACHABLE_TARGET)
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- pass
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
- await channel.close()
-
- async def test_cancel_unary_stream(self):
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
- self.assertFalse(call.cancelled())
-
- response = await call.read()
- self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertTrue(call.cancel())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
- self.assertEqual(_LOCAL_CANCEL_DETAILS_EXPECTATION, await
- call.details())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call.read()
- self.assertTrue(call.cancelled())
-
- async def test_multiple_cancel_unary_stream(self):
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
- self.assertFalse(call.cancelled())
-
- response = await call.read()
- self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertTrue(call.cancel())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call.read()
-
- async def test_early_cancel_unary_stream(self):
- """Test cancellation before receiving messages."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call.read()
-
- self.assertTrue(call.cancelled())
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
- self.assertEqual(_LOCAL_CANCEL_DETAILS_EXPECTATION, await
- call.details())
-
- async def test_late_cancel_unary_stream(self):
- """Test cancellation after received all messages."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # After all messages received, it is possible that the final state
- # is received or on its way. It's basically a data race, so our
- # expectation here is do not crash :)
- call.cancel()
- self.assertIn(await call.code(),
- [grpc.StatusCode.OK, grpc.StatusCode.CANCELLED])
-
- async def test_too_many_reads_unary_stream(self):
- """Test calling read after received all messages fails."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- self.assertIs(await call.read(), aio.EOF)
-
- # After the RPC is finished, further reads will lead to exception.
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertIs(await call.read(), aio.EOF)
-
- async def test_unary_stream_async_generator(self):
- """Sunny day test case for unary_stream."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
- self.assertFalse(call.cancelled())
-
- async for response in call:
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_cancel_unary_stream_in_task_using_read(self):
- coro_started = asyncio.Event()
-
- # Configs the server method to block forever
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_INFINITE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- async def another_coro():
- coro_started.set()
- await call.read()
-
- task = self.loop.create_task(another_coro())
- await coro_started.wait()
-
- self.assertFalse(task.done())
- task.cancel()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- async def test_cancel_unary_stream_in_task_using_async_for(self):
- coro_started = asyncio.Event()
-
- # Configs the server method to block forever
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_INFINITE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- async def another_coro():
- coro_started.set()
- async for _ in call:
- pass
-
- task = self.loop.create_task(another_coro())
- await coro_started.wait()
-
- self.assertFalse(task.done())
- task.cancel()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- async def test_time_remaining(self):
- request = messages_pb2.StreamingOutputCallRequest()
- # First message comes back immediately
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
- # Second message comes back after a unit of wait time
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- call = self._stub.StreamingOutputCall(request,
- timeout=_SHORT_TIMEOUT_S * 2)
-
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # Should be around the same as the timeout
- remained_time = call.time_remaining()
- self.assertGreater(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
- self.assertLess(remained_time, _SHORT_TIMEOUT_S * 5 / 2)
-
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # Should be around the timeout minus a unit of wait time
- remained_time = call.time_remaining()
- self.assertGreater(remained_time, _SHORT_TIMEOUT_S / 2)
- self.assertLess(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
-
-class TestStreamUnaryCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_cancel_stream_unary(self):
- call = self._stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
-
- await call.done_writing()
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- async def test_early_cancel_stream_unary(self):
- call = self._stub.StreamingInputCall()
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
-
- with self.assertRaises(asyncio.InvalidStateError):
- await call.write(messages_pb2.StreamingInputCallRequest())
-
- # Should be no-op
- await call.done_writing()
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- async def test_write_after_done_writing(self):
- call = self._stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
-
- # Should be no-op
- await call.done_writing()
-
- with self.assertRaises(asyncio.InvalidStateError):
- await call.write(messages_pb2.StreamingInputCallRequest())
-
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_error_in_async_generator(self):
- # Server will pause between responses
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # We expect the request iterator to receive the exception
- request_iterator_received_the_exception = asyncio.Event()
-
- async def request_iterator():
- with self.assertRaises(asyncio.CancelledError):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
- await asyncio.sleep(_SHORT_TIMEOUT_S)
- request_iterator_received_the_exception.set()
-
- call = self._stub.StreamingInputCall(request_iterator())
-
- # Cancel the RPC after at least one response
- async def cancel_later():
- await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
- call.cancel()
-
- cancel_later_task = self.loop.create_task(cancel_later())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- await request_iterator_received_the_exception.wait()
-
- # No failures in the cancel later task!
- await cancel_later_task
-
- async def test_normal_iterable_requests(self):
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
- requests = [request] * _NUM_STREAM_RESPONSES
-
- # Sends out requests
- call = self._stub.StreamingInputCall(requests)
-
- # RPC should succeed
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_call_rpc_error(self):
- async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # The error should be raised automatically without any traffic.
- call = stub.StreamingInputCall()
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- async def test_timeout(self):
- call = self._stub.StreamingInputCall(timeout=_SHORT_TIMEOUT_S)
-
- # The error should be raised automatically without any traffic.
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, await call.code())
-
-
-# Prepares the request that stream in a ping-pong manner.
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
-
-class TestStreamStreamCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_cancel(self):
- # Invokes the actual RPC
- call = self._stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_cancel_with_pending_read(self):
- call = self._stub.FullDuplexCall()
-
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_cancel_with_ongoing_read(self):
- call = self._stub.FullDuplexCall()
- coro_started = asyncio.Event()
-
- async def read_coro():
- coro_started.set()
- await call.read()
-
- read_task = self.loop.create_task(read_coro())
- await coro_started.wait()
- self.assertFalse(read_task.done())
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_early_cancel(self):
- call = self._stub.FullDuplexCall()
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_cancel_after_done_writing(self):
- call = self._stub.FullDuplexCall()
- await call.done_writing()
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_late_cancel(self):
- call = self._stub.FullDuplexCall()
- await call.done_writing()
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- # Cancels the RPC
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancelled())
-
- # Status is still OK
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_async_generator(self):
-
- async def request_generator():
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
-
- call = self._stub.FullDuplexCall(request_generator())
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_too_many_reads(self):
-
- async def request_generator():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
-
- call = self._stub.FullDuplexCall(request_generator())
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- self.assertIs(await call.read(), aio.EOF)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- # After the RPC finished, the read should also produce EOF
- self.assertIs(await call.read(), aio.EOF)
-
- async def test_read_write_after_done_writing(self):
- call = self._stub.FullDuplexCall()
-
- # Writes two requests, and pending two requests
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
- await call.done_writing()
-
- # Further write should fail
- with self.assertRaises(asyncio.InvalidStateError):
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
-
- # But read should be unaffected
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_error_in_async_generator(self):
- # Server will pause between responses
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # We expect the request iterator to receive the exception
- request_iterator_received_the_exception = asyncio.Event()
-
- async def request_iterator():
- with self.assertRaises(asyncio.CancelledError):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
- await asyncio.sleep(_SHORT_TIMEOUT_S)
- request_iterator_received_the_exception.set()
-
- call = self._stub.FullDuplexCall(request_iterator())
-
- # Cancel the RPC after at least one response
- async def cancel_later():
- await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
- call.cancel()
-
- cancel_later_task = self.loop.create_task(cancel_later())
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- await request_iterator_received_the_exception.wait()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
- # No failures in the cancel later task!
- await cancel_later_task
-
- async def test_normal_iterable_requests(self):
- requests = [_STREAM_OUTPUT_REQUEST_ONE_RESPONSE] * _NUM_STREAM_RESPONSES
-
- call = self._stub.FullDuplexCall(iter(requests))
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
-
+from tests_aio.unit._constants import UNREACHABLE_TARGET
+
+_SHORT_TIMEOUT_S = datetime.timedelta(seconds=1).total_seconds()
+
+_NUM_STREAM_RESPONSES = 5
+_RESPONSE_PAYLOAD_SIZE = 42
+_REQUEST_PAYLOAD_SIZE = 7
+_LOCAL_CANCEL_DETAILS_EXPECTATION = 'Locally cancelled by application!'
+_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
+_INFINITE_INTERVAL_US = 2**31 - 1
+
+
+class _MulticallableTestMixin():
+
+ async def setUp(self):
+ address, self._server = await start_test_server()
+ self._channel = aio.insecure_channel(address)
+ self._stub = test_pb2_grpc.TestServiceStub(self._channel)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+
+class TestUnaryUnaryCall(_MulticallableTestMixin, AioTestBase):
+
+ async def test_call_to_string(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ self.assertTrue(str(call) is not None)
+ self.assertTrue(repr(call) is not None)
+
+ await call
+
+ self.assertTrue(str(call) is not None)
+ self.assertTrue(repr(call) is not None)
+
+ async def test_call_ok(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ self.assertFalse(call.done())
+
+ response = await call
+
+ self.assertTrue(call.done())
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ # Response is cached at call object level, reentrance
+ # returns again the same response
+ response_retry = await call
+ self.assertIs(response, response_retry)
+
+ async def test_call_rpc_error(self):
+ async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ call = stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
+
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
+
+ async def test_call_code_awaitable(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_call_details_awaitable(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+ self.assertEqual('', await call.details())
+
+ async def test_call_initial_metadata_awaitable(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+ self.assertEqual(aio.Metadata(), await call.initial_metadata())
+
+ async def test_call_trailing_metadata_awaitable(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+ self.assertEqual(aio.Metadata(), await call.trailing_metadata())
+
+ async def test_call_initial_metadata_cancelable(self):
+ coro_started = asyncio.Event()
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ async def coro():
+ coro_started.set()
+ await call.initial_metadata()
+
+ task = self.loop.create_task(coro())
+ await coro_started.wait()
+ task.cancel()
+
+ # Test that initial metadata can still be asked thought
+ # a cancellation happened with the previous task
+ self.assertEqual(aio.Metadata(), await call.initial_metadata())
+
+ async def test_call_initial_metadata_multiple_waiters(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ async def coro():
+ return await call.initial_metadata()
+
+ task1 = self.loop.create_task(coro())
+ task2 = self.loop.create_task(coro())
+
+ await call
+ expected = [aio.Metadata() for _ in range(2)]
+ self.assertEqual(expected, await asyncio.gather(*[task1, task2]))
+
+ async def test_call_code_cancelable(self):
+ coro_started = asyncio.Event()
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ async def coro():
+ coro_started.set()
+ await call.code()
+
+ task = self.loop.create_task(coro())
+ await coro_started.wait()
+ task.cancel()
+
+ # Test that code can still be asked thought
+ # a cancellation happened with the previous task
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_call_code_multiple_waiters(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ async def coro():
+ return await call.code()
+
+ task1 = self.loop.create_task(coro())
+ task2 = self.loop.create_task(coro())
+
+ await call
+
+ self.assertEqual([grpc.StatusCode.OK, grpc.StatusCode.OK], await
+ asyncio.gather(task1, task2))
+
+ async def test_cancel_unary_unary(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ self.assertFalse(call.cancelled())
+
+ self.assertTrue(call.cancel())
+ self.assertFalse(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ # The info in the RpcError should match the info in Call object.
+ self.assertTrue(call.cancelled())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.details(),
+ 'Locally cancelled by application!')
+
+ async def test_cancel_unary_unary_in_task(self):
+ coro_started = asyncio.Event()
+ call = self._stub.EmptyCall(messages_pb2.SimpleRequest())
+
+ async def another_coro():
+ coro_started.set()
+ await call
+
+ task = self.loop.create_task(another_coro())
+ await coro_started.wait()
+
+ self.assertFalse(task.done())
+ task.cancel()
+
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ async def test_passing_credentials_fails_over_insecure_channel(self):
+ call_credentials = grpc.composite_call_credentials(
+ grpc.access_token_call_credentials("abc"),
+ grpc.access_token_call_credentials("def"),
+ )
+ with self.assertRaisesRegex(
+ aio.UsageError,
+ "Call credentials are only valid on secure channels"):
+ self._stub.UnaryCall(messages_pb2.SimpleRequest(),
+ credentials=call_credentials)
+
+
+class TestUnaryStreamCall(_MulticallableTestMixin, AioTestBase):
+
+ async def test_call_rpc_error(self):
+ channel = aio.insecure_channel(UNREACHABLE_TARGET)
+ request = messages_pb2.StreamingOutputCallRequest()
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ async for response in call:
+ pass
+
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
+
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
+ await channel.close()
+
+ async def test_cancel_unary_stream(self):
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_RESPONSE_INTERVAL_US,
+ ))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+ self.assertFalse(call.cancelled())
+
+ response = await call.read()
+ self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertTrue(call.cancel())
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+ self.assertEqual(_LOCAL_CANCEL_DETAILS_EXPECTATION, await
+ call.details())
+ self.assertFalse(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call.read()
+ self.assertTrue(call.cancelled())
+
+ async def test_multiple_cancel_unary_stream(self):
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_RESPONSE_INTERVAL_US,
+ ))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+ self.assertFalse(call.cancelled())
+
+ response = await call.read()
+ self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertTrue(call.cancel())
+ self.assertFalse(call.cancel())
+ self.assertFalse(call.cancel())
+ self.assertFalse(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call.read()
+
+ async def test_early_cancel_unary_stream(self):
+ """Test cancellation before receiving messages."""
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_RESPONSE_INTERVAL_US,
+ ))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertFalse(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call.read()
+
+ self.assertTrue(call.cancelled())
+
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+ self.assertEqual(_LOCAL_CANCEL_DETAILS_EXPECTATION, await
+ call.details())
+
+ async def test_late_cancel_unary_stream(self):
+ """Test cancellation after received all messages."""
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ # After all messages received, it is possible that the final state
+ # is received or on its way. It's basically a data race, so our
+ # expectation here is do not crash :)
+ call.cancel()
+ self.assertIn(await call.code(),
+ [grpc.StatusCode.OK, grpc.StatusCode.CANCELLED])
+
+ async def test_too_many_reads_unary_stream(self):
+ """Test calling read after received all messages fails."""
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+ self.assertIs(await call.read(), aio.EOF)
+
+ # After the RPC is finished, further reads will lead to exception.
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertIs(await call.read(), aio.EOF)
+
+ async def test_unary_stream_async_generator(self):
+ """Sunny day test case for unary_stream."""
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+ self.assertFalse(call.cancelled())
+
+ async for response in call:
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_cancel_unary_stream_in_task_using_read(self):
+ coro_started = asyncio.Event()
+
+ # Configs the server method to block forever
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_INFINITE_INTERVAL_US,
+ ))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+
+ async def another_coro():
+ coro_started.set()
+ await call.read()
+
+ task = self.loop.create_task(another_coro())
+ await coro_started.wait()
+
+ self.assertFalse(task.done())
+ task.cancel()
+
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ async def test_cancel_unary_stream_in_task_using_async_for(self):
+ coro_started = asyncio.Event()
+
+ # Configs the server method to block forever
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_INFINITE_INTERVAL_US,
+ ))
+
+ # Invokes the actual RPC
+ call = self._stub.StreamingOutputCall(request)
+
+ async def another_coro():
+ coro_started.set()
+ async for _ in call:
+ pass
+
+ task = self.loop.create_task(another_coro())
+ await coro_started.wait()
+
+ self.assertFalse(task.done())
+ task.cancel()
+
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await task
+
+ async def test_time_remaining(self):
+ request = messages_pb2.StreamingOutputCallRequest()
+ # First message comes back immediately
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
+ # Second message comes back after a unit of wait time
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_RESPONSE_INTERVAL_US,
+ ))
+
+ call = self._stub.StreamingOutputCall(request,
+ timeout=_SHORT_TIMEOUT_S * 2)
+
+ response = await call.read()
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ # Should be around the same as the timeout
+ remained_time = call.time_remaining()
+ self.assertGreater(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
+ self.assertLess(remained_time, _SHORT_TIMEOUT_S * 5 / 2)
+
+ response = await call.read()
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ # Should be around the timeout minus a unit of wait time
+ remained_time = call.time_remaining()
+ self.assertGreater(remained_time, _SHORT_TIMEOUT_S / 2)
+ self.assertLess(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+
+class TestStreamUnaryCall(_MulticallableTestMixin, AioTestBase):
+
+ async def test_cancel_stream_unary(self):
+ call = self._stub.StreamingInputCall()
+
+ # Prepares the request
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ # Sends out requests
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+
+ # Cancels the RPC
+ self.assertFalse(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertTrue(call.cancelled())
+
+ await call.done_writing()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ async def test_early_cancel_stream_unary(self):
+ call = self._stub.StreamingInputCall()
+
+ # Cancels the RPC
+ self.assertFalse(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertTrue(call.cancelled())
+
+ with self.assertRaises(asyncio.InvalidStateError):
+ await call.write(messages_pb2.StreamingInputCallRequest())
+
+ # Should be no-op
+ await call.done_writing()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ async def test_write_after_done_writing(self):
+ call = self._stub.StreamingInputCall()
+
+ # Prepares the request
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ # Sends out requests
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+
+ # Should be no-op
+ await call.done_writing()
+
+ with self.assertRaises(asyncio.InvalidStateError):
+ await call.write(messages_pb2.StreamingInputCallRequest())
+
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_error_in_async_generator(self):
+ # Server will pause between responses
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_RESPONSE_INTERVAL_US,
+ ))
+
+ # We expect the request iterator to receive the exception
+ request_iterator_received_the_exception = asyncio.Event()
+
+ async def request_iterator():
+ with self.assertRaises(asyncio.CancelledError):
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+ await asyncio.sleep(_SHORT_TIMEOUT_S)
+ request_iterator_received_the_exception.set()
+
+ call = self._stub.StreamingInputCall(request_iterator())
+
+ # Cancel the RPC after at least one response
+ async def cancel_later():
+ await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
+ call.cancel()
+
+ cancel_later_task = self.loop.create_task(cancel_later())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ await request_iterator_received_the_exception.wait()
+
+ # No failures in the cancel later task!
+ await cancel_later_task
+
+ async def test_normal_iterable_requests(self):
+ # Prepares the request
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+ requests = [request] * _NUM_STREAM_RESPONSES
+
+ # Sends out requests
+ call = self._stub.StreamingInputCall(requests)
+
+ # RPC should succeed
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_call_rpc_error(self):
+ async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # The error should be raised automatically without any traffic.
+ call = stub.StreamingInputCall()
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
+
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
+
+ async def test_timeout(self):
+ call = self._stub.StreamingInputCall(timeout=_SHORT_TIMEOUT_S)
+
+ # The error should be raised automatically without any traffic.
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, await call.code())
+
+
+# Prepares the request that stream in a ping-pong manner.
+_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
+_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+
+class TestStreamStreamCall(_MulticallableTestMixin, AioTestBase):
+
+ async def test_cancel(self):
+ # Invokes the actual RPC
+ call = self._stub.FullDuplexCall()
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
+ response = await call.read()
+ self.assertIsInstance(response,
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ # Cancels the RPC
+ self.assertFalse(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertTrue(call.cancelled())
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ async def test_cancel_with_pending_read(self):
+ call = self._stub.FullDuplexCall()
+
+ await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
+
+ # Cancels the RPC
+ self.assertFalse(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertTrue(call.cancelled())
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ async def test_cancel_with_ongoing_read(self):
+ call = self._stub.FullDuplexCall()
+ coro_started = asyncio.Event()
+
+ async def read_coro():
+ coro_started.set()
+ await call.read()
+
+ read_task = self.loop.create_task(read_coro())
+ await coro_started.wait()
+ self.assertFalse(read_task.done())
+
+ # Cancels the RPC
+ self.assertFalse(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertTrue(call.cancelled())
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ async def test_early_cancel(self):
+ call = self._stub.FullDuplexCall()
+
+ # Cancels the RPC
+ self.assertFalse(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertTrue(call.cancelled())
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ async def test_cancel_after_done_writing(self):
+ call = self._stub.FullDuplexCall()
+ await call.done_writing()
+
+ # Cancels the RPC
+ self.assertFalse(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertTrue(call.cancel())
+ self.assertTrue(call.cancelled())
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ async def test_late_cancel(self):
+ call = self._stub.FullDuplexCall()
+ await call.done_writing()
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ # Cancels the RPC
+ self.assertTrue(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertFalse(call.cancel())
+ self.assertFalse(call.cancelled())
+
+ # Status is still OK
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_async_generator(self):
+
+ async def request_generator():
+ yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
+ yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
+
+ call = self._stub.FullDuplexCall(request_generator())
+ async for response in call:
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_too_many_reads(self):
+
+ async def request_generator():
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
+
+ call = self._stub.FullDuplexCall(request_generator())
+ for _ in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+ self.assertIs(await call.read(), aio.EOF)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ # After the RPC finished, the read should also produce EOF
+ self.assertIs(await call.read(), aio.EOF)
+
+ async def test_read_write_after_done_writing(self):
+ call = self._stub.FullDuplexCall()
+
+ # Writes two requests, and pending two requests
+ await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
+ await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
+ await call.done_writing()
+
+ # Further write should fail
+ with self.assertRaises(asyncio.InvalidStateError):
+ await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
+
+ # But read should be unaffected
+ response = await call.read()
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+ response = await call.read()
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_error_in_async_generator(self):
+ # Server will pause between responses
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE,
+ interval_us=_RESPONSE_INTERVAL_US,
+ ))
+
+ # We expect the request iterator to receive the exception
+ request_iterator_received_the_exception = asyncio.Event()
+
+ async def request_iterator():
+ with self.assertRaises(asyncio.CancelledError):
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+ await asyncio.sleep(_SHORT_TIMEOUT_S)
+ request_iterator_received_the_exception.set()
+
+ call = self._stub.FullDuplexCall(request_iterator())
+
+ # Cancel the RPC after at least one response
+ async def cancel_later():
+ await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
+ call.cancel()
+
+ cancel_later_task = self.loop.create_task(cancel_later())
+
+ with self.assertRaises(asyncio.CancelledError):
+ async for response in call:
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
+ len(response.payload.body))
+
+ await request_iterator_received_the_exception.wait()
+
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+ # No failures in the cancel later task!
+ await cancel_later_task
+
+ async def test_normal_iterable_requests(self):
+ requests = [_STREAM_OUTPUT_REQUEST_ONE_RESPONSE] * _NUM_STREAM_RESPONSES
+
+ call = self._stub.FullDuplexCall(iter(requests))
+ async for response in call:
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+
if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
+ logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py
index ccf23d5cdf..8bf2dc8b1f 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py
@@ -1,176 +1,176 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the Core channel arguments."""
-
-import asyncio
-import logging
-import platform
-import random
-import errno
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests.unit.framework import common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_RANDOM_SEED = 42
-
-_ENABLE_REUSE_PORT = 'SO_REUSEPORT enabled'
-_DISABLE_REUSE_PORT = 'SO_REUSEPORT disabled'
-_SOCKET_OPT_SO_REUSEPORT = 'grpc.so_reuseport'
-_OPTIONS = (
- (_ENABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 1),)),
- (_DISABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 0),)),
-)
-
-_NUM_SERVER_CREATED = 5
-
-_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH = 'grpc.max_receive_message_length'
-_MAX_MESSAGE_LENGTH = 1024
-
-_ADDRESS_TOKEN_ERRNO = errno.EADDRINUSE, errno.ENOSR
-
-
-class _TestPointerWrapper(object):
-
- def __int__(self):
- return 123456
-
-
-_TEST_CHANNEL_ARGS = (
- ('arg1', b'bytes_val'),
- ('arg2', 'str_val'),
- ('arg3', 1),
- (b'arg4', 'str_val'),
- ('arg6', _TestPointerWrapper()),
-)
-
-_INVALID_TEST_CHANNEL_ARGS = [
- {
- 'foo': 'bar'
- },
- (('key',),),
- 'str',
-]
-
-
-async def test_if_reuse_port_enabled(server: aio.Server):
- port = server.add_insecure_port('localhost:0')
- await server.start()
-
- try:
- with common.bound_socket(
- bind_address='localhost',
- port=port,
- listen=False,
- ) as (unused_host, bound_port):
- assert bound_port == port
- except OSError as e:
- if e.errno in _ADDRESS_TOKEN_ERRNO:
- return False
- else:
- logging.exception(e)
- raise
- else:
- return True
-
-
-class TestChannelArgument(AioTestBase):
-
- async def setUp(self):
- random.seed(_RANDOM_SEED)
-
- @unittest.skipIf(platform.system() == 'Windows',
- 'SO_REUSEPORT only available in Linux-like OS.')
- async def test_server_so_reuse_port_is_set_properly(self):
-
- async def test_body():
- fact, options = random.choice(_OPTIONS)
- server = aio.server(options=options)
- try:
- result = await test_if_reuse_port_enabled(server)
- if fact == _ENABLE_REUSE_PORT and not result:
- self.fail(
- 'Enabled reuse port in options, but not observed in socket'
- )
- elif fact == _DISABLE_REUSE_PORT and result:
- self.fail(
- 'Disabled reuse port in options, but observed in socket'
- )
- finally:
- await server.stop(None)
-
- # Creating a lot of servers concurrently
- await asyncio.gather(*(test_body() for _ in range(_NUM_SERVER_CREATED)))
-
- async def test_client(self):
- # Do not segfault, or raise exception!
- channel = aio.insecure_channel('[::]:0', options=_TEST_CHANNEL_ARGS)
- await channel.close()
-
- async def test_server(self):
- # Do not segfault, or raise exception!
- server = aio.server(options=_TEST_CHANNEL_ARGS)
- await server.stop(None)
-
- async def test_invalid_client_args(self):
- for invalid_arg in _INVALID_TEST_CHANNEL_ARGS:
- self.assertRaises((ValueError, TypeError),
- aio.insecure_channel,
- '[::]:0',
- options=invalid_arg)
-
- async def test_max_message_length_applied(self):
- address, server = await start_test_server()
-
- async with aio.insecure_channel(
- address,
- options=((_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH,
- _MAX_MESSAGE_LENGTH),)) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- # First request will pass
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_MAX_MESSAGE_LENGTH // 2,))
- # Second request should fail
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_MAX_MESSAGE_LENGTH * 2,))
-
- call = stub.StreamingOutputCall(request)
-
- response = await call.read()
- self.assertEqual(_MAX_MESSAGE_LENGTH // 2,
- len(response.payload.body))
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.read()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED,
- rpc_error.code())
- self.assertIn(str(_MAX_MESSAGE_LENGTH), rpc_error.details())
-
- self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, await
- call.code())
-
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2019 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior around the Core channel arguments."""
+
+import asyncio
+import logging
+import platform
+import random
+import errno
+import unittest
+
+import grpc
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests.unit.framework import common
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+
+_RANDOM_SEED = 42
+
+_ENABLE_REUSE_PORT = 'SO_REUSEPORT enabled'
+_DISABLE_REUSE_PORT = 'SO_REUSEPORT disabled'
+_SOCKET_OPT_SO_REUSEPORT = 'grpc.so_reuseport'
+_OPTIONS = (
+ (_ENABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 1),)),
+ (_DISABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 0),)),
+)
+
+_NUM_SERVER_CREATED = 5
+
+_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH = 'grpc.max_receive_message_length'
+_MAX_MESSAGE_LENGTH = 1024
+
+_ADDRESS_TOKEN_ERRNO = errno.EADDRINUSE, errno.ENOSR
+
+
+class _TestPointerWrapper(object):
+
+ def __int__(self):
+ return 123456
+
+
+_TEST_CHANNEL_ARGS = (
+ ('arg1', b'bytes_val'),
+ ('arg2', 'str_val'),
+ ('arg3', 1),
+ (b'arg4', 'str_val'),
+ ('arg6', _TestPointerWrapper()),
+)
+
+_INVALID_TEST_CHANNEL_ARGS = [
+ {
+ 'foo': 'bar'
+ },
+ (('key',),),
+ 'str',
+]
+
+
+async def test_if_reuse_port_enabled(server: aio.Server):
+ port = server.add_insecure_port('localhost:0')
+ await server.start()
+
+ try:
+ with common.bound_socket(
+ bind_address='localhost',
+ port=port,
+ listen=False,
+ ) as (unused_host, bound_port):
+ assert bound_port == port
+ except OSError as e:
+ if e.errno in _ADDRESS_TOKEN_ERRNO:
+ return False
+ else:
+ logging.exception(e)
+ raise
+ else:
+ return True
+
+
+class TestChannelArgument(AioTestBase):
+
+ async def setUp(self):
+ random.seed(_RANDOM_SEED)
+
+ @unittest.skipIf(platform.system() == 'Windows',
+ 'SO_REUSEPORT only available in Linux-like OS.')
+ async def test_server_so_reuse_port_is_set_properly(self):
+
+ async def test_body():
+ fact, options = random.choice(_OPTIONS)
+ server = aio.server(options=options)
+ try:
+ result = await test_if_reuse_port_enabled(server)
+ if fact == _ENABLE_REUSE_PORT and not result:
+ self.fail(
+ 'Enabled reuse port in options, but not observed in socket'
+ )
+ elif fact == _DISABLE_REUSE_PORT and result:
+ self.fail(
+ 'Disabled reuse port in options, but observed in socket'
+ )
+ finally:
+ await server.stop(None)
+
+ # Creating a lot of servers concurrently
+ await asyncio.gather(*(test_body() for _ in range(_NUM_SERVER_CREATED)))
+
+ async def test_client(self):
+ # Do not segfault, or raise exception!
+ channel = aio.insecure_channel('[::]:0', options=_TEST_CHANNEL_ARGS)
+ await channel.close()
+
+ async def test_server(self):
+ # Do not segfault, or raise exception!
+ server = aio.server(options=_TEST_CHANNEL_ARGS)
+ await server.stop(None)
+
+ async def test_invalid_client_args(self):
+ for invalid_arg in _INVALID_TEST_CHANNEL_ARGS:
+ self.assertRaises((ValueError, TypeError),
+ aio.insecure_channel,
+ '[::]:0',
+ options=invalid_arg)
+
+ async def test_max_message_length_applied(self):
+ address, server = await start_test_server()
+
+ async with aio.insecure_channel(
+ address,
+ options=((_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH,
+ _MAX_MESSAGE_LENGTH),)) as channel:
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ # First request will pass
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_MAX_MESSAGE_LENGTH // 2,))
+ # Second request should fail
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_MAX_MESSAGE_LENGTH * 2,))
+
+ call = stub.StreamingOutputCall(request)
+
+ response = await call.read()
+ self.assertEqual(_MAX_MESSAGE_LENGTH // 2,
+ len(response.payload.body))
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.read()
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED,
+ rpc_error.code())
+ self.assertIn(str(_MAX_MESSAGE_LENGTH), rpc_error.details())
+
+ self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, await
+ call.code())
+
+ await server.stop(None)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py
index 391df3a612..75e4703d86 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py
@@ -1,69 +1,69 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the channel_ready function."""
-
-import asyncio
-import gc
-import logging
-import socket
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests.unit.framework.common import get_socket, test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-
-class TestChannelReady(AioTestBase):
-
- async def setUp(self):
- address, self._port, self._socket = get_socket(
- listen=False, sock_options=(socket.SO_REUSEADDR,))
- self._channel = aio.insecure_channel(f"{address}:{self._port}")
- self._socket.close()
-
- async def tearDown(self):
- await self._channel.close()
-
- async def test_channel_ready_success(self):
- # Start `channel_ready` as another Task
- channel_ready_task = self.loop.create_task(
- self._channel.channel_ready())
-
- # Wait for TRANSIENT_FAILURE
- await _common.block_until_certain_state(
- self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE)
-
- try:
- # Start the server
- _, server = await start_test_server(port=self._port)
-
- # The RPC should recover itself
- await channel_ready_task
- finally:
- await server.stop(None)
-
- async def test_channel_ready_blocked(self):
- with self.assertRaises(asyncio.TimeoutError):
- await asyncio.wait_for(self._channel.channel_ready(),
- test_constants.SHORT_TIMEOUT)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Testing the channel_ready function."""
+
+import asyncio
+import gc
+import logging
+import socket
+import time
+import unittest
+
+import grpc
+from grpc.experimental import aio
+
+from tests.unit.framework.common import get_socket, test_constants
+from tests_aio.unit import _common
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+
+
+class TestChannelReady(AioTestBase):
+
+ async def setUp(self):
+ address, self._port, self._socket = get_socket(
+ listen=False, sock_options=(socket.SO_REUSEADDR,))
+ self._channel = aio.insecure_channel(f"{address}:{self._port}")
+ self._socket.close()
+
+ async def tearDown(self):
+ await self._channel.close()
+
+ async def test_channel_ready_success(self):
+ # Start `channel_ready` as another Task
+ channel_ready_task = self.loop.create_task(
+ self._channel.channel_ready())
+
+ # Wait for TRANSIENT_FAILURE
+ await _common.block_until_certain_state(
+ self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE)
+
+ try:
+ # Start the server
+ _, server = await start_test_server(port=self._port)
+
+ # The RPC should recover itself
+ await channel_ready_task
+ finally:
+ await server.stop(None)
+
+ async def test_channel_ready_blocked(self):
+ with self.assertRaises(asyncio.TimeoutError):
+ await asyncio.wait_for(self._channel.channel_ready(),
+ test_constants.SHORT_TIMEOUT)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py
index d09a4c193e..58cd555491 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py
@@ -11,220 +11,220 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Tests behavior of the grpc.aio.Channel class."""
-
+"""Tests behavior of the grpc.aio.Channel class."""
+
import logging
-import os
+import os
import unittest
import grpc
-from grpc.experimental import aio
+from grpc.experimental import aio
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
from tests.unit.framework.common import test_constants
-from tests_aio.unit._constants import (UNARY_CALL_WITH_SLEEP_VALUE,
- UNREACHABLE_TARGET)
-from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._constants import (UNARY_CALL_WITH_SLEEP_VALUE,
+ UNREACHABLE_TARGET)
+from tests_aio.unit._test_base import AioTestBase
from tests_aio.unit._test_server import start_test_server
_UNARY_CALL_METHOD = '/grpc.testing.TestService/UnaryCall'
-_UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep'
-_STREAMING_OUTPUT_CALL_METHOD = '/grpc.testing.TestService/StreamingOutputCall'
+_UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep'
+_STREAMING_OUTPUT_CALL_METHOD = '/grpc.testing.TestService/StreamingOutputCall'
-_INVOCATION_METADATA = (
- ('x-grpc-test-echo-initial', 'initial-md-value'),
- ('x-grpc-test-echo-trailing-bin', b'\x00\x02'),
-)
+_INVOCATION_METADATA = (
+ ('x-grpc-test-echo-initial', 'initial-md-value'),
+ ('x-grpc-test-echo-trailing-bin', b'\x00\x02'),
+)
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
+_NUM_STREAM_RESPONSES = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_PAYLOAD_SIZE = 42
-class TestChannel(AioTestBase):
+class TestChannel(AioTestBase):
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
+ async def setUp(self):
+ self._server_target, self._server = await start_test_server()
- async def tearDown(self):
- await self._server.stop(None)
+ async def tearDown(self):
+ await self._server.stop(None)
- async def test_async_context(self):
- async with aio.insecure_channel(self._server_target) as channel:
- hi = channel.unary_unary(
- _UNARY_CALL_METHOD,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- await hi(messages_pb2.SimpleRequest())
+ async def test_async_context(self):
+ async with aio.insecure_channel(self._server_target) as channel:
+ hi = channel.unary_unary(
+ _UNARY_CALL_METHOD,
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ await hi(messages_pb2.SimpleRequest())
- async def test_unary_unary(self):
- async with aio.insecure_channel(self._server_target) as channel:
+ async def test_unary_unary(self):
+ async with aio.insecure_channel(self._server_target) as channel:
hi = channel.unary_unary(
_UNARY_CALL_METHOD,
request_serializer=messages_pb2.SimpleRequest.SerializeToString,
response_deserializer=messages_pb2.SimpleResponse.FromString)
response = await hi(messages_pb2.SimpleRequest())
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- async def test_unary_call_times_out(self):
- async with aio.insecure_channel(self._server_target) as channel:
- hi = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- with self.assertRaises(grpc.RpcError) as exception_context:
- await hi(messages_pb2.SimpleRequest(),
- timeout=UNARY_CALL_WITH_SLEEP_VALUE / 2)
-
- _, details = grpc.StatusCode.DEADLINE_EXCEEDED.value # pylint: disable=unused-variable
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertEqual(details.title(),
- exception_context.exception.details())
- self.assertIsNotNone(exception_context.exception.initial_metadata())
- self.assertIsNotNone(
- exception_context.exception.trailing_metadata())
-
- @unittest.skipIf(os.name == 'nt',
- 'TODO: https://github.com/grpc/grpc/issues/21658')
- async def test_unary_call_does_not_times_out(self):
- async with aio.insecure_channel(self._server_target) as channel:
- hi = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- call = hi(messages_pb2.SimpleRequest(),
- timeout=UNARY_CALL_WITH_SLEEP_VALUE * 5)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_unary_stream(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- # Invokes the actual RPC
- call = stub.StreamingOutputCall(request)
-
- # Validates the responses
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- await channel.close()
-
- async def test_stream_unary_using_write(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- await call.done_writing()
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- await channel.close()
-
- async def test_stream_unary_using_async_gen(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall(gen())
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- await channel.close()
-
- async def test_stream_stream_using_read_write(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Invokes the actual RPC
- call = stub.FullDuplexCall()
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
- await channel.close()
-
- async def test_stream_stream_using_async_gen(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- # Invokes the actual RPC
- call = stub.FullDuplexCall(gen())
-
- async for response in call:
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
- await channel.close()
-
-
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+
+ async def test_unary_call_times_out(self):
+ async with aio.insecure_channel(self._server_target) as channel:
+ hi = channel.unary_unary(
+ _UNARY_CALL_METHOD_WITH_SLEEP,
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString,
+ )
+
+ with self.assertRaises(grpc.RpcError) as exception_context:
+ await hi(messages_pb2.SimpleRequest(),
+ timeout=UNARY_CALL_WITH_SLEEP_VALUE / 2)
+
+ _, details = grpc.StatusCode.DEADLINE_EXCEEDED.value # pylint: disable=unused-variable
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED,
+ exception_context.exception.code())
+ self.assertEqual(details.title(),
+ exception_context.exception.details())
+ self.assertIsNotNone(exception_context.exception.initial_metadata())
+ self.assertIsNotNone(
+ exception_context.exception.trailing_metadata())
+
+ @unittest.skipIf(os.name == 'nt',
+ 'TODO: https://github.com/grpc/grpc/issues/21658')
+ async def test_unary_call_does_not_times_out(self):
+ async with aio.insecure_channel(self._server_target) as channel:
+ hi = channel.unary_unary(
+ _UNARY_CALL_METHOD_WITH_SLEEP,
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString,
+ )
+
+ call = hi(messages_pb2.SimpleRequest(),
+ timeout=UNARY_CALL_WITH_SLEEP_VALUE * 5)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_unary_stream(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ # Invokes the actual RPC
+ call = stub.StreamingOutputCall(request)
+
+ # Validates the responses
+ response_cnt = 0
+ async for response in call:
+ response_cnt += 1
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ await channel.close()
+
+ async def test_stream_unary_using_write(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Invokes the actual RPC
+ call = stub.StreamingInputCall()
+
+ # Prepares the request
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ # Sends out requests
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ await call.done_writing()
+
+ # Validates the responses
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ await channel.close()
+
+ async def test_stream_unary_using_async_gen(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Prepares the request
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ async def gen():
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+
+ # Invokes the actual RPC
+ call = stub.StreamingInputCall(gen())
+
+ # Validates the responses
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ await channel.close()
+
+ async def test_stream_stream_using_read_write(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Invokes the actual RPC
+ call = stub.FullDuplexCall()
+
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ response = await call.read()
+ self.assertIsInstance(response,
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ await call.done_writing()
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+ await channel.close()
+
+ async def test_stream_stream_using_async_gen(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ async def gen():
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+
+ # Invokes the actual RPC
+ call = stub.FullDuplexCall(gen())
+
+ async for response in call:
+ self.assertIsInstance(response,
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+ await channel.close()
+
+
if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
+ logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py
index ccf5855de1..ce6a7bc04d 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py
@@ -1,202 +1,202 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import unittest
-
-import grpc
-
-from grpc.experimental import aio
-from tests_aio.unit._common import CountingResponseIterator, CountingRequestIterator
-from tests_aio.unit._test_server import start_test_server
-from tests_aio.unit._test_base import AioTestBase
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-
-_NUM_STREAM_RESPONSES = 5
-_NUM_STREAM_REQUESTS = 5
-_RESPONSE_PAYLOAD_SIZE = 7
-
-
-class _StreamStreamInterceptorEmpty(aio.StreamStreamClientInterceptor):
-
- async def intercept_stream_stream(self, continuation, client_call_details,
- request_iterator):
- return await continuation(client_call_details, request_iterator)
-
- def assert_in_final_state(self, test: unittest.TestCase):
- pass
-
-
-class _StreamStreamInterceptorWithRequestAndResponseIterator(
- aio.StreamStreamClientInterceptor):
-
- async def intercept_stream_stream(self, continuation, client_call_details,
- request_iterator):
- self.request_iterator = CountingRequestIterator(request_iterator)
- call = await continuation(client_call_details, self.request_iterator)
- self.response_iterator = CountingResponseIterator(call)
- return self.response_iterator
-
- def assert_in_final_state(self, test: unittest.TestCase):
- test.assertEqual(_NUM_STREAM_REQUESTS,
- self.request_iterator.request_cnt)
- test.assertEqual(_NUM_STREAM_RESPONSES,
- self.response_iterator.response_cnt)
-
-
-class TestStreamStreamClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_intercepts(self):
-
- for interceptor_class in (
- _StreamStreamInterceptorEmpty,
- _StreamStreamInterceptorWithRequestAndResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE))
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.FullDuplexCall(request_iterator())
-
- await call.wait_for_connection()
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIsInstance(
- response, messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_intercepts_using_write_and_read(self):
- for interceptor_class in (
- _StreamStreamInterceptorEmpty,
- _StreamStreamInterceptorWithRequestAndResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE))
-
- call = stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(
- response, messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_multiple_interceptors_request_iterator(self):
- for interceptor_class in (
- _StreamStreamInterceptorEmpty,
- _StreamStreamInterceptorWithRequestAndResponseIterator):
-
- with self.subTest(name=interceptor_class):
-
- interceptors = [interceptor_class(), interceptor_class()]
- channel = aio.insecure_channel(self._server_target,
- interceptors=interceptors)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE))
-
- call = stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(
- response, messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- for interceptor in interceptors:
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+import unittest
+
+import grpc
+
+from grpc.experimental import aio
+from tests_aio.unit._common import CountingResponseIterator, CountingRequestIterator
+from tests_aio.unit._test_server import start_test_server
+from tests_aio.unit._test_base import AioTestBase
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+
+_NUM_STREAM_RESPONSES = 5
+_NUM_STREAM_REQUESTS = 5
+_RESPONSE_PAYLOAD_SIZE = 7
+
+
+class _StreamStreamInterceptorEmpty(aio.StreamStreamClientInterceptor):
+
+ async def intercept_stream_stream(self, continuation, client_call_details,
+ request_iterator):
+ return await continuation(client_call_details, request_iterator)
+
+ def assert_in_final_state(self, test: unittest.TestCase):
+ pass
+
+
+class _StreamStreamInterceptorWithRequestAndResponseIterator(
+ aio.StreamStreamClientInterceptor):
+
+ async def intercept_stream_stream(self, continuation, client_call_details,
+ request_iterator):
+ self.request_iterator = CountingRequestIterator(request_iterator)
+ call = await continuation(client_call_details, self.request_iterator)
+ self.response_iterator = CountingResponseIterator(call)
+ return self.response_iterator
+
+ def assert_in_final_state(self, test: unittest.TestCase):
+ test.assertEqual(_NUM_STREAM_REQUESTS,
+ self.request_iterator.request_cnt)
+ test.assertEqual(_NUM_STREAM_RESPONSES,
+ self.response_iterator.response_cnt)
+
+
+class TestStreamStreamClientInterceptor(AioTestBase):
+
+ async def setUp(self):
+ self._server_target, self._server = await start_test_server()
+
+ async def tearDown(self):
+ await self._server.stop(None)
+
+ async def test_intercepts(self):
+
+ for interceptor_class in (
+ _StreamStreamInterceptorEmpty,
+ _StreamStreamInterceptorWithRequestAndResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE))
+
+ async def request_iterator():
+ for _ in range(_NUM_STREAM_REQUESTS):
+ yield request
+
+ call = stub.FullDuplexCall(request_iterator())
+
+ await call.wait_for_connection()
+
+ response_cnt = 0
+ async for response in call:
+ response_cnt += 1
+ self.assertIsInstance(
+ response, messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
+ len(response.payload.body))
+
+ self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.debug_error_string(), '')
+ self.assertEqual(call.cancel(), False)
+ self.assertEqual(call.cancelled(), False)
+ self.assertEqual(call.done(), True)
+
+ interceptor.assert_in_final_state(self)
+
+ await channel.close()
+
+ async def test_intercepts_using_write_and_read(self):
+ for interceptor_class in (
+ _StreamStreamInterceptorEmpty,
+ _StreamStreamInterceptorWithRequestAndResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE))
+
+ call = stub.FullDuplexCall()
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ response = await call.read()
+ self.assertIsInstance(
+ response, messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
+ len(response.payload.body))
+
+ await call.done_writing()
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.debug_error_string(), '')
+ self.assertEqual(call.cancel(), False)
+ self.assertEqual(call.cancelled(), False)
+ self.assertEqual(call.done(), True)
+
+ interceptor.assert_in_final_state(self)
+
+ await channel.close()
+
+ async def test_multiple_interceptors_request_iterator(self):
+ for interceptor_class in (
+ _StreamStreamInterceptorEmpty,
+ _StreamStreamInterceptorWithRequestAndResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+
+ interceptors = [interceptor_class(), interceptor_class()]
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=interceptors)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(
+ size=_RESPONSE_PAYLOAD_SIZE))
+
+ call = stub.FullDuplexCall()
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ response = await call.read()
+ self.assertIsInstance(
+ response, messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
+ len(response.payload.body))
+
+ await call.done_writing()
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.debug_error_string(), '')
+ self.assertEqual(call.cancel(), False)
+ self.assertEqual(call.cancelled(), False)
+ self.assertEqual(call.done(), True)
+
+ for interceptor in interceptors:
+ interceptor.assert_in_final_state(self)
+
+ await channel.close()
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py
index 0c07e2ce74..b9a04af00d 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py
@@ -1,517 +1,517 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import asyncio
-import logging
-import unittest
-import datetime
-
-import grpc
-
-from grpc.experimental import aio
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._common import inject_callbacks
-from tests_aio.unit._common import CountingRequestIterator
-from tests_aio.unit._test_server import start_test_server
-from tests_aio.unit._test_base import AioTestBase
-from tests.unit.framework.common import test_constants
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-
-_SHORT_TIMEOUT_S = 1.0
-
-_NUM_STREAM_REQUESTS = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
-
-
-class _StreamUnaryInterceptorEmpty(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation, client_call_details,
- request_iterator):
- return await continuation(client_call_details, request_iterator)
-
- def assert_in_final_state(self, test: unittest.TestCase):
- pass
-
-
-class _StreamUnaryInterceptorWithRequestIterator(
- aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation, client_call_details,
- request_iterator):
- self.request_iterator = CountingRequestIterator(request_iterator)
- call = await continuation(client_call_details, self.request_iterator)
- return call
-
- def assert_in_final_state(self, test: unittest.TestCase):
- test.assertEqual(_NUM_STREAM_REQUESTS,
- self.request_iterator.request_cnt)
-
-
-class TestStreamUnaryClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_intercepts(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- response = await call
-
- self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_intercepts_using_write(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- call = stub.StreamingInputCall()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- await call.done_writing()
-
- response = await call
-
- self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_not_finished(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- validation = inject_callbacks(call)
-
- response = await call
-
- await validation
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_finished(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- response = await call
-
- validation = inject_callbacks(call)
-
- await validation
-
- await channel.close()
-
- async def test_multiple_interceptors_request_iterator(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
-
- interceptors = [interceptor_class(), interceptor_class()]
- channel = aio.insecure_channel(self._server_target,
- interceptors=interceptors)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- response = await call
-
- self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- for interceptor in interceptors:
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_intercepts_request_iterator_rpc_error(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET, interceptors=[interceptor_class()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- # When there is an error the request iterator is no longer
- # consumed.
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- await channel.close()
-
- async def test_intercepts_request_iterator_rpc_error_using_write(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET, interceptors=[interceptor_class()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- call = stub.StreamingInputCall()
-
- # When there is an error during the write, exception is raised.
- with self.assertRaises(asyncio.InvalidStateError):
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- await channel.close()
-
- async def test_cancel_before_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- # When there is an error during the write, exception is raised.
- with self.assertRaises(asyncio.InvalidStateError):
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_after_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- call = await continuation(client_call_details, request_iterator)
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- # When there is an error during the write, exception is raised.
- with self.assertRaises(asyncio.InvalidStateError):
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_while_writing(self):
- # Test cancelation before making any write or after doing at least 1
- for num_writes_before_cancel in (0, 1):
- with self.subTest(name="Num writes before cancel: {}".format(
- num_writes_before_cancel)):
-
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET,
- interceptors=[_StreamUnaryInterceptorWithRequestIterator()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- call = stub.StreamingInputCall()
-
- with self.assertRaises(asyncio.InvalidStateError):
- for i in range(_NUM_STREAM_REQUESTS):
- if i == num_writes_before_cancel:
- self.assertTrue(call.cancel())
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
-
- await channel.close()
-
- async def test_cancel_by_the_interceptor(self):
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- call = await continuation(client_call_details, request_iterator)
- call.cancel()
- return call
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- with self.assertRaises(asyncio.InvalidStateError):
- for i in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
-
- await channel.close()
-
- async def test_exception_raised_by_interceptor(self):
-
- class InterceptorException(Exception):
- pass
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- raise InterceptorException
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- with self.assertRaises(InterceptorException):
- for i in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(InterceptorException):
- await call
-
- await channel.close()
-
- async def test_intercepts_prohibit_mixing_style(self):
- channel = aio.insecure_channel(
- self._server_target, interceptors=[_StreamUnaryInterceptorEmpty()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- with self.assertRaises(grpc._cython.cygrpc.UsageError):
- await call.write(request)
-
- with self.assertRaises(grpc._cython.cygrpc.UsageError):
- await call.done_writing()
-
- await channel.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import asyncio
+import logging
+import unittest
+import datetime
+
+import grpc
+
+from grpc.experimental import aio
+from tests_aio.unit._constants import UNREACHABLE_TARGET
+from tests_aio.unit._common import inject_callbacks
+from tests_aio.unit._common import CountingRequestIterator
+from tests_aio.unit._test_server import start_test_server
+from tests_aio.unit._test_base import AioTestBase
+from tests.unit.framework.common import test_constants
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+
+_SHORT_TIMEOUT_S = 1.0
+
+_NUM_STREAM_REQUESTS = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
+
+
+class _StreamUnaryInterceptorEmpty(aio.StreamUnaryClientInterceptor):
+
+ async def intercept_stream_unary(self, continuation, client_call_details,
+ request_iterator):
+ return await continuation(client_call_details, request_iterator)
+
+ def assert_in_final_state(self, test: unittest.TestCase):
+ pass
+
+
+class _StreamUnaryInterceptorWithRequestIterator(
+ aio.StreamUnaryClientInterceptor):
+
+ async def intercept_stream_unary(self, continuation, client_call_details,
+ request_iterator):
+ self.request_iterator = CountingRequestIterator(request_iterator)
+ call = await continuation(client_call_details, self.request_iterator)
+ return call
+
+ def assert_in_final_state(self, test: unittest.TestCase):
+ test.assertEqual(_NUM_STREAM_REQUESTS,
+ self.request_iterator.request_cnt)
+
+
+class TestStreamUnaryClientInterceptor(AioTestBase):
+
+ async def setUp(self):
+ self._server_target, self._server = await start_test_server()
+
+ async def tearDown(self):
+ await self._server.stop(None)
+
+ async def test_intercepts(self):
+ for interceptor_class in (_StreamUnaryInterceptorEmpty,
+ _StreamUnaryInterceptorWithRequestIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ async def request_iterator():
+ for _ in range(_NUM_STREAM_REQUESTS):
+ yield request
+
+ call = stub.StreamingInputCall(request_iterator())
+
+ response = await call
+
+ self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.debug_error_string(), '')
+ self.assertEqual(call.cancel(), False)
+ self.assertEqual(call.cancelled(), False)
+ self.assertEqual(call.done(), True)
+
+ interceptor.assert_in_final_state(self)
+
+ await channel.close()
+
+ async def test_intercepts_using_write(self):
+ for interceptor_class in (_StreamUnaryInterceptorEmpty,
+ _StreamUnaryInterceptorWithRequestIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ call = stub.StreamingInputCall()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(request)
+
+ await call.done_writing()
+
+ response = await call
+
+ self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.debug_error_string(), '')
+ self.assertEqual(call.cancel(), False)
+ self.assertEqual(call.cancelled(), False)
+ self.assertEqual(call.done(), True)
+
+ interceptor.assert_in_final_state(self)
+
+ await channel.close()
+
+ async def test_add_done_callback_interceptor_task_not_finished(self):
+ for interceptor_class in (_StreamUnaryInterceptorEmpty,
+ _StreamUnaryInterceptorWithRequestIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ async def request_iterator():
+ for _ in range(_NUM_STREAM_REQUESTS):
+ yield request
+
+ call = stub.StreamingInputCall(request_iterator())
+
+ validation = inject_callbacks(call)
+
+ response = await call
+
+ await validation
+
+ await channel.close()
+
+ async def test_add_done_callback_interceptor_task_finished(self):
+ for interceptor_class in (_StreamUnaryInterceptorEmpty,
+ _StreamUnaryInterceptorWithRequestIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ async def request_iterator():
+ for _ in range(_NUM_STREAM_REQUESTS):
+ yield request
+
+ call = stub.StreamingInputCall(request_iterator())
+
+ response = await call
+
+ validation = inject_callbacks(call)
+
+ await validation
+
+ await channel.close()
+
+ async def test_multiple_interceptors_request_iterator(self):
+ for interceptor_class in (_StreamUnaryInterceptorEmpty,
+ _StreamUnaryInterceptorWithRequestIterator):
+
+ with self.subTest(name=interceptor_class):
+
+ interceptors = [interceptor_class(), interceptor_class()]
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=interceptors)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ async def request_iterator():
+ for _ in range(_NUM_STREAM_REQUESTS):
+ yield request
+
+ call = stub.StreamingInputCall(request_iterator())
+
+ response = await call
+
+ self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.debug_error_string(), '')
+ self.assertEqual(call.cancel(), False)
+ self.assertEqual(call.cancelled(), False)
+ self.assertEqual(call.done(), True)
+
+ for interceptor in interceptors:
+ interceptor.assert_in_final_state(self)
+
+ await channel.close()
+
+ async def test_intercepts_request_iterator_rpc_error(self):
+ for interceptor_class in (_StreamUnaryInterceptorEmpty,
+ _StreamUnaryInterceptorWithRequestIterator):
+
+ with self.subTest(name=interceptor_class):
+ channel = aio.insecure_channel(
+ UNREACHABLE_TARGET, interceptors=[interceptor_class()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ # When there is an error the request iterator is no longer
+ # consumed.
+ async def request_iterator():
+ for _ in range(_NUM_STREAM_REQUESTS):
+ yield request
+
+ call = stub.StreamingInputCall(request_iterator())
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
+
+ await channel.close()
+
+ async def test_intercepts_request_iterator_rpc_error_using_write(self):
+ for interceptor_class in (_StreamUnaryInterceptorEmpty,
+ _StreamUnaryInterceptorWithRequestIterator):
+
+ with self.subTest(name=interceptor_class):
+ channel = aio.insecure_channel(
+ UNREACHABLE_TARGET, interceptors=[interceptor_class()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ call = stub.StreamingInputCall()
+
+ # When there is an error during the write, exception is raised.
+ with self.assertRaises(asyncio.InvalidStateError):
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(request)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
+
+ await channel.close()
+
+ async def test_cancel_before_rpc(self):
+
+ interceptor_reached = asyncio.Event()
+ wait_for_ever = self.loop.create_future()
+
+ class Interceptor(aio.StreamUnaryClientInterceptor):
+
+ async def intercept_stream_unary(self, continuation,
+ client_call_details,
+ request_iterator):
+ interceptor_reached.set()
+ await wait_for_ever
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ call = stub.StreamingInputCall()
+
+ self.assertFalse(call.cancelled())
+ self.assertFalse(call.done())
+
+ await interceptor_reached.wait()
+ self.assertTrue(call.cancel())
+
+ # When there is an error during the write, exception is raised.
+ with self.assertRaises(asyncio.InvalidStateError):
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(request)
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+ await channel.close()
+
+ async def test_cancel_after_rpc(self):
+
+ interceptor_reached = asyncio.Event()
+ wait_for_ever = self.loop.create_future()
+
+ class Interceptor(aio.StreamUnaryClientInterceptor):
+
+ async def intercept_stream_unary(self, continuation,
+ client_call_details,
+ request_iterator):
+ call = await continuation(client_call_details, request_iterator)
+ interceptor_reached.set()
+ await wait_for_ever
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ call = stub.StreamingInputCall()
+
+ self.assertFalse(call.cancelled())
+ self.assertFalse(call.done())
+
+ await interceptor_reached.wait()
+ self.assertTrue(call.cancel())
+
+ # When there is an error during the write, exception is raised.
+ with self.assertRaises(asyncio.InvalidStateError):
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(request)
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+ await channel.close()
+
+ async def test_cancel_while_writing(self):
+ # Test cancelation before making any write or after doing at least 1
+ for num_writes_before_cancel in (0, 1):
+ with self.subTest(name="Num writes before cancel: {}".format(
+ num_writes_before_cancel)):
+
+ channel = aio.insecure_channel(
+ UNREACHABLE_TARGET,
+ interceptors=[_StreamUnaryInterceptorWithRequestIterator()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' *
+ _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(
+ payload=payload)
+
+ call = stub.StreamingInputCall()
+
+ with self.assertRaises(asyncio.InvalidStateError):
+ for i in range(_NUM_STREAM_REQUESTS):
+ if i == num_writes_before_cancel:
+ self.assertTrue(call.cancel())
+ await call.write(request)
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+
+ await channel.close()
+
+ async def test_cancel_by_the_interceptor(self):
+
+ class Interceptor(aio.StreamUnaryClientInterceptor):
+
+ async def intercept_stream_unary(self, continuation,
+ client_call_details,
+ request_iterator):
+ call = await continuation(client_call_details, request_iterator)
+ call.cancel()
+ return call
+
+ channel = aio.insecure_channel(UNREACHABLE_TARGET,
+ interceptors=[Interceptor()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ call = stub.StreamingInputCall()
+
+ with self.assertRaises(asyncio.InvalidStateError):
+ for i in range(_NUM_STREAM_REQUESTS):
+ await call.write(request)
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+
+ await channel.close()
+
+ async def test_exception_raised_by_interceptor(self):
+
+ class InterceptorException(Exception):
+ pass
+
+ class Interceptor(aio.StreamUnaryClientInterceptor):
+
+ async def intercept_stream_unary(self, continuation,
+ client_call_details,
+ request_iterator):
+ raise InterceptorException
+
+ channel = aio.insecure_channel(UNREACHABLE_TARGET,
+ interceptors=[Interceptor()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ call = stub.StreamingInputCall()
+
+ with self.assertRaises(InterceptorException):
+ for i in range(_NUM_STREAM_REQUESTS):
+ await call.write(request)
+
+ with self.assertRaises(InterceptorException):
+ await call
+
+ await channel.close()
+
+ async def test_intercepts_prohibit_mixing_style(self):
+ channel = aio.insecure_channel(
+ self._server_target, interceptors=[_StreamUnaryInterceptorEmpty()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ async def request_iterator():
+ for _ in range(_NUM_STREAM_REQUESTS):
+ yield request
+
+ call = stub.StreamingInputCall(request_iterator())
+
+ with self.assertRaises(grpc._cython.cygrpc.UsageError):
+ await call.write(request)
+
+ with self.assertRaises(grpc._cython.cygrpc.UsageError):
+ await call.done_writing()
+
+ await channel.close()
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py
index 51b10c4bd6..fd542fd16e 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py
@@ -1,395 +1,395 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import asyncio
-import logging
-import unittest
-import datetime
-
-import grpc
-
-from grpc.experimental import aio
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._common import inject_callbacks
-from tests_aio.unit._common import CountingResponseIterator
-from tests_aio.unit._test_server import start_test_server
-from tests_aio.unit._test_base import AioTestBase
-from tests.unit.framework.common import test_constants
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-
-_SHORT_TIMEOUT_S = 1.0
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 7
-_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
-
-
-class _UnaryStreamInterceptorEmpty(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation, client_call_details,
- request):
- return await continuation(client_call_details, request)
-
- def assert_in_final_state(self, test: unittest.TestCase):
- pass
-
-
-class _UnaryStreamInterceptorWithResponseIterator(
- aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation, client_call_details,
- request):
- call = await continuation(client_call_details, request)
- self.response_iterator = CountingResponseIterator(call)
- return self.response_iterator
-
- def assert_in_final_state(self, test: unittest.TestCase):
- test.assertEqual(_NUM_STREAM_RESPONSES,
- self.response_iterator.response_cnt)
-
-
-class TestUnaryStreamClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_intercepts(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- await call.wait_for_connection()
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_not_finished(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- validation = inject_callbacks(call)
-
- async for response in call:
- pass
-
- await validation
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_finished(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- # This ensures that the callbacks will be registered
- # with the intercepted call rather than saving in the
- # pending state list.
- await call.wait_for_connection()
-
- validation = inject_callbacks(call)
-
- async for response in call:
- pass
-
- await validation
-
- await channel.close()
-
- async def test_response_iterator_using_read(self):
- interceptor = _UnaryStreamInterceptorWithResponseIterator()
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend(
- [messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)] *
- _NUM_STREAM_RESPONSES)
-
- call = stub.StreamingOutputCall(request)
-
- response_cnt = 0
- for response in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(interceptor.response_iterator.response_cnt,
- _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- await channel.close()
-
- async def test_multiple_interceptors_response_iterator(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
-
- interceptors = [interceptor_class(), interceptor_class()]
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=interceptors)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- call = stub.StreamingOutputCall(request)
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- await channel.close()
-
- async def test_intercepts_response_iterator_rpc_error(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
-
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET, interceptors=[interceptor_class()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- pass
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
- await channel.close()
-
- async def test_cancel_before_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- pass
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_after_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- pass
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_consuming_response_iterator(self):
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend(
- [messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)] *
- _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(
- self._server_target,
- interceptors=[_UnaryStreamInterceptorWithResponseIterator()])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- call.cancel()
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- await channel.close()
-
- async def test_cancel_by_the_interceptor(self):
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- call.cancel()
- return call
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- pass
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- await channel.close()
-
- async def test_exception_raised_by_interceptor(self):
-
- class InterceptorException(Exception):
- pass
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- raise InterceptorException
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(InterceptorException):
- async for response in call:
- pass
-
- await channel.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import asyncio
+import logging
+import unittest
+import datetime
+
+import grpc
+
+from grpc.experimental import aio
+from tests_aio.unit._constants import UNREACHABLE_TARGET
+from tests_aio.unit._common import inject_callbacks
+from tests_aio.unit._common import CountingResponseIterator
+from tests_aio.unit._test_server import start_test_server
+from tests_aio.unit._test_base import AioTestBase
+from tests.unit.framework.common import test_constants
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+
+_SHORT_TIMEOUT_S = 1.0
+
+_NUM_STREAM_RESPONSES = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_PAYLOAD_SIZE = 7
+_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
+
+
+class _UnaryStreamInterceptorEmpty(aio.UnaryStreamClientInterceptor):
+
+ async def intercept_unary_stream(self, continuation, client_call_details,
+ request):
+ return await continuation(client_call_details, request)
+
+ def assert_in_final_state(self, test: unittest.TestCase):
+ pass
+
+
+class _UnaryStreamInterceptorWithResponseIterator(
+ aio.UnaryStreamClientInterceptor):
+
+ async def intercept_unary_stream(self, continuation, client_call_details,
+ request):
+ call = await continuation(client_call_details, request)
+ self.response_iterator = CountingResponseIterator(call)
+ return self.response_iterator
+
+ def assert_in_final_state(self, test: unittest.TestCase):
+ test.assertEqual(_NUM_STREAM_RESPONSES,
+ self.response_iterator.response_cnt)
+
+
+class TestUnaryStreamClientInterceptor(AioTestBase):
+
+ async def setUp(self):
+ self._server_target, self._server = await start_test_server()
+
+ async def tearDown(self):
+ await self._server.stop(None)
+
+ async def test_intercepts(self):
+ for interceptor_class in (_UnaryStreamInterceptorEmpty,
+ _UnaryStreamInterceptorWithResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.extend([
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
+ ] * _NUM_STREAM_RESPONSES)
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ await call.wait_for_connection()
+
+ response_cnt = 0
+ async for response in call:
+ response_cnt += 1
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
+ len(response.payload.body))
+
+ self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.debug_error_string(), '')
+ self.assertEqual(call.cancel(), False)
+ self.assertEqual(call.cancelled(), False)
+ self.assertEqual(call.done(), True)
+
+ interceptor.assert_in_final_state(self)
+
+ await channel.close()
+
+ async def test_add_done_callback_interceptor_task_not_finished(self):
+ for interceptor_class in (_UnaryStreamInterceptorEmpty,
+ _UnaryStreamInterceptorWithResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.extend([
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
+ ] * _NUM_STREAM_RESPONSES)
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ validation = inject_callbacks(call)
+
+ async for response in call:
+ pass
+
+ await validation
+
+ await channel.close()
+
+ async def test_add_done_callback_interceptor_task_finished(self):
+ for interceptor_class in (_UnaryStreamInterceptorEmpty,
+ _UnaryStreamInterceptorWithResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+ interceptor = interceptor_class()
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.extend([
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
+ ] * _NUM_STREAM_RESPONSES)
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ # This ensures that the callbacks will be registered
+ # with the intercepted call rather than saving in the
+ # pending state list.
+ await call.wait_for_connection()
+
+ validation = inject_callbacks(call)
+
+ async for response in call:
+ pass
+
+ await validation
+
+ await channel.close()
+
+ async def test_response_iterator_using_read(self):
+ interceptor = _UnaryStreamInterceptorWithResponseIterator()
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=[interceptor])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.extend(
+ [messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)] *
+ _NUM_STREAM_RESPONSES)
+
+ call = stub.StreamingOutputCall(request)
+
+ response_cnt = 0
+ for response in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ response_cnt += 1
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
+ self.assertEqual(interceptor.response_iterator.response_cnt,
+ _NUM_STREAM_RESPONSES)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ await channel.close()
+
+ async def test_multiple_interceptors_response_iterator(self):
+ for interceptor_class in (_UnaryStreamInterceptorEmpty,
+ _UnaryStreamInterceptorWithResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+
+ interceptors = [interceptor_class(), interceptor_class()]
+
+ channel = aio.insecure_channel(self._server_target,
+ interceptors=interceptors)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.extend([
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
+ ] * _NUM_STREAM_RESPONSES)
+
+ call = stub.StreamingOutputCall(request)
+
+ response_cnt = 0
+ async for response in call:
+ response_cnt += 1
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
+ len(response.payload.body))
+
+ self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ await channel.close()
+
+ async def test_intercepts_response_iterator_rpc_error(self):
+ for interceptor_class in (_UnaryStreamInterceptorEmpty,
+ _UnaryStreamInterceptorWithResponseIterator):
+
+ with self.subTest(name=interceptor_class):
+
+ channel = aio.insecure_channel(
+ UNREACHABLE_TARGET, interceptors=[interceptor_class()])
+ request = messages_pb2.StreamingOutputCallRequest()
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ async for response in call:
+ pass
+
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
+
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
+ await channel.close()
+
+ async def test_cancel_before_rpc(self):
+
+ interceptor_reached = asyncio.Event()
+ wait_for_ever = self.loop.create_future()
+
+ class Interceptor(aio.UnaryStreamClientInterceptor):
+
+ async def intercept_unary_stream(self, continuation,
+ client_call_details, request):
+ interceptor_reached.set()
+ await wait_for_ever
+
+ channel = aio.insecure_channel(UNREACHABLE_TARGET,
+ interceptors=[Interceptor()])
+ request = messages_pb2.StreamingOutputCallRequest()
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ self.assertFalse(call.cancelled())
+ self.assertFalse(call.done())
+
+ await interceptor_reached.wait()
+ self.assertTrue(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ async for response in call:
+ pass
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+ await channel.close()
+
+ async def test_cancel_after_rpc(self):
+
+ interceptor_reached = asyncio.Event()
+ wait_for_ever = self.loop.create_future()
+
+ class Interceptor(aio.UnaryStreamClientInterceptor):
+
+ async def intercept_unary_stream(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ interceptor_reached.set()
+ await wait_for_ever
+
+ channel = aio.insecure_channel(UNREACHABLE_TARGET,
+ interceptors=[Interceptor()])
+ request = messages_pb2.StreamingOutputCallRequest()
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ self.assertFalse(call.cancelled())
+ self.assertFalse(call.done())
+
+ await interceptor_reached.wait()
+ self.assertTrue(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ async for response in call:
+ pass
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+ await channel.close()
+
+ async def test_cancel_consuming_response_iterator(self):
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.extend(
+ [messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)] *
+ _NUM_STREAM_RESPONSES)
+
+ channel = aio.insecure_channel(
+ self._server_target,
+ interceptors=[_UnaryStreamInterceptorWithResponseIterator()])
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ with self.assertRaises(asyncio.CancelledError):
+ async for response in call:
+ call.cancel()
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ await channel.close()
+
+ async def test_cancel_by_the_interceptor(self):
+
+ class Interceptor(aio.UnaryStreamClientInterceptor):
+
+ async def intercept_unary_stream(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ call.cancel()
+ return call
+
+ channel = aio.insecure_channel(UNREACHABLE_TARGET,
+ interceptors=[Interceptor()])
+ request = messages_pb2.StreamingOutputCallRequest()
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ with self.assertRaises(asyncio.CancelledError):
+ async for response in call:
+ pass
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ await channel.close()
+
+ async def test_exception_raised_by_interceptor(self):
+
+ class InterceptorException(Exception):
+ pass
+
+ class Interceptor(aio.UnaryStreamClientInterceptor):
+
+ async def intercept_unary_stream(self, continuation,
+ client_call_details, request):
+ raise InterceptorException
+
+ channel = aio.insecure_channel(UNREACHABLE_TARGET,
+ interceptors=[Interceptor()])
+ request = messages_pb2.StreamingOutputCallRequest()
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.StreamingOutputCall(request)
+
+ with self.assertRaises(InterceptorException):
+ async for response in call:
+ pass
+
+ await channel.close()
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py
index 12af6629ac..e64daec7df 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py
@@ -1,699 +1,699 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import asyncio
-import logging
-import unittest
-
-import grpc
-
-from grpc.experimental import aio
-from tests_aio.unit._test_server import start_test_server, _INITIAL_METADATA_KEY, _TRAILING_METADATA_KEY
-from tests_aio.unit import _constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-
-_LOCAL_CANCEL_DETAILS_EXPECTATION = 'Locally cancelled by application!'
-_INITIAL_METADATA_TO_INJECT = aio.Metadata(
- (_INITIAL_METADATA_KEY, 'extra info'),
- (_TRAILING_METADATA_KEY, b'\x13\x37'),
-)
-_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED = 1.0
-
-
-class TestUnaryUnaryClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- def test_invalid_interceptor(self):
-
- class InvalidInterceptor:
- """Just an invalid Interceptor"""
-
- with self.assertRaises(ValueError):
- aio.insecure_channel("", interceptors=[InvalidInterceptor()])
-
- async def test_executed_right_order(self):
-
- interceptors_executed = []
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
- """Interceptor used for testing if the interceptor is being called"""
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- interceptors_executed.append(self)
- call = await continuation(client_call_details, request)
- return call
-
- interceptors = [Interceptor() for i in range(2)]
-
- async with aio.insecure_channel(self._server_target,
- interceptors=interceptors) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- # Check that all interceptors were executed, and were executed
- # in the right order.
- self.assertSequenceEqual(interceptors_executed, interceptors)
-
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- @unittest.expectedFailure
- # TODO(https://github.com/grpc/grpc/issues/20144) Once metadata support is
- # implemented in the client-side, this test must be implemented.
- def test_modify_metadata(self):
- raise NotImplementedError()
-
- @unittest.expectedFailure
- # TODO(https://github.com/grpc/grpc/issues/20532) Once credentials support is
- # implemented in the client-side, this test must be implemented.
- def test_modify_credentials(self):
- raise NotImplementedError()
-
- async def test_status_code_Ok(self):
-
- class StatusCodeOkInterceptor(aio.UnaryUnaryClientInterceptor):
- """Interceptor used for observing status code Ok returned by the RPC"""
-
- def __init__(self):
- self.status_code_Ok_observed = False
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- code = await call.code()
- if code == grpc.StatusCode.OK:
- self.status_code_Ok_observed = True
-
- return call
-
- interceptor = StatusCodeOkInterceptor()
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[interceptor]) as channel:
-
- # when no error StatusCode.OK must be observed
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- await multicallable(messages_pb2.SimpleRequest())
-
- self.assertTrue(interceptor.status_code_Ok_observed)
-
- async def test_add_timeout(self):
-
- class TimeoutInterceptor(aio.UnaryUnaryClientInterceptor):
- """Interceptor used for adding a timeout to the RPC"""
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- new_client_call_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
- metadata=client_call_details.metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready)
- return await continuation(new_client_call_details, request)
-
- interceptor = TimeoutInterceptor()
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[interceptor]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(messages_pb2.SimpleRequest())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(exception_context.exception.code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, await
- call.code())
-
- async def test_retry(self):
-
- class RetryInterceptor(aio.UnaryUnaryClientInterceptor):
- """Simulates a Retry Interceptor which ends up by making
- two RPC calls."""
-
- def __init__(self):
- self.calls = []
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- new_client_call_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
- metadata=client_call_details.metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready)
-
- try:
- call = await continuation(new_client_call_details, request)
- await call
- except grpc.RpcError:
- pass
-
- self.calls.append(call)
-
- new_client_call_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=None,
- metadata=client_call_details.metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready)
-
- call = await continuation(new_client_call_details, request)
- self.calls.append(call)
- return call
-
- interceptor = RetryInterceptor()
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[interceptor]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(messages_pb2.SimpleRequest())
-
- await call
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- # Check that two calls were made, first one finishing with
- # a deadline and second one finishing ok..
- self.assertEqual(len(interceptor.calls), 2)
- self.assertEqual(await interceptor.calls[0].code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
- self.assertEqual(await interceptor.calls[1].code(),
- grpc.StatusCode.OK)
-
- async def test_rpcresponse(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
- """Raw responses are seen as reegular calls"""
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- response = await call
- return call
-
- class ResponseInterceptor(aio.UnaryUnaryClientInterceptor):
- """Return a raw response"""
- response = messages_pb2.SimpleResponse()
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- return ResponseInterceptor.response
-
- interceptor, interceptor_response = Interceptor(), ResponseInterceptor()
-
- async with aio.insecure_channel(
- self._server_target,
- interceptors=[interceptor, interceptor_response]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- # Check that the response returned is the one returned by the
- # interceptor
- self.assertEqual(id(response), id(ResponseInterceptor.response))
-
- # Check all of the UnaryUnaryCallResponse attributes
- self.assertTrue(call.done())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancelled())
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- self.assertEqual(await call.debug_error_string(), None)
-
-
-class TestInterceptedUnaryUnaryCall(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_call_ok(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(type(response), messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_call_ok_awaited(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- await call
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(type(response), messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_call_rpc_error(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(
- messages_pb2.SimpleRequest(),
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(await call.code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
- self.assertEqual(await call.details(), 'Deadline Exceeded')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_call_rpc_error_awaited(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- await call
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(
- messages_pb2.SimpleRequest(),
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(await call.code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
- self.assertEqual(await call.details(), 'Deadline Exceeded')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_cancel_before_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- interceptor_reached.set()
- await wait_for_ever
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
-
- async def test_cancel_after_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- await call
- interceptor_reached.set()
- await wait_for_ever
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
-
- async def test_cancel_inside_interceptor_after_rpc_awaiting(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- call.cancel()
- await call
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
-
- async def test_cancel_inside_interceptor_after_rpc_not_awaiting(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- call.cancel()
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(
- await call.trailing_metadata(), aio.Metadata(),
- "When the raw response is None, empty metadata is returned")
-
- async def test_initial_metadata_modification(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- new_metadata = aio.Metadata(*client_call_details.metadata,
- *_INITIAL_METADATA_TO_INJECT)
- new_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=client_call_details.timeout,
- metadata=new_metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready,
- )
- return await continuation(new_details, request)
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.UnaryCall(messages_pb2.SimpleRequest())
-
- # Expected to see the echoed initial metadata
- self.assertTrue(
- _common.seen_metadatum(
- expected_key=_INITIAL_METADATA_KEY,
- expected_value=_INITIAL_METADATA_TO_INJECT[
- _INITIAL_METADATA_KEY],
- actual=await call.initial_metadata(),
- ))
- # Expected to see the echoed trailing metadata
- self.assertTrue(
- _common.seen_metadatum(
- expected_key=_TRAILING_METADATA_KEY,
- expected_value=_INITIAL_METADATA_TO_INJECT[
- _TRAILING_METADATA_KEY],
- actual=await call.trailing_metadata(),
- ))
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_add_done_callback_before_finishes(self):
- called = asyncio.Event()
- interceptor_can_continue = asyncio.Event()
-
- def callback(call):
- called.set()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- await interceptor_can_continue.wait()
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- call.add_done_callback(callback)
- interceptor_can_continue.set()
- await call
-
- try:
- await asyncio.wait_for(
- called.wait(),
- timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
- except:
- self.fail("Callback was not called")
-
- async def test_add_done_callback_after_finishes(self):
- called = asyncio.Event()
-
- def callback(call):
- called.set()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- await call
-
- call.add_done_callback(callback)
-
- try:
- await asyncio.wait_for(
- called.wait(),
- timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
- except:
- self.fail("Callback was not called")
-
- async def test_add_done_callback_after_finishes_before_await(self):
- called = asyncio.Event()
-
- def callback(call):
- called.set()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- call.add_done_callback(callback)
-
- await call
-
- try:
- await asyncio.wait_for(
- called.wait(),
- timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
- except:
- self.fail("Callback was not called")
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2019 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import asyncio
+import logging
+import unittest
+
+import grpc
+
+from grpc.experimental import aio
+from tests_aio.unit._test_server import start_test_server, _INITIAL_METADATA_KEY, _TRAILING_METADATA_KEY
+from tests_aio.unit import _constants
+from tests_aio.unit import _common
+from tests_aio.unit._test_base import AioTestBase
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+
+_LOCAL_CANCEL_DETAILS_EXPECTATION = 'Locally cancelled by application!'
+_INITIAL_METADATA_TO_INJECT = aio.Metadata(
+ (_INITIAL_METADATA_KEY, 'extra info'),
+ (_TRAILING_METADATA_KEY, b'\x13\x37'),
+)
+_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED = 1.0
+
+
+class TestUnaryUnaryClientInterceptor(AioTestBase):
+
+ async def setUp(self):
+ self._server_target, self._server = await start_test_server()
+
+ async def tearDown(self):
+ await self._server.stop(None)
+
+ def test_invalid_interceptor(self):
+
+ class InvalidInterceptor:
+ """Just an invalid Interceptor"""
+
+ with self.assertRaises(ValueError):
+ aio.insecure_channel("", interceptors=[InvalidInterceptor()])
+
+ async def test_executed_right_order(self):
+
+ interceptors_executed = []
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+ """Interceptor used for testing if the interceptor is being called"""
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ interceptors_executed.append(self)
+ call = await continuation(client_call_details, request)
+ return call
+
+ interceptors = [Interceptor() for i in range(2)]
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=interceptors) as channel:
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+ response = await call
+
+ # Check that all interceptors were executed, and were executed
+ # in the right order.
+ self.assertSequenceEqual(interceptors_executed, interceptors)
+
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+
+ @unittest.expectedFailure
+ # TODO(https://github.com/grpc/grpc/issues/20144) Once metadata support is
+ # implemented in the client-side, this test must be implemented.
+ def test_modify_metadata(self):
+ raise NotImplementedError()
+
+ @unittest.expectedFailure
+ # TODO(https://github.com/grpc/grpc/issues/20532) Once credentials support is
+ # implemented in the client-side, this test must be implemented.
+ def test_modify_credentials(self):
+ raise NotImplementedError()
+
+ async def test_status_code_Ok(self):
+
+ class StatusCodeOkInterceptor(aio.UnaryUnaryClientInterceptor):
+ """Interceptor used for observing status code Ok returned by the RPC"""
+
+ def __init__(self):
+ self.status_code_Ok_observed = False
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ code = await call.code()
+ if code == grpc.StatusCode.OK:
+ self.status_code_Ok_observed = True
+
+ return call
+
+ interceptor = StatusCodeOkInterceptor()
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[interceptor]) as channel:
+
+ # when no error StatusCode.OK must be observed
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+
+ await multicallable(messages_pb2.SimpleRequest())
+
+ self.assertTrue(interceptor.status_code_Ok_observed)
+
+ async def test_add_timeout(self):
+
+ class TimeoutInterceptor(aio.UnaryUnaryClientInterceptor):
+ """Interceptor used for adding a timeout to the RPC"""
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ new_client_call_details = aio.ClientCallDetails(
+ method=client_call_details.method,
+ timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
+ metadata=client_call_details.metadata,
+ credentials=client_call_details.credentials,
+ wait_for_ready=client_call_details.wait_for_ready)
+ return await continuation(new_client_call_details, request)
+
+ interceptor = TimeoutInterceptor()
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[interceptor]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCallWithSleep',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ self.assertEqual(exception_context.exception.code(),
+ grpc.StatusCode.DEADLINE_EXCEEDED)
+
+ self.assertTrue(call.done())
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, await
+ call.code())
+
+ async def test_retry(self):
+
+ class RetryInterceptor(aio.UnaryUnaryClientInterceptor):
+ """Simulates a Retry Interceptor which ends up by making
+ two RPC calls."""
+
+ def __init__(self):
+ self.calls = []
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+
+ new_client_call_details = aio.ClientCallDetails(
+ method=client_call_details.method,
+ timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
+ metadata=client_call_details.metadata,
+ credentials=client_call_details.credentials,
+ wait_for_ready=client_call_details.wait_for_ready)
+
+ try:
+ call = await continuation(new_client_call_details, request)
+ await call
+ except grpc.RpcError:
+ pass
+
+ self.calls.append(call)
+
+ new_client_call_details = aio.ClientCallDetails(
+ method=client_call_details.method,
+ timeout=None,
+ metadata=client_call_details.metadata,
+ credentials=client_call_details.credentials,
+ wait_for_ready=client_call_details.wait_for_ready)
+
+ call = await continuation(new_client_call_details, request)
+ self.calls.append(call)
+ return call
+
+ interceptor = RetryInterceptor()
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[interceptor]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCallWithSleep',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ await call
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ # Check that two calls were made, first one finishing with
+ # a deadline and second one finishing ok..
+ self.assertEqual(len(interceptor.calls), 2)
+ self.assertEqual(await interceptor.calls[0].code(),
+ grpc.StatusCode.DEADLINE_EXCEEDED)
+ self.assertEqual(await interceptor.calls[1].code(),
+ grpc.StatusCode.OK)
+
+ async def test_rpcresponse(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+ """Raw responses are seen as reegular calls"""
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ response = await call
+ return call
+
+ class ResponseInterceptor(aio.UnaryUnaryClientInterceptor):
+ """Return a raw response"""
+ response = messages_pb2.SimpleResponse()
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ return ResponseInterceptor.response
+
+ interceptor, interceptor_response = Interceptor(), ResponseInterceptor()
+
+ async with aio.insecure_channel(
+ self._server_target,
+ interceptors=[interceptor, interceptor_response]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+
+ call = multicallable(messages_pb2.SimpleRequest())
+ response = await call
+
+ # Check that the response returned is the one returned by the
+ # interceptor
+ self.assertEqual(id(response), id(ResponseInterceptor.response))
+
+ # Check all of the UnaryUnaryCallResponse attributes
+ self.assertTrue(call.done())
+ self.assertFalse(call.cancel())
+ self.assertFalse(call.cancelled())
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+ self.assertEqual(await call.debug_error_string(), None)
+
+
+class TestInterceptedUnaryUnaryCall(AioTestBase):
+
+ async def setUp(self):
+ self._server_target, self._server = await start_test_server()
+
+ async def tearDown(self):
+ await self._server.stop(None)
+
+ async def test_call_ok(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+ response = await call
+
+ self.assertTrue(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertEqual(type(response), messages_pb2.SimpleResponse)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+
+ async def test_call_ok_awaited(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ await call
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+ response = await call
+
+ self.assertTrue(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertEqual(type(response), messages_pb2.SimpleResponse)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ self.assertEqual(await call.details(), '')
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+
+ async def test_call_rpc_error(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCallWithSleep',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+
+ call = multicallable(
+ messages_pb2.SimpleRequest(),
+ timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ self.assertTrue(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertEqual(await call.code(),
+ grpc.StatusCode.DEADLINE_EXCEEDED)
+ self.assertEqual(await call.details(), 'Deadline Exceeded')
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+
+ async def test_call_rpc_error_awaited(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ await call
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCallWithSleep',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+
+ call = multicallable(
+ messages_pb2.SimpleRequest(),
+ timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ self.assertTrue(call.done())
+ self.assertFalse(call.cancelled())
+ self.assertEqual(await call.code(),
+ grpc.StatusCode.DEADLINE_EXCEEDED)
+ self.assertEqual(await call.details(), 'Deadline Exceeded')
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(await call.trailing_metadata(), aio.Metadata())
+
+ async def test_cancel_before_rpc(self):
+
+ interceptor_reached = asyncio.Event()
+ wait_for_ever = self.loop.create_future()
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ interceptor_reached.set()
+ await wait_for_ever
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ self.assertFalse(call.cancelled())
+ self.assertFalse(call.done())
+
+ await interceptor_reached.wait()
+ self.assertTrue(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.details(),
+ _LOCAL_CANCEL_DETAILS_EXPECTATION)
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+
+ async def test_cancel_after_rpc(self):
+
+ interceptor_reached = asyncio.Event()
+ wait_for_ever = self.loop.create_future()
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ await call
+ interceptor_reached.set()
+ await wait_for_ever
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ self.assertFalse(call.cancelled())
+ self.assertFalse(call.done())
+
+ await interceptor_reached.wait()
+ self.assertTrue(call.cancel())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.details(),
+ _LOCAL_CANCEL_DETAILS_EXPECTATION)
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+
+ async def test_cancel_inside_interceptor_after_rpc_awaiting(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ call.cancel()
+ await call
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.details(),
+ _LOCAL_CANCEL_DETAILS_EXPECTATION)
+ self.assertEqual(await call.initial_metadata(), None)
+ self.assertEqual(await call.trailing_metadata(), None)
+
+ async def test_cancel_inside_interceptor_after_rpc_not_awaiting(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ call = await continuation(client_call_details, request)
+ call.cancel()
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ with self.assertRaises(asyncio.CancelledError):
+ await call
+
+ self.assertTrue(call.cancelled())
+ self.assertTrue(call.done())
+ self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
+ self.assertEqual(await call.details(),
+ _LOCAL_CANCEL_DETAILS_EXPECTATION)
+ self.assertEqual(await call.initial_metadata(), aio.Metadata())
+ self.assertEqual(
+ await call.trailing_metadata(), aio.Metadata(),
+ "When the raw response is None, empty metadata is returned")
+
+ async def test_initial_metadata_modification(self):
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+ new_metadata = aio.Metadata(*client_call_details.metadata,
+ *_INITIAL_METADATA_TO_INJECT)
+ new_details = aio.ClientCallDetails(
+ method=client_call_details.method,
+ timeout=client_call_details.timeout,
+ metadata=new_metadata,
+ credentials=client_call_details.credentials,
+ wait_for_ready=client_call_details.wait_for_ready,
+ )
+ return await continuation(new_details, request)
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ call = stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ # Expected to see the echoed initial metadata
+ self.assertTrue(
+ _common.seen_metadatum(
+ expected_key=_INITIAL_METADATA_KEY,
+ expected_value=_INITIAL_METADATA_TO_INJECT[
+ _INITIAL_METADATA_KEY],
+ actual=await call.initial_metadata(),
+ ))
+ # Expected to see the echoed trailing metadata
+ self.assertTrue(
+ _common.seen_metadatum(
+ expected_key=_TRAILING_METADATA_KEY,
+ expected_value=_INITIAL_METADATA_TO_INJECT[
+ _TRAILING_METADATA_KEY],
+ actual=await call.trailing_metadata(),
+ ))
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_add_done_callback_before_finishes(self):
+ called = asyncio.Event()
+ interceptor_can_continue = asyncio.Event()
+
+ def callback(call):
+ called.set()
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+
+ await interceptor_can_continue.wait()
+ call = await continuation(client_call_details, request)
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+ call.add_done_callback(callback)
+ interceptor_can_continue.set()
+ await call
+
+ try:
+ await asyncio.wait_for(
+ called.wait(),
+ timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
+ except:
+ self.fail("Callback was not called")
+
+ async def test_add_done_callback_after_finishes(self):
+ called = asyncio.Event()
+
+ def callback(call):
+ called.set()
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+
+ call = await continuation(client_call_details, request)
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ await call
+
+ call.add_done_callback(callback)
+
+ try:
+ await asyncio.wait_for(
+ called.wait(),
+ timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
+ except:
+ self.fail("Callback was not called")
+
+ async def test_add_done_callback_after_finishes_before_await(self):
+ called = asyncio.Event()
+
+ def callback(call):
+ called.set()
+
+ class Interceptor(aio.UnaryUnaryClientInterceptor):
+
+ async def intercept_unary_unary(self, continuation,
+ client_call_details, request):
+
+ call = await continuation(client_call_details, request)
+ return call
+
+ async with aio.insecure_channel(self._server_target,
+ interceptors=[Interceptor()
+ ]) as channel:
+
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+
+ call.add_done_callback(callback)
+
+ await call
+
+ try:
+ await asyncio.wait_for(
+ called.wait(),
+ timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
+ except:
+ self.fail("Callback was not called")
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py
index 6163fefa49..20543e95bf 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py
@@ -1,138 +1,138 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of closing a grpc.aio.Channel."""
-
-import asyncio
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-from grpc.aio import _base_call
-
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep'
-_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE = 60
-
-
-class TestCloseChannel(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_graceful_close(self):
- channel = aio.insecure_channel(self._server_target)
- UnaryCallWithSleep = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
-
- await channel.close(grace=_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE)
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_none_graceful_close(self):
- channel = aio.insecure_channel(self._server_target)
- UnaryCallWithSleep = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
-
- await channel.close(None)
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_close_unary_unary(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- calls = [stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_unary_stream(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- calls = [stub.StreamingOutputCall(request) for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_stream_unary(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- calls = [stub.StreamingInputCall() for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_stream_stream(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- calls = [stub.FullDuplexCall() for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_async_context(self):
- async with aio.insecure_channel(self._server_target) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- calls = [
- stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)
- ]
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_channel_isolation(self):
- async with aio.insecure_channel(self._server_target) as channel1:
- async with aio.insecure_channel(self._server_target) as channel2:
- stub1 = test_pb2_grpc.TestServiceStub(channel1)
- stub2 = test_pb2_grpc.TestServiceStub(channel2)
-
- call1 = stub1.UnaryCall(messages_pb2.SimpleRequest())
- call2 = stub2.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertFalse(call1.cancelled())
- self.assertTrue(call2.cancelled())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior of closing a grpc.aio.Channel."""
+
+import asyncio
+import logging
+import unittest
+
+import grpc
+from grpc.experimental import aio
+from grpc.aio import _base_call
+
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+
+_UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep'
+_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE = 60
+
+
+class TestCloseChannel(AioTestBase):
+
+ async def setUp(self):
+ self._server_target, self._server = await start_test_server()
+
+ async def tearDown(self):
+ await self._server.stop(None)
+
+ async def test_graceful_close(self):
+ channel = aio.insecure_channel(self._server_target)
+ UnaryCallWithSleep = channel.unary_unary(
+ _UNARY_CALL_METHOD_WITH_SLEEP,
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString,
+ )
+
+ call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
+
+ await channel.close(grace=_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE)
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_none_graceful_close(self):
+ channel = aio.insecure_channel(self._server_target)
+ UnaryCallWithSleep = channel.unary_unary(
+ _UNARY_CALL_METHOD_WITH_SLEEP,
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString,
+ )
+
+ call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
+
+ await channel.close(None)
+
+ self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
+
+ async def test_close_unary_unary(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ calls = [stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)]
+
+ await channel.close()
+
+ for call in calls:
+ self.assertTrue(call.cancelled())
+
+ async def test_close_unary_stream(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ calls = [stub.StreamingOutputCall(request) for _ in range(2)]
+
+ await channel.close()
+
+ for call in calls:
+ self.assertTrue(call.cancelled())
+
+ async def test_close_stream_unary(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ calls = [stub.StreamingInputCall() for _ in range(2)]
+
+ await channel.close()
+
+ for call in calls:
+ self.assertTrue(call.cancelled())
+
+ async def test_close_stream_stream(self):
+ channel = aio.insecure_channel(self._server_target)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ calls = [stub.FullDuplexCall() for _ in range(2)]
+
+ await channel.close()
+
+ for call in calls:
+ self.assertTrue(call.cancelled())
+
+ async def test_close_async_context(self):
+ async with aio.insecure_channel(self._server_target) as channel:
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ calls = [
+ stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)
+ ]
+
+ for call in calls:
+ self.assertTrue(call.cancelled())
+
+ async def test_channel_isolation(self):
+ async with aio.insecure_channel(self._server_target) as channel1:
+ async with aio.insecure_channel(self._server_target) as channel2:
+ stub1 = test_pb2_grpc.TestServiceStub(channel1)
+ stub2 = test_pb2_grpc.TestServiceStub(channel2)
+
+ call1 = stub1.UnaryCall(messages_pb2.SimpleRequest())
+ call2 = stub2.UnaryCall(messages_pb2.SimpleRequest())
+
+ self.assertFalse(call1.cancelled())
+ self.assertTrue(call2.cancelled())
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py
index ae404c5a64..0bb3a3acc8 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py
@@ -1,380 +1,380 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the compatibility between AsyncIO stack and the old stack."""
-
-import asyncio
-import logging
-import os
-import random
-import threading
-import unittest
-from concurrent.futures import ThreadPoolExecutor
-from typing import Callable, Iterable, Sequence, Tuple
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import TestServiceServicer, start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-_REQUEST = b'\x03\x07'
-_ADHOC_METHOD = '/test/AdHoc'
-
-
-def _unique_options() -> Sequence[Tuple[str, float]]:
- return (('iv', random.random()),)
-
-
-class _AdhocGenericHandler(grpc.GenericRpcHandler):
- _handler: grpc.RpcMethodHandler
-
- def __init__(self):
- self._handler = None
-
- def set_adhoc_handler(self, handler: grpc.RpcMethodHandler):
- self._handler = handler
-
- def service(self, handler_call_details):
- if handler_call_details.method == _ADHOC_METHOD:
- return self._handler
- else:
- return None
-
-
-@unittest.skipIf(
- os.environ.get('GRPC_ASYNCIO_ENGINE', '').lower() == 'custom_io_manager',
- 'Compatible mode needs POLLER completion queue.')
-class TestCompatibility(AioTestBase):
-
- async def setUp(self):
- self._async_server = aio.server(
- options=(('grpc.so_reuseport', 0),),
- migration_thread_pool=ThreadPoolExecutor())
-
- test_pb2_grpc.add_TestServiceServicer_to_server(TestServiceServicer(),
- self._async_server)
- self._adhoc_handlers = _AdhocGenericHandler()
- self._async_server.add_generic_rpc_handlers((self._adhoc_handlers,))
-
- port = self._async_server.add_insecure_port('[::]:0')
- address = 'localhost:%d' % port
- await self._async_server.start()
-
- # Create async stub
- self._async_channel = aio.insecure_channel(address,
- options=_unique_options())
- self._async_stub = test_pb2_grpc.TestServiceStub(self._async_channel)
-
- # Create sync stub
- self._sync_channel = grpc.insecure_channel(address,
- options=_unique_options())
- self._sync_stub = test_pb2_grpc.TestServiceStub(self._sync_channel)
-
- async def tearDown(self):
- self._sync_channel.close()
- await self._async_channel.close()
- await self._async_server.stop(None)
-
- async def _run_in_another_thread(self, func: Callable[[], None]):
- work_done = asyncio.Event(loop=self.loop)
-
- def thread_work():
- func()
- self.loop.call_soon_threadsafe(work_done.set)
-
- thread = threading.Thread(target=thread_work, daemon=True)
- thread.start()
- await work_done.wait()
- thread.join()
-
- async def test_unary_unary(self):
- # Calling async API in this thread
- await self._async_stub.UnaryCall(messages_pb2.SimpleRequest(),
- timeout=test_constants.LONG_TIMEOUT)
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response, call = self._sync_stub.UnaryCall.with_call(
- messages_pb2.SimpleRequest(),
- timeout=test_constants.LONG_TIMEOUT)
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(grpc.StatusCode.OK, call.code())
-
- await self._run_in_another_thread(sync_work)
-
- async def test_unary_stream(self):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- # Calling async API in this thread
- call = self._async_stub.StreamingOutputCall(request)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.read()
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response_iterator = self._sync_stub.StreamingOutputCall(request)
- for response in response_iterator:
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
- self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
-
- await self._run_in_another_thread(sync_work)
-
- async def test_stream_unary(self):
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Calling async API in this thread
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- response = await self._async_stub.StreamingInputCall(gen())
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response = self._sync_stub.StreamingInputCall(
- iter([request] * _NUM_STREAM_RESPONSES))
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- await self._run_in_another_thread(sync_work)
-
- async def test_stream_stream(self):
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- # Calling async API in this thread
- call = self._async_stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
-
- await call.done_writing()
- assert await call.code() == grpc.StatusCode.OK
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response_iterator = self._sync_stub.FullDuplexCall(iter([request]))
- for response in response_iterator:
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
- self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
-
- await self._run_in_another_thread(sync_work)
-
- async def test_server(self):
-
- class GenericHandlers(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
-
- # It's fine to instantiate server object in the event loop thread.
- # The server will spawn its own serving thread.
- server = grpc.server(ThreadPoolExecutor(),
- handlers=(GenericHandlers(),))
- port = server.add_insecure_port('localhost:0')
- server.start()
-
- def sync_work() -> None:
- for _ in range(100):
- with grpc.insecure_channel('localhost:%d' % port) as channel:
- response = channel.unary_unary('/test/test')(b'\x07\x08')
- self.assertEqual(response, b'\x07\x08')
-
- await self._run_in_another_thread(sync_work)
-
- async def test_many_loop(self):
- address, server = await start_test_server()
-
- # Run another loop in another thread
- def sync_work():
-
- async def async_work():
- # Create async stub
- async_channel = aio.insecure_channel(address,
- options=_unique_options())
- async_stub = test_pb2_grpc.TestServiceStub(async_channel)
-
- call = async_stub.UnaryCall(messages_pb2.SimpleRequest())
- response = await call
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- loop = asyncio.new_event_loop()
- loop.run_until_complete(async_work())
-
- await self._run_in_another_thread(sync_work)
- await server.stop(None)
-
- async def test_sync_unary_unary_success(self):
-
- @grpc.unary_unary_rpc_method_handler
- def echo_unary_unary(request: bytes, unused_context):
- return request
-
- self._adhoc_handlers.set_adhoc_handler(echo_unary_unary)
- response = await self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST
- )
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_unary_unary_metadata(self):
- metadata = (('unique', 'key-42'),)
-
- @grpc.unary_unary_rpc_method_handler
- def metadata_unary_unary(request: bytes, context: grpc.ServicerContext):
- context.send_initial_metadata(metadata)
- return request
-
- self._adhoc_handlers.set_adhoc_handler(metadata_unary_unary)
- call = self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST)
- self.assertTrue(
- _common.seen_metadata(aio.Metadata(*metadata), await
- call.initial_metadata()))
-
- async def test_sync_unary_unary_abort(self):
-
- @grpc.unary_unary_rpc_method_handler
- def abort_unary_unary(request: bytes, context: grpc.ServicerContext):
- context.abort(grpc.StatusCode.INTERNAL, 'Test')
-
- self._adhoc_handlers.set_adhoc_handler(abort_unary_unary)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST)
- self.assertEqual(grpc.StatusCode.INTERNAL,
- exception_context.exception.code())
-
- async def test_sync_unary_unary_set_code(self):
-
- @grpc.unary_unary_rpc_method_handler
- def set_code_unary_unary(request: bytes, context: grpc.ServicerContext):
- context.set_code(grpc.StatusCode.INTERNAL)
-
- self._adhoc_handlers.set_adhoc_handler(set_code_unary_unary)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST)
- self.assertEqual(grpc.StatusCode.INTERNAL,
- exception_context.exception.code())
-
- async def test_sync_unary_stream_success(self):
-
- @grpc.unary_stream_rpc_method_handler
- def echo_unary_stream(request: bytes, unused_context):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- self._adhoc_handlers.set_adhoc_handler(echo_unary_stream)
- call = self._async_channel.unary_stream(_ADHOC_METHOD)(_REQUEST)
- async for response in call:
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_unary_stream_error(self):
-
- @grpc.unary_stream_rpc_method_handler
- def error_unary_stream(request: bytes, unused_context):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
- raise RuntimeError('Test')
-
- self._adhoc_handlers.set_adhoc_handler(error_unary_stream)
- call = self._async_channel.unary_stream(_ADHOC_METHOD)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- self.assertEqual(_REQUEST, response)
- self.assertEqual(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
- async def test_sync_stream_unary_success(self):
-
- @grpc.stream_unary_rpc_method_handler
- def echo_stream_unary(request_iterator: Iterable[bytes],
- unused_context):
- self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
- return _REQUEST
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- response = await self._async_channel.stream_unary(_ADHOC_METHOD)(
- request_iterator)
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_stream_unary_error(self):
-
- @grpc.stream_unary_rpc_method_handler
- def echo_stream_unary(request_iterator: Iterable[bytes],
- unused_context):
- self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
- raise RuntimeError('Test')
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- response = await self._async_channel.stream_unary(_ADHOC_METHOD)(
- request_iterator)
- self.assertEqual(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
- async def test_sync_stream_stream_success(self):
-
- @grpc.stream_stream_rpc_method_handler
- def echo_stream_stream(request_iterator: Iterable[bytes],
- unused_context):
- for request in request_iterator:
- yield request
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- call = self._async_channel.stream_stream(_ADHOC_METHOD)(
- request_iterator)
- async for response in call:
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_stream_stream_error(self):
-
- @grpc.stream_stream_rpc_method_handler
- def echo_stream_stream(request_iterator: Iterable[bytes],
- unused_context):
- for request in request_iterator:
- yield request
- raise RuntimeError('test')
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- call = self._async_channel.stream_stream(_ADHOC_METHOD)(
- request_iterator)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- self.assertEqual(_REQUEST, response)
- self.assertEqual(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Testing the compatibility between AsyncIO stack and the old stack."""
+
+import asyncio
+import logging
+import os
+import random
+import threading
+import unittest
+from concurrent.futures import ThreadPoolExecutor
+from typing import Callable, Iterable, Sequence, Tuple
+
+import grpc
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests.unit.framework.common import test_constants
+from tests_aio.unit import _common
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import TestServiceServicer, start_test_server
+
+_NUM_STREAM_RESPONSES = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_PAYLOAD_SIZE = 42
+_REQUEST = b'\x03\x07'
+_ADHOC_METHOD = '/test/AdHoc'
+
+
+def _unique_options() -> Sequence[Tuple[str, float]]:
+ return (('iv', random.random()),)
+
+
+class _AdhocGenericHandler(grpc.GenericRpcHandler):
+ _handler: grpc.RpcMethodHandler
+
+ def __init__(self):
+ self._handler = None
+
+ def set_adhoc_handler(self, handler: grpc.RpcMethodHandler):
+ self._handler = handler
+
+ def service(self, handler_call_details):
+ if handler_call_details.method == _ADHOC_METHOD:
+ return self._handler
+ else:
+ return None
+
+
+@unittest.skipIf(
+ os.environ.get('GRPC_ASYNCIO_ENGINE', '').lower() == 'custom_io_manager',
+ 'Compatible mode needs POLLER completion queue.')
+class TestCompatibility(AioTestBase):
+
+ async def setUp(self):
+ self._async_server = aio.server(
+ options=(('grpc.so_reuseport', 0),),
+ migration_thread_pool=ThreadPoolExecutor())
+
+ test_pb2_grpc.add_TestServiceServicer_to_server(TestServiceServicer(),
+ self._async_server)
+ self._adhoc_handlers = _AdhocGenericHandler()
+ self._async_server.add_generic_rpc_handlers((self._adhoc_handlers,))
+
+ port = self._async_server.add_insecure_port('[::]:0')
+ address = 'localhost:%d' % port
+ await self._async_server.start()
+
+ # Create async stub
+ self._async_channel = aio.insecure_channel(address,
+ options=_unique_options())
+ self._async_stub = test_pb2_grpc.TestServiceStub(self._async_channel)
+
+ # Create sync stub
+ self._sync_channel = grpc.insecure_channel(address,
+ options=_unique_options())
+ self._sync_stub = test_pb2_grpc.TestServiceStub(self._sync_channel)
+
+ async def tearDown(self):
+ self._sync_channel.close()
+ await self._async_channel.close()
+ await self._async_server.stop(None)
+
+ async def _run_in_another_thread(self, func: Callable[[], None]):
+ work_done = asyncio.Event(loop=self.loop)
+
+ def thread_work():
+ func()
+ self.loop.call_soon_threadsafe(work_done.set)
+
+ thread = threading.Thread(target=thread_work, daemon=True)
+ thread.start()
+ await work_done.wait()
+ thread.join()
+
+ async def test_unary_unary(self):
+ # Calling async API in this thread
+ await self._async_stub.UnaryCall(messages_pb2.SimpleRequest(),
+ timeout=test_constants.LONG_TIMEOUT)
+
+ # Calling sync API in a different thread
+ def sync_work() -> None:
+ response, call = self._sync_stub.UnaryCall.with_call(
+ messages_pb2.SimpleRequest(),
+ timeout=test_constants.LONG_TIMEOUT)
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+ self.assertEqual(grpc.StatusCode.OK, call.code())
+
+ await self._run_in_another_thread(sync_work)
+
+ async def test_unary_stream(self):
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ # Calling async API in this thread
+ call = self._async_stub.StreamingOutputCall(request)
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.read()
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ # Calling sync API in a different thread
+ def sync_work() -> None:
+ response_iterator = self._sync_stub.StreamingOutputCall(request)
+ for response in response_iterator:
+ assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
+ self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
+
+ await self._run_in_another_thread(sync_work)
+
+ async def test_stream_unary(self):
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ # Calling async API in this thread
+ async def gen():
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+
+ response = await self._async_stub.StreamingInputCall(gen())
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ # Calling sync API in a different thread
+ def sync_work() -> None:
+ response = self._sync_stub.StreamingInputCall(
+ iter([request] * _NUM_STREAM_RESPONSES))
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ await self._run_in_another_thread(sync_work)
+
+ async def test_stream_stream(self):
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ # Calling async API in this thread
+ call = self._async_stub.FullDuplexCall()
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ response = await call.read()
+ assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
+
+ await call.done_writing()
+ assert await call.code() == grpc.StatusCode.OK
+
+ # Calling sync API in a different thread
+ def sync_work() -> None:
+ response_iterator = self._sync_stub.FullDuplexCall(iter([request]))
+ for response in response_iterator:
+ assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
+ self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
+
+ await self._run_in_another_thread(sync_work)
+
+ async def test_server(self):
+
+ class GenericHandlers(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
+
+ # It's fine to instantiate server object in the event loop thread.
+ # The server will spawn its own serving thread.
+ server = grpc.server(ThreadPoolExecutor(),
+ handlers=(GenericHandlers(),))
+ port = server.add_insecure_port('localhost:0')
+ server.start()
+
+ def sync_work() -> None:
+ for _ in range(100):
+ with grpc.insecure_channel('localhost:%d' % port) as channel:
+ response = channel.unary_unary('/test/test')(b'\x07\x08')
+ self.assertEqual(response, b'\x07\x08')
+
+ await self._run_in_another_thread(sync_work)
+
+ async def test_many_loop(self):
+ address, server = await start_test_server()
+
+ # Run another loop in another thread
+ def sync_work():
+
+ async def async_work():
+ # Create async stub
+ async_channel = aio.insecure_channel(address,
+ options=_unique_options())
+ async_stub = test_pb2_grpc.TestServiceStub(async_channel)
+
+ call = async_stub.UnaryCall(messages_pb2.SimpleRequest())
+ response = await call
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ loop = asyncio.new_event_loop()
+ loop.run_until_complete(async_work())
+
+ await self._run_in_another_thread(sync_work)
+ await server.stop(None)
+
+ async def test_sync_unary_unary_success(self):
+
+ @grpc.unary_unary_rpc_method_handler
+ def echo_unary_unary(request: bytes, unused_context):
+ return request
+
+ self._adhoc_handlers.set_adhoc_handler(echo_unary_unary)
+ response = await self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST
+ )
+ self.assertEqual(_REQUEST, response)
+
+ async def test_sync_unary_unary_metadata(self):
+ metadata = (('unique', 'key-42'),)
+
+ @grpc.unary_unary_rpc_method_handler
+ def metadata_unary_unary(request: bytes, context: grpc.ServicerContext):
+ context.send_initial_metadata(metadata)
+ return request
+
+ self._adhoc_handlers.set_adhoc_handler(metadata_unary_unary)
+ call = self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST)
+ self.assertTrue(
+ _common.seen_metadata(aio.Metadata(*metadata), await
+ call.initial_metadata()))
+
+ async def test_sync_unary_unary_abort(self):
+
+ @grpc.unary_unary_rpc_method_handler
+ def abort_unary_unary(request: bytes, context: grpc.ServicerContext):
+ context.abort(grpc.StatusCode.INTERNAL, 'Test')
+
+ self._adhoc_handlers.set_adhoc_handler(abort_unary_unary)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST)
+ self.assertEqual(grpc.StatusCode.INTERNAL,
+ exception_context.exception.code())
+
+ async def test_sync_unary_unary_set_code(self):
+
+ @grpc.unary_unary_rpc_method_handler
+ def set_code_unary_unary(request: bytes, context: grpc.ServicerContext):
+ context.set_code(grpc.StatusCode.INTERNAL)
+
+ self._adhoc_handlers.set_adhoc_handler(set_code_unary_unary)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await self._async_channel.unary_unary(_ADHOC_METHOD)(_REQUEST)
+ self.assertEqual(grpc.StatusCode.INTERNAL,
+ exception_context.exception.code())
+
+ async def test_sync_unary_stream_success(self):
+
+ @grpc.unary_stream_rpc_method_handler
+ def echo_unary_stream(request: bytes, unused_context):
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+
+ self._adhoc_handlers.set_adhoc_handler(echo_unary_stream)
+ call = self._async_channel.unary_stream(_ADHOC_METHOD)(_REQUEST)
+ async for response in call:
+ self.assertEqual(_REQUEST, response)
+
+ async def test_sync_unary_stream_error(self):
+
+ @grpc.unary_stream_rpc_method_handler
+ def error_unary_stream(request: bytes, unused_context):
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+ raise RuntimeError('Test')
+
+ self._adhoc_handlers.set_adhoc_handler(error_unary_stream)
+ call = self._async_channel.unary_stream(_ADHOC_METHOD)(_REQUEST)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ async for response in call:
+ self.assertEqual(_REQUEST, response)
+ self.assertEqual(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
+ async def test_sync_stream_unary_success(self):
+
+ @grpc.stream_unary_rpc_method_handler
+ def echo_stream_unary(request_iterator: Iterable[bytes],
+ unused_context):
+ self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
+ return _REQUEST
+
+ self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
+ request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
+ response = await self._async_channel.stream_unary(_ADHOC_METHOD)(
+ request_iterator)
+ self.assertEqual(_REQUEST, response)
+
+ async def test_sync_stream_unary_error(self):
+
+ @grpc.stream_unary_rpc_method_handler
+ def echo_stream_unary(request_iterator: Iterable[bytes],
+ unused_context):
+ self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
+ raise RuntimeError('Test')
+
+ self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
+ request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ response = await self._async_channel.stream_unary(_ADHOC_METHOD)(
+ request_iterator)
+ self.assertEqual(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
+ async def test_sync_stream_stream_success(self):
+
+ @grpc.stream_stream_rpc_method_handler
+ def echo_stream_stream(request_iterator: Iterable[bytes],
+ unused_context):
+ for request in request_iterator:
+ yield request
+
+ self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
+ request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
+ call = self._async_channel.stream_stream(_ADHOC_METHOD)(
+ request_iterator)
+ async for response in call:
+ self.assertEqual(_REQUEST, response)
+
+ async def test_sync_stream_stream_error(self):
+
+ @grpc.stream_stream_rpc_method_handler
+ def echo_stream_stream(request_iterator: Iterable[bytes],
+ unused_context):
+ for request in request_iterator:
+ yield request
+ raise RuntimeError('test')
+
+ self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
+ request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
+ call = self._async_channel.stream_stream(_ADHOC_METHOD)(
+ request_iterator)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ async for response in call:
+ self.assertEqual(_REQUEST, response)
+ self.assertEqual(grpc.StatusCode.UNKNOWN,
+ exception_context.exception.code())
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py
index b9f2c8dd97..9d93885ea2 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py
@@ -1,196 +1,196 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the compression mechanism."""
-
-import asyncio
-import logging
-import platform
-import random
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit import _common
-
-_GZIP_CHANNEL_ARGUMENT = ('grpc.default_compression_algorithm', 2)
-_GZIP_DISABLED_CHANNEL_ARGUMENT = ('grpc.compression_enabled_algorithms_bitset',
- 3)
-_DEFLATE_DISABLED_CHANNEL_ARGUMENT = (
- 'grpc.compression_enabled_algorithms_bitset', 5)
-
-_TEST_UNARY_UNARY = '/test/TestUnaryUnary'
-_TEST_SET_COMPRESSION = '/test/TestSetCompression'
-_TEST_DISABLE_COMPRESSION_UNARY = '/test/TestDisableCompressionUnary'
-_TEST_DISABLE_COMPRESSION_STREAM = '/test/TestDisableCompressionStream'
-
-_REQUEST = b'\x01' * 100
-_RESPONSE = b'\x02' * 100
-
-
-async def _test_unary_unary(unused_request, unused_context):
- return _RESPONSE
-
-
-async def _test_set_compression(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- context.set_compression(grpc.Compression.Deflate)
- await context.write(_RESPONSE)
- try:
- context.set_compression(grpc.Compression.Deflate)
- except RuntimeError:
- # NOTE(lidiz) Testing if the servicer context raises exception when
- # the set_compression method is called after initial_metadata sent.
- # After the initial_metadata sent, the server-side has no control over
- # which compression algorithm it should use.
- pass
- else:
- raise ValueError(
- 'Expecting exceptions if set_compression is not effective')
-
-
-async def _test_disable_compression_unary(request, context):
- assert _REQUEST == request
- context.set_compression(grpc.Compression.Deflate)
- context.disable_next_message_compression()
- return _RESPONSE
-
-
-async def _test_disable_compression_stream(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- context.set_compression(grpc.Compression.Deflate)
- await context.write(_RESPONSE)
- context.disable_next_message_compression()
- await context.write(_RESPONSE)
- await context.write(_RESPONSE)
-
-
-_ROUTING_TABLE = {
- _TEST_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(_test_unary_unary),
- _TEST_SET_COMPRESSION:
- grpc.stream_stream_rpc_method_handler(_test_set_compression),
- _TEST_DISABLE_COMPRESSION_UNARY:
- grpc.unary_unary_rpc_method_handler(_test_disable_compression_unary),
- _TEST_DISABLE_COMPRESSION_STREAM:
- grpc.stream_stream_rpc_method_handler(_test_disable_compression_stream),
-}
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- return _ROUTING_TABLE.get(handler_call_details.method)
-
-
-async def _start_test_server(options=None):
- server = aio.server(options=options)
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
- return f'localhost:{port}', server
-
-
-class TestCompression(AioTestBase):
-
- async def setUp(self):
- server_options = (_GZIP_DISABLED_CHANNEL_ARGUMENT,)
- self._address, self._server = await _start_test_server(server_options)
- self._channel = aio.insecure_channel(self._address)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_channel_level_compression_baned_compression(self):
- # GZIP is disabled, this call should fail
- async with aio.insecure_channel(
- self._address, compression=grpc.Compression.Gzip) as channel:
- multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
- call = multicallable(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
-
- async def test_channel_level_compression_allowed_compression(self):
- # Deflate is allowed, this call should succeed
- async with aio.insecure_channel(
- self._address, compression=grpc.Compression.Deflate) as channel:
- multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
- call = multicallable(_REQUEST)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_client_call_level_compression_baned_compression(self):
- multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
-
- # GZIP is disabled, this call should fail
- call = multicallable(_REQUEST, compression=grpc.Compression.Gzip)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
-
- async def test_client_call_level_compression_allowed_compression(self):
- multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
-
- # Deflate is allowed, this call should succeed
- call = multicallable(_REQUEST, compression=grpc.Compression.Deflate)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_call_level_compression(self):
- multicallable = self._channel.stream_stream(_TEST_SET_COMPRESSION)
- call = multicallable()
- await call.write(_REQUEST)
- await call.done_writing()
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_disable_compression_unary(self):
- multicallable = self._channel.unary_unary(
- _TEST_DISABLE_COMPRESSION_UNARY)
- call = multicallable(_REQUEST)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_disable_compression_stream(self):
- multicallable = self._channel.stream_stream(
- _TEST_DISABLE_COMPRESSION_STREAM)
- call = multicallable()
- await call.write(_REQUEST)
- await call.done_writing()
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_default_compression_algorithm(self):
- server = aio.server(compression=grpc.Compression.Deflate)
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
-
- async with aio.insecure_channel(f'localhost:{port}') as channel:
- multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
- call = multicallable(_REQUEST)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior around the compression mechanism."""
+
+import asyncio
+import logging
+import platform
+import random
+import unittest
+
+import grpc
+from grpc.experimental import aio
+
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit import _common
+
+_GZIP_CHANNEL_ARGUMENT = ('grpc.default_compression_algorithm', 2)
+_GZIP_DISABLED_CHANNEL_ARGUMENT = ('grpc.compression_enabled_algorithms_bitset',
+ 3)
+_DEFLATE_DISABLED_CHANNEL_ARGUMENT = (
+ 'grpc.compression_enabled_algorithms_bitset', 5)
+
+_TEST_UNARY_UNARY = '/test/TestUnaryUnary'
+_TEST_SET_COMPRESSION = '/test/TestSetCompression'
+_TEST_DISABLE_COMPRESSION_UNARY = '/test/TestDisableCompressionUnary'
+_TEST_DISABLE_COMPRESSION_STREAM = '/test/TestDisableCompressionStream'
+
+_REQUEST = b'\x01' * 100
+_RESPONSE = b'\x02' * 100
+
+
+async def _test_unary_unary(unused_request, unused_context):
+ return _RESPONSE
+
+
+async def _test_set_compression(unused_request_iterator, context):
+ assert _REQUEST == await context.read()
+ context.set_compression(grpc.Compression.Deflate)
+ await context.write(_RESPONSE)
+ try:
+ context.set_compression(grpc.Compression.Deflate)
+ except RuntimeError:
+ # NOTE(lidiz) Testing if the servicer context raises exception when
+ # the set_compression method is called after initial_metadata sent.
+ # After the initial_metadata sent, the server-side has no control over
+ # which compression algorithm it should use.
+ pass
+ else:
+ raise ValueError(
+ 'Expecting exceptions if set_compression is not effective')
+
+
+async def _test_disable_compression_unary(request, context):
+ assert _REQUEST == request
+ context.set_compression(grpc.Compression.Deflate)
+ context.disable_next_message_compression()
+ return _RESPONSE
+
+
+async def _test_disable_compression_stream(unused_request_iterator, context):
+ assert _REQUEST == await context.read()
+ context.set_compression(grpc.Compression.Deflate)
+ await context.write(_RESPONSE)
+ context.disable_next_message_compression()
+ await context.write(_RESPONSE)
+ await context.write(_RESPONSE)
+
+
+_ROUTING_TABLE = {
+ _TEST_UNARY_UNARY:
+ grpc.unary_unary_rpc_method_handler(_test_unary_unary),
+ _TEST_SET_COMPRESSION:
+ grpc.stream_stream_rpc_method_handler(_test_set_compression),
+ _TEST_DISABLE_COMPRESSION_UNARY:
+ grpc.unary_unary_rpc_method_handler(_test_disable_compression_unary),
+ _TEST_DISABLE_COMPRESSION_STREAM:
+ grpc.stream_stream_rpc_method_handler(_test_disable_compression_stream),
+}
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ return _ROUTING_TABLE.get(handler_call_details.method)
+
+
+async def _start_test_server(options=None):
+ server = aio.server(options=options)
+ port = server.add_insecure_port('[::]:0')
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ await server.start()
+ return f'localhost:{port}', server
+
+
+class TestCompression(AioTestBase):
+
+ async def setUp(self):
+ server_options = (_GZIP_DISABLED_CHANNEL_ARGUMENT,)
+ self._address, self._server = await _start_test_server(server_options)
+ self._channel = aio.insecure_channel(self._address)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+ async def test_channel_level_compression_baned_compression(self):
+ # GZIP is disabled, this call should fail
+ async with aio.insecure_channel(
+ self._address, compression=grpc.Compression.Gzip) as channel:
+ multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
+ call = multicallable(_REQUEST)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
+
+ async def test_channel_level_compression_allowed_compression(self):
+ # Deflate is allowed, this call should succeed
+ async with aio.insecure_channel(
+ self._address, compression=grpc.Compression.Deflate) as channel:
+ multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
+ call = multicallable(_REQUEST)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_client_call_level_compression_baned_compression(self):
+ multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
+
+ # GZIP is disabled, this call should fail
+ call = multicallable(_REQUEST, compression=grpc.Compression.Gzip)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
+
+ async def test_client_call_level_compression_allowed_compression(self):
+ multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
+
+ # Deflate is allowed, this call should succeed
+ call = multicallable(_REQUEST, compression=grpc.Compression.Deflate)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_server_call_level_compression(self):
+ multicallable = self._channel.stream_stream(_TEST_SET_COMPRESSION)
+ call = multicallable()
+ await call.write(_REQUEST)
+ await call.done_writing()
+ self.assertEqual(_RESPONSE, await call.read())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_server_disable_compression_unary(self):
+ multicallable = self._channel.unary_unary(
+ _TEST_DISABLE_COMPRESSION_UNARY)
+ call = multicallable(_REQUEST)
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_server_disable_compression_stream(self):
+ multicallable = self._channel.stream_stream(
+ _TEST_DISABLE_COMPRESSION_STREAM)
+ call = multicallable()
+ await call.write(_REQUEST)
+ await call.done_writing()
+ self.assertEqual(_RESPONSE, await call.read())
+ self.assertEqual(_RESPONSE, await call.read())
+ self.assertEqual(_RESPONSE, await call.read())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_server_default_compression_algorithm(self):
+ server = aio.server(compression=grpc.Compression.Deflate)
+ port = server.add_insecure_port('[::]:0')
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ await server.start()
+
+ async with aio.insecure_channel(f'localhost:{port}') as channel:
+ multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
+ call = multicallable(_REQUEST)
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ await server.stop(None)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py
index bb577ec588..7f98329070 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py
@@ -1,112 +1,112 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the connectivity state."""
-
-import asyncio
-import logging
-import threading
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-
-class TestConnectivityState(AioTestBase):
-
- async def setUp(self):
- self._server_address, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_unavailable_backend(self):
- async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(False))
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(True))
-
- # Should not time out
- await asyncio.wait_for(
- _common.block_until_certain_state(
- channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE),
- test_constants.SHORT_TIMEOUT)
-
- async def test_normal_backend(self):
- async with aio.insecure_channel(self._server_address) as channel:
- current_state = channel.get_state(True)
- self.assertEqual(grpc.ChannelConnectivity.IDLE, current_state)
-
- # Should not time out
- await asyncio.wait_for(
- _common.block_until_certain_state(
- channel, grpc.ChannelConnectivity.READY),
- test_constants.SHORT_TIMEOUT)
-
- async def test_timeout(self):
- async with aio.insecure_channel(self._server_address) as channel:
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(False))
-
- # If timed out, the function should return None.
- with self.assertRaises(asyncio.TimeoutError):
- await asyncio.wait_for(
- _common.block_until_certain_state(
- channel, grpc.ChannelConnectivity.READY),
- test_constants.SHORT_TIMEOUT)
-
- async def test_shutdown(self):
- channel = aio.insecure_channel(self._server_address)
-
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(False))
-
- # Waiting for changes in a separate coroutine
- wait_started = asyncio.Event()
-
- async def a_pending_wait():
- wait_started.set()
- await channel.wait_for_state_change(grpc.ChannelConnectivity.IDLE)
-
- pending_task = self.loop.create_task(a_pending_wait())
- await wait_started.wait()
-
- await channel.close()
-
- self.assertEqual(grpc.ChannelConnectivity.SHUTDOWN,
- channel.get_state(True))
-
- self.assertEqual(grpc.ChannelConnectivity.SHUTDOWN,
- channel.get_state(False))
-
- # Make sure there isn't any exception in the task
- await pending_task
-
- # It can raise exceptions since it is an usage error, but it should not
- # segfault or abort.
- with self.assertRaises(aio.UsageError):
- await channel.wait_for_state_change(
- grpc.ChannelConnectivity.SHUTDOWN)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2019 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior of the connectivity state."""
+
+import asyncio
+import logging
+import threading
+import time
+import unittest
+
+import grpc
+from grpc.experimental import aio
+
+from tests.unit.framework.common import test_constants
+from tests_aio.unit import _common
+from tests_aio.unit._constants import UNREACHABLE_TARGET
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+
+
+class TestConnectivityState(AioTestBase):
+
+ async def setUp(self):
+ self._server_address, self._server = await start_test_server()
+
+ async def tearDown(self):
+ await self._server.stop(None)
+
+ async def test_unavailable_backend(self):
+ async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
+ self.assertEqual(grpc.ChannelConnectivity.IDLE,
+ channel.get_state(False))
+ self.assertEqual(grpc.ChannelConnectivity.IDLE,
+ channel.get_state(True))
+
+ # Should not time out
+ await asyncio.wait_for(
+ _common.block_until_certain_state(
+ channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE),
+ test_constants.SHORT_TIMEOUT)
+
+ async def test_normal_backend(self):
+ async with aio.insecure_channel(self._server_address) as channel:
+ current_state = channel.get_state(True)
+ self.assertEqual(grpc.ChannelConnectivity.IDLE, current_state)
+
+ # Should not time out
+ await asyncio.wait_for(
+ _common.block_until_certain_state(
+ channel, grpc.ChannelConnectivity.READY),
+ test_constants.SHORT_TIMEOUT)
+
+ async def test_timeout(self):
+ async with aio.insecure_channel(self._server_address) as channel:
+ self.assertEqual(grpc.ChannelConnectivity.IDLE,
+ channel.get_state(False))
+
+ # If timed out, the function should return None.
+ with self.assertRaises(asyncio.TimeoutError):
+ await asyncio.wait_for(
+ _common.block_until_certain_state(
+ channel, grpc.ChannelConnectivity.READY),
+ test_constants.SHORT_TIMEOUT)
+
+ async def test_shutdown(self):
+ channel = aio.insecure_channel(self._server_address)
+
+ self.assertEqual(grpc.ChannelConnectivity.IDLE,
+ channel.get_state(False))
+
+ # Waiting for changes in a separate coroutine
+ wait_started = asyncio.Event()
+
+ async def a_pending_wait():
+ wait_started.set()
+ await channel.wait_for_state_change(grpc.ChannelConnectivity.IDLE)
+
+ pending_task = self.loop.create_task(a_pending_wait())
+ await wait_started.wait()
+
+ await channel.close()
+
+ self.assertEqual(grpc.ChannelConnectivity.SHUTDOWN,
+ channel.get_state(True))
+
+ self.assertEqual(grpc.ChannelConnectivity.SHUTDOWN,
+ channel.get_state(False))
+
+ # Make sure there isn't any exception in the task
+ await pending_task
+
+ # It can raise exceptions since it is an usage error, but it should not
+ # segfault or abort.
+ with self.assertRaises(aio.UsageError):
+ await channel.wait_for_state_change(
+ grpc.ChannelConnectivity.SHUTDOWN)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py
index e53d771a93..ea5f4621af 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py
@@ -1,65 +1,65 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the server context ability to access peer info."""
-
-import asyncio
-import logging
-import os
-import unittest
-from typing import Callable, Iterable, Sequence, Tuple
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import TestServiceServicer, start_test_server
-
-_REQUEST = b'\x03\x07'
-_TEST_METHOD = '/test/UnaryUnary'
-
-
-class TestContextPeer(AioTestBase):
-
- async def test_peer(self):
-
- @grpc.unary_unary_rpc_method_handler
- async def check_peer_unary_unary(request: bytes,
- context: aio.ServicerContext):
- self.assertEqual(_REQUEST, request)
- # The peer address could be ipv4 or ipv6
- self.assertIn('ip', context.peer())
- return request
-
- # Creates a server
- server = aio.server()
- handlers = grpc.method_handlers_generic_handler(
- 'test', {'UnaryUnary': check_peer_unary_unary})
- server.add_generic_rpc_handlers((handlers,))
- port = server.add_insecure_port('[::]:0')
- await server.start()
-
- # Creates a channel
- async with aio.insecure_channel('localhost:%d' % port) as channel:
- response = await channel.unary_unary(_TEST_METHOD)(_REQUEST)
- self.assertEqual(_REQUEST, response)
-
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Testing the server context ability to access peer info."""
+
+import asyncio
+import logging
+import os
+import unittest
+from typing import Callable, Iterable, Sequence, Tuple
+
+import grpc
+from grpc.experimental import aio
+
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests.unit.framework.common import test_constants
+from tests_aio.unit import _common
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import TestServiceServicer, start_test_server
+
+_REQUEST = b'\x03\x07'
+_TEST_METHOD = '/test/UnaryUnary'
+
+
+class TestContextPeer(AioTestBase):
+
+ async def test_peer(self):
+
+ @grpc.unary_unary_rpc_method_handler
+ async def check_peer_unary_unary(request: bytes,
+ context: aio.ServicerContext):
+ self.assertEqual(_REQUEST, request)
+ # The peer address could be ipv4 or ipv6
+ self.assertIn('ip', context.peer())
+ return request
+
+ # Creates a server
+ server = aio.server()
+ handlers = grpc.method_handlers_generic_handler(
+ 'test', {'UnaryUnary': check_peer_unary_unary})
+ server.add_generic_rpc_handlers((handlers,))
+ port = server.add_insecure_port('[::]:0')
+ await server.start()
+
+ # Creates a channel
+ async with aio.insecure_channel('localhost:%d' % port) as channel:
+ response = await channel.unary_unary(_TEST_METHOD)(_REQUEST)
+ self.assertEqual(_REQUEST, response)
+
+ await server.stop(None)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py
index f4916f52ec..481bafd567 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py
@@ -1,124 +1,124 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the done callbacks mechanism."""
-
-import asyncio
-import logging
-import unittest
-import time
-import gc
-
-import grpc
-from grpc.experimental import aio
-from tests_aio.unit._common import inject_callbacks
-from tests_aio.unit._test_base import AioTestBase
-from tests.unit.framework.common import test_constants
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class TestDoneCallback(AioTestBase):
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_add_after_done(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- validation = inject_callbacks(call)
- await validation
-
- async def test_unary_unary(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- validation = inject_callbacks(call)
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await validation
-
- async def test_unary_stream(self):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- call = self._stub.StreamingOutputCall(request)
- validation = inject_callbacks(call)
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await validation
-
- async def test_stream_unary(self):
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- call = self._stub.StreamingInputCall(gen())
- validation = inject_callbacks(call)
-
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await validation
-
- async def test_stream_stream(self):
- call = self._stub.FullDuplexCall()
- validation = inject_callbacks(call)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
- await validation
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Testing the done callbacks mechanism."""
+
+import asyncio
+import logging
+import unittest
+import time
+import gc
+
+import grpc
+from grpc.experimental import aio
+from tests_aio.unit._common import inject_callbacks
+from tests_aio.unit._test_base import AioTestBase
+from tests.unit.framework.common import test_constants
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests_aio.unit._test_server import start_test_server
+
+_NUM_STREAM_RESPONSES = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_PAYLOAD_SIZE = 42
+
+
+class TestDoneCallback(AioTestBase):
+
+ async def setUp(self):
+ address, self._server = await start_test_server()
+ self._channel = aio.insecure_channel(address)
+ self._stub = test_pb2_grpc.TestServiceStub(self._channel)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+ async def test_add_after_done(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ validation = inject_callbacks(call)
+ await validation
+
+ async def test_unary_unary(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+ validation = inject_callbacks(call)
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ await validation
+
+ async def test_unary_stream(self):
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ call = self._stub.StreamingOutputCall(request)
+ validation = inject_callbacks(call)
+
+ response_cnt = 0
+ async for response in call:
+ response_cnt += 1
+ self.assertIsInstance(response,
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ await validation
+
+ async def test_stream_unary(self):
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ async def gen():
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+
+ call = self._stub.StreamingInputCall(gen())
+ validation = inject_callbacks(call)
+
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ await validation
+
+ async def test_stream_stream(self):
+ call = self._stub.FullDuplexCall()
+ validation = inject_callbacks(call)
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ response = await call.read()
+ self.assertIsInstance(response,
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ await call.done_writing()
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+ await validation
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py
index 701e6f194b..b9183a22c7 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py
@@ -14,20 +14,20 @@
import logging
import unittest
-
-class TestInit(unittest.TestCase):
- def test_grpc(self):
- import grpc # pylint: disable=wrong-import-position
- channel = grpc.aio.insecure_channel('dummy')
- self.assertIsInstance(channel, grpc.aio.Channel)
+class TestInit(unittest.TestCase):
+
+ def test_grpc(self):
+ import grpc # pylint: disable=wrong-import-position
+ channel = grpc.aio.insecure_channel('dummy')
+ self.assertIsInstance(channel, grpc.aio.Channel)
+
+ def test_grpc_dot_aio(self):
+ import grpc.aio # pylint: disable=wrong-import-position
+ channel = grpc.aio.insecure_channel('dummy')
+ self.assertIsInstance(channel, grpc.aio.Channel)
- def test_grpc_dot_aio(self):
- import grpc.aio # pylint: disable=wrong-import-position
- channel = grpc.aio.insecure_channel('dummy')
- self.assertIsInstance(channel, grpc.aio.Channel)
-
if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
+ logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py
index e49f8ef220..2261446b3e 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py
@@ -1,297 +1,297 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the metadata mechanism."""
-
-import asyncio
-import logging
-import platform
-import random
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit import _common
-
-_TEST_CLIENT_TO_SERVER = '/test/TestClientToServer'
-_TEST_SERVER_TO_CLIENT = '/test/TestServerToClient'
-_TEST_TRAILING_METADATA = '/test/TestTrailingMetadata'
-_TEST_ECHO_INITIAL_METADATA = '/test/TestEchoInitialMetadata'
-_TEST_GENERIC_HANDLER = '/test/TestGenericHandler'
-_TEST_UNARY_STREAM = '/test/TestUnaryStream'
-_TEST_STREAM_UNARY = '/test/TestStreamUnary'
-_TEST_STREAM_STREAM = '/test/TestStreamStream'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_INITIAL_METADATA_FROM_CLIENT_TO_SERVER = aio.Metadata(
- ('client-to-server', 'question'),
- ('client-to-server-bin', b'\x07\x07\x07'),
-)
-_INITIAL_METADATA_FROM_SERVER_TO_CLIENT = aio.Metadata(
- ('server-to-client', 'answer'),
- ('server-to-client-bin', b'\x06\x06\x06'),
-)
-_TRAILING_METADATA = aio.Metadata(
- ('a-trailing-metadata', 'stack-trace'),
- ('a-trailing-metadata-bin', b'\x05\x05\x05'),
-)
-_INITIAL_METADATA_FOR_GENERIC_HANDLER = aio.Metadata(
- ('a-must-have-key', 'secret'),)
-
-_INVALID_METADATA_TEST_CASES = (
- (
- TypeError,
- ((42, 42),),
- ),
- (
- TypeError,
- (({}, {}),),
- ),
- (
- TypeError,
- ((None, {}),),
- ),
- (
- TypeError,
- (({}, {}),),
- ),
- (
- TypeError,
- (('normal', object()),),
- ),
-)
-
-
-class _TestGenericHandlerForMethods(grpc.GenericRpcHandler):
-
- def __init__(self):
- self._routing_table = {
- _TEST_CLIENT_TO_SERVER:
- grpc.unary_unary_rpc_method_handler(self._test_client_to_server
- ),
- _TEST_SERVER_TO_CLIENT:
- grpc.unary_unary_rpc_method_handler(self._test_server_to_client
- ),
- _TEST_TRAILING_METADATA:
- grpc.unary_unary_rpc_method_handler(self._test_trailing_metadata
- ),
- _TEST_UNARY_STREAM:
- grpc.unary_stream_rpc_method_handler(self._test_unary_stream),
- _TEST_STREAM_UNARY:
- grpc.stream_unary_rpc_method_handler(self._test_stream_unary),
- _TEST_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(self._test_stream_stream),
- }
-
- @staticmethod
- async def _test_client_to_server(request, context):
- assert _REQUEST == request
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- return _RESPONSE
-
- @staticmethod
- async def _test_server_to_client(request, context):
- assert _REQUEST == request
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
- return _RESPONSE
-
- @staticmethod
- async def _test_trailing_metadata(request, context):
- assert _REQUEST == request
- context.set_trailing_metadata(_TRAILING_METADATA)
- return _RESPONSE
-
- @staticmethod
- async def _test_unary_stream(request, context):
- assert _REQUEST == request
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
- yield _RESPONSE
- context.set_trailing_metadata(_TRAILING_METADATA)
-
- @staticmethod
- async def _test_stream_unary(request_iterator, context):
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
-
- async for request in request_iterator:
- assert _REQUEST == request
-
- context.set_trailing_metadata(_TRAILING_METADATA)
- return _RESPONSE
-
- @staticmethod
- async def _test_stream_stream(request_iterator, context):
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
-
- async for request in request_iterator:
- assert _REQUEST == request
-
- yield _RESPONSE
- context.set_trailing_metadata(_TRAILING_METADATA)
-
- def service(self, handler_call_details):
- return self._routing_table.get(handler_call_details.method)
-
-
-class _TestGenericHandlerItself(grpc.GenericRpcHandler):
-
- @staticmethod
- async def _method(request, unused_context):
- assert _REQUEST == request
- return _RESPONSE
-
- def service(self, handler_call_details):
- assert _common.seen_metadata(_INITIAL_METADATA_FOR_GENERIC_HANDLER,
- handler_call_details.invocation_metadata)
- return grpc.unary_unary_rpc_method_handler(self._method)
-
-
-async def _start_test_server():
- server = aio.server()
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((
- _TestGenericHandlerForMethods(),
- _TestGenericHandlerItself(),
- ))
- await server.start()
- return 'localhost:%d' % port, server
-
-
-class TestMetadata(AioTestBase):
-
- async def setUp(self):
- address, self._server = await _start_test_server()
- self._client = aio.insecure_channel(address)
-
- async def tearDown(self):
- await self._client.close()
- await self._server.stop(None)
-
- async def test_from_client_to_server(self):
- multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
- call = multicallable(_REQUEST,
- metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_from_server_to_client(self):
- multicallable = self._client.unary_unary(_TEST_SERVER_TO_CLIENT)
- call = multicallable(_REQUEST)
-
- self.assertEqual(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata())
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_trailing_metadata(self):
- multicallable = self._client.unary_unary(_TEST_TRAILING_METADATA)
- call = multicallable(_REQUEST)
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_from_client_to_server_with_list(self):
- multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
- call = multicallable(
- _REQUEST, metadata=list(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)) # pytype: disable=wrong-arg-types
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- @unittest.skipIf(platform.system() == 'Windows',
- 'https://github.com/grpc/grpc/issues/21943')
- async def test_invalid_metadata(self):
- multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
- for exception_type, metadata in _INVALID_METADATA_TEST_CASES:
- with self.subTest(metadata=metadata):
- with self.assertRaises(exception_type):
- call = multicallable(_REQUEST, metadata=metadata)
- await call
-
- async def test_generic_handler(self):
- multicallable = self._client.unary_unary(_TEST_GENERIC_HANDLER)
- call = multicallable(_REQUEST,
- metadata=_INITIAL_METADATA_FOR_GENERIC_HANDLER)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_stream(self):
- multicallable = self._client.unary_stream(_TEST_UNARY_STREAM)
- call = multicallable(_REQUEST,
- metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
-
- self.assertTrue(
- _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata()))
-
- self.assertSequenceEqual([_RESPONSE],
- [request async for request in call])
-
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_unary(self):
- multicallable = self._client.stream_unary(_TEST_STREAM_UNARY)
- call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
- await call.write(_REQUEST)
- await call.done_writing()
-
- self.assertTrue(
- _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata()))
- self.assertEqual(_RESPONSE, await call)
-
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_stream(self):
- multicallable = self._client.stream_stream(_TEST_STREAM_STREAM)
- call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
- await call.write(_REQUEST)
- await call.done_writing()
-
- self.assertTrue(
- _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata()))
- self.assertSequenceEqual([_RESPONSE],
- [request async for request in call])
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_compatibility_with_tuple(self):
- metadata_obj = aio.Metadata(('key', '42'), ('key-2', 'value'))
- self.assertEqual(metadata_obj, tuple(metadata_obj))
- self.assertEqual(tuple(metadata_obj), metadata_obj)
-
- expected_sum = tuple(metadata_obj) + (('third', '3'),)
- self.assertEqual(expected_sum, metadata_obj + (('third', '3'),))
- self.assertEqual(expected_sum, metadata_obj + aio.Metadata(
- ('third', '3')))
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior around the metadata mechanism."""
+
+import asyncio
+import logging
+import platform
+import random
+import unittest
+
+import grpc
+from grpc.experimental import aio
+
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit import _common
+
+_TEST_CLIENT_TO_SERVER = '/test/TestClientToServer'
+_TEST_SERVER_TO_CLIENT = '/test/TestServerToClient'
+_TEST_TRAILING_METADATA = '/test/TestTrailingMetadata'
+_TEST_ECHO_INITIAL_METADATA = '/test/TestEchoInitialMetadata'
+_TEST_GENERIC_HANDLER = '/test/TestGenericHandler'
+_TEST_UNARY_STREAM = '/test/TestUnaryStream'
+_TEST_STREAM_UNARY = '/test/TestStreamUnary'
+_TEST_STREAM_STREAM = '/test/TestStreamStream'
+
+_REQUEST = b'\x00\x00\x00'
+_RESPONSE = b'\x01\x01\x01'
+
+_INITIAL_METADATA_FROM_CLIENT_TO_SERVER = aio.Metadata(
+ ('client-to-server', 'question'),
+ ('client-to-server-bin', b'\x07\x07\x07'),
+)
+_INITIAL_METADATA_FROM_SERVER_TO_CLIENT = aio.Metadata(
+ ('server-to-client', 'answer'),
+ ('server-to-client-bin', b'\x06\x06\x06'),
+)
+_TRAILING_METADATA = aio.Metadata(
+ ('a-trailing-metadata', 'stack-trace'),
+ ('a-trailing-metadata-bin', b'\x05\x05\x05'),
+)
+_INITIAL_METADATA_FOR_GENERIC_HANDLER = aio.Metadata(
+ ('a-must-have-key', 'secret'),)
+
+_INVALID_METADATA_TEST_CASES = (
+ (
+ TypeError,
+ ((42, 42),),
+ ),
+ (
+ TypeError,
+ (({}, {}),),
+ ),
+ (
+ TypeError,
+ ((None, {}),),
+ ),
+ (
+ TypeError,
+ (({}, {}),),
+ ),
+ (
+ TypeError,
+ (('normal', object()),),
+ ),
+)
+
+
+class _TestGenericHandlerForMethods(grpc.GenericRpcHandler):
+
+ def __init__(self):
+ self._routing_table = {
+ _TEST_CLIENT_TO_SERVER:
+ grpc.unary_unary_rpc_method_handler(self._test_client_to_server
+ ),
+ _TEST_SERVER_TO_CLIENT:
+ grpc.unary_unary_rpc_method_handler(self._test_server_to_client
+ ),
+ _TEST_TRAILING_METADATA:
+ grpc.unary_unary_rpc_method_handler(self._test_trailing_metadata
+ ),
+ _TEST_UNARY_STREAM:
+ grpc.unary_stream_rpc_method_handler(self._test_unary_stream),
+ _TEST_STREAM_UNARY:
+ grpc.stream_unary_rpc_method_handler(self._test_stream_unary),
+ _TEST_STREAM_STREAM:
+ grpc.stream_stream_rpc_method_handler(self._test_stream_stream),
+ }
+
+ @staticmethod
+ async def _test_client_to_server(request, context):
+ assert _REQUEST == request
+ assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
+ context.invocation_metadata())
+ return _RESPONSE
+
+ @staticmethod
+ async def _test_server_to_client(request, context):
+ assert _REQUEST == request
+ await context.send_initial_metadata(
+ _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
+ return _RESPONSE
+
+ @staticmethod
+ async def _test_trailing_metadata(request, context):
+ assert _REQUEST == request
+ context.set_trailing_metadata(_TRAILING_METADATA)
+ return _RESPONSE
+
+ @staticmethod
+ async def _test_unary_stream(request, context):
+ assert _REQUEST == request
+ assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
+ context.invocation_metadata())
+ await context.send_initial_metadata(
+ _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
+ yield _RESPONSE
+ context.set_trailing_metadata(_TRAILING_METADATA)
+
+ @staticmethod
+ async def _test_stream_unary(request_iterator, context):
+ assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
+ context.invocation_metadata())
+ await context.send_initial_metadata(
+ _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
+
+ async for request in request_iterator:
+ assert _REQUEST == request
+
+ context.set_trailing_metadata(_TRAILING_METADATA)
+ return _RESPONSE
+
+ @staticmethod
+ async def _test_stream_stream(request_iterator, context):
+ assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
+ context.invocation_metadata())
+ await context.send_initial_metadata(
+ _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
+
+ async for request in request_iterator:
+ assert _REQUEST == request
+
+ yield _RESPONSE
+ context.set_trailing_metadata(_TRAILING_METADATA)
+
+ def service(self, handler_call_details):
+ return self._routing_table.get(handler_call_details.method)
+
+
+class _TestGenericHandlerItself(grpc.GenericRpcHandler):
+
+ @staticmethod
+ async def _method(request, unused_context):
+ assert _REQUEST == request
+ return _RESPONSE
+
+ def service(self, handler_call_details):
+ assert _common.seen_metadata(_INITIAL_METADATA_FOR_GENERIC_HANDLER,
+ handler_call_details.invocation_metadata)
+ return grpc.unary_unary_rpc_method_handler(self._method)
+
+
+async def _start_test_server():
+ server = aio.server()
+ port = server.add_insecure_port('[::]:0')
+ server.add_generic_rpc_handlers((
+ _TestGenericHandlerForMethods(),
+ _TestGenericHandlerItself(),
+ ))
+ await server.start()
+ return 'localhost:%d' % port, server
+
+
+class TestMetadata(AioTestBase):
+
+ async def setUp(self):
+ address, self._server = await _start_test_server()
+ self._client = aio.insecure_channel(address)
+
+ async def tearDown(self):
+ await self._client.close()
+ await self._server.stop(None)
+
+ async def test_from_client_to_server(self):
+ multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
+ call = multicallable(_REQUEST,
+ metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_from_server_to_client(self):
+ multicallable = self._client.unary_unary(_TEST_SERVER_TO_CLIENT)
+ call = multicallable(_REQUEST)
+
+ self.assertEqual(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
+ call.initial_metadata())
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_trailing_metadata(self):
+ multicallable = self._client.unary_unary(_TEST_TRAILING_METADATA)
+ call = multicallable(_REQUEST)
+ self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_from_client_to_server_with_list(self):
+ multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
+ call = multicallable(
+ _REQUEST, metadata=list(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)) # pytype: disable=wrong-arg-types
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ @unittest.skipIf(platform.system() == 'Windows',
+ 'https://github.com/grpc/grpc/issues/21943')
+ async def test_invalid_metadata(self):
+ multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
+ for exception_type, metadata in _INVALID_METADATA_TEST_CASES:
+ with self.subTest(metadata=metadata):
+ with self.assertRaises(exception_type):
+ call = multicallable(_REQUEST, metadata=metadata)
+ await call
+
+ async def test_generic_handler(self):
+ multicallable = self._client.unary_unary(_TEST_GENERIC_HANDLER)
+ call = multicallable(_REQUEST,
+ metadata=_INITIAL_METADATA_FOR_GENERIC_HANDLER)
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_unary_stream(self):
+ multicallable = self._client.unary_stream(_TEST_UNARY_STREAM)
+ call = multicallable(_REQUEST,
+ metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
+
+ self.assertTrue(
+ _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
+ call.initial_metadata()))
+
+ self.assertSequenceEqual([_RESPONSE],
+ [request async for request in call])
+
+ self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_stream_unary(self):
+ multicallable = self._client.stream_unary(_TEST_STREAM_UNARY)
+ call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ self.assertTrue(
+ _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
+ call.initial_metadata()))
+ self.assertEqual(_RESPONSE, await call)
+
+ self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_stream_stream(self):
+ multicallable = self._client.stream_stream(_TEST_STREAM_STREAM)
+ call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ self.assertTrue(
+ _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
+ call.initial_metadata()))
+ self.assertSequenceEqual([_RESPONSE],
+ [request async for request in call])
+ self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_compatibility_with_tuple(self):
+ metadata_obj = aio.Metadata(('key', '42'), ('key-2', 'value'))
+ self.assertEqual(metadata_obj, tuple(metadata_obj))
+ self.assertEqual(tuple(metadata_obj), metadata_obj)
+
+ expected_sum = tuple(metadata_obj) + (('third', '3'),)
+ self.assertEqual(expected_sum, metadata_obj + (('third', '3'),))
+ self.assertEqual(expected_sum, metadata_obj + aio.Metadata(
+ ('third', '3')))
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py
index 0d710ef0f2..879796cf0f 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py
@@ -1,74 +1,74 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the metadata mechanism."""
-
-import asyncio
-import logging
-import unittest
-from grpc.experimental import aio
-import grpc
-
-from tests_aio.unit._test_server import start_test_server
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-
-_NUM_OF_LOOPS = 50
-
-
-class TestOutsideInit(unittest.TestCase):
-
- def test_behavior_outside_asyncio(self):
- # Ensures non-AsyncIO object can be initiated
- channel_creds = grpc.ssl_channel_credentials()
-
- # Ensures AsyncIO API not raising outside of AsyncIO.
- # NOTE(lidiz) This behavior is bound with GAPIC generator, and required
- # by test frameworks like pytest. In test frameworks, objects shared
- # across cases need to be created outside of AsyncIO coroutines.
- aio.insecure_channel('')
- aio.secure_channel('', channel_creds)
- aio.server()
- aio.init_grpc_aio()
- aio.shutdown_grpc_aio()
-
- def test_multi_ephemeral_loops(self):
- # Initializes AIO module outside. It's part of the test. We especially
- # want to ensure the closing of the default loop won't cause deadlocks.
- aio.init_grpc_aio()
-
- async def ping_pong():
- address, server = await start_test_server()
- channel = aio.insecure_channel(address)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- await stub.UnaryCall(messages_pb2.SimpleRequest())
-
- await channel.close()
- await server.stop(None)
-
- for i in range(_NUM_OF_LOOPS):
- old_loop = asyncio.get_event_loop()
- old_loop.close()
-
- loop = asyncio.new_event_loop()
- loop.set_debug(True)
- asyncio.set_event_loop(loop)
-
- loop.run_until_complete(ping_pong())
-
- aio.shutdown_grpc_aio()
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior around the metadata mechanism."""
+
+import asyncio
+import logging
+import unittest
+from grpc.experimental import aio
+import grpc
+
+from tests_aio.unit._test_server import start_test_server
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+
+_NUM_OF_LOOPS = 50
+
+
+class TestOutsideInit(unittest.TestCase):
+
+ def test_behavior_outside_asyncio(self):
+ # Ensures non-AsyncIO object can be initiated
+ channel_creds = grpc.ssl_channel_credentials()
+
+ # Ensures AsyncIO API not raising outside of AsyncIO.
+ # NOTE(lidiz) This behavior is bound with GAPIC generator, and required
+ # by test frameworks like pytest. In test frameworks, objects shared
+ # across cases need to be created outside of AsyncIO coroutines.
+ aio.insecure_channel('')
+ aio.secure_channel('', channel_creds)
+ aio.server()
+ aio.init_grpc_aio()
+ aio.shutdown_grpc_aio()
+
+ def test_multi_ephemeral_loops(self):
+ # Initializes AIO module outside. It's part of the test. We especially
+ # want to ensure the closing of the default loop won't cause deadlocks.
+ aio.init_grpc_aio()
+
+ async def ping_pong():
+ address, server = await start_test_server()
+ channel = aio.insecure_channel(address)
+ stub = test_pb2_grpc.TestServiceStub(channel)
+
+ await stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ await channel.close()
+ await server.stop(None)
+
+ for i in range(_NUM_OF_LOOPS):
+ old_loop = asyncio.get_event_loop()
+ old_loop.close()
+
+ loop = asyncio.new_event_loop()
+ loop.set_debug(True)
+ asyncio.set_event_loop(loop)
+
+ loop.run_until_complete(ping_pong())
+
+ aio.shutdown_grpc_aio()
+
+
+if __name__ == "__main__":
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py
index a63f84c0d0..7efaddd607 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py
@@ -1,130 +1,130 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests the behaviour of the Call classes under a secure channel."""
-
-import unittest
-import logging
-
-import grpc
-from grpc.experimental import aio
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-from tests.unit import resources
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-_NUM_STREAM_RESPONSES = 5
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class _SecureCallMixin:
- """A Mixin to run the call tests over a secure channel."""
-
- async def setUp(self):
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- channel_credentials = grpc.ssl_channel_credentials(
- resources.test_root_certificates())
-
- self._server_address, self._server = await start_test_server(
- secure=True, server_credentials=server_credentials)
- channel_options = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
- ),)
- self._channel = aio.secure_channel(self._server_address,
- channel_credentials, channel_options)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-class TestUnaryUnarySecureCall(_SecureCallMixin, AioTestBase):
- """unary_unary Calls made over a secure channel."""
-
- async def test_call_ok_over_secure_channel(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- response = await call
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_call_with_credentials(self):
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest(),
- credentials=call_credentials)
- response = await call
-
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
-
-class TestUnaryStreamSecureCall(_SecureCallMixin, AioTestBase):
- """unary_stream calls over a secure channel"""
-
- async def test_unary_stream_async_generator_secure(self):
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,)
- for _ in range(_NUM_STREAM_RESPONSES))
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
- call = self._stub.StreamingOutputCall(request,
- credentials=call_credentials)
-
- async for response in call:
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(len(response.payload.body), _RESPONSE_PAYLOAD_SIZE)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
-
-# Prepares the request that stream in a ping-pong manner.
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
-
-class TestStreamStreamSecureCall(_SecureCallMixin, AioTestBase):
- _STREAM_ITERATIONS = 2
-
- async def test_async_generator_secure_channel(self):
-
- async def request_generator():
- for _ in range(self._STREAM_ITERATIONS):
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
-
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
-
- call = self._stub.FullDuplexCall(request_generator(),
- credentials=call_credentials)
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests the behaviour of the Call classes under a secure channel."""
+
+import unittest
+import logging
+
+import grpc
+from grpc.experimental import aio
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+from tests.unit import resources
+
+_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
+_NUM_STREAM_RESPONSES = 5
+_RESPONSE_PAYLOAD_SIZE = 42
+
+
+class _SecureCallMixin:
+ """A Mixin to run the call tests over a secure channel."""
+
+ async def setUp(self):
+ server_credentials = grpc.ssl_server_credentials([
+ (resources.private_key(), resources.certificate_chain())
+ ])
+ channel_credentials = grpc.ssl_channel_credentials(
+ resources.test_root_certificates())
+
+ self._server_address, self._server = await start_test_server(
+ secure=True, server_credentials=server_credentials)
+ channel_options = ((
+ 'grpc.ssl_target_name_override',
+ _SERVER_HOST_OVERRIDE,
+ ),)
+ self._channel = aio.secure_channel(self._server_address,
+ channel_credentials, channel_options)
+ self._stub = test_pb2_grpc.TestServiceStub(self._channel)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+
+class TestUnaryUnarySecureCall(_SecureCallMixin, AioTestBase):
+ """unary_unary Calls made over a secure channel."""
+
+ async def test_call_ok_over_secure_channel(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+ response = await call
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_call_with_credentials(self):
+ call_credentials = grpc.composite_call_credentials(
+ grpc.access_token_call_credentials("abc"),
+ grpc.access_token_call_credentials("def"),
+ )
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest(),
+ credentials=call_credentials)
+ response = await call
+
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+
+
+class TestUnaryStreamSecureCall(_SecureCallMixin, AioTestBase):
+ """unary_stream calls over a secure channel"""
+
+ async def test_unary_stream_async_generator_secure(self):
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.extend(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,)
+ for _ in range(_NUM_STREAM_RESPONSES))
+ call_credentials = grpc.composite_call_credentials(
+ grpc.access_token_call_credentials("abc"),
+ grpc.access_token_call_credentials("def"),
+ )
+ call = self._stub.StreamingOutputCall(request,
+ credentials=call_credentials)
+
+ async for response in call:
+ self.assertIsInstance(response,
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(len(response.payload.body), _RESPONSE_PAYLOAD_SIZE)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+
+# Prepares the request that stream in a ping-pong manner.
+_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
+_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+
+class TestStreamStreamSecureCall(_SecureCallMixin, AioTestBase):
+ _STREAM_ITERATIONS = 2
+
+ async def test_async_generator_secure_channel(self):
+
+ async def request_generator():
+ for _ in range(self._STREAM_ITERATIONS):
+ yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
+
+ call_credentials = grpc.composite_call_credentials(
+ grpc.access_token_call_credentials("abc"),
+ grpc.access_token_call_credentials("def"),
+ )
+
+ call = self._stub.FullDuplexCall(request_generator(),
+ credentials=call_credentials)
+ async for response in call:
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py
index e4133c431f..d891ecdb77 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py
@@ -1,330 +1,330 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test the functionality of server interceptors."""
-
-import asyncio
-import functools
-import logging
-import unittest
-from typing import Any, Awaitable, Callable, Tuple
-
-import grpc
-from grpc.experimental import aio, wrap_server_method_handler
-
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class _LoggingInterceptor(aio.ServerInterceptor):
-
- def __init__(self, tag: str, record: list) -> None:
- self.tag = tag
- self.record = record
-
- async def intercept_service(
- self, continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
- grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- self.record.append(self.tag + ':intercept_service')
- return await continuation(handler_call_details)
-
-
-class _GenericInterceptor(aio.ServerInterceptor):
-
- def __init__(self, fn: Callable[[
- Callable[[grpc.HandlerCallDetails], Awaitable[grpc.
- RpcMethodHandler]],
- grpc.HandlerCallDetails
- ], Any]) -> None:
- self._fn = fn
-
- async def intercept_service(
- self, continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
- grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- return await self._fn(continuation, handler_call_details)
-
-
-def _filter_server_interceptor(condition: Callable,
- interceptor: aio.ServerInterceptor
- ) -> aio.ServerInterceptor:
-
- async def intercept_service(
- continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
- grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- if condition(handler_call_details):
- return await interceptor.intercept_service(continuation,
- handler_call_details)
- return await continuation(handler_call_details)
-
- return _GenericInterceptor(intercept_service)
-
-
-class _CacheInterceptor(aio.ServerInterceptor):
- """An interceptor that caches response based on request message."""
-
- def __init__(self, cache_store=None):
- self.cache_store = cache_store or {}
-
- async def intercept_service(
- self, continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
- grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- # Get the actual handler
- handler = await continuation(handler_call_details)
-
- # Only intercept unary call RPCs
- if handler and (handler.request_streaming or # pytype: disable=attribute-error
- handler.response_streaming): # pytype: disable=attribute-error
- return handler
-
- def wrapper(behavior: Callable[
- [messages_pb2.SimpleRequest, aio.
- ServicerContext], messages_pb2.SimpleResponse]):
-
- @functools.wraps(behavior)
- async def wrapper(request: messages_pb2.SimpleRequest,
- context: aio.ServicerContext
- ) -> messages_pb2.SimpleResponse:
- if request.response_size not in self.cache_store:
- self.cache_store[request.response_size] = await behavior(
- request, context)
- return self.cache_store[request.response_size]
-
- return wrapper
-
- return wrap_server_method_handler(wrapper, handler)
-
-
-async def _create_server_stub_pair(
- *interceptors: aio.ServerInterceptor
-) -> Tuple[aio.Server, test_pb2_grpc.TestServiceStub]:
- """Creates a server-stub pair with given interceptors.
-
- Returning the server object to protect it from being garbage collected.
- """
- server_target, server = await start_test_server(interceptors=interceptors)
- channel = aio.insecure_channel(server_target)
- return server, test_pb2_grpc.TestServiceStub(channel)
-
-
-class TestServerInterceptor(AioTestBase):
-
- async def test_invalid_interceptor(self):
-
- class InvalidInterceptor:
- """Just an invalid Interceptor"""
-
- with self.assertRaises(ValueError):
- server_target, _ = await start_test_server(
- interceptors=(InvalidInterceptor(),))
-
- async def test_executed_right_order(self):
- record = []
- server_target, _ = await start_test_server(interceptors=(
- _LoggingInterceptor('log1', record),
- _LoggingInterceptor('log2', record),
- ))
-
- async with aio.insecure_channel(server_target) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- # Check that all interceptors were executed, and were executed
- # in the right order.
- self.assertSequenceEqual([
- 'log1:intercept_service',
- 'log2:intercept_service',
- ], record)
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- async def test_response_ok(self):
- record = []
- server_target, _ = await start_test_server(
- interceptors=(_LoggingInterceptor('log1', record),))
-
- async with aio.insecure_channel(server_target) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
- code = await call.code()
-
- self.assertSequenceEqual(['log1:intercept_service'], record)
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(code, grpc.StatusCode.OK)
-
- async def test_apply_different_interceptors_by_metadata(self):
- record = []
- conditional_interceptor = _filter_server_interceptor(
- lambda x: ('secret', '42') in x.invocation_metadata,
- _LoggingInterceptor('log3', record))
- server_target, _ = await start_test_server(interceptors=(
- _LoggingInterceptor('log1', record),
- conditional_interceptor,
- _LoggingInterceptor('log2', record),
- ))
-
- async with aio.insecure_channel(server_target) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- metadata = aio.Metadata(('key', 'value'),)
- call = multicallable(messages_pb2.SimpleRequest(),
- metadata=metadata)
- await call
- self.assertSequenceEqual([
- 'log1:intercept_service',
- 'log2:intercept_service',
- ], record)
-
- record.clear()
- metadata = aio.Metadata(('key', 'value'), ('secret', '42'))
- call = multicallable(messages_pb2.SimpleRequest(),
- metadata=metadata)
- await call
- self.assertSequenceEqual([
- 'log1:intercept_service',
- 'log3:intercept_service',
- 'log2:intercept_service',
- ], record)
-
- async def test_response_caching(self):
- # Prepares a preset value to help testing
- interceptor = _CacheInterceptor({
- 42:
- messages_pb2.SimpleResponse(payload=messages_pb2.Payload(
- body=b'\x42'))
- })
-
- # Constructs a server with the cache interceptor
- server, stub = await _create_server_stub_pair(interceptor)
-
- # Tests if the cache store is used
- response = await stub.UnaryCall(
- messages_pb2.SimpleRequest(response_size=42))
- self.assertEqual(1, len(interceptor.cache_store[42].payload.body))
- self.assertEqual(interceptor.cache_store[42], response)
-
- # Tests response can be cached
- response = await stub.UnaryCall(
- messages_pb2.SimpleRequest(response_size=1337))
- self.assertEqual(1337, len(interceptor.cache_store[1337].payload.body))
- self.assertEqual(interceptor.cache_store[1337], response)
- response = await stub.UnaryCall(
- messages_pb2.SimpleRequest(response_size=1337))
- self.assertEqual(interceptor.cache_store[1337], response)
-
- async def test_interceptor_unary_stream(self):
- record = []
- server, stub = await _create_server_stub_pair(
- _LoggingInterceptor('log_unary_stream', record))
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Tests if the cache store is used
- call = stub.StreamingOutputCall(request)
-
- # Ensures the RPC goes fine
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- self.assertSequenceEqual([
- 'log_unary_stream:intercept_service',
- ], record)
-
- async def test_interceptor_stream_unary(self):
- record = []
- server, stub = await _create_server_stub_pair(
- _LoggingInterceptor('log_stream_unary', record))
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- await call.done_writing()
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- self.assertSequenceEqual([
- 'log_stream_unary:intercept_service',
- ], record)
-
- async def test_interceptor_stream_stream(self):
- record = []
- server, stub = await _create_server_stub_pair(
- _LoggingInterceptor('log_stream_stream', record))
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall(gen())
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- self.assertSequenceEqual([
- 'log_stream_stream:intercept_service',
- ], record)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test the functionality of server interceptors."""
+
+import asyncio
+import functools
+import logging
+import unittest
+from typing import Any, Awaitable, Callable, Tuple
+
+import grpc
+from grpc.experimental import aio, wrap_server_method_handler
+
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+
+_NUM_STREAM_RESPONSES = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_PAYLOAD_SIZE = 42
+
+
+class _LoggingInterceptor(aio.ServerInterceptor):
+
+ def __init__(self, tag: str, record: list) -> None:
+ self.tag = tag
+ self.record = record
+
+ async def intercept_service(
+ self, continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
+ grpc.RpcMethodHandler]],
+ handler_call_details: grpc.HandlerCallDetails
+ ) -> grpc.RpcMethodHandler:
+ self.record.append(self.tag + ':intercept_service')
+ return await continuation(handler_call_details)
+
+
+class _GenericInterceptor(aio.ServerInterceptor):
+
+ def __init__(self, fn: Callable[[
+ Callable[[grpc.HandlerCallDetails], Awaitable[grpc.
+ RpcMethodHandler]],
+ grpc.HandlerCallDetails
+ ], Any]) -> None:
+ self._fn = fn
+
+ async def intercept_service(
+ self, continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
+ grpc.RpcMethodHandler]],
+ handler_call_details: grpc.HandlerCallDetails
+ ) -> grpc.RpcMethodHandler:
+ return await self._fn(continuation, handler_call_details)
+
+
+def _filter_server_interceptor(condition: Callable,
+ interceptor: aio.ServerInterceptor
+ ) -> aio.ServerInterceptor:
+
+ async def intercept_service(
+ continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
+ grpc.RpcMethodHandler]],
+ handler_call_details: grpc.HandlerCallDetails
+ ) -> grpc.RpcMethodHandler:
+ if condition(handler_call_details):
+ return await interceptor.intercept_service(continuation,
+ handler_call_details)
+ return await continuation(handler_call_details)
+
+ return _GenericInterceptor(intercept_service)
+
+
+class _CacheInterceptor(aio.ServerInterceptor):
+ """An interceptor that caches response based on request message."""
+
+ def __init__(self, cache_store=None):
+ self.cache_store = cache_store or {}
+
+ async def intercept_service(
+ self, continuation: Callable[[grpc.HandlerCallDetails], Awaitable[
+ grpc.RpcMethodHandler]],
+ handler_call_details: grpc.HandlerCallDetails
+ ) -> grpc.RpcMethodHandler:
+ # Get the actual handler
+ handler = await continuation(handler_call_details)
+
+ # Only intercept unary call RPCs
+ if handler and (handler.request_streaming or # pytype: disable=attribute-error
+ handler.response_streaming): # pytype: disable=attribute-error
+ return handler
+
+ def wrapper(behavior: Callable[
+ [messages_pb2.SimpleRequest, aio.
+ ServicerContext], messages_pb2.SimpleResponse]):
+
+ @functools.wraps(behavior)
+ async def wrapper(request: messages_pb2.SimpleRequest,
+ context: aio.ServicerContext
+ ) -> messages_pb2.SimpleResponse:
+ if request.response_size not in self.cache_store:
+ self.cache_store[request.response_size] = await behavior(
+ request, context)
+ return self.cache_store[request.response_size]
+
+ return wrapper
+
+ return wrap_server_method_handler(wrapper, handler)
+
+
+async def _create_server_stub_pair(
+ *interceptors: aio.ServerInterceptor
+) -> Tuple[aio.Server, test_pb2_grpc.TestServiceStub]:
+ """Creates a server-stub pair with given interceptors.
+
+ Returning the server object to protect it from being garbage collected.
+ """
+ server_target, server = await start_test_server(interceptors=interceptors)
+ channel = aio.insecure_channel(server_target)
+ return server, test_pb2_grpc.TestServiceStub(channel)
+
+
+class TestServerInterceptor(AioTestBase):
+
+ async def test_invalid_interceptor(self):
+
+ class InvalidInterceptor:
+ """Just an invalid Interceptor"""
+
+ with self.assertRaises(ValueError):
+ server_target, _ = await start_test_server(
+ interceptors=(InvalidInterceptor(),))
+
+ async def test_executed_right_order(self):
+ record = []
+ server_target, _ = await start_test_server(interceptors=(
+ _LoggingInterceptor('log1', record),
+ _LoggingInterceptor('log2', record),
+ ))
+
+ async with aio.insecure_channel(server_target) as channel:
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+ response = await call
+
+ # Check that all interceptors were executed, and were executed
+ # in the right order.
+ self.assertSequenceEqual([
+ 'log1:intercept_service',
+ 'log2:intercept_service',
+ ], record)
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+
+ async def test_response_ok(self):
+ record = []
+ server_target, _ = await start_test_server(
+ interceptors=(_LoggingInterceptor('log1', record),))
+
+ async with aio.insecure_channel(server_target) as channel:
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+ call = multicallable(messages_pb2.SimpleRequest())
+ response = await call
+ code = await call.code()
+
+ self.assertSequenceEqual(['log1:intercept_service'], record)
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+ self.assertEqual(code, grpc.StatusCode.OK)
+
+ async def test_apply_different_interceptors_by_metadata(self):
+ record = []
+ conditional_interceptor = _filter_server_interceptor(
+ lambda x: ('secret', '42') in x.invocation_metadata,
+ _LoggingInterceptor('log3', record))
+ server_target, _ = await start_test_server(interceptors=(
+ _LoggingInterceptor('log1', record),
+ conditional_interceptor,
+ _LoggingInterceptor('log2', record),
+ ))
+
+ async with aio.insecure_channel(server_target) as channel:
+ multicallable = channel.unary_unary(
+ '/grpc.testing.TestService/UnaryCall',
+ request_serializer=messages_pb2.SimpleRequest.SerializeToString,
+ response_deserializer=messages_pb2.SimpleResponse.FromString)
+
+ metadata = aio.Metadata(('key', 'value'),)
+ call = multicallable(messages_pb2.SimpleRequest(),
+ metadata=metadata)
+ await call
+ self.assertSequenceEqual([
+ 'log1:intercept_service',
+ 'log2:intercept_service',
+ ], record)
+
+ record.clear()
+ metadata = aio.Metadata(('key', 'value'), ('secret', '42'))
+ call = multicallable(messages_pb2.SimpleRequest(),
+ metadata=metadata)
+ await call
+ self.assertSequenceEqual([
+ 'log1:intercept_service',
+ 'log3:intercept_service',
+ 'log2:intercept_service',
+ ], record)
+
+ async def test_response_caching(self):
+ # Prepares a preset value to help testing
+ interceptor = _CacheInterceptor({
+ 42:
+ messages_pb2.SimpleResponse(payload=messages_pb2.Payload(
+ body=b'\x42'))
+ })
+
+ # Constructs a server with the cache interceptor
+ server, stub = await _create_server_stub_pair(interceptor)
+
+ # Tests if the cache store is used
+ response = await stub.UnaryCall(
+ messages_pb2.SimpleRequest(response_size=42))
+ self.assertEqual(1, len(interceptor.cache_store[42].payload.body))
+ self.assertEqual(interceptor.cache_store[42], response)
+
+ # Tests response can be cached
+ response = await stub.UnaryCall(
+ messages_pb2.SimpleRequest(response_size=1337))
+ self.assertEqual(1337, len(interceptor.cache_store[1337].payload.body))
+ self.assertEqual(interceptor.cache_store[1337], response)
+ response = await stub.UnaryCall(
+ messages_pb2.SimpleRequest(response_size=1337))
+ self.assertEqual(interceptor.cache_store[1337], response)
+
+ async def test_interceptor_unary_stream(self):
+ record = []
+ server, stub = await _create_server_stub_pair(
+ _LoggingInterceptor('log_unary_stream', record))
+
+ # Prepares the request
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
+
+ # Tests if the cache store is used
+ call = stub.StreamingOutputCall(request)
+
+ # Ensures the RPC goes fine
+ async for response in call:
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ self.assertSequenceEqual([
+ 'log_unary_stream:intercept_service',
+ ], record)
+
+ async def test_interceptor_stream_unary(self):
+ record = []
+ server, stub = await _create_server_stub_pair(
+ _LoggingInterceptor('log_stream_unary', record))
+
+ # Invokes the actual RPC
+ call = stub.StreamingInputCall()
+
+ # Prepares the request
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ # Sends out requests
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ await call.done_writing()
+
+ # Validates the responses
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ self.assertSequenceEqual([
+ 'log_stream_unary:intercept_service',
+ ], record)
+
+ async def test_interceptor_stream_stream(self):
+ record = []
+ server, stub = await _create_server_stub_pair(
+ _LoggingInterceptor('log_stream_stream', record))
+
+ # Prepares the request
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ async def gen():
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+
+ # Invokes the actual RPC
+ call = stub.StreamingInputCall(gen())
+
+ # Validates the responses
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ self.assertSequenceEqual([
+ 'log_stream_stream:intercept_service',
+ ], record)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py
index 95d50bee7b..61d1edd523 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py
@@ -13,90 +13,90 @@
# limitations under the License.
import asyncio
-import gc
+import gc
import logging
-import socket
-import time
+import socket
+import time
import unittest
import grpc
from grpc.experimental import aio
-
-from tests.unit import resources
-from tests.unit.framework.common import test_constants
+
+from tests.unit import resources
+from tests.unit.framework.common import test_constants
from tests_aio.unit._test_base import AioTestBase
_SIMPLE_UNARY_UNARY = '/test/SimpleUnaryUnary'
_BLOCK_FOREVER = '/test/BlockForever'
_BLOCK_BRIEFLY = '/test/BlockBriefly'
-_UNARY_STREAM_ASYNC_GEN = '/test/UnaryStreamAsyncGen'
-_UNARY_STREAM_READER_WRITER = '/test/UnaryStreamReaderWriter'
-_UNARY_STREAM_EVILLY_MIXED = '/test/UnaryStreamEvillyMixed'
-_STREAM_UNARY_ASYNC_GEN = '/test/StreamUnaryAsyncGen'
-_STREAM_UNARY_READER_WRITER = '/test/StreamUnaryReaderWriter'
-_STREAM_UNARY_EVILLY_MIXED = '/test/StreamUnaryEvillyMixed'
-_STREAM_STREAM_ASYNC_GEN = '/test/StreamStreamAsyncGen'
-_STREAM_STREAM_READER_WRITER = '/test/StreamStreamReaderWriter'
-_STREAM_STREAM_EVILLY_MIXED = '/test/StreamStreamEvillyMixed'
-_UNIMPLEMENTED_METHOD = '/test/UnimplementedMethod'
-_ERROR_IN_STREAM_STREAM = '/test/ErrorInStreamStream'
-_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY = '/test/ErrorWithoutRaiseInUnaryUnary'
-_ERROR_WITHOUT_RAISE_IN_STREAM_STREAM = '/test/ErrorWithoutRaiseInStreamStream'
+_UNARY_STREAM_ASYNC_GEN = '/test/UnaryStreamAsyncGen'
+_UNARY_STREAM_READER_WRITER = '/test/UnaryStreamReaderWriter'
+_UNARY_STREAM_EVILLY_MIXED = '/test/UnaryStreamEvillyMixed'
+_STREAM_UNARY_ASYNC_GEN = '/test/StreamUnaryAsyncGen'
+_STREAM_UNARY_READER_WRITER = '/test/StreamUnaryReaderWriter'
+_STREAM_UNARY_EVILLY_MIXED = '/test/StreamUnaryEvillyMixed'
+_STREAM_STREAM_ASYNC_GEN = '/test/StreamStreamAsyncGen'
+_STREAM_STREAM_READER_WRITER = '/test/StreamStreamReaderWriter'
+_STREAM_STREAM_EVILLY_MIXED = '/test/StreamStreamEvillyMixed'
+_UNIMPLEMENTED_METHOD = '/test/UnimplementedMethod'
+_ERROR_IN_STREAM_STREAM = '/test/ErrorInStreamStream'
+_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY = '/test/ErrorWithoutRaiseInUnaryUnary'
+_ERROR_WITHOUT_RAISE_IN_STREAM_STREAM = '/test/ErrorWithoutRaiseInStreamStream'
_REQUEST = b'\x00\x00\x00'
_RESPONSE = b'\x01\x01\x01'
-_NUM_STREAM_REQUESTS = 3
-_NUM_STREAM_RESPONSES = 5
+_NUM_STREAM_REQUESTS = 3
+_NUM_STREAM_RESPONSES = 5
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self):
self._called = asyncio.get_event_loop().create_future()
- self._routing_table = {
- _SIMPLE_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(self._unary_unary),
- _BLOCK_FOREVER:
- grpc.unary_unary_rpc_method_handler(self._block_forever),
- _BLOCK_BRIEFLY:
- grpc.unary_unary_rpc_method_handler(self._block_briefly),
- _UNARY_STREAM_ASYNC_GEN:
- grpc.unary_stream_rpc_method_handler(
- self._unary_stream_async_gen),
- _UNARY_STREAM_READER_WRITER:
- grpc.unary_stream_rpc_method_handler(
- self._unary_stream_reader_writer),
- _UNARY_STREAM_EVILLY_MIXED:
- grpc.unary_stream_rpc_method_handler(
- self._unary_stream_evilly_mixed),
- _STREAM_UNARY_ASYNC_GEN:
- grpc.stream_unary_rpc_method_handler(
- self._stream_unary_async_gen),
- _STREAM_UNARY_READER_WRITER:
- grpc.stream_unary_rpc_method_handler(
- self._stream_unary_reader_writer),
- _STREAM_UNARY_EVILLY_MIXED:
- grpc.stream_unary_rpc_method_handler(
- self._stream_unary_evilly_mixed),
- _STREAM_STREAM_ASYNC_GEN:
- grpc.stream_stream_rpc_method_handler(
- self._stream_stream_async_gen),
- _STREAM_STREAM_READER_WRITER:
- grpc.stream_stream_rpc_method_handler(
- self._stream_stream_reader_writer),
- _STREAM_STREAM_EVILLY_MIXED:
- grpc.stream_stream_rpc_method_handler(
- self._stream_stream_evilly_mixed),
- _ERROR_IN_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(
- self._error_in_stream_stream),
- _ERROR_WITHOUT_RAISE_IN_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(
- self._error_without_raise_in_unary_unary),
- _ERROR_WITHOUT_RAISE_IN_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(
- self._error_without_raise_in_stream_stream),
- }
+ self._routing_table = {
+ _SIMPLE_UNARY_UNARY:
+ grpc.unary_unary_rpc_method_handler(self._unary_unary),
+ _BLOCK_FOREVER:
+ grpc.unary_unary_rpc_method_handler(self._block_forever),
+ _BLOCK_BRIEFLY:
+ grpc.unary_unary_rpc_method_handler(self._block_briefly),
+ _UNARY_STREAM_ASYNC_GEN:
+ grpc.unary_stream_rpc_method_handler(
+ self._unary_stream_async_gen),
+ _UNARY_STREAM_READER_WRITER:
+ grpc.unary_stream_rpc_method_handler(
+ self._unary_stream_reader_writer),
+ _UNARY_STREAM_EVILLY_MIXED:
+ grpc.unary_stream_rpc_method_handler(
+ self._unary_stream_evilly_mixed),
+ _STREAM_UNARY_ASYNC_GEN:
+ grpc.stream_unary_rpc_method_handler(
+ self._stream_unary_async_gen),
+ _STREAM_UNARY_READER_WRITER:
+ grpc.stream_unary_rpc_method_handler(
+ self._stream_unary_reader_writer),
+ _STREAM_UNARY_EVILLY_MIXED:
+ grpc.stream_unary_rpc_method_handler(
+ self._stream_unary_evilly_mixed),
+ _STREAM_STREAM_ASYNC_GEN:
+ grpc.stream_stream_rpc_method_handler(
+ self._stream_stream_async_gen),
+ _STREAM_STREAM_READER_WRITER:
+ grpc.stream_stream_rpc_method_handler(
+ self._stream_stream_reader_writer),
+ _STREAM_STREAM_EVILLY_MIXED:
+ grpc.stream_stream_rpc_method_handler(
+ self._stream_stream_evilly_mixed),
+ _ERROR_IN_STREAM_STREAM:
+ grpc.stream_stream_rpc_method_handler(
+ self._error_in_stream_stream),
+ _ERROR_WITHOUT_RAISE_IN_UNARY_UNARY:
+ grpc.unary_unary_rpc_method_handler(
+ self._error_without_raise_in_unary_unary),
+ _ERROR_WITHOUT_RAISE_IN_STREAM_STREAM:
+ grpc.stream_stream_rpc_method_handler(
+ self._error_without_raise_in_stream_stream),
+ }
@staticmethod
async def _unary_unary(unused_request, unused_context):
@@ -105,92 +105,92 @@ class _GenericHandler(grpc.GenericRpcHandler):
async def _block_forever(self, unused_request, unused_context):
await asyncio.get_event_loop().create_future()
- async def _block_briefly(self, unused_request, unused_context):
+ async def _block_briefly(self, unused_request, unused_context):
await asyncio.sleep(test_constants.SHORT_TIMEOUT / 2)
return _RESPONSE
- async def _unary_stream_async_gen(self, unused_request, unused_context):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield _RESPONSE
-
- async def _unary_stream_reader_writer(self, unused_request, context):
- for _ in range(_NUM_STREAM_RESPONSES):
- await context.write(_RESPONSE)
-
- async def _unary_stream_evilly_mixed(self, unused_request, context):
- yield _RESPONSE
- for _ in range(_NUM_STREAM_RESPONSES - 1):
- await context.write(_RESPONSE)
-
- async def _stream_unary_async_gen(self, request_iterator, unused_context):
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS == request_count
- return _RESPONSE
-
- async def _stream_unary_reader_writer(self, unused_request, context):
- for _ in range(_NUM_STREAM_REQUESTS):
- assert _REQUEST == await context.read()
- return _RESPONSE
-
- async def _stream_unary_evilly_mixed(self, request_iterator, context):
- assert _REQUEST == await context.read()
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS - 1 == request_count
- return _RESPONSE
-
- async def _stream_stream_async_gen(self, request_iterator, unused_context):
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS == request_count
-
- for _ in range(_NUM_STREAM_RESPONSES):
- yield _RESPONSE
-
- async def _stream_stream_reader_writer(self, unused_request, context):
- for _ in range(_NUM_STREAM_REQUESTS):
- assert _REQUEST == await context.read()
- for _ in range(_NUM_STREAM_RESPONSES):
- await context.write(_RESPONSE)
-
- async def _stream_stream_evilly_mixed(self, request_iterator, context):
- assert _REQUEST == await context.read()
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS - 1 == request_count
-
- yield _RESPONSE
- for _ in range(_NUM_STREAM_RESPONSES - 1):
- await context.write(_RESPONSE)
-
- async def _error_in_stream_stream(self, request_iterator, unused_context):
- async for request in request_iterator:
- assert _REQUEST == request
- raise RuntimeError('A testing RuntimeError!')
- yield _RESPONSE
-
- async def _error_without_raise_in_unary_unary(self, request, context):
- assert _REQUEST == request
- context.set_code(grpc.StatusCode.INTERNAL)
-
- async def _error_without_raise_in_stream_stream(self, request_iterator,
- context):
- async for request in request_iterator:
- assert _REQUEST == request
- context.set_code(grpc.StatusCode.INTERNAL)
-
+ async def _unary_stream_async_gen(self, unused_request, unused_context):
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield _RESPONSE
+
+ async def _unary_stream_reader_writer(self, unused_request, context):
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await context.write(_RESPONSE)
+
+ async def _unary_stream_evilly_mixed(self, unused_request, context):
+ yield _RESPONSE
+ for _ in range(_NUM_STREAM_RESPONSES - 1):
+ await context.write(_RESPONSE)
+
+ async def _stream_unary_async_gen(self, request_iterator, unused_context):
+ request_count = 0
+ async for request in request_iterator:
+ assert _REQUEST == request
+ request_count += 1
+ assert _NUM_STREAM_REQUESTS == request_count
+ return _RESPONSE
+
+ async def _stream_unary_reader_writer(self, unused_request, context):
+ for _ in range(_NUM_STREAM_REQUESTS):
+ assert _REQUEST == await context.read()
+ return _RESPONSE
+
+ async def _stream_unary_evilly_mixed(self, request_iterator, context):
+ assert _REQUEST == await context.read()
+ request_count = 0
+ async for request in request_iterator:
+ assert _REQUEST == request
+ request_count += 1
+ assert _NUM_STREAM_REQUESTS - 1 == request_count
+ return _RESPONSE
+
+ async def _stream_stream_async_gen(self, request_iterator, unused_context):
+ request_count = 0
+ async for request in request_iterator:
+ assert _REQUEST == request
+ request_count += 1
+ assert _NUM_STREAM_REQUESTS == request_count
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield _RESPONSE
+
+ async def _stream_stream_reader_writer(self, unused_request, context):
+ for _ in range(_NUM_STREAM_REQUESTS):
+ assert _REQUEST == await context.read()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await context.write(_RESPONSE)
+
+ async def _stream_stream_evilly_mixed(self, request_iterator, context):
+ assert _REQUEST == await context.read()
+ request_count = 0
+ async for request in request_iterator:
+ assert _REQUEST == request
+ request_count += 1
+ assert _NUM_STREAM_REQUESTS - 1 == request_count
+
+ yield _RESPONSE
+ for _ in range(_NUM_STREAM_RESPONSES - 1):
+ await context.write(_RESPONSE)
+
+ async def _error_in_stream_stream(self, request_iterator, unused_context):
+ async for request in request_iterator:
+ assert _REQUEST == request
+ raise RuntimeError('A testing RuntimeError!')
+ yield _RESPONSE
+
+ async def _error_without_raise_in_unary_unary(self, request, context):
+ assert _REQUEST == request
+ context.set_code(grpc.StatusCode.INTERNAL)
+
+ async def _error_without_raise_in_stream_stream(self, request_iterator,
+ context):
+ async for request in request_iterator:
+ assert _REQUEST == request
+ context.set_code(grpc.StatusCode.INTERNAL)
+
def service(self, handler_details):
self._called.set_result(None)
- return self._routing_table.get(handler_details.method)
+ return self._routing_table.get(handler_details.method)
async def wait_for_call(self):
await self._called
@@ -207,280 +207,280 @@ async def _start_test_server():
class TestServer(AioTestBase):
- async def setUp(self):
- addr, self._server, self._generic_handler = await _start_test_server()
- self._channel = aio.insecure_channel(addr)
+ async def setUp(self):
+ addr, self._server, self._generic_handler = await _start_test_server()
+ self._channel = aio.insecure_channel(addr)
+
+ async def tearDown(self):
+ await self._channel.close()
+ await self._server.stop(None)
+
+ async def test_unary_unary(self):
+ unary_unary_call = self._channel.unary_unary(_SIMPLE_UNARY_UNARY)
+ response = await unary_unary_call(_REQUEST)
+ self.assertEqual(response, _RESPONSE)
+
+ async def test_unary_stream_async_generator(self):
+ unary_stream_call = self._channel.unary_stream(_UNARY_STREAM_ASYNC_GEN)
+ call = unary_stream_call(_REQUEST)
+
+ response_cnt = 0
+ async for response in call:
+ response_cnt += 1
+ self.assertEqual(_RESPONSE, response)
+
+ self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_unary_stream_reader_writer(self):
+ unary_stream_call = self._channel.unary_stream(
+ _UNARY_STREAM_READER_WRITER)
+ call = unary_stream_call(_REQUEST)
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ self.assertEqual(_RESPONSE, response)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_unary_stream_evilly_mixed(self):
+ unary_stream_call = self._channel.unary_stream(
+ _UNARY_STREAM_EVILLY_MIXED)
+ call = unary_stream_call(_REQUEST)
+
+ # Uses reader API
+ self.assertEqual(_RESPONSE, await call.read())
+
+ # Uses async generator API, mixed!
+ with self.assertRaises(aio.UsageError):
+ async for response in call:
+ self.assertEqual(_RESPONSE, response)
+
+ async def test_stream_unary_async_generator(self):
+ stream_unary_call = self._channel.stream_unary(_STREAM_UNARY_ASYNC_GEN)
+ call = stream_unary_call()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ response = await call
+ self.assertEqual(_RESPONSE, response)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_stream_unary_reader_writer(self):
+ stream_unary_call = self._channel.stream_unary(
+ _STREAM_UNARY_READER_WRITER)
+ call = stream_unary_call()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ response = await call
+ self.assertEqual(_RESPONSE, response)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_stream_unary_evilly_mixed(self):
+ stream_unary_call = self._channel.stream_unary(
+ _STREAM_UNARY_EVILLY_MIXED)
+ call = stream_unary_call()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ response = await call
+ self.assertEqual(_RESPONSE, response)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_stream_stream_async_generator(self):
+ stream_stream_call = self._channel.stream_stream(
+ _STREAM_STREAM_ASYNC_GEN)
+ call = stream_stream_call()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ self.assertEqual(_RESPONSE, response)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_stream_stream_reader_writer(self):
+ stream_stream_call = self._channel.stream_stream(
+ _STREAM_STREAM_READER_WRITER)
+ call = stream_stream_call()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ self.assertEqual(_RESPONSE, response)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_stream_stream_evilly_mixed(self):
+ stream_stream_call = self._channel.stream_stream(
+ _STREAM_STREAM_EVILLY_MIXED)
+ call = stream_stream_call()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ response = await call.read()
+ self.assertEqual(_RESPONSE, response)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_shutdown(self):
+ await self._server.stop(None)
+ # Ensures no SIGSEGV triggered, and ends within timeout.
+
+ async def test_shutdown_after_call(self):
+ await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
+
+ await self._server.stop(None)
+
+ async def test_graceful_shutdown_success(self):
+ call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
+ await self._generic_handler.wait_for_call()
+
+ shutdown_start_time = time.time()
+ await self._server.stop(test_constants.SHORT_TIMEOUT)
+ grace_period_length = time.time() - shutdown_start_time
+ self.assertGreater(grace_period_length,
+ test_constants.SHORT_TIMEOUT / 3)
+
+ # Validates the states.
+ self.assertEqual(_RESPONSE, await call)
+ self.assertTrue(call.done())
+
+ async def test_graceful_shutdown_failed(self):
+ call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
+ await self._generic_handler.wait_for_call()
+
+ await self._server.stop(test_constants.SHORT_TIMEOUT)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
+
+ async def test_concurrent_graceful_shutdown(self):
+ call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
+ await self._generic_handler.wait_for_call()
+
+ # Expects the shortest grace period to be effective.
+ shutdown_start_time = time.time()
+ await asyncio.gather(
+ self._server.stop(test_constants.LONG_TIMEOUT),
+ self._server.stop(test_constants.SHORT_TIMEOUT),
+ self._server.stop(test_constants.LONG_TIMEOUT),
+ )
+ grace_period_length = time.time() - shutdown_start_time
+ self.assertGreater(grace_period_length,
+ test_constants.SHORT_TIMEOUT / 3)
+
+ self.assertEqual(_RESPONSE, await call)
+ self.assertTrue(call.done())
+
+ async def test_concurrent_graceful_shutdown_immediate(self):
+ call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
+ await self._generic_handler.wait_for_call()
+
+ # Expects no grace period, due to the "server.stop(None)".
+ await asyncio.gather(
+ self._server.stop(test_constants.LONG_TIMEOUT),
+ self._server.stop(None),
+ self._server.stop(test_constants.SHORT_TIMEOUT),
+ self._server.stop(test_constants.LONG_TIMEOUT),
+ )
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE,
+ exception_context.exception.code())
- async def test_unary_unary(self):
- unary_unary_call = self._channel.unary_unary(_SIMPLE_UNARY_UNARY)
- response = await unary_unary_call(_REQUEST)
- self.assertEqual(response, _RESPONSE)
+ async def test_shutdown_before_call(self):
+ await self._server.stop(None)
- async def test_unary_stream_async_generator(self):
- unary_stream_call = self._channel.unary_stream(_UNARY_STREAM_ASYNC_GEN)
- call = unary_stream_call(_REQUEST)
+ # Ensures the server is cleaned up at this point.
+ # Some proper exception should be raised.
+ with self.assertRaises(aio.AioRpcError):
+ await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertEqual(_RESPONSE, response)
+ async def test_unimplemented(self):
+ call = self._channel.unary_unary(_UNIMPLEMENTED_METHOD)
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call(_REQUEST)
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ async def test_shutdown_during_stream_stream(self):
+ stream_stream_call = self._channel.stream_stream(
+ _STREAM_STREAM_ASYNC_GEN)
+ call = stream_stream_call()
- async def test_unary_stream_reader_writer(self):
- unary_stream_call = self._channel.unary_stream(
- _UNARY_STREAM_READER_WRITER)
- call = unary_stream_call(_REQUEST)
+ # Don't half close the RPC yet, keep it alive.
+ await call.write(_REQUEST)
+ await self._server.stop(None)
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
+ # No segfault
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
+ async def test_error_in_stream_stream(self):
+ stream_stream_call = self._channel.stream_stream(
+ _ERROR_IN_STREAM_STREAM)
+ call = stream_stream_call()
+
+ # Don't half close the RPC yet, keep it alive.
+ await call.write(_REQUEST)
- async def test_unary_stream_evilly_mixed(self):
- unary_stream_call = self._channel.unary_stream(
- _UNARY_STREAM_EVILLY_MIXED)
- call = unary_stream_call(_REQUEST)
+ # Don't segfault here
+ self.assertEqual(grpc.StatusCode.UNKNOWN, await call.code())
+
+ async def test_error_without_raise_in_unary_unary(self):
+ call = self._channel.unary_unary(_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY)(
+ _REQUEST)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.INTERNAL, rpc_error.code())
+
+ async def test_error_without_raise_in_stream_stream(self):
+ call = self._channel.stream_stream(
+ _ERROR_WITHOUT_RAISE_IN_STREAM_STREAM)()
+
+ for _ in range(_NUM_STREAM_REQUESTS):
+ await call.write(_REQUEST)
+ await call.done_writing()
+
+ self.assertEqual(grpc.StatusCode.INTERNAL, await call.code())
+
+ async def test_port_binding_exception(self):
+ server = aio.server(options=(('grpc.so_reuseport', 0),))
+ port = server.add_insecure_port('localhost:0')
+ bind_address = "localhost:%d" % port
+
+ with self.assertRaises(RuntimeError):
+ server.add_insecure_port(bind_address)
+
+ server_credentials = grpc.ssl_server_credentials([
+ (resources.private_key(), resources.certificate_chain())
+ ])
+ with self.assertRaises(RuntimeError):
+ server.add_secure_port(bind_address, server_credentials)
- # Uses reader API
- self.assertEqual(_RESPONSE, await call.read())
- # Uses async generator API, mixed!
- with self.assertRaises(aio.UsageError):
- async for response in call:
- self.assertEqual(_RESPONSE, response)
-
- async def test_stream_unary_async_generator(self):
- stream_unary_call = self._channel.stream_unary(_STREAM_UNARY_ASYNC_GEN)
- call = stream_unary_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- response = await call
- self.assertEqual(_RESPONSE, response)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_unary_reader_writer(self):
- stream_unary_call = self._channel.stream_unary(
- _STREAM_UNARY_READER_WRITER)
- call = stream_unary_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- response = await call
- self.assertEqual(_RESPONSE, response)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_unary_evilly_mixed(self):
- stream_unary_call = self._channel.stream_unary(
- _STREAM_UNARY_EVILLY_MIXED)
- call = stream_unary_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- response = await call
- self.assertEqual(_RESPONSE, response)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_async_generator(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_ASYNC_GEN)
- call = stream_stream_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_reader_writer(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_READER_WRITER)
- call = stream_stream_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_evilly_mixed(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_EVILLY_MIXED)
- call = stream_stream_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_shutdown(self):
- await self._server.stop(None)
- # Ensures no SIGSEGV triggered, and ends within timeout.
-
- async def test_shutdown_after_call(self):
- await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
-
- await self._server.stop(None)
-
- async def test_graceful_shutdown_success(self):
- call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- shutdown_start_time = time.time()
- await self._server.stop(test_constants.SHORT_TIMEOUT)
- grace_period_length = time.time() - shutdown_start_time
- self.assertGreater(grace_period_length,
- test_constants.SHORT_TIMEOUT / 3)
-
- # Validates the states.
- self.assertEqual(_RESPONSE, await call)
- self.assertTrue(call.done())
-
- async def test_graceful_shutdown_failed(self):
- call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- await self._server.stop(test_constants.SHORT_TIMEOUT)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- async def test_concurrent_graceful_shutdown(self):
- call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- # Expects the shortest grace period to be effective.
- shutdown_start_time = time.time()
- await asyncio.gather(
- self._server.stop(test_constants.LONG_TIMEOUT),
- self._server.stop(test_constants.SHORT_TIMEOUT),
- self._server.stop(test_constants.LONG_TIMEOUT),
- )
- grace_period_length = time.time() - shutdown_start_time
- self.assertGreater(grace_period_length,
- test_constants.SHORT_TIMEOUT / 3)
-
- self.assertEqual(_RESPONSE, await call)
- self.assertTrue(call.done())
-
- async def test_concurrent_graceful_shutdown_immediate(self):
- call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- # Expects no grace period, due to the "server.stop(None)".
- await asyncio.gather(
- self._server.stop(test_constants.LONG_TIMEOUT),
- self._server.stop(None),
- self._server.stop(test_constants.SHORT_TIMEOUT),
- self._server.stop(test_constants.LONG_TIMEOUT),
- )
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- async def test_shutdown_before_call(self):
- await self._server.stop(None)
-
- # Ensures the server is cleaned up at this point.
- # Some proper exception should be raised.
- with self.assertRaises(aio.AioRpcError):
- await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
-
- async def test_unimplemented(self):
- call = self._channel.unary_unary(_UNIMPLEMENTED_METHOD)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
-
- async def test_shutdown_during_stream_stream(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_ASYNC_GEN)
- call = stream_stream_call()
-
- # Don't half close the RPC yet, keep it alive.
- await call.write(_REQUEST)
- await self._server.stop(None)
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
- # No segfault
-
- async def test_error_in_stream_stream(self):
- stream_stream_call = self._channel.stream_stream(
- _ERROR_IN_STREAM_STREAM)
- call = stream_stream_call()
-
- # Don't half close the RPC yet, keep it alive.
- await call.write(_REQUEST)
-
- # Don't segfault here
- self.assertEqual(grpc.StatusCode.UNKNOWN, await call.code())
-
- async def test_error_without_raise_in_unary_unary(self):
- call = self._channel.unary_unary(_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY)(
- _REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.INTERNAL, rpc_error.code())
-
- async def test_error_without_raise_in_stream_stream(self):
- call = self._channel.stream_stream(
- _ERROR_WITHOUT_RAISE_IN_STREAM_STREAM)()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.INTERNAL, await call.code())
-
- async def test_port_binding_exception(self):
- server = aio.server(options=(('grpc.so_reuseport', 0),))
- port = server.add_insecure_port('localhost:0')
- bind_address = "localhost:%d" % port
-
- with self.assertRaises(RuntimeError):
- server.add_insecure_port(bind_address)
-
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- with self.assertRaises(RuntimeError):
- server.add_secure_port(bind_address, server_credentials)
-
-
if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
+ logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py
index 8ad0af48f7..b5bcc027ec 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py
@@ -1,178 +1,178 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the timeout mechanism on client side."""
-
-import asyncio
-import logging
-import platform
-import random
-import unittest
-import datetime
-
-import grpc
-from grpc.experimental import aio
-
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit import _common
-
-_SLEEP_TIME_UNIT_S = datetime.timedelta(seconds=1).total_seconds()
-
-_TEST_SLEEPY_UNARY_UNARY = '/test/Test/SleepyUnaryUnary'
-_TEST_SLEEPY_UNARY_STREAM = '/test/Test/SleepyUnaryStream'
-_TEST_SLEEPY_STREAM_UNARY = '/test/Test/SleepyStreamUnary'
-_TEST_SLEEPY_STREAM_STREAM = '/test/Test/SleepyStreamStream'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-
-async def _test_sleepy_unary_unary(unused_request, unused_context):
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- return _RESPONSE
-
-
-async def _test_sleepy_unary_stream(unused_request, unused_context):
- yield _RESPONSE
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- yield _RESPONSE
-
-
-async def _test_sleepy_stream_unary(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- assert _REQUEST == await context.read()
- return _RESPONSE
-
-
-async def _test_sleepy_stream_stream(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- await context.write(_RESPONSE)
-
-
-_ROUTING_TABLE = {
- _TEST_SLEEPY_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(_test_sleepy_unary_unary),
- _TEST_SLEEPY_UNARY_STREAM:
- grpc.unary_stream_rpc_method_handler(_test_sleepy_unary_stream),
- _TEST_SLEEPY_STREAM_UNARY:
- grpc.stream_unary_rpc_method_handler(_test_sleepy_stream_unary),
- _TEST_SLEEPY_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(_test_sleepy_stream_stream)
-}
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- return _ROUTING_TABLE.get(handler_call_details.method)
-
-
-async def _start_test_server():
- server = aio.server()
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
- return f'localhost:{port}', server
-
-
-class TestTimeout(AioTestBase):
-
- async def setUp(self):
- address, self._server = await _start_test_server()
- self._client = aio.insecure_channel(address)
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- self._client.get_state(True))
- await _common.block_until_certain_state(self._client,
- grpc.ChannelConnectivity.READY)
-
- async def tearDown(self):
- await self._client.close()
- await self._server.stop(None)
-
- async def test_unary_unary_success_with_timeout(self):
- multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
- call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_unary_deadline_exceeded(self):
- multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
- call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
- async def test_unary_stream_success_with_timeout(self):
- multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
- call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_stream_deadline_exceeded(self):
- multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
- call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
- self.assertEqual(_RESPONSE, await call.read())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.read()
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
- async def test_stream_unary_success_with_timeout(self):
- multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
- call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
- await call.write(_REQUEST)
- await call.write(_REQUEST)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_unary_deadline_exceeded(self):
- multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
- call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.write(_REQUEST)
- await call.write(_REQUEST)
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
- async def test_stream_stream_success_with_timeout(self):
- multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
- call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
- await call.write(_REQUEST)
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_stream_deadline_exceeded(self):
- multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
- call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.write(_REQUEST)
- await call.read()
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior of the timeout mechanism on client side."""
+
+import asyncio
+import logging
+import platform
+import random
+import unittest
+import datetime
+
+import grpc
+from grpc.experimental import aio
+
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit import _common
+
+_SLEEP_TIME_UNIT_S = datetime.timedelta(seconds=1).total_seconds()
+
+_TEST_SLEEPY_UNARY_UNARY = '/test/Test/SleepyUnaryUnary'
+_TEST_SLEEPY_UNARY_STREAM = '/test/Test/SleepyUnaryStream'
+_TEST_SLEEPY_STREAM_UNARY = '/test/Test/SleepyStreamUnary'
+_TEST_SLEEPY_STREAM_STREAM = '/test/Test/SleepyStreamStream'
+
+_REQUEST = b'\x00\x00\x00'
+_RESPONSE = b'\x01\x01\x01'
+
+
+async def _test_sleepy_unary_unary(unused_request, unused_context):
+ await asyncio.sleep(_SLEEP_TIME_UNIT_S)
+ return _RESPONSE
+
+
+async def _test_sleepy_unary_stream(unused_request, unused_context):
+ yield _RESPONSE
+ await asyncio.sleep(_SLEEP_TIME_UNIT_S)
+ yield _RESPONSE
+
+
+async def _test_sleepy_stream_unary(unused_request_iterator, context):
+ assert _REQUEST == await context.read()
+ await asyncio.sleep(_SLEEP_TIME_UNIT_S)
+ assert _REQUEST == await context.read()
+ return _RESPONSE
+
+
+async def _test_sleepy_stream_stream(unused_request_iterator, context):
+ assert _REQUEST == await context.read()
+ await asyncio.sleep(_SLEEP_TIME_UNIT_S)
+ await context.write(_RESPONSE)
+
+
+_ROUTING_TABLE = {
+ _TEST_SLEEPY_UNARY_UNARY:
+ grpc.unary_unary_rpc_method_handler(_test_sleepy_unary_unary),
+ _TEST_SLEEPY_UNARY_STREAM:
+ grpc.unary_stream_rpc_method_handler(_test_sleepy_unary_stream),
+ _TEST_SLEEPY_STREAM_UNARY:
+ grpc.stream_unary_rpc_method_handler(_test_sleepy_stream_unary),
+ _TEST_SLEEPY_STREAM_STREAM:
+ grpc.stream_stream_rpc_method_handler(_test_sleepy_stream_stream)
+}
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ return _ROUTING_TABLE.get(handler_call_details.method)
+
+
+async def _start_test_server():
+ server = aio.server()
+ port = server.add_insecure_port('[::]:0')
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ await server.start()
+ return f'localhost:{port}', server
+
+
+class TestTimeout(AioTestBase):
+
+ async def setUp(self):
+ address, self._server = await _start_test_server()
+ self._client = aio.insecure_channel(address)
+ self.assertEqual(grpc.ChannelConnectivity.IDLE,
+ self._client.get_state(True))
+ await _common.block_until_certain_state(self._client,
+ grpc.ChannelConnectivity.READY)
+
+ async def tearDown(self):
+ await self._client.close()
+ await self._server.stop(None)
+
+ async def test_unary_unary_success_with_timeout(self):
+ multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
+ call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
+ self.assertEqual(_RESPONSE, await call)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_unary_unary_deadline_exceeded(self):
+ multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
+ call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call
+
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
+
+ async def test_unary_stream_success_with_timeout(self):
+ multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
+ call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
+ self.assertEqual(_RESPONSE, await call.read())
+ self.assertEqual(_RESPONSE, await call.read())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_unary_stream_deadline_exceeded(self):
+ multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
+ call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
+ self.assertEqual(_RESPONSE, await call.read())
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.read()
+
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
+
+ async def test_stream_unary_success_with_timeout(self):
+ multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
+ call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
+ await call.write(_REQUEST)
+ await call.write(_REQUEST)
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_stream_unary_deadline_exceeded(self):
+ multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
+ call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.write(_REQUEST)
+ await call.write(_REQUEST)
+ await call
+
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
+
+ async def test_stream_stream_success_with_timeout(self):
+ multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
+ call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
+ await call.write(_REQUEST)
+ self.assertEqual(_RESPONSE, await call.read())
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_stream_stream_deadline_exceeded(self):
+ multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
+ call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.write(_REQUEST)
+ await call.read()
+
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py
index 5d572b4d8a..cb6f798529 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py
@@ -1,159 +1,159 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the wait for connection API on client side."""
-
-import asyncio
-import logging
-import unittest
-import datetime
-from typing import Callable, Tuple
-
-import grpc
-from grpc.experimental import aio
-
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-from tests_aio.unit import _common
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-
-_REQUEST = b'\x01\x02\x03'
-_TEST_METHOD = '/test/Test'
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class TestWaitForConnection(AioTestBase):
- """Tests if wait_for_connection raises connectivity issue."""
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._dummy_channel = aio.insecure_channel(UNREACHABLE_TARGET)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._dummy_channel.close()
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_unary_unary_ok(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- response = await call
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- async def test_unary_stream_ok(self):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- call = self._stub.StreamingOutputCall(request)
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_unary_ok(self):
- call = self._stub.StreamingInputCall()
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- await call.done_writing()
-
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_ok(self):
- call = self._stub.FullDuplexCall()
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_unary_error(self):
- call = self._dummy_channel.unary_unary(_TEST_METHOD)(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_unary_stream_error(self):
- call = self._dummy_channel.unary_stream(_TEST_METHOD)(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_stream_unary_error(self):
- call = self._dummy_channel.stream_unary(_TEST_METHOD)()
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_stream_stream_error(self):
- call = self._dummy_channel.stream_stream(_TEST_METHOD)()
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests behavior of the wait for connection API on client side."""
+
+import asyncio
+import logging
+import unittest
+import datetime
+from typing import Callable, Tuple
+
+import grpc
+from grpc.experimental import aio
+
+from tests_aio.unit._test_base import AioTestBase
+from tests_aio.unit._test_server import start_test_server
+from tests_aio.unit import _common
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests_aio.unit._constants import UNREACHABLE_TARGET
+
+_REQUEST = b'\x01\x02\x03'
+_TEST_METHOD = '/test/Test'
+
+_NUM_STREAM_RESPONSES = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_PAYLOAD_SIZE = 42
+
+
+class TestWaitForConnection(AioTestBase):
+ """Tests if wait_for_connection raises connectivity issue."""
+
+ async def setUp(self):
+ address, self._server = await start_test_server()
+ self._channel = aio.insecure_channel(address)
+ self._dummy_channel = aio.insecure_channel(UNREACHABLE_TARGET)
+ self._stub = test_pb2_grpc.TestServiceStub(self._channel)
+
+ async def tearDown(self):
+ await self._dummy_channel.close()
+ await self._channel.close()
+ await self._server.stop(None)
+
+ async def test_unary_unary_ok(self):
+ call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
+
+ # No exception raised and no message swallowed.
+ await call.wait_for_connection()
+
+ response = await call
+ self.assertIsInstance(response, messages_pb2.SimpleResponse)
+
+ async def test_unary_stream_ok(self):
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ call = self._stub.StreamingOutputCall(request)
+
+ # No exception raised and no message swallowed.
+ await call.wait_for_connection()
+
+ response_cnt = 0
+ async for response in call:
+ response_cnt += 1
+ self.assertIs(type(response),
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_stream_unary_ok(self):
+ call = self._stub.StreamingInputCall()
+
+ # No exception raised and no message swallowed.
+ await call.wait_for_connection()
+
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ await call.done_writing()
+
+ response = await call
+ self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
+ self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
+ response.aggregated_payload_size)
+
+ self.assertEqual(await call.code(), grpc.StatusCode.OK)
+
+ async def test_stream_stream_ok(self):
+ call = self._stub.FullDuplexCall()
+
+ # No exception raised and no message swallowed.
+ await call.wait_for_connection()
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ response = await call.read()
+ self.assertIsInstance(response,
+ messages_pb2.StreamingOutputCallResponse)
+ self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
+
+ await call.done_writing()
+
+ self.assertEqual(grpc.StatusCode.OK, await call.code())
+
+ async def test_unary_unary_error(self):
+ call = self._dummy_channel.unary_unary(_TEST_METHOD)(_REQUEST)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.wait_for_connection()
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
+
+ async def test_unary_stream_error(self):
+ call = self._dummy_channel.unary_stream(_TEST_METHOD)(_REQUEST)
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.wait_for_connection()
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
+
+ async def test_stream_unary_error(self):
+ call = self._dummy_channel.stream_unary(_TEST_METHOD)()
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.wait_for_connection()
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
+
+ async def test_stream_stream_error(self):
+ call = self._dummy_channel.stream_stream(_TEST_METHOD)()
+
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await call.wait_for_connection()
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py
index a4ac5e4bd3..5bcfd54856 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py
@@ -1,146 +1,146 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the done callbacks mechanism."""
-
-import asyncio
-import logging
-import unittest
-import time
-import gc
-
-import grpc
-from grpc.experimental import aio
-from tests_aio.unit._test_base import AioTestBase
-from tests.unit.framework.common import test_constants
-from tests.unit.framework.common import get_socket
-from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
-from tests_aio.unit._test_server import start_test_server
-from tests_aio.unit import _common
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-async def _perform_unary_unary(stub, wait_for_ready):
- await stub.UnaryCall(messages_pb2.SimpleRequest(),
- timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
-
-async def _perform_unary_stream(stub, wait_for_ready):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- call = stub.StreamingOutputCall(request,
- timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.read()
- assert await call.code() == grpc.StatusCode.OK
-
-
-async def _perform_stream_unary(stub, wait_for_ready):
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- await stub.StreamingInputCall(gen(),
- timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
-
-async def _perform_stream_stream(stub, wait_for_ready):
- call = stub.FullDuplexCall(timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
-
- await call.done_writing()
- assert await call.code() == grpc.StatusCode.OK
-
-
-_RPC_ACTIONS = (
- _perform_unary_unary,
- _perform_unary_stream,
- _perform_stream_unary,
- _perform_stream_stream,
-)
-
-
-class TestWaitForReady(AioTestBase):
-
- async def setUp(self):
- address, self._port, self._socket = get_socket(listen=False)
- self._channel = aio.insecure_channel(f"{address}:{self._port}")
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
- self._socket.close()
-
- async def tearDown(self):
- await self._channel.close()
-
- async def _connection_fails_fast(self, wait_for_ready):
- for action in _RPC_ACTIONS:
- with self.subTest(name=action):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await action(self._stub, wait_for_ready)
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_call_wait_for_ready_default(self):
- """RPC should fail immediately after connection failed."""
- await self._connection_fails_fast(None)
-
- async def test_call_wait_for_ready_disabled(self):
- """RPC should fail immediately after connection failed."""
- await self._connection_fails_fast(False)
-
- async def test_call_wait_for_ready_enabled(self):
- """RPC will wait until the connection is ready."""
- for action in _RPC_ACTIONS:
- with self.subTest(name=action.__name__):
- # Starts the RPC
- action_task = self.loop.create_task(action(self._stub, True))
-
- # Wait for TRANSIENT_FAILURE, and RPC is not aborting
- await _common.block_until_certain_state(
- self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE)
-
- try:
- # Start the server
- _, server = await start_test_server(port=self._port)
-
- # The RPC should recover itself
- await action_task
- finally:
- if server is not None:
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Testing the done callbacks mechanism."""
+
+import asyncio
+import logging
+import unittest
+import time
+import gc
+
+import grpc
+from grpc.experimental import aio
+from tests_aio.unit._test_base import AioTestBase
+from tests.unit.framework.common import test_constants
+from tests.unit.framework.common import get_socket
+from src.proto.grpc.testing import messages_pb2, test_pb2_grpc
+from tests_aio.unit._test_server import start_test_server
+from tests_aio.unit import _common
+
+_NUM_STREAM_RESPONSES = 5
+_REQUEST_PAYLOAD_SIZE = 7
+_RESPONSE_PAYLOAD_SIZE = 42
+
+
+async def _perform_unary_unary(stub, wait_for_ready):
+ await stub.UnaryCall(messages_pb2.SimpleRequest(),
+ timeout=test_constants.LONG_TIMEOUT,
+ wait_for_ready=wait_for_ready)
+
+
+async def _perform_unary_stream(stub, wait_for_ready):
+ request = messages_pb2.StreamingOutputCallRequest()
+ for _ in range(_NUM_STREAM_RESPONSES):
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ call = stub.StreamingOutputCall(request,
+ timeout=test_constants.LONG_TIMEOUT,
+ wait_for_ready=wait_for_ready)
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.read()
+ assert await call.code() == grpc.StatusCode.OK
+
+
+async def _perform_stream_unary(stub, wait_for_ready):
+ payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
+ request = messages_pb2.StreamingInputCallRequest(payload=payload)
+
+ async def gen():
+ for _ in range(_NUM_STREAM_RESPONSES):
+ yield request
+
+ await stub.StreamingInputCall(gen(),
+ timeout=test_constants.LONG_TIMEOUT,
+ wait_for_ready=wait_for_ready)
+
+
+async def _perform_stream_stream(stub, wait_for_ready):
+ call = stub.FullDuplexCall(timeout=test_constants.LONG_TIMEOUT,
+ wait_for_ready=wait_for_ready)
+
+ request = messages_pb2.StreamingOutputCallRequest()
+ request.response_parameters.append(
+ messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
+
+ for _ in range(_NUM_STREAM_RESPONSES):
+ await call.write(request)
+ response = await call.read()
+ assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
+
+ await call.done_writing()
+ assert await call.code() == grpc.StatusCode.OK
+
+
+_RPC_ACTIONS = (
+ _perform_unary_unary,
+ _perform_unary_stream,
+ _perform_stream_unary,
+ _perform_stream_stream,
+)
+
+
+class TestWaitForReady(AioTestBase):
+
+ async def setUp(self):
+ address, self._port, self._socket = get_socket(listen=False)
+ self._channel = aio.insecure_channel(f"{address}:{self._port}")
+ self._stub = test_pb2_grpc.TestServiceStub(self._channel)
+ self._socket.close()
+
+ async def tearDown(self):
+ await self._channel.close()
+
+ async def _connection_fails_fast(self, wait_for_ready):
+ for action in _RPC_ACTIONS:
+ with self.subTest(name=action):
+ with self.assertRaises(aio.AioRpcError) as exception_context:
+ await action(self._stub, wait_for_ready)
+ rpc_error = exception_context.exception
+ self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
+
+ async def test_call_wait_for_ready_default(self):
+ """RPC should fail immediately after connection failed."""
+ await self._connection_fails_fast(None)
+
+ async def test_call_wait_for_ready_disabled(self):
+ """RPC should fail immediately after connection failed."""
+ await self._connection_fails_fast(False)
+
+ async def test_call_wait_for_ready_enabled(self):
+ """RPC will wait until the connection is ready."""
+ for action in _RPC_ACTIONS:
+ with self.subTest(name=action.__name__):
+ # Starts the RPC
+ action_task = self.loop.create_task(action(self._stub, True))
+
+ # Wait for TRANSIENT_FAILURE, and RPC is not aborting
+ await _common.block_until_certain_state(
+ self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE)
+
+ try:
+ # Start the server
+ _, server = await start_test_server(port=self._port)
+
+ # The RPC should recover itself
+ await action_task
+ finally:
+ if server is not None:
+ await server.stop(None)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py
index c3423d83e0..6732ae8cbb 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py
@@ -1,21 +1,21 @@
-# Copyright 2020 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-from tests import _loader
-from tests import _runner
-
-Loader = _loader.Loader
-Runner = _runner.Runner
+# Copyright 2020 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import
+
+from tests import _loader
+from tests import _runner
+
+Loader = _loader.Loader
+Runner = _runner.Runner
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py
index 4ebcfa5b21..21277a98cf 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py
@@ -1,348 +1,348 @@
-# Copyright 2020 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import logging
-import signal
-import threading
-import time
-import sys
-
-from typing import DefaultDict, Dict, List, Mapping, Set, Sequence, Tuple
-import collections
-
-from concurrent import futures
-
-import grpc
-
-from src.proto.grpc.testing import test_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import empty_pb2
-
-logger = logging.getLogger()
-console_handler = logging.StreamHandler()
-formatter = logging.Formatter(fmt='%(asctime)s: %(levelname)-8s %(message)s')
-console_handler.setFormatter(formatter)
-logger.addHandler(console_handler)
-
-_SUPPORTED_METHODS = (
- "UnaryCall",
- "EmptyCall",
-)
-
-PerMethodMetadataType = Mapping[str, Sequence[Tuple[str, str]]]
-
-
-class _StatsWatcher:
- _start: int
- _end: int
- _rpcs_needed: int
- _rpcs_by_peer: DefaultDict[str, int]
- _rpcs_by_method: DefaultDict[str, DefaultDict[str, int]]
- _no_remote_peer: int
- _lock: threading.Lock
- _condition: threading.Condition
-
- def __init__(self, start: int, end: int):
- self._start = start
- self._end = end
- self._rpcs_needed = end - start
- self._rpcs_by_peer = collections.defaultdict(int)
- self._rpcs_by_method = collections.defaultdict(
- lambda: collections.defaultdict(int))
- self._condition = threading.Condition()
- self._no_remote_peer = 0
-
- def on_rpc_complete(self, request_id: int, peer: str, method: str) -> None:
- """Records statistics for a single RPC."""
- if self._start <= request_id < self._end:
- with self._condition:
- if not peer:
- self._no_remote_peer += 1
- else:
- self._rpcs_by_peer[peer] += 1
- self._rpcs_by_method[method][peer] += 1
- self._rpcs_needed -= 1
- self._condition.notify()
-
- def await_rpc_stats_response(self, timeout_sec: int
- ) -> messages_pb2.LoadBalancerStatsResponse:
- """Blocks until a full response has been collected."""
- with self._condition:
- self._condition.wait_for(lambda: not self._rpcs_needed,
- timeout=float(timeout_sec))
- response = messages_pb2.LoadBalancerStatsResponse()
- for peer, count in self._rpcs_by_peer.items():
- response.rpcs_by_peer[peer] = count
- for method, count_by_peer in self._rpcs_by_method.items():
- for peer, count in count_by_peer.items():
- response.rpcs_by_method[method].rpcs_by_peer[peer] = count
- response.num_failures = self._no_remote_peer + self._rpcs_needed
- return response
-
-
-_global_lock = threading.Lock()
-_stop_event = threading.Event()
-_global_rpc_id: int = 0
-_watchers: Set[_StatsWatcher] = set()
-_global_server = None
-
-
-def _handle_sigint(sig, frame):
- _stop_event.set()
- _global_server.stop(None)
-
-
-class _LoadBalancerStatsServicer(test_pb2_grpc.LoadBalancerStatsServiceServicer
- ):
-
- def __init__(self):
- super(_LoadBalancerStatsServicer).__init__()
-
- def GetClientStats(self, request: messages_pb2.LoadBalancerStatsRequest,
- context: grpc.ServicerContext
- ) -> messages_pb2.LoadBalancerStatsResponse:
- logger.info("Received stats request.")
- start = None
- end = None
- watcher = None
- with _global_lock:
- start = _global_rpc_id + 1
- end = start + request.num_rpcs
- watcher = _StatsWatcher(start, end)
- _watchers.add(watcher)
- response = watcher.await_rpc_stats_response(request.timeout_sec)
- with _global_lock:
- _watchers.remove(watcher)
- logger.info("Returning stats response: {}".format(response))
- return response
-
-
-def _start_rpc(method: str, metadata: Sequence[Tuple[str, str]],
- request_id: int, stub: test_pb2_grpc.TestServiceStub,
- timeout: float,
- futures: Mapping[int, Tuple[grpc.Future, str]]) -> None:
- logger.info(f"Sending {method} request to backend: {request_id}")
- if method == "UnaryCall":
- future = stub.UnaryCall.future(messages_pb2.SimpleRequest(),
- metadata=metadata,
- timeout=timeout)
- elif method == "EmptyCall":
- future = stub.EmptyCall.future(empty_pb2.Empty(),
- metadata=metadata,
- timeout=timeout)
- else:
- raise ValueError(f"Unrecognized method '{method}'.")
- futures[request_id] = (future, method)
-
-
-def _on_rpc_done(rpc_id: int, future: grpc.Future, method: str,
- print_response: bool) -> None:
- exception = future.exception()
- hostname = ""
- if exception is not None:
- if exception.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
- logger.error(f"RPC {rpc_id} timed out")
- else:
- logger.error(exception)
- else:
- response = future.result()
- hostname = None
- for metadatum in future.initial_metadata():
- if metadatum[0] == "hostname":
- hostname = metadatum[1]
- break
- else:
- hostname = response.hostname
- if print_response:
- if future.code() == grpc.StatusCode.OK:
- logger.info("Successful response.")
- else:
- logger.info(f"RPC failed: {call}")
- with _global_lock:
- for watcher in _watchers:
- watcher.on_rpc_complete(rpc_id, hostname, method)
-
-
-def _remove_completed_rpcs(futures: Mapping[int, grpc.Future],
- print_response: bool) -> None:
- logger.debug("Removing completed RPCs")
- done = []
- for future_id, (future, method) in futures.items():
- if future.done():
- _on_rpc_done(future_id, future, method, args.print_response)
- done.append(future_id)
- for rpc_id in done:
- del futures[rpc_id]
-
-
-def _cancel_all_rpcs(futures: Mapping[int, Tuple[grpc.Future, str]]) -> None:
- logger.info("Cancelling all remaining RPCs")
- for future, _ in futures.values():
- future.cancel()
-
-
-def _run_single_channel(method: str, metadata: Sequence[Tuple[str, str]],
- qps: int, server: str, rpc_timeout_sec: int,
- print_response: bool):
- global _global_rpc_id # pylint: disable=global-statement
- duration_per_query = 1.0 / float(qps)
- with grpc.insecure_channel(server) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- futures: Dict[int, Tuple[grpc.Future, str]] = {}
- while not _stop_event.is_set():
- request_id = None
- with _global_lock:
- request_id = _global_rpc_id
- _global_rpc_id += 1
- start = time.time()
- end = start + duration_per_query
- _start_rpc(method, metadata, request_id, stub,
- float(rpc_timeout_sec), futures)
- _remove_completed_rpcs(futures, print_response)
- logger.debug(f"Currently {len(futures)} in-flight RPCs")
- now = time.time()
- while now < end:
- time.sleep(end - now)
- now = time.time()
- _cancel_all_rpcs(futures)
-
-
-class _MethodHandle:
- """An object grouping together threads driving RPCs for a method."""
-
- _channel_threads: List[threading.Thread]
-
- def __init__(self, method: str, metadata: Sequence[Tuple[str, str]],
- num_channels: int, qps: int, server: str, rpc_timeout_sec: int,
- print_response: bool):
- """Creates and starts a group of threads running the indicated method."""
- self._channel_threads = []
- for i in range(num_channels):
- thread = threading.Thread(target=_run_single_channel,
- args=(
- method,
- metadata,
- qps,
- server,
- rpc_timeout_sec,
- print_response,
- ))
- thread.start()
- self._channel_threads.append(thread)
-
- def stop(self):
- """Joins all threads referenced by the handle."""
- for channel_thread in self._channel_threads:
- channel_thread.join()
-
-
-def _run(args: argparse.Namespace, methods: Sequence[str],
- per_method_metadata: PerMethodMetadataType) -> None:
- logger.info("Starting python xDS Interop Client.")
- global _global_server # pylint: disable=global-statement
- method_handles = []
- for method in methods:
- method_handles.append(
- _MethodHandle(method, per_method_metadata.get(method, []),
- args.num_channels, args.qps, args.server,
- args.rpc_timeout_sec, args.print_response))
- _global_server = grpc.server(futures.ThreadPoolExecutor())
- _global_server.add_insecure_port(f"0.0.0.0:{args.stats_port}")
- test_pb2_grpc.add_LoadBalancerStatsServiceServicer_to_server(
- _LoadBalancerStatsServicer(), _global_server)
- _global_server.start()
- _global_server.wait_for_termination()
- for method_handle in method_handles:
- method_handle.stop()
-
-
-def parse_metadata_arg(metadata_arg: str) -> PerMethodMetadataType:
- metadata = metadata_arg.split(",") if args.metadata else []
- per_method_metadata = collections.defaultdict(list)
- for metadatum in metadata:
- elems = metadatum.split(":")
- if len(elems) != 3:
- raise ValueError(
- f"'{metadatum}' was not in the form 'METHOD:KEY:VALUE'")
- if elems[0] not in _SUPPORTED_METHODS:
- raise ValueError(f"Unrecognized method '{elems[0]}'")
- per_method_metadata[elems[0]].append((elems[1], elems[2]))
- return per_method_metadata
-
-
-def parse_rpc_arg(rpc_arg: str) -> Sequence[str]:
- methods = rpc_arg.split(",")
- if set(methods) - set(_SUPPORTED_METHODS):
- raise ValueError("--rpc supported methods: {}".format(
- ", ".join(_SUPPORTED_METHODS)))
- return methods
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(
- description='Run Python XDS interop client.')
- parser.add_argument(
- "--num_channels",
- default=1,
- type=int,
- help="The number of channels from which to send requests.")
- parser.add_argument("--print_response",
- default=False,
- action="store_true",
- help="Write RPC response to STDOUT.")
- parser.add_argument(
- "--qps",
- default=1,
- type=int,
- help="The number of queries to send from each channel per second.")
- parser.add_argument("--rpc_timeout_sec",
- default=30,
- type=int,
- help="The per-RPC timeout in seconds.")
- parser.add_argument("--server",
- default="localhost:50051",
- help="The address of the server.")
- parser.add_argument(
- "--stats_port",
- default=50052,
- type=int,
- help="The port on which to expose the peer distribution stats service.")
- parser.add_argument('--verbose',
- help='verbose log output',
- default=False,
- action='store_true')
- parser.add_argument("--log_file",
- default=None,
- type=str,
- help="A file to log to.")
- rpc_help = "A comma-delimited list of RPC methods to run. Must be one of "
- rpc_help += ", ".join(_SUPPORTED_METHODS)
- rpc_help += "."
- parser.add_argument("--rpc", default="UnaryCall", type=str, help=rpc_help)
- metadata_help = (
- "A comma-delimited list of 3-tuples of the form " +
- "METHOD:KEY:VALUE, e.g. " +
- "EmptyCall:key1:value1,UnaryCall:key2:value2,EmptyCall:k3:v3")
- parser.add_argument("--metadata", default="", type=str, help=metadata_help)
- args = parser.parse_args()
- signal.signal(signal.SIGINT, _handle_sigint)
- if args.verbose:
- logger.setLevel(logging.DEBUG)
- if args.log_file:
- file_handler = logging.FileHandler(args.log_file, mode='a')
- file_handler.setFormatter(formatter)
- logger.addHandler(file_handler)
- _run(args, parse_rpc_arg(args.rpc), parse_metadata_arg(args.metadata))
+# Copyright 2020 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import signal
+import threading
+import time
+import sys
+
+from typing import DefaultDict, Dict, List, Mapping, Set, Sequence, Tuple
+import collections
+
+from concurrent import futures
+
+import grpc
+
+from src.proto.grpc.testing import test_pb2
+from src.proto.grpc.testing import test_pb2_grpc
+from src.proto.grpc.testing import messages_pb2
+from src.proto.grpc.testing import empty_pb2
+
+logger = logging.getLogger()
+console_handler = logging.StreamHandler()
+formatter = logging.Formatter(fmt='%(asctime)s: %(levelname)-8s %(message)s')
+console_handler.setFormatter(formatter)
+logger.addHandler(console_handler)
+
+_SUPPORTED_METHODS = (
+ "UnaryCall",
+ "EmptyCall",
+)
+
+PerMethodMetadataType = Mapping[str, Sequence[Tuple[str, str]]]
+
+
+class _StatsWatcher:
+ _start: int
+ _end: int
+ _rpcs_needed: int
+ _rpcs_by_peer: DefaultDict[str, int]
+ _rpcs_by_method: DefaultDict[str, DefaultDict[str, int]]
+ _no_remote_peer: int
+ _lock: threading.Lock
+ _condition: threading.Condition
+
+ def __init__(self, start: int, end: int):
+ self._start = start
+ self._end = end
+ self._rpcs_needed = end - start
+ self._rpcs_by_peer = collections.defaultdict(int)
+ self._rpcs_by_method = collections.defaultdict(
+ lambda: collections.defaultdict(int))
+ self._condition = threading.Condition()
+ self._no_remote_peer = 0
+
+ def on_rpc_complete(self, request_id: int, peer: str, method: str) -> None:
+ """Records statistics for a single RPC."""
+ if self._start <= request_id < self._end:
+ with self._condition:
+ if not peer:
+ self._no_remote_peer += 1
+ else:
+ self._rpcs_by_peer[peer] += 1
+ self._rpcs_by_method[method][peer] += 1
+ self._rpcs_needed -= 1
+ self._condition.notify()
+
+ def await_rpc_stats_response(self, timeout_sec: int
+ ) -> messages_pb2.LoadBalancerStatsResponse:
+ """Blocks until a full response has been collected."""
+ with self._condition:
+ self._condition.wait_for(lambda: not self._rpcs_needed,
+ timeout=float(timeout_sec))
+ response = messages_pb2.LoadBalancerStatsResponse()
+ for peer, count in self._rpcs_by_peer.items():
+ response.rpcs_by_peer[peer] = count
+ for method, count_by_peer in self._rpcs_by_method.items():
+ for peer, count in count_by_peer.items():
+ response.rpcs_by_method[method].rpcs_by_peer[peer] = count
+ response.num_failures = self._no_remote_peer + self._rpcs_needed
+ return response
+
+
+_global_lock = threading.Lock()
+_stop_event = threading.Event()
+_global_rpc_id: int = 0
+_watchers: Set[_StatsWatcher] = set()
+_global_server = None
+
+
+def _handle_sigint(sig, frame):
+ _stop_event.set()
+ _global_server.stop(None)
+
+
+class _LoadBalancerStatsServicer(test_pb2_grpc.LoadBalancerStatsServiceServicer
+ ):
+
+ def __init__(self):
+ super(_LoadBalancerStatsServicer).__init__()
+
+ def GetClientStats(self, request: messages_pb2.LoadBalancerStatsRequest,
+ context: grpc.ServicerContext
+ ) -> messages_pb2.LoadBalancerStatsResponse:
+ logger.info("Received stats request.")
+ start = None
+ end = None
+ watcher = None
+ with _global_lock:
+ start = _global_rpc_id + 1
+ end = start + request.num_rpcs
+ watcher = _StatsWatcher(start, end)
+ _watchers.add(watcher)
+ response = watcher.await_rpc_stats_response(request.timeout_sec)
+ with _global_lock:
+ _watchers.remove(watcher)
+ logger.info("Returning stats response: {}".format(response))
+ return response
+
+
+def _start_rpc(method: str, metadata: Sequence[Tuple[str, str]],
+ request_id: int, stub: test_pb2_grpc.TestServiceStub,
+ timeout: float,
+ futures: Mapping[int, Tuple[grpc.Future, str]]) -> None:
+ logger.info(f"Sending {method} request to backend: {request_id}")
+ if method == "UnaryCall":
+ future = stub.UnaryCall.future(messages_pb2.SimpleRequest(),
+ metadata=metadata,
+ timeout=timeout)
+ elif method == "EmptyCall":
+ future = stub.EmptyCall.future(empty_pb2.Empty(),
+ metadata=metadata,
+ timeout=timeout)
+ else:
+ raise ValueError(f"Unrecognized method '{method}'.")
+ futures[request_id] = (future, method)
+
+
+def _on_rpc_done(rpc_id: int, future: grpc.Future, method: str,
+ print_response: bool) -> None:
+ exception = future.exception()
+ hostname = ""
+ if exception is not None:
+ if exception.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
+ logger.error(f"RPC {rpc_id} timed out")
+ else:
+ logger.error(exception)
+ else:
+ response = future.result()
+ hostname = None
+ for metadatum in future.initial_metadata():
+ if metadatum[0] == "hostname":
+ hostname = metadatum[1]
+ break
+ else:
+ hostname = response.hostname
+ if print_response:
+ if future.code() == grpc.StatusCode.OK:
+ logger.info("Successful response.")
+ else:
+ logger.info(f"RPC failed: {call}")
+ with _global_lock:
+ for watcher in _watchers:
+ watcher.on_rpc_complete(rpc_id, hostname, method)
+
+
+def _remove_completed_rpcs(futures: Mapping[int, grpc.Future],
+ print_response: bool) -> None:
+ logger.debug("Removing completed RPCs")
+ done = []
+ for future_id, (future, method) in futures.items():
+ if future.done():
+ _on_rpc_done(future_id, future, method, args.print_response)
+ done.append(future_id)
+ for rpc_id in done:
+ del futures[rpc_id]
+
+
+def _cancel_all_rpcs(futures: Mapping[int, Tuple[grpc.Future, str]]) -> None:
+ logger.info("Cancelling all remaining RPCs")
+ for future, _ in futures.values():
+ future.cancel()
+
+
+def _run_single_channel(method: str, metadata: Sequence[Tuple[str, str]],
+ qps: int, server: str, rpc_timeout_sec: int,
+ print_response: bool):
+ global _global_rpc_id # pylint: disable=global-statement
+ duration_per_query = 1.0 / float(qps)
+ with grpc.insecure_channel(server) as channel:
+ stub = test_pb2_grpc.TestServiceStub(channel)
+ futures: Dict[int, Tuple[grpc.Future, str]] = {}
+ while not _stop_event.is_set():
+ request_id = None
+ with _global_lock:
+ request_id = _global_rpc_id
+ _global_rpc_id += 1
+ start = time.time()
+ end = start + duration_per_query
+ _start_rpc(method, metadata, request_id, stub,
+ float(rpc_timeout_sec), futures)
+ _remove_completed_rpcs(futures, print_response)
+ logger.debug(f"Currently {len(futures)} in-flight RPCs")
+ now = time.time()
+ while now < end:
+ time.sleep(end - now)
+ now = time.time()
+ _cancel_all_rpcs(futures)
+
+
+class _MethodHandle:
+ """An object grouping together threads driving RPCs for a method."""
+
+ _channel_threads: List[threading.Thread]
+
+ def __init__(self, method: str, metadata: Sequence[Tuple[str, str]],
+ num_channels: int, qps: int, server: str, rpc_timeout_sec: int,
+ print_response: bool):
+ """Creates and starts a group of threads running the indicated method."""
+ self._channel_threads = []
+ for i in range(num_channels):
+ thread = threading.Thread(target=_run_single_channel,
+ args=(
+ method,
+ metadata,
+ qps,
+ server,
+ rpc_timeout_sec,
+ print_response,
+ ))
+ thread.start()
+ self._channel_threads.append(thread)
+
+ def stop(self):
+ """Joins all threads referenced by the handle."""
+ for channel_thread in self._channel_threads:
+ channel_thread.join()
+
+
+def _run(args: argparse.Namespace, methods: Sequence[str],
+ per_method_metadata: PerMethodMetadataType) -> None:
+ logger.info("Starting python xDS Interop Client.")
+ global _global_server # pylint: disable=global-statement
+ method_handles = []
+ for method in methods:
+ method_handles.append(
+ _MethodHandle(method, per_method_metadata.get(method, []),
+ args.num_channels, args.qps, args.server,
+ args.rpc_timeout_sec, args.print_response))
+ _global_server = grpc.server(futures.ThreadPoolExecutor())
+ _global_server.add_insecure_port(f"0.0.0.0:{args.stats_port}")
+ test_pb2_grpc.add_LoadBalancerStatsServiceServicer_to_server(
+ _LoadBalancerStatsServicer(), _global_server)
+ _global_server.start()
+ _global_server.wait_for_termination()
+ for method_handle in method_handles:
+ method_handle.stop()
+
+
+def parse_metadata_arg(metadata_arg: str) -> PerMethodMetadataType:
+ metadata = metadata_arg.split(",") if args.metadata else []
+ per_method_metadata = collections.defaultdict(list)
+ for metadatum in metadata:
+ elems = metadatum.split(":")
+ if len(elems) != 3:
+ raise ValueError(
+ f"'{metadatum}' was not in the form 'METHOD:KEY:VALUE'")
+ if elems[0] not in _SUPPORTED_METHODS:
+ raise ValueError(f"Unrecognized method '{elems[0]}'")
+ per_method_metadata[elems[0]].append((elems[1], elems[2]))
+ return per_method_metadata
+
+
+def parse_rpc_arg(rpc_arg: str) -> Sequence[str]:
+ methods = rpc_arg.split(",")
+ if set(methods) - set(_SUPPORTED_METHODS):
+ raise ValueError("--rpc supported methods: {}".format(
+ ", ".join(_SUPPORTED_METHODS)))
+ return methods
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description='Run Python XDS interop client.')
+ parser.add_argument(
+ "--num_channels",
+ default=1,
+ type=int,
+ help="The number of channels from which to send requests.")
+ parser.add_argument("--print_response",
+ default=False,
+ action="store_true",
+ help="Write RPC response to STDOUT.")
+ parser.add_argument(
+ "--qps",
+ default=1,
+ type=int,
+ help="The number of queries to send from each channel per second.")
+ parser.add_argument("--rpc_timeout_sec",
+ default=30,
+ type=int,
+ help="The per-RPC timeout in seconds.")
+ parser.add_argument("--server",
+ default="localhost:50051",
+ help="The address of the server.")
+ parser.add_argument(
+ "--stats_port",
+ default=50052,
+ type=int,
+ help="The port on which to expose the peer distribution stats service.")
+ parser.add_argument('--verbose',
+ help='verbose log output',
+ default=False,
+ action='store_true')
+ parser.add_argument("--log_file",
+ default=None,
+ type=str,
+ help="A file to log to.")
+ rpc_help = "A comma-delimited list of RPC methods to run. Must be one of "
+ rpc_help += ", ".join(_SUPPORTED_METHODS)
+ rpc_help += "."
+ parser.add_argument("--rpc", default="UnaryCall", type=str, help=rpc_help)
+ metadata_help = (
+ "A comma-delimited list of 3-tuples of the form " +
+ "METHOD:KEY:VALUE, e.g. " +
+ "EmptyCall:key1:value1,UnaryCall:key2:value2,EmptyCall:k3:v3")
+ parser.add_argument("--metadata", default="", type=str, help=metadata_help)
+ args = parser.parse_args()
+ signal.signal(signal.SIGINT, _handle_sigint)
+ if args.verbose:
+ logger.setLevel(logging.DEBUG)
+ if args.log_file:
+ file_handler = logging.FileHandler(args.log_file, mode='a')
+ file_handler.setFormatter(formatter)
+ logger.addHandler(file_handler)
+ _run(args, parse_rpc_arg(args.rpc), parse_metadata_arg(args.metadata))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py
index 690315b5ad..f4b321fc5b 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
+# Copyright 2019 The gRPC Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py
index 9532aab645..3b3f12fa1f 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py
@@ -1,98 +1,98 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A smoke test for memory leaks on short-lived channels without close.
-
-This test doesn't guarantee all resources are cleaned if `Channel.close` is not
-explicitly invoked. The recommended way of using Channel object is using `with`
-clause, and let context manager automatically close the channel.
-"""
-
-import logging
-import os
-import resource
-import sys
-import unittest
-from concurrent.futures import ThreadPoolExecutor
-
-import grpc
-
-_TEST_METHOD = '/test/Test'
-_REQUEST = b'\x23\x33'
-_LARGE_NUM_OF_ITERATIONS = 5000
-
-# If MAX_RSS inflated more than this size, the test is failed.
-_FAIL_THRESHOLD = 25 * 1024 * 1024 # 25 MiB
-
-
-def _get_max_rss():
- return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
-
-
-def _pretty_print_bytes(x):
- if x > 1024 * 1024 * 1024:
- return "%.2f GiB" % (x / 1024.0 / 1024 / 1024)
- elif x > 1024 * 1024:
- return "%.2f MiB" % (x / 1024.0 / 1024)
- elif x > 1024:
- return "%.2f KiB" % (x / 1024.0)
- else:
- return "%d B" % x
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _TEST_METHOD:
- return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
-
-
-def _start_a_test_server():
- server = grpc.server(ThreadPoolExecutor(max_workers=1),
- options=(('grpc.so_reuseport', 0),))
- server.add_generic_rpc_handlers((_GenericHandler(),))
- port = server.add_insecure_port('localhost:0')
- server.start()
- return 'localhost:%d' % port, server
-
-
-def _perform_an_rpc(address):
- channel = grpc.insecure_channel(address)
- multicallable = channel.unary_unary(_TEST_METHOD)
- response = multicallable(_REQUEST)
- assert _REQUEST == response
-
-
-class TestLeak(unittest.TestCase):
-
- def test_leak_with_single_shot_rpcs(self):
- address, server = _start_a_test_server()
-
- # Records memory before experiment.
- before = _get_max_rss()
-
- # Amplifies the leak.
- for n in range(_LARGE_NUM_OF_ITERATIONS):
- _perform_an_rpc(address)
-
- # Fails the test if memory leak detected.
- diff = _get_max_rss() - before
- if diff > _FAIL_THRESHOLD:
- self.fail("Max RSS inflated {} > {}".format(
- _pretty_print_bytes(diff),
- _pretty_print_bytes(_FAIL_THRESHOLD)))
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A smoke test for memory leaks on short-lived channels without close.
+
+This test doesn't guarantee all resources are cleaned if `Channel.close` is not
+explicitly invoked. The recommended way of using Channel object is using `with`
+clause, and let context manager automatically close the channel.
+"""
+
+import logging
+import os
+import resource
+import sys
+import unittest
+from concurrent.futures import ThreadPoolExecutor
+
+import grpc
+
+_TEST_METHOD = '/test/Test'
+_REQUEST = b'\x23\x33'
+_LARGE_NUM_OF_ITERATIONS = 5000
+
+# If MAX_RSS inflated more than this size, the test is failed.
+_FAIL_THRESHOLD = 25 * 1024 * 1024 # 25 MiB
+
+
+def _get_max_rss():
+ return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
+
+
+def _pretty_print_bytes(x):
+ if x > 1024 * 1024 * 1024:
+ return "%.2f GiB" % (x / 1024.0 / 1024 / 1024)
+ elif x > 1024 * 1024:
+ return "%.2f MiB" % (x / 1024.0 / 1024)
+ elif x > 1024:
+ return "%.2f KiB" % (x / 1024.0)
+ else:
+ return "%d B" % x
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ if handler_call_details.method == _TEST_METHOD:
+ return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
+
+
+def _start_a_test_server():
+ server = grpc.server(ThreadPoolExecutor(max_workers=1),
+ options=(('grpc.so_reuseport', 0),))
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ port = server.add_insecure_port('localhost:0')
+ server.start()
+ return 'localhost:%d' % port, server
+
+
+def _perform_an_rpc(address):
+ channel = grpc.insecure_channel(address)
+ multicallable = channel.unary_unary(_TEST_METHOD)
+ response = multicallable(_REQUEST)
+ assert _REQUEST == response
+
+
+class TestLeak(unittest.TestCase):
+
+ def test_leak_with_single_shot_rpcs(self):
+ address, server = _start_a_test_server()
+
+ # Records memory before experiment.
+ before = _get_max_rss()
+
+ # Amplifies the leak.
+ for n in range(_LARGE_NUM_OF_ITERATIONS):
+ _perform_an_rpc(address)
+
+ # Fails the test if memory leak detected.
+ diff = _get_max_rss() - before
+ if diff > _FAIL_THRESHOLD:
+ self.fail("Max RSS inflated {} > {}".format(
+ _pretty_print_bytes(diff),
+ _pretty_print_bytes(_FAIL_THRESHOLD)))
+
+
+if __name__ == "__main__":
+ logging.basicConfig(level=logging.DEBUG)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py
index c891e6a01e..08d5a882eb 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py
+++ b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py
@@ -1,415 +1,415 @@
-# Copyright 2020 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests for Simple Stubs."""
-
-# TODO(https://github.com/grpc/grpc/issues/21965): Run under setuptools.
-
-import os
-
-_MAXIMUM_CHANNELS = 10
-
-_DEFAULT_TIMEOUT = 1.0
-
-os.environ["GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"] = "2"
-os.environ["GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"] = str(_MAXIMUM_CHANNELS)
-os.environ["GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"] = str(_DEFAULT_TIMEOUT)
-
-import contextlib
-import datetime
-import inspect
-import logging
-import threading
-import unittest
-import sys
-import time
-from typing import Callable, Optional
-
-from tests.unit import test_common
-from tests.unit.framework.common import get_socket
-from tests.unit import resources
-import grpc
-import grpc.experimental
-
-_REQUEST = b"0000"
-
-_CACHE_EPOCHS = 8
-_CACHE_TRIALS = 6
-
-_SERVER_RESPONSE_COUNT = 10
-_CLIENT_REQUEST_COUNT = _SERVER_RESPONSE_COUNT
-
-_STRESS_EPOCHS = _MAXIMUM_CHANNELS * 10
-
-_UNARY_UNARY = "/test/UnaryUnary"
-_UNARY_STREAM = "/test/UnaryStream"
-_STREAM_UNARY = "/test/StreamUnary"
-_STREAM_STREAM = "/test/StreamStream"
-_BLACK_HOLE = "/test/BlackHole"
-
-
-@contextlib.contextmanager
-def _env(key: str, value: str):
- os.environ[key] = value
- yield
- del os.environ[key]
-
-
-def _unary_unary_handler(request, context):
- return request
-
-
-def _unary_stream_handler(request, context):
- for _ in range(_SERVER_RESPONSE_COUNT):
- yield request
-
-
-def _stream_unary_handler(request_iterator, context):
- request = None
- for single_request in request_iterator:
- request = single_request
- return request
-
-
-def _stream_stream_handler(request_iterator, context):
- for request in request_iterator:
- yield request
-
-
-def _black_hole_handler(request, context):
- event = threading.Event()
-
- def _on_done():
- event.set()
-
- context.add_callback(_on_done)
- while not event.is_set():
- time.sleep(0.1)
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_unary_unary_handler)
- elif handler_call_details.method == _UNARY_STREAM:
- return grpc.unary_stream_rpc_method_handler(_unary_stream_handler)
- elif handler_call_details.method == _STREAM_UNARY:
- return grpc.stream_unary_rpc_method_handler(_stream_unary_handler)
- elif handler_call_details.method == _STREAM_STREAM:
- return grpc.stream_stream_rpc_method_handler(_stream_stream_handler)
- elif handler_call_details.method == _BLACK_HOLE:
- return grpc.unary_unary_rpc_method_handler(_black_hole_handler)
- else:
- raise NotImplementedError()
-
-
-def _time_invocation(to_time: Callable[[], None]) -> datetime.timedelta:
- start = datetime.datetime.now()
- to_time()
- return datetime.datetime.now() - start
-
-
-@contextlib.contextmanager
-def _server(credentials: Optional[grpc.ServerCredentials]):
- try:
- server = test_common.test_server()
- target = '[::]:0'
- if credentials is None:
- port = server.add_insecure_port(target)
- else:
- port = server.add_secure_port(target, credentials)
- server.add_generic_rpc_handlers((_GenericHandler(),))
- server.start()
- yield port
- finally:
- server.stop(None)
-
-
-class SimpleStubsTest(unittest.TestCase):
-
- def assert_cached(self, to_check: Callable[[str], None]) -> None:
- """Asserts that a function caches intermediate data/state.
-
- To be specific, given a function whose caching behavior is
- deterministic in the value of a supplied string, this function asserts
- that, on average, subsequent invocations of the function for a specific
- string are faster than first invocations with that same string.
-
- Args:
- to_check: A function returning nothing, that caches values based on
- an arbitrary supplied string.
- """
- initial_runs = []
- cached_runs = []
- for epoch in range(_CACHE_EPOCHS):
- runs = []
- text = str(epoch)
- for trial in range(_CACHE_TRIALS):
- runs.append(_time_invocation(lambda: to_check(text)))
- initial_runs.append(runs[0])
- cached_runs.extend(runs[1:])
- average_cold = sum((run for run in initial_runs),
- datetime.timedelta()) / len(initial_runs)
- average_warm = sum((run for run in cached_runs),
- datetime.timedelta()) / len(cached_runs)
- self.assertLess(average_warm, average_cold)
-
- def assert_eventually(self,
- predicate: Callable[[], bool],
- *,
- timeout: Optional[datetime.timedelta] = None,
- message: Optional[Callable[[], str]] = None) -> None:
- message = message or (lambda: "Proposition did not evaluate to true")
- timeout = timeout or datetime.timedelta(seconds=10)
- end = datetime.datetime.now() + timeout
- while datetime.datetime.now() < end:
- if predicate():
- break
- time.sleep(0.5)
- else:
- self.fail(message() + " after " + str(timeout))
-
- def test_unary_unary_insecure(self):
- with _server(None) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- channel_credentials=grpc.experimental.
- insecure_channel_credentials(),
- timeout=None)
- self.assertEqual(_REQUEST, response)
-
- def test_unary_unary_secure(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- channel_credentials=grpc.local_channel_credentials(),
- timeout=None)
- self.assertEqual(_REQUEST, response)
-
- def test_channels_cached(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- test_name = inspect.stack()[0][3]
- args = (_REQUEST, target, _UNARY_UNARY)
- kwargs = {"channel_credentials": grpc.local_channel_credentials()}
-
- def _invoke(seed: str):
- run_kwargs = dict(kwargs)
- run_kwargs["options"] = ((test_name + seed, ""),)
- grpc.experimental.unary_unary(*args, **run_kwargs)
-
- self.assert_cached(_invoke)
-
- def test_channels_evicted(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- channel_credentials=grpc.local_channel_credentials())
- self.assert_eventually(
- lambda: grpc._simple_stubs.ChannelCache.get(
- )._test_only_channel_count() == 0,
- message=lambda:
- f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} remain"
- )
-
- def test_total_channels_enforced(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- for i in range(_STRESS_EPOCHS):
- # Ensure we get a new channel each time.
- options = (("foo", str(i)),)
- # Send messages at full blast.
- grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- options=options,
- channel_credentials=grpc.local_channel_credentials())
- self.assert_eventually(
- lambda: grpc._simple_stubs.ChannelCache.get(
- )._test_only_channel_count() <= _MAXIMUM_CHANNELS + 1,
- message=lambda:
- f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} channels remain"
- )
-
- def test_unary_stream(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- for response in grpc.experimental.unary_stream(
- _REQUEST,
- target,
- _UNARY_STREAM,
- channel_credentials=grpc.local_channel_credentials()):
- self.assertEqual(_REQUEST, response)
-
- def test_stream_unary(self):
-
- def request_iter():
- for _ in range(_CLIENT_REQUEST_COUNT):
- yield _REQUEST
-
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.stream_unary(
- request_iter(),
- target,
- _STREAM_UNARY,
- channel_credentials=grpc.local_channel_credentials())
- self.assertEqual(_REQUEST, response)
-
- def test_stream_stream(self):
-
- def request_iter():
- for _ in range(_CLIENT_REQUEST_COUNT):
- yield _REQUEST
-
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- for response in grpc.experimental.stream_stream(
- request_iter(),
- target,
- _STREAM_STREAM,
- channel_credentials=grpc.local_channel_credentials()):
- self.assertEqual(_REQUEST, response)
-
- def test_default_ssl(self):
- _private_key = resources.private_key()
- _certificate_chain = resources.certificate_chain()
- _server_certs = ((_private_key, _certificate_chain),)
- _server_host_override = 'foo.test.google.fr'
- _test_root_certificates = resources.test_root_certificates()
- _property_options = ((
- 'grpc.ssl_target_name_override',
- _server_host_override,
- ),)
- cert_dir = os.path.join(os.path.dirname(resources.__file__),
- "credentials")
- cert_file = os.path.join(cert_dir, "ca.pem")
- with _env("GRPC_DEFAULT_SSL_ROOTS_FILE_PATH", cert_file):
- server_creds = grpc.ssl_server_credentials(_server_certs)
- with _server(server_creds) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST, target, _UNARY_UNARY, options=_property_options)
-
- def test_insecure_sugar(self):
- with _server(None) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(_REQUEST,
- target,
- _UNARY_UNARY,
- insecure=True)
- self.assertEqual(_REQUEST, response)
-
- def test_insecure_sugar_mutually_exclusive(self):
- with _server(None) as port:
- target = f'localhost:{port}'
- with self.assertRaises(ValueError):
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- insecure=True,
- channel_credentials=grpc.local_channel_credentials())
-
- def test_default_wait_for_ready(self):
- addr, port, sock = get_socket()
- sock.close()
- target = f'{addr}:{port}'
- channel = grpc._simple_stubs.ChannelCache.get().get_channel(
- target, (), None, True, None)
- rpc_finished_event = threading.Event()
- rpc_failed_event = threading.Event()
- server = None
-
- def _on_connectivity_changed(connectivity):
- nonlocal server
- if connectivity is grpc.ChannelConnectivity.TRANSIENT_FAILURE:
- self.assertFalse(rpc_finished_event.is_set())
- self.assertFalse(rpc_failed_event.is_set())
- server = test_common.test_server()
- server.add_insecure_port(target)
- server.add_generic_rpc_handlers((_GenericHandler(),))
- server.start()
- channel.unsubscribe(_on_connectivity_changed)
- elif connectivity in (grpc.ChannelConnectivity.IDLE,
- grpc.ChannelConnectivity.CONNECTING):
- pass
- else:
- self.fail("Encountered unknown state.")
-
- channel.subscribe(_on_connectivity_changed)
-
- def _send_rpc():
- try:
- response = grpc.experimental.unary_unary(_REQUEST,
- target,
- _UNARY_UNARY,
- timeout=None,
- insecure=True)
- rpc_finished_event.set()
- except Exception as e:
- rpc_failed_event.set()
-
- t = threading.Thread(target=_send_rpc)
- t.start()
- t.join()
- self.assertFalse(rpc_failed_event.is_set())
- self.assertTrue(rpc_finished_event.is_set())
- if server is not None:
- server.stop(None)
-
- def assert_times_out(self, invocation_args):
- with _server(None) as port:
- target = f'localhost:{port}'
- with self.assertRaises(grpc.RpcError) as cm:
- response = grpc.experimental.unary_unary(_REQUEST,
- target,
- _BLACK_HOLE,
- insecure=True,
- **invocation_args)
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED,
- cm.exception.code())
-
- def test_default_timeout(self):
- not_present = object()
- wait_for_ready_values = [True, not_present]
- timeout_values = [0.5, not_present]
- cases = []
- for wait_for_ready in wait_for_ready_values:
- for timeout in timeout_values:
- case = {}
- if timeout is not not_present:
- case["timeout"] = timeout
- if wait_for_ready is not not_present:
- case["wait_for_ready"] = wait_for_ready
- cases.append(case)
-
- for case in cases:
- with self.subTest(**case):
- self.assert_times_out(case)
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.INFO)
- unittest.main(verbosity=2)
+# Copyright 2020 The gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for Simple Stubs."""
+
+# TODO(https://github.com/grpc/grpc/issues/21965): Run under setuptools.
+
+import os
+
+_MAXIMUM_CHANNELS = 10
+
+_DEFAULT_TIMEOUT = 1.0
+
+os.environ["GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"] = "2"
+os.environ["GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"] = str(_MAXIMUM_CHANNELS)
+os.environ["GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"] = str(_DEFAULT_TIMEOUT)
+
+import contextlib
+import datetime
+import inspect
+import logging
+import threading
+import unittest
+import sys
+import time
+from typing import Callable, Optional
+
+from tests.unit import test_common
+from tests.unit.framework.common import get_socket
+from tests.unit import resources
+import grpc
+import grpc.experimental
+
+_REQUEST = b"0000"
+
+_CACHE_EPOCHS = 8
+_CACHE_TRIALS = 6
+
+_SERVER_RESPONSE_COUNT = 10
+_CLIENT_REQUEST_COUNT = _SERVER_RESPONSE_COUNT
+
+_STRESS_EPOCHS = _MAXIMUM_CHANNELS * 10
+
+_UNARY_UNARY = "/test/UnaryUnary"
+_UNARY_STREAM = "/test/UnaryStream"
+_STREAM_UNARY = "/test/StreamUnary"
+_STREAM_STREAM = "/test/StreamStream"
+_BLACK_HOLE = "/test/BlackHole"
+
+
+@contextlib.contextmanager
+def _env(key: str, value: str):
+ os.environ[key] = value
+ yield
+ del os.environ[key]
+
+
+def _unary_unary_handler(request, context):
+ return request
+
+
+def _unary_stream_handler(request, context):
+ for _ in range(_SERVER_RESPONSE_COUNT):
+ yield request
+
+
+def _stream_unary_handler(request_iterator, context):
+ request = None
+ for single_request in request_iterator:
+ request = single_request
+ return request
+
+
+def _stream_stream_handler(request_iterator, context):
+ for request in request_iterator:
+ yield request
+
+
+def _black_hole_handler(request, context):
+ event = threading.Event()
+
+ def _on_done():
+ event.set()
+
+ context.add_callback(_on_done)
+ while not event.is_set():
+ time.sleep(0.1)
+
+
+class _GenericHandler(grpc.GenericRpcHandler):
+
+ def service(self, handler_call_details):
+ if handler_call_details.method == _UNARY_UNARY:
+ return grpc.unary_unary_rpc_method_handler(_unary_unary_handler)
+ elif handler_call_details.method == _UNARY_STREAM:
+ return grpc.unary_stream_rpc_method_handler(_unary_stream_handler)
+ elif handler_call_details.method == _STREAM_UNARY:
+ return grpc.stream_unary_rpc_method_handler(_stream_unary_handler)
+ elif handler_call_details.method == _STREAM_STREAM:
+ return grpc.stream_stream_rpc_method_handler(_stream_stream_handler)
+ elif handler_call_details.method == _BLACK_HOLE:
+ return grpc.unary_unary_rpc_method_handler(_black_hole_handler)
+ else:
+ raise NotImplementedError()
+
+
+def _time_invocation(to_time: Callable[[], None]) -> datetime.timedelta:
+ start = datetime.datetime.now()
+ to_time()
+ return datetime.datetime.now() - start
+
+
+@contextlib.contextmanager
+def _server(credentials: Optional[grpc.ServerCredentials]):
+ try:
+ server = test_common.test_server()
+ target = '[::]:0'
+ if credentials is None:
+ port = server.add_insecure_port(target)
+ else:
+ port = server.add_secure_port(target, credentials)
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ server.start()
+ yield port
+ finally:
+ server.stop(None)
+
+
+class SimpleStubsTest(unittest.TestCase):
+
+ def assert_cached(self, to_check: Callable[[str], None]) -> None:
+ """Asserts that a function caches intermediate data/state.
+
+ To be specific, given a function whose caching behavior is
+ deterministic in the value of a supplied string, this function asserts
+ that, on average, subsequent invocations of the function for a specific
+ string are faster than first invocations with that same string.
+
+ Args:
+ to_check: A function returning nothing, that caches values based on
+ an arbitrary supplied string.
+ """
+ initial_runs = []
+ cached_runs = []
+ for epoch in range(_CACHE_EPOCHS):
+ runs = []
+ text = str(epoch)
+ for trial in range(_CACHE_TRIALS):
+ runs.append(_time_invocation(lambda: to_check(text)))
+ initial_runs.append(runs[0])
+ cached_runs.extend(runs[1:])
+ average_cold = sum((run for run in initial_runs),
+ datetime.timedelta()) / len(initial_runs)
+ average_warm = sum((run for run in cached_runs),
+ datetime.timedelta()) / len(cached_runs)
+ self.assertLess(average_warm, average_cold)
+
+ def assert_eventually(self,
+ predicate: Callable[[], bool],
+ *,
+ timeout: Optional[datetime.timedelta] = None,
+ message: Optional[Callable[[], str]] = None) -> None:
+ message = message or (lambda: "Proposition did not evaluate to true")
+ timeout = timeout or datetime.timedelta(seconds=10)
+ end = datetime.datetime.now() + timeout
+ while datetime.datetime.now() < end:
+ if predicate():
+ break
+ time.sleep(0.5)
+ else:
+ self.fail(message() + " after " + str(timeout))
+
+ def test_unary_unary_insecure(self):
+ with _server(None) as port:
+ target = f'localhost:{port}'
+ response = grpc.experimental.unary_unary(
+ _REQUEST,
+ target,
+ _UNARY_UNARY,
+ channel_credentials=grpc.experimental.
+ insecure_channel_credentials(),
+ timeout=None)
+ self.assertEqual(_REQUEST, response)
+
+ def test_unary_unary_secure(self):
+ with _server(grpc.local_server_credentials()) as port:
+ target = f'localhost:{port}'
+ response = grpc.experimental.unary_unary(
+ _REQUEST,
+ target,
+ _UNARY_UNARY,
+ channel_credentials=grpc.local_channel_credentials(),
+ timeout=None)
+ self.assertEqual(_REQUEST, response)
+
+ def test_channels_cached(self):
+ with _server(grpc.local_server_credentials()) as port:
+ target = f'localhost:{port}'
+ test_name = inspect.stack()[0][3]
+ args = (_REQUEST, target, _UNARY_UNARY)
+ kwargs = {"channel_credentials": grpc.local_channel_credentials()}
+
+ def _invoke(seed: str):
+ run_kwargs = dict(kwargs)
+ run_kwargs["options"] = ((test_name + seed, ""),)
+ grpc.experimental.unary_unary(*args, **run_kwargs)
+
+ self.assert_cached(_invoke)
+
+ def test_channels_evicted(self):
+ with _server(grpc.local_server_credentials()) as port:
+ target = f'localhost:{port}'
+ response = grpc.experimental.unary_unary(
+ _REQUEST,
+ target,
+ _UNARY_UNARY,
+ channel_credentials=grpc.local_channel_credentials())
+ self.assert_eventually(
+ lambda: grpc._simple_stubs.ChannelCache.get(
+ )._test_only_channel_count() == 0,
+ message=lambda:
+ f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} remain"
+ )
+
+ def test_total_channels_enforced(self):
+ with _server(grpc.local_server_credentials()) as port:
+ target = f'localhost:{port}'
+ for i in range(_STRESS_EPOCHS):
+ # Ensure we get a new channel each time.
+ options = (("foo", str(i)),)
+ # Send messages at full blast.
+ grpc.experimental.unary_unary(
+ _REQUEST,
+ target,
+ _UNARY_UNARY,
+ options=options,
+ channel_credentials=grpc.local_channel_credentials())
+ self.assert_eventually(
+ lambda: grpc._simple_stubs.ChannelCache.get(
+ )._test_only_channel_count() <= _MAXIMUM_CHANNELS + 1,
+ message=lambda:
+ f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} channels remain"
+ )
+
+ def test_unary_stream(self):
+ with _server(grpc.local_server_credentials()) as port:
+ target = f'localhost:{port}'
+ for response in grpc.experimental.unary_stream(
+ _REQUEST,
+ target,
+ _UNARY_STREAM,
+ channel_credentials=grpc.local_channel_credentials()):
+ self.assertEqual(_REQUEST, response)
+
+ def test_stream_unary(self):
+
+ def request_iter():
+ for _ in range(_CLIENT_REQUEST_COUNT):
+ yield _REQUEST
+
+ with _server(grpc.local_server_credentials()) as port:
+ target = f'localhost:{port}'
+ response = grpc.experimental.stream_unary(
+ request_iter(),
+ target,
+ _STREAM_UNARY,
+ channel_credentials=grpc.local_channel_credentials())
+ self.assertEqual(_REQUEST, response)
+
+ def test_stream_stream(self):
+
+ def request_iter():
+ for _ in range(_CLIENT_REQUEST_COUNT):
+ yield _REQUEST
+
+ with _server(grpc.local_server_credentials()) as port:
+ target = f'localhost:{port}'
+ for response in grpc.experimental.stream_stream(
+ request_iter(),
+ target,
+ _STREAM_STREAM,
+ channel_credentials=grpc.local_channel_credentials()):
+ self.assertEqual(_REQUEST, response)
+
+ def test_default_ssl(self):
+ _private_key = resources.private_key()
+ _certificate_chain = resources.certificate_chain()
+ _server_certs = ((_private_key, _certificate_chain),)
+ _server_host_override = 'foo.test.google.fr'
+ _test_root_certificates = resources.test_root_certificates()
+ _property_options = ((
+ 'grpc.ssl_target_name_override',
+ _server_host_override,
+ ),)
+ cert_dir = os.path.join(os.path.dirname(resources.__file__),
+ "credentials")
+ cert_file = os.path.join(cert_dir, "ca.pem")
+ with _env("GRPC_DEFAULT_SSL_ROOTS_FILE_PATH", cert_file):
+ server_creds = grpc.ssl_server_credentials(_server_certs)
+ with _server(server_creds) as port:
+ target = f'localhost:{port}'
+ response = grpc.experimental.unary_unary(
+ _REQUEST, target, _UNARY_UNARY, options=_property_options)
+
+ def test_insecure_sugar(self):
+ with _server(None) as port:
+ target = f'localhost:{port}'
+ response = grpc.experimental.unary_unary(_REQUEST,
+ target,
+ _UNARY_UNARY,
+ insecure=True)
+ self.assertEqual(_REQUEST, response)
+
+ def test_insecure_sugar_mutually_exclusive(self):
+ with _server(None) as port:
+ target = f'localhost:{port}'
+ with self.assertRaises(ValueError):
+ response = grpc.experimental.unary_unary(
+ _REQUEST,
+ target,
+ _UNARY_UNARY,
+ insecure=True,
+ channel_credentials=grpc.local_channel_credentials())
+
+ def test_default_wait_for_ready(self):
+ addr, port, sock = get_socket()
+ sock.close()
+ target = f'{addr}:{port}'
+ channel = grpc._simple_stubs.ChannelCache.get().get_channel(
+ target, (), None, True, None)
+ rpc_finished_event = threading.Event()
+ rpc_failed_event = threading.Event()
+ server = None
+
+ def _on_connectivity_changed(connectivity):
+ nonlocal server
+ if connectivity is grpc.ChannelConnectivity.TRANSIENT_FAILURE:
+ self.assertFalse(rpc_finished_event.is_set())
+ self.assertFalse(rpc_failed_event.is_set())
+ server = test_common.test_server()
+ server.add_insecure_port(target)
+ server.add_generic_rpc_handlers((_GenericHandler(),))
+ server.start()
+ channel.unsubscribe(_on_connectivity_changed)
+ elif connectivity in (grpc.ChannelConnectivity.IDLE,
+ grpc.ChannelConnectivity.CONNECTING):
+ pass
+ else:
+ self.fail("Encountered unknown state.")
+
+ channel.subscribe(_on_connectivity_changed)
+
+ def _send_rpc():
+ try:
+ response = grpc.experimental.unary_unary(_REQUEST,
+ target,
+ _UNARY_UNARY,
+ timeout=None,
+ insecure=True)
+ rpc_finished_event.set()
+ except Exception as e:
+ rpc_failed_event.set()
+
+ t = threading.Thread(target=_send_rpc)
+ t.start()
+ t.join()
+ self.assertFalse(rpc_failed_event.is_set())
+ self.assertTrue(rpc_finished_event.is_set())
+ if server is not None:
+ server.stop(None)
+
+ def assert_times_out(self, invocation_args):
+ with _server(None) as port:
+ target = f'localhost:{port}'
+ with self.assertRaises(grpc.RpcError) as cm:
+ response = grpc.experimental.unary_unary(_REQUEST,
+ target,
+ _BLACK_HOLE,
+ insecure=True,
+ **invocation_args)
+ self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED,
+ cm.exception.code())
+
+ def test_default_timeout(self):
+ not_present = object()
+ wait_for_ready_values = [True, not_present]
+ timeout_values = [0.5, not_present]
+ cases = []
+ for wait_for_ready in wait_for_ready_values:
+ for timeout in timeout_values:
+ case = {}
+ if timeout is not not_present:
+ case["timeout"] = timeout
+ if wait_for_ready is not not_present:
+ case["wait_for_ready"] = wait_for_ready
+ cases.append(case)
+
+ for case in cases:
+ with self.subTest(**case):
+ self.assert_times_out(case)
+
+
+if __name__ == "__main__":
+ logging.basicConfig(level=logging.INFO)
+ unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/ya.make b/contrib/libs/grpc/src/python/grpcio_tests/ya.make
index 09ee7f2fb5..b0642eae34 100644
--- a/contrib/libs/grpc/src/python/grpcio_tests/ya.make
+++ b/contrib/libs/grpc/src/python/grpcio_tests/ya.make
@@ -1,13 +1,13 @@
PY3TEST()
-LICENSE(Apache-2.0)
+LICENSE(Apache-2.0)
+
+LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
+
+PEERDIR(
+ contrib/libs/grpc/python
+)
-LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
-
-PEERDIR(
- contrib/libs/grpc/python
-)
-
NO_LINT()
PY_SRCS(
@@ -28,7 +28,7 @@ PY_SRCS(
tests/unit/_cython/test_utilities.py
tests/unit/_exit_scenarios.py
tests/unit/_from_grpc_import_star.py
- tests/unit/_rpc_test_helpers.py
+ tests/unit/_rpc_test_helpers.py
tests/unit/_server_shutdown_scenarios.py
tests/unit/_signal_client.py
tests/unit/_tcp_proxy.py
@@ -76,7 +76,7 @@ TEST_SRCS(
tests/unit/_channel_ready_future_test.py
# FLAKY
# tests/unit/_compression_test.py
- tests/unit/_contextvars_propagation_test.py
+ tests/unit/_contextvars_propagation_test.py
tests/unit/_credentials_test.py
tests/unit/_cython/_cancel_many_calls_test.py
tests/unit/_cython/_channel_test.py
@@ -87,7 +87,7 @@ TEST_SRCS(
tests/unit/_cython/_server_test.py
tests/unit/_cython/cygrpc_test.py
tests/unit/_dns_resolver_test.py
- tests/unit/_dynamic_stubs_test.py
+ tests/unit/_dynamic_stubs_test.py
tests/unit/_empty_message_test.py
tests/unit/_error_message_encoding_test.py
tests/unit/_exit_test.py
@@ -102,8 +102,8 @@ TEST_SRCS(
tests/unit/_metadata_test.py
tests/unit/_reconnect_test.py
tests/unit/_resource_exhausted_test.py
- tests/unit/_rpc_part_1_test.py
- tests/unit/_rpc_part_2_test.py
+ tests/unit/_rpc_part_1_test.py
+ tests/unit/_rpc_part_2_test.py
tests/unit/_server_shutdown_test.py
# tests.testing
# tests/unit/_server_ssl_cert_config_test.py