aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/libs
diff options
context:
space:
mode:
authorthegeorg <thegeorg@yandex-team.com>2022-08-22 15:16:31 +0300
committerthegeorg <thegeorg@yandex-team.com>2022-08-22 15:16:31 +0300
commit0892cb5af1324d28882c8ff6fa82ed0d23140b06 (patch)
treecd52bc37ebd26ed30a1f12862e20aed7e6050b26 /contrib/libs
parent11f534cfab6e582005dd74706656fb189f351507 (diff)
downloadydb-0892cb5af1324d28882c8ff6fa82ed0d23140b06.tar.gz
Markup tests with ya:explicit_data to avoid exporting metadata to github
Diffstat (limited to 'contrib/libs')
-rw-r--r--contrib/libs/cctz/test/.yandex_meta/licenses.list.txt16
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt80
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/commands.py350
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py17
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/setup.py111
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py21
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py106
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py439
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py239
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py48
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/admin/test_admin.py60
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py40
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py480
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/csds/test_csds.py147
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py158
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py72
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py451
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py287
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py159
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py47
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py51
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py57
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py180
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py482
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py42
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py76
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py96
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py244
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py44
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py93
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py80
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py56
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py197
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py195
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py186
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py159
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py45
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py58
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py104
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py43
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py33
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py235
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py308
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py95
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py207
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py165
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py193
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py383
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py511
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py140
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py55
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py21
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py27
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py187
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py56
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py46
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py59
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py372
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py473
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py280
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py61
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py134
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py458
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py49
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py194
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py177
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py121
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py19
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py137
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py67
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py151
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py152
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py52
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py194
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py846
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py179
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py70
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py231
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py204
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py518
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py396
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py702
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py139
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py367
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py196
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py115
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py67
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py276
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py33
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py325
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py76
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py132
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py334
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py592
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_time_remaining_test.py71
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py178
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py160
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py151
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/_test_server.py61
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/close_channel_test.py105
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py21
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py501
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_server.py177
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py13
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py98
-rw-r--r--contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py416
-rw-r--r--contrib/libs/re2/re2/testing/.yandex_meta/licenses.list.txt69
127 files changed, 0 insertions, 19707 deletions
diff --git a/contrib/libs/cctz/test/.yandex_meta/licenses.list.txt b/contrib/libs/cctz/test/.yandex_meta/licenses.list.txt
deleted file mode 100644
index aa1605a0ac..0000000000
--- a/contrib/libs/cctz/test/.yandex_meta/licenses.list.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-====================Apache-2.0====================
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-
-====================COPYRIGHT====================
-// Copyright 2016 Google Inc. All Rights Reserved.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt b/contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt
deleted file mode 100644
index bc0e4575ef..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/.yandex_meta/licenses.list.txt
+++ /dev/null
@@ -1,80 +0,0 @@
-====================Apache-2.0====================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-====================COPYRIGHT====================
- * Copyright 2015 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2016 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2017 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2018 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2019 gRPC authors.
-
-
-====================COPYRIGHT====================
- * Copyright 2020 gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2018 The gRPC Authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2019 The gRPC Authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2019 The gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2019 the gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2020 The gRPC Authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2020 The gRPC authors.
-
-
-====================COPYRIGHT====================
-# Copyright 2021 The gRPC authors.
-
-
-====================COPYRIGHT====================
-// Copyright 2018 The gRPC Authors
-
-
-====================COPYRIGHT====================
-// Copyright 2019 The gRPC Authors
-
-
-====================COPYRIGHT====================
-// Copyright 2020 The gRPC Authors
-
-
-====================COPYRIGHT====================
-// Copyright 2021 The gRPC Authors
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/commands.py b/contrib/libs/grpc/src/python/grpcio_tests/commands.py
deleted file mode 100644
index 84331aed63..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/commands.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Provides distutils command classes for the gRPC Python setup process."""
-
-from distutils import errors as _errors
-import glob
-import os
-import os.path
-import platform
-import re
-import shutil
-import sys
-
-import setuptools
-from setuptools.command import build_ext
-from setuptools.command import build_py
-from setuptools.command import easy_install
-from setuptools.command import install
-from setuptools.command import test
-
-PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
-GRPC_STEM = os.path.abspath(PYTHON_STEM + '../../../../')
-GRPC_PROTO_STEM = os.path.join(GRPC_STEM, 'src', 'proto')
-PROTO_STEM = os.path.join(PYTHON_STEM, 'src', 'proto')
-PYTHON_PROTO_TOP_LEVEL = os.path.join(PYTHON_STEM, 'src')
-
-
-class CommandError(object):
- pass
-
-
-class GatherProto(setuptools.Command):
-
- description = 'gather proto dependencies'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- # TODO(atash) ensure that we're running from the repository directory when
- # this command is used
- try:
- shutil.rmtree(PROTO_STEM)
- except Exception as error:
- # We don't care if this command fails
- pass
- shutil.copytree(GRPC_PROTO_STEM, PROTO_STEM)
- for root, _, _ in os.walk(PYTHON_PROTO_TOP_LEVEL):
- path = os.path.join(root, '__init__.py')
- open(path, 'a').close()
-
-
-class BuildPy(build_py.build_py):
- """Custom project build command."""
-
- def run(self):
- try:
- self.run_command('build_package_protos')
- except CommandError as error:
- sys.stderr.write('warning: %s\n' % error.message)
- build_py.build_py.run(self)
-
-
-class TestLite(setuptools.Command):
- """Command to run tests without fetching or building anything."""
-
- description = 'run tests without fetching or building anything.'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- # distutils requires this override.
- pass
-
- def run(self):
- self._add_eggs_to_path()
-
- import tests
- loader = tests.Loader()
- loader.loadTestsFromNames(['tests'])
- runner = tests.Runner(dedicated_threads=True)
- result = runner.run(loader.suite)
- if not result.wasSuccessful():
- sys.exit('Test failure')
-
- def _add_eggs_to_path(self):
- """Fetch install and test requirements"""
- self.distribution.fetch_build_eggs(self.distribution.install_requires)
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
-
-class TestPy3Only(setuptools.Command):
- """Command to run tests for Python 3+ features.
-
- This does not include asyncio tests, which are housed in a separate
- directory.
- """
-
- description = 'run tests for py3+ features'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- self._add_eggs_to_path()
- import tests
- loader = tests.Loader()
- loader.loadTestsFromNames(['tests_py3_only'])
- runner = tests.Runner()
- result = runner.run(loader.suite)
- if not result.wasSuccessful():
- sys.exit('Test failure')
-
- def _add_eggs_to_path(self):
- self.distribution.fetch_build_eggs(self.distribution.install_requires)
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
-
-class TestAio(setuptools.Command):
- """Command to run aio tests without fetching or building anything."""
-
- description = 'run aio tests without fetching or building anything.'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- self._add_eggs_to_path()
-
- import tests
- loader = tests.Loader()
- loader.loadTestsFromNames(['tests_aio'])
- # Even without dedicated threads, the framework will somehow spawn a
- # new thread for tests to run upon. New thread doesn't have event loop
- # attached by default, so initialization is needed.
- runner = tests.Runner(dedicated_threads=False)
- result = runner.run(loader.suite)
- if not result.wasSuccessful():
- sys.exit('Test failure')
-
- def _add_eggs_to_path(self):
- """Fetch install and test requirements"""
- self.distribution.fetch_build_eggs(self.distribution.install_requires)
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
-
-
-class TestGevent(setuptools.Command):
- """Command to run tests w/gevent."""
-
- BANNED_TESTS = (
- # Fork support is not compatible with gevent
- 'fork._fork_interop_test.ForkInteropTest',
- # These tests send a lot of RPCs and are really slow on gevent. They will
- # eventually succeed, but need to dig into performance issues.
- 'unit._cython._no_messages_server_completion_queue_per_call_test.Test.test_rpcs',
- 'unit._cython._no_messages_single_server_completion_queue_test.Test.test_rpcs',
- 'unit._compression_test',
- # TODO(https://github.com/grpc/grpc/issues/16890) enable this test
- 'unit._cython._channel_test.ChannelTest.test_multiple_channels_lonely_connectivity',
- # I have no idea why this doesn't work in gevent, but it shouldn't even be
- # using the c-core
- 'testing._client_test.ClientTest.test_infinite_request_stream_real_time',
- # TODO(https://github.com/grpc/grpc/issues/15743) enable this test
- 'unit._session_cache_test.SSLSessionCacheTest.testSSLSessionCacheLRU',
- # TODO(https://github.com/grpc/grpc/issues/14789) enable this test
- 'unit._server_ssl_cert_config_test',
- # TODO(https://github.com/grpc/grpc/issues/14901) enable this test
- 'protoc_plugin._python_plugin_test.PythonPluginTest',
- 'protoc_plugin._python_plugin_test.SimpleStubsPluginTest',
- # Beta API is unsupported for gevent
- 'protoc_plugin.beta_python_plugin_test',
- 'unit.beta._beta_features_test',
- # TODO(https://github.com/grpc/grpc/issues/15411) unpin gevent version
- # This test will stuck while running higher version of gevent
- 'unit._auth_context_test.AuthContextTest.testSessionResumption',
- # TODO(https://github.com/grpc/grpc/issues/15411) enable these tests
- 'unit._channel_ready_future_test.ChannelReadyFutureTest.test_immediately_connectable_channel_connectivity',
- "unit._cython._channel_test.ChannelTest.test_single_channel_lonely_connectivity",
- 'unit._exit_test.ExitTest.test_in_flight_unary_unary_call',
- 'unit._exit_test.ExitTest.test_in_flight_unary_stream_call',
- 'unit._exit_test.ExitTest.test_in_flight_stream_unary_call',
- 'unit._exit_test.ExitTest.test_in_flight_stream_stream_call',
- 'unit._exit_test.ExitTest.test_in_flight_partial_unary_stream_call',
- 'unit._exit_test.ExitTest.test_in_flight_partial_stream_unary_call',
- 'unit._exit_test.ExitTest.test_in_flight_partial_stream_stream_call',
- # TODO(https://github.com/grpc/grpc/issues/18980): Reenable.
- 'unit._signal_handling_test.SignalHandlingTest',
- 'unit._metadata_flags_test',
- 'health_check._health_servicer_test.HealthServicerTest.test_cancelled_watch_removed_from_watch_list',
- # TODO(https://github.com/grpc/grpc/issues/17330) enable these three tests
- 'channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels',
- 'channelz._channelz_servicer_test.ChannelzServicerTest.test_many_subchannels_and_sockets',
- 'channelz._channelz_servicer_test.ChannelzServicerTest.test_streaming_rpc',
- # TODO(https://github.com/grpc/grpc/issues/15411) enable this test
- 'unit._cython._channel_test.ChannelTest.test_negative_deadline_connectivity',
- # TODO(https://github.com/grpc/grpc/issues/15411) enable this test
- 'unit._local_credentials_test.LocalCredentialsTest',
- # TODO(https://github.com/grpc/grpc/issues/22020) LocalCredentials
- # aren't supported with custom io managers.
- 'unit._contextvars_propagation_test',
- 'testing._time_test.StrictRealTimeTest',
- )
- BANNED_WINDOWS_TESTS = (
- # TODO(https://github.com/grpc/grpc/pull/15411) enable this test
- 'unit._dns_resolver_test.DNSResolverTest.test_connect_loopback',
- # TODO(https://github.com/grpc/grpc/pull/15411) enable this test
- 'unit._server_test.ServerTest.test_failed_port_binding_exception',
- )
- BANNED_MACOS_TESTS = (
- # TODO(https://github.com/grpc/grpc/issues/15411) enable this test
- 'unit._dynamic_stubs_test.DynamicStubTest',)
- description = 'run tests with gevent. Assumes grpc/gevent are installed'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- # distutils requires this override.
- pass
-
- def run(self):
- from gevent import monkey
- monkey.patch_all()
-
- import grpc.experimental.gevent
-
- import tests
- grpc.experimental.gevent.init_gevent()
-
- import gevent
-
- import tests
- loader = tests.Loader()
- loader.loadTestsFromNames(['tests', 'tests_gevent'])
- runner = tests.Runner()
- if sys.platform == 'win32':
- runner.skip_tests(self.BANNED_TESTS + self.BANNED_WINDOWS_TESTS)
- elif sys.platform == 'darwin':
- runner.skip_tests(self.BANNED_TESTS + self.BANNED_MACOS_TESTS)
- else:
- runner.skip_tests(self.BANNED_TESTS)
- result = gevent.spawn(runner.run, loader.suite)
- result.join()
- if not result.value.wasSuccessful():
- sys.exit('Test failure')
-
-
-class RunInterop(test.test):
-
- description = 'run interop test client/server'
- user_options = [
- ('args=', None, 'pass-thru arguments for the client/server'),
- ('client', None, 'flag indicating to run the client'),
- ('server', None, 'flag indicating to run the server'),
- ('use-asyncio', None, 'flag indicating to run the asyncio stack')
- ]
-
- def initialize_options(self):
- self.args = ''
- self.client = False
- self.server = False
- self.use_asyncio = False
-
- def finalize_options(self):
- if self.client and self.server:
- raise _errors.DistutilsOptionError(
- 'you may only specify one of client or server')
-
- def run(self):
- if self.distribution.install_requires:
- self.distribution.fetch_build_eggs(
- self.distribution.install_requires)
- if self.distribution.tests_require:
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
- if self.client:
- self.run_client()
- elif self.server:
- self.run_server()
-
- def run_server(self):
- # We import here to ensure that our setuptools parent has had a chance to
- # edit the Python system path.
- if self.use_asyncio:
- import asyncio
-
- from tests_aio.interop import server
- sys.argv[1:] = self.args.split()
- asyncio.get_event_loop().run_until_complete(server.serve())
- else:
- from tests.interop import server
- sys.argv[1:] = self.args.split()
- server.serve()
-
- def run_client(self):
- # We import here to ensure that our setuptools parent has had a chance to
- # edit the Python system path.
- from tests.interop import client
- sys.argv[1:] = self.args.split()
- client.test_interoperability()
-
-
-class RunFork(test.test):
-
- description = 'run fork test client'
- user_options = [('args=', 'a', 'pass-thru arguments for the client')]
-
- def initialize_options(self):
- self.args = ''
-
- def finalize_options(self):
- # distutils requires this override.
- pass
-
- def run(self):
- if self.distribution.install_requires:
- self.distribution.fetch_build_eggs(
- self.distribution.install_requires)
- if self.distribution.tests_require:
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
- # We import here to ensure that our setuptools parent has had a chance to
- # edit the Python system path.
- from tests.fork import client
- sys.argv[1:] = self.args.split()
- client.test_fork()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py b/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py
deleted file mode 100644
index 0a2b753cd3..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/grpc_version.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!!
-
-VERSION = '1.43.2'
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/setup.py b/contrib/libs/grpc/src/python/grpcio_tests/setup.py
deleted file mode 100644
index 90f722cc27..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/setup.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A setup module for the gRPC Python package."""
-
-import multiprocessing
-import os
-import os.path
-import sys
-
-import grpc_tools.command
-import setuptools
-
-PY3 = sys.version_info.major == 3
-
-# Ensure we're in the proper directory whether or not we're being used by pip.
-os.chdir(os.path.dirname(os.path.abspath(__file__)))
-
-# Break import-style to ensure we can actually find our in-repo dependencies.
-import commands
-import grpc_version
-
-LICENSE = 'Apache License 2.0'
-
-PACKAGE_DIRECTORIES = {
- '': '.',
-}
-
-INSTALL_REQUIRES = (
- 'coverage>=4.0', 'grpcio>={version}'.format(version=grpc_version.VERSION),
- 'grpcio-channelz>={version}'.format(version=grpc_version.VERSION),
- 'grpcio-status>={version}'.format(version=grpc_version.VERSION),
- 'grpcio-tools>={version}'.format(version=grpc_version.VERSION),
- 'grpcio-health-checking>={version}'.format(version=grpc_version.VERSION),
- 'oauth2client>=1.4.7', 'protobuf>=3.6.0', 'six>=1.10',
- 'google-auth>=1.17.2', 'requests>=2.14.2')
-
-if not PY3:
- INSTALL_REQUIRES += ('futures>=2.2.0', 'enum34>=1.0.4')
-
-COMMAND_CLASS = {
- # Run `preprocess` *before* doing any packaging!
- 'preprocess': commands.GatherProto,
- 'build_package_protos': grpc_tools.command.BuildPackageProtos,
- 'build_py': commands.BuildPy,
- 'run_fork': commands.RunFork,
- 'run_interop': commands.RunInterop,
- 'test_lite': commands.TestLite,
- 'test_gevent': commands.TestGevent,
- 'test_aio': commands.TestAio,
- 'test_py3_only': commands.TestPy3Only,
-}
-
-PACKAGE_DATA = {
- 'tests.interop': [
- 'credentials/ca.pem',
- 'credentials/server1.key',
- 'credentials/server1.pem',
- ],
- 'tests.protoc_plugin.protos.invocation_testing': ['same.proto',],
- 'tests.protoc_plugin.protos.invocation_testing.split_messages': [
- 'messages.proto',
- ],
- 'tests.protoc_plugin.protos.invocation_testing.split_services': [
- 'services.proto',
- ],
- 'tests.testing.proto': [
- 'requests.proto',
- 'services.proto',
- ],
- 'tests.unit': [
- 'credentials/ca.pem',
- 'credentials/server1.key',
- 'credentials/server1.pem',
- ],
- 'tests': ['tests.json'],
-}
-
-TEST_SUITE = 'tests'
-TEST_LOADER = 'tests:Loader'
-TEST_RUNNER = 'tests:Runner'
-TESTS_REQUIRE = INSTALL_REQUIRES
-
-PACKAGES = setuptools.find_packages('.')
-
-if __name__ == "__main__":
- multiprocessing.freeze_support()
- setuptools.setup(
- name='grpcio-tests',
- version=grpc_version.VERSION,
- license=LICENSE,
- packages=list(PACKAGES),
- package_dir=PACKAGE_DIRECTORIES,
- package_data=PACKAGE_DATA,
- install_requires=INSTALL_REQUIRES,
- cmdclass=COMMAND_CLASS,
- tests_require=TESTS_REQUIRE,
- test_suite=TEST_SUITE,
- test_loader=TEST_LOADER,
- test_runner=TEST_RUNNER,
- )
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py
deleted file mode 100644
index d2466fd022..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-from tests import _loader
-from tests import _runner
-
-Loader = _loader.Loader
-Runner = _runner.Runner
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py
deleted file mode 100644
index 80c107aa8e..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-import importlib
-import pkgutil
-import re
-import unittest
-
-import coverage
-
-TEST_MODULE_REGEX = r'^.*_test$'
-
-
-class Loader(object):
- """Test loader for setuptools test suite support.
-
- Attributes:
- suite (unittest.TestSuite): All tests collected by the loader.
- loader (unittest.TestLoader): Standard Python unittest loader to be ran per
- module discovered.
- module_matcher (re.RegexObject): A regular expression object to match
- against module names and determine whether or not the discovered module
- contributes to the test suite.
- """
-
- def __init__(self):
- self.suite = unittest.TestSuite()
- self.loader = unittest.TestLoader()
- self.module_matcher = re.compile(TEST_MODULE_REGEX)
-
- def loadTestsFromNames(self, names, module=None):
- """Function mirroring TestLoader::loadTestsFromNames, as expected by
- setuptools.setup argument `test_loader`."""
- # ensure that we capture decorators and definitions (else our coverage
- # measure unnecessarily suffers)
- coverage_context = coverage.Coverage(data_suffix=True)
- coverage_context.start()
- imported_modules = tuple(
- importlib.import_module(name) for name in names)
- for imported_module in imported_modules:
- self.visit_module(imported_module)
- for imported_module in imported_modules:
- try:
- package_paths = imported_module.__path__
- except AttributeError:
- continue
- self.walk_packages(package_paths)
- coverage_context.stop()
- coverage_context.save()
- return self.suite
-
- def walk_packages(self, package_paths):
- """Walks over the packages, dispatching `visit_module` calls.
-
- Args:
- package_paths (list): A list of paths over which to walk through modules
- along.
- """
- for importer, module_name, is_package in (
- pkgutil.walk_packages(package_paths)):
- module = importer.find_module(module_name).load_module(module_name)
- self.visit_module(module)
-
- def visit_module(self, module):
- """Visits the module, adding discovered tests to the test suite.
-
- Args:
- module (module): Module to match against self.module_matcher; if matched
- it has its tests loaded via self.loader into self.suite.
- """
- if self.module_matcher.match(module.__name__):
- module_suite = self.loader.loadTestsFromModule(module)
- self.suite.addTest(module_suite)
-
-
-def iterate_suite_cases(suite):
- """Generator over all unittest.TestCases in a unittest.TestSuite.
-
- Args:
- suite (unittest.TestSuite): Suite to iterate over in the generator.
-
- Returns:
- generator: A generator over all unittest.TestCases in `suite`.
- """
- for item in suite:
- if isinstance(item, unittest.TestSuite):
- for child_item in iterate_suite_cases(item):
- yield child_item
- elif isinstance(item, unittest.TestCase):
- yield item
- else:
- raise ValueError('unexpected suite item of type {}'.format(
- type(item)))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py
deleted file mode 100644
index 389d5f4f96..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py
+++ /dev/null
@@ -1,439 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-import collections
-import itertools
-import traceback
-import unittest
-from xml.etree import ElementTree
-
-import coverage
-from six import moves
-
-from tests import _loader
-
-
-class CaseResult(
- collections.namedtuple('CaseResult', [
- 'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback'
- ])):
- """A serializable result of a single test case.
-
- Attributes:
- id (object): Any serializable object used to denote the identity of this
- test case.
- name (str or None): A human-readable name of the test case.
- kind (CaseResult.Kind): The kind of test result.
- stdout (object or None): Output on stdout, or None if nothing was captured.
- stderr (object or None): Output on stderr, or None if nothing was captured.
- skip_reason (object or None): The reason the test was skipped. Must be
- something if self.kind is CaseResult.Kind.SKIP, else None.
- traceback (object or None): The traceback of the test. Must be something if
- self.kind is CaseResult.Kind.{ERROR, FAILURE, EXPECTED_FAILURE}, else
- None.
- """
-
- class Kind(object):
- UNTESTED = 'untested'
- RUNNING = 'running'
- ERROR = 'error'
- FAILURE = 'failure'
- SUCCESS = 'success'
- SKIP = 'skip'
- EXPECTED_FAILURE = 'expected failure'
- UNEXPECTED_SUCCESS = 'unexpected success'
-
- def __new__(cls,
- id=None,
- name=None,
- kind=None,
- stdout=None,
- stderr=None,
- skip_reason=None,
- traceback=None):
- """Helper keyword constructor for the namedtuple.
-
- See this class' attributes for information on the arguments."""
- assert id is not None
- assert name is None or isinstance(name, str)
- if kind is CaseResult.Kind.UNTESTED:
- pass
- elif kind is CaseResult.Kind.RUNNING:
- pass
- elif kind is CaseResult.Kind.ERROR:
- assert traceback is not None
- elif kind is CaseResult.Kind.FAILURE:
- assert traceback is not None
- elif kind is CaseResult.Kind.SUCCESS:
- pass
- elif kind is CaseResult.Kind.SKIP:
- assert skip_reason is not None
- elif kind is CaseResult.Kind.EXPECTED_FAILURE:
- assert traceback is not None
- elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS:
- pass
- else:
- assert False
- return super(cls, CaseResult).__new__(cls, id, name, kind, stdout,
- stderr, skip_reason, traceback)
-
- def updated(self,
- name=None,
- kind=None,
- stdout=None,
- stderr=None,
- skip_reason=None,
- traceback=None):
- """Get a new validated CaseResult with the fields updated.
-
- See this class' attributes for information on the arguments."""
- name = self.name if name is None else name
- kind = self.kind if kind is None else kind
- stdout = self.stdout if stdout is None else stdout
- stderr = self.stderr if stderr is None else stderr
- skip_reason = self.skip_reason if skip_reason is None else skip_reason
- traceback = self.traceback if traceback is None else traceback
- return CaseResult(id=self.id,
- name=name,
- kind=kind,
- stdout=stdout,
- stderr=stderr,
- skip_reason=skip_reason,
- traceback=traceback)
-
-
-class AugmentedResult(unittest.TestResult):
- """unittest.Result that keeps track of additional information.
-
- Uses CaseResult objects to store test-case results, providing additional
- information beyond that of the standard Python unittest library, such as
- standard output.
-
- Attributes:
- id_map (callable): A unary callable mapping unittest.TestCase objects to
- unique identifiers.
- cases (dict): A dictionary mapping from the identifiers returned by id_map
- to CaseResult objects corresponding to those IDs.
- """
-
- def __init__(self, id_map):
- """Initialize the object with an identifier mapping.
-
- Arguments:
- id_map (callable): Corresponds to the attribute `id_map`."""
- super(AugmentedResult, self).__init__()
- self.id_map = id_map
- self.cases = None
-
- def startTestRun(self):
- """See unittest.TestResult.startTestRun."""
- super(AugmentedResult, self).startTestRun()
- self.cases = dict()
-
- def startTest(self, test):
- """See unittest.TestResult.startTest."""
- super(AugmentedResult, self).startTest(test)
- case_id = self.id_map(test)
- self.cases[case_id] = CaseResult(id=case_id,
- name=test.id(),
- kind=CaseResult.Kind.RUNNING)
-
- def addError(self, test, err):
- """See unittest.TestResult.addError."""
- super(AugmentedResult, self).addError(test, err)
- case_id = self.id_map(test)
- self.cases[case_id] = self.cases[case_id].updated(
- kind=CaseResult.Kind.ERROR, traceback=err)
-
- def addFailure(self, test, err):
- """See unittest.TestResult.addFailure."""
- super(AugmentedResult, self).addFailure(test, err)
- case_id = self.id_map(test)
- self.cases[case_id] = self.cases[case_id].updated(
- kind=CaseResult.Kind.FAILURE, traceback=err)
-
- def addSuccess(self, test):
- """See unittest.TestResult.addSuccess."""
- super(AugmentedResult, self).addSuccess(test)
- case_id = self.id_map(test)
- self.cases[case_id] = self.cases[case_id].updated(
- kind=CaseResult.Kind.SUCCESS)
-
- def addSkip(self, test, reason):
- """See unittest.TestResult.addSkip."""
- super(AugmentedResult, self).addSkip(test, reason)
- case_id = self.id_map(test)
- self.cases[case_id] = self.cases[case_id].updated(
- kind=CaseResult.Kind.SKIP, skip_reason=reason)
-
- def addExpectedFailure(self, test, err):
- """See unittest.TestResult.addExpectedFailure."""
- super(AugmentedResult, self).addExpectedFailure(test, err)
- case_id = self.id_map(test)
- self.cases[case_id] = self.cases[case_id].updated(
- kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=err)
-
- def addUnexpectedSuccess(self, test):
- """See unittest.TestResult.addUnexpectedSuccess."""
- super(AugmentedResult, self).addUnexpectedSuccess(test)
- case_id = self.id_map(test)
- self.cases[case_id] = self.cases[case_id].updated(
- kind=CaseResult.Kind.UNEXPECTED_SUCCESS)
-
- def set_output(self, test, stdout, stderr):
- """Set the output attributes for the CaseResult corresponding to a test.
-
- Args:
- test (unittest.TestCase): The TestCase to set the outputs of.
- stdout (str): Output from stdout to assign to self.id_map(test).
- stderr (str): Output from stderr to assign to self.id_map(test).
- """
- case_id = self.id_map(test)
- self.cases[case_id] = self.cases[case_id].updated(
- stdout=stdout.decode(), stderr=stderr.decode())
-
- def augmented_results(self, filter):
- """Convenience method to retrieve filtered case results.
-
- Args:
- filter (callable): A unary predicate to filter over CaseResult objects.
- """
- return (self.cases[case_id]
- for case_id in self.cases
- if filter(self.cases[case_id]))
-
-
-class CoverageResult(AugmentedResult):
- """Extension to AugmentedResult adding coverage.py support per test.\
-
- Attributes:
- coverage_context (coverage.Coverage): coverage.py management object.
- """
-
- def __init__(self, id_map):
- """See AugmentedResult.__init__."""
- super(CoverageResult, self).__init__(id_map=id_map)
- self.coverage_context = None
-
- def startTest(self, test):
- """See unittest.TestResult.startTest.
-
- Additionally initializes and begins code coverage tracking."""
- super(CoverageResult, self).startTest(test)
- self.coverage_context = coverage.Coverage(data_suffix=True)
- self.coverage_context.start()
-
- def stopTest(self, test):
- """See unittest.TestResult.stopTest.
-
- Additionally stops and deinitializes code coverage tracking."""
- super(CoverageResult, self).stopTest(test)
- self.coverage_context.stop()
- self.coverage_context.save()
- self.coverage_context = None
-
-
-class _Colors(object):
- """Namespaced constants for terminal color magic numbers."""
- HEADER = '\033[95m'
- INFO = '\033[94m'
- OK = '\033[92m'
- WARN = '\033[93m'
- FAIL = '\033[91m'
- BOLD = '\033[1m'
- UNDERLINE = '\033[4m'
- END = '\033[0m'
-
-
-class TerminalResult(CoverageResult):
- """Extension to CoverageResult adding basic terminal reporting."""
-
- def __init__(self, out, id_map):
- """Initialize the result object.
-
- Args:
- out (file-like): Output file to which terminal-colored live results will
- be written.
- id_map (callable): See AugmentedResult.__init__.
- """
- super(TerminalResult, self).__init__(id_map=id_map)
- self.out = out
-
- def startTestRun(self):
- """See unittest.TestResult.startTestRun."""
- super(TerminalResult, self).startTestRun()
- self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' +
- _Colors.END)
-
- def stopTestRun(self):
- """See unittest.TestResult.stopTestRun."""
- super(TerminalResult, self).stopTestRun()
- self.out.write(summary(self))
- self.out.flush()
-
- def addError(self, test, err):
- """See unittest.TestResult.addError."""
- super(TerminalResult, self).addError(test, err)
- self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) +
- _Colors.END)
- self.out.flush()
-
- def addFailure(self, test, err):
- """See unittest.TestResult.addFailure."""
- super(TerminalResult, self).addFailure(test, err)
- self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) +
- _Colors.END)
- self.out.flush()
-
- def addSuccess(self, test):
- """See unittest.TestResult.addSuccess."""
- super(TerminalResult, self).addSuccess(test)
- self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) +
- _Colors.END)
- self.out.flush()
-
- def addSkip(self, test, reason):
- """See unittest.TestResult.addSkip."""
- super(TerminalResult, self).addSkip(test, reason)
- self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) +
- _Colors.END)
- self.out.flush()
-
- def addExpectedFailure(self, test, err):
- """See unittest.TestResult.addExpectedFailure."""
- super(TerminalResult, self).addExpectedFailure(test, err)
- self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) +
- _Colors.END)
- self.out.flush()
-
- def addUnexpectedSuccess(self, test):
- """See unittest.TestResult.addUnexpectedSuccess."""
- super(TerminalResult, self).addUnexpectedSuccess(test)
- self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) +
- _Colors.END)
- self.out.flush()
-
-
-def _traceback_string(type, value, trace):
- """Generate a descriptive string of a Python exception traceback.
-
- Args:
- type (class): The type of the exception.
- value (Exception): The value of the exception.
- trace (traceback): Traceback of the exception.
-
- Returns:
- str: Formatted exception descriptive string.
- """
- buffer = moves.cStringIO()
- traceback.print_exception(type, value, trace, file=buffer)
- return buffer.getvalue()
-
-
-def summary(result):
- """A summary string of a result object.
-
- Args:
- result (AugmentedResult): The result object to get the summary of.
-
- Returns:
- str: The summary string.
- """
- assert isinstance(result, AugmentedResult)
- untested = list(
- result.augmented_results(
- lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED))
- running = list(
- result.augmented_results(
- lambda case_result: case_result.kind is CaseResult.Kind.RUNNING))
- failures = list(
- result.augmented_results(
- lambda case_result: case_result.kind is CaseResult.Kind.FAILURE))
- errors = list(
- result.augmented_results(
- lambda case_result: case_result.kind is CaseResult.Kind.ERROR))
- successes = list(
- result.augmented_results(
- lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS))
- skips = list(
- result.augmented_results(
- lambda case_result: case_result.kind is CaseResult.Kind.SKIP))
- expected_failures = list(
- result.augmented_results(lambda case_result: case_result.kind is
- CaseResult.Kind.EXPECTED_FAILURE))
- unexpected_successes = list(
- result.augmented_results(lambda case_result: case_result.kind is
- CaseResult.Kind.UNEXPECTED_SUCCESS))
- running_names = [case.name for case in running]
- finished_count = (len(failures) + len(errors) + len(successes) +
- len(expected_failures) + len(unexpected_successes))
- statistics = ('{finished} tests finished:\n'
- '\t{successful} successful\n'
- '\t{unsuccessful} unsuccessful\n'
- '\t{skipped} skipped\n'
- '\t{expected_fail} expected failures\n'
- '\t{unexpected_successful} unexpected successes\n'
- 'Interrupted Tests:\n'
- '\t{interrupted}\n'.format(
- finished=finished_count,
- successful=len(successes),
- unsuccessful=(len(failures) + len(errors)),
- skipped=len(skips),
- expected_fail=len(expected_failures),
- unexpected_successful=len(unexpected_successes),
- interrupted=str(running_names)))
- tracebacks = '\n\n'.join([
- (_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD +
- 'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD +
- 'stdout:' + _Colors.END + '\n' + '{stdout}\n' + _Colors.BOLD +
- 'stderr:' + _Colors.END + '\n' + '{stderr}\n').format(
- test_name=result.name,
- traceback=_traceback_string(*result.traceback),
- stdout=result.stdout,
- stderr=result.stderr)
- for result in itertools.chain(failures, errors)
- ])
- notes = 'Unexpected successes: {}\n'.format(
- [result.name for result in unexpected_successes])
- return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes
-
-
-def jenkins_junit_xml(result):
- """An XML tree object that when written is recognizable by Jenkins.
-
- Args:
- result (AugmentedResult): The result object to get the junit xml output of.
-
- Returns:
- ElementTree.ElementTree: The XML tree.
- """
- assert isinstance(result, AugmentedResult)
- root = ElementTree.Element('testsuites')
- suite = ElementTree.SubElement(root, 'testsuite', {
- 'name': 'Python gRPC tests',
- })
- for case in result.cases.values():
- if case.kind is CaseResult.Kind.SUCCESS:
- ElementTree.SubElement(suite, 'testcase', {
- 'name': case.name,
- })
- elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE):
- case_xml = ElementTree.SubElement(suite, 'testcase', {
- 'name': case.name,
- })
- error_xml = ElementTree.SubElement(case_xml, 'error', {})
- error_xml.text = ''.format(case.stderr, case.traceback)
- return ElementTree.ElementTree(element=root)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py
deleted file mode 100644
index 92546fca49..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py
+++ /dev/null
@@ -1,239 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-import collections
-import os
-import select
-import signal
-import sys
-import tempfile
-import threading
-import time
-import unittest
-import uuid
-
-import six
-from six import moves
-
-from tests import _loader
-from tests import _result
-
-
-class CaptureFile(object):
- """A context-managed file to redirect output to a byte array.
-
- Use by invoking `start` (`__enter__`) and at some point invoking `stop`
- (`__exit__`). At any point after the initial call to `start` call `output` to
- get the current redirected output. Note that we don't currently use file
- locking, so calling `output` between calls to `start` and `stop` may muddle
- the result (you should only be doing this during a Python-handled interrupt as
- a last ditch effort to provide output to the user).
-
- Attributes:
- _redirected_fd (int): File descriptor of file to redirect writes from.
- _saved_fd (int): A copy of the original value of the redirected file
- descriptor.
- _into_file (TemporaryFile or None): File to which writes are redirected.
- Only non-None when self is started.
- """
-
- def __init__(self, fd):
- self._redirected_fd = fd
- self._saved_fd = os.dup(self._redirected_fd)
- self._into_file = None
-
- def output(self):
- """Get all output from the redirected-to file if it exists."""
- if self._into_file:
- self._into_file.seek(0)
- return bytes(self._into_file.read())
- else:
- return bytes()
-
- def start(self):
- """Start redirection of writes to the file descriptor."""
- self._into_file = tempfile.TemporaryFile()
- os.dup2(self._into_file.fileno(), self._redirected_fd)
-
- def stop(self):
- """Stop redirection of writes to the file descriptor."""
- # n.b. this dup2 call auto-closes self._redirected_fd
- os.dup2(self._saved_fd, self._redirected_fd)
-
- def write_bypass(self, value):
- """Bypass the redirection and write directly to the original file.
-
- Arguments:
- value (str): What to write to the original file.
- """
- if six.PY3 and not isinstance(value, six.binary_type):
- value = value.encode('ascii')
- if self._saved_fd is None:
- os.write(self._redirect_fd, value)
- else:
- os.write(self._saved_fd, value)
-
- def __enter__(self):
- self.start()
- return self
-
- def __exit__(self, type, value, traceback):
- self.stop()
-
- def close(self):
- """Close any resources used by self not closed by stop()."""
- os.close(self._saved_fd)
-
-
-class AugmentedCase(collections.namedtuple('AugmentedCase', ['case', 'id'])):
- """A test case with a guaranteed unique externally specified identifier.
-
- Attributes:
- case (unittest.TestCase): TestCase we're decorating with an additional
- identifier.
- id (object): Any identifier that may be considered 'unique' for testing
- purposes.
- """
-
- def __new__(cls, case, id=None):
- if id is None:
- id = uuid.uuid4()
- return super(cls, AugmentedCase).__new__(cls, case, id)
-
-
-# NOTE(lidiz) This complex wrapper is not triggering setUpClass nor
-# tearDownClass. Do not use those methods, or fix this wrapper!
-class Runner(object):
-
- def __init__(self, dedicated_threads=False):
- """Constructs the Runner object.
-
- Args:
- dedicated_threads: A bool indicates whether to spawn each unit test
- in separate thread or not.
- """
- self._skipped_tests = []
- self._dedicated_threads = dedicated_threads
-
- def skip_tests(self, tests):
- self._skipped_tests = tests
-
- def run(self, suite):
- """See setuptools' test_runner setup argument for information."""
- # only run test cases with id starting with given prefix
- testcase_filter = os.getenv('GRPC_PYTHON_TESTRUNNER_FILTER')
- filtered_cases = []
- for case in _loader.iterate_suite_cases(suite):
- if not testcase_filter or case.id().startswith(testcase_filter):
- filtered_cases.append(case)
-
- # Ensure that every test case has no collision with any other test case in
- # the augmented results.
- augmented_cases = [
- AugmentedCase(case, uuid.uuid4()) for case in filtered_cases
- ]
- case_id_by_case = dict((augmented_case.case, augmented_case.id)
- for augmented_case in augmented_cases)
- result_out = moves.cStringIO()
- result = _result.TerminalResult(
- result_out, id_map=lambda case: case_id_by_case[case])
- stdout_pipe = CaptureFile(sys.stdout.fileno())
- stderr_pipe = CaptureFile(sys.stderr.fileno())
- kill_flag = [False]
-
- def sigint_handler(signal_number, frame):
- if signal_number == signal.SIGINT:
- kill_flag[0] = True # Python 2.7 not having 'local'... :-(
- signal.signal(signal_number, signal.SIG_DFL)
-
- def fault_handler(signal_number, frame):
- stdout_pipe.write_bypass(
- 'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'.format(
- signal_number, stdout_pipe.output(), stderr_pipe.output()))
- os._exit(1)
-
- def check_kill_self():
- if kill_flag[0]:
- stdout_pipe.write_bypass('Stopping tests short...')
- result.stopTestRun()
- stdout_pipe.write_bypass(result_out.getvalue())
- stdout_pipe.write_bypass('\ninterrupted stdout:\n{}\n'.format(
- stdout_pipe.output().decode()))
- stderr_pipe.write_bypass('\ninterrupted stderr:\n{}\n'.format(
- stderr_pipe.output().decode()))
- os._exit(1)
-
- def try_set_handler(name, handler):
- try:
- signal.signal(getattr(signal, name), handler)
- except AttributeError:
- pass
-
- try_set_handler('SIGINT', sigint_handler)
- try_set_handler('SIGBUS', fault_handler)
- try_set_handler('SIGABRT', fault_handler)
- try_set_handler('SIGFPE', fault_handler)
- try_set_handler('SIGILL', fault_handler)
- # Sometimes output will lag after a test has successfully finished; we
- # ignore such writes to our pipes.
- try_set_handler('SIGPIPE', signal.SIG_IGN)
-
- # Run the tests
- result.startTestRun()
- for augmented_case in augmented_cases:
- for skipped_test in self._skipped_tests:
- if skipped_test in augmented_case.case.id():
- break
- else:
- sys.stdout.write('Running {}\n'.format(
- augmented_case.case.id()))
- sys.stdout.flush()
- if self._dedicated_threads:
- # (Deprecated) Spawns dedicated thread for each test case.
- case_thread = threading.Thread(
- target=augmented_case.case.run, args=(result,))
- try:
- with stdout_pipe, stderr_pipe:
- case_thread.start()
- # If the thread is exited unexpected, stop testing.
- while case_thread.is_alive():
- check_kill_self()
- time.sleep(0)
- case_thread.join()
- except: # pylint: disable=try-except-raise
- # re-raise the exception after forcing the with-block to end
- raise
- # Records the result of the test case run.
- result.set_output(augmented_case.case, stdout_pipe.output(),
- stderr_pipe.output())
- sys.stdout.write(result_out.getvalue())
- sys.stdout.flush()
- result_out.truncate(0)
- check_kill_self()
- else:
- # Donates current thread to test case execution.
- augmented_case.case.run(result)
- result.stopTestRun()
- stdout_pipe.close()
- stderr_pipe.close()
-
- # Report results
- sys.stdout.write(result_out.getvalue())
- sys.stdout.flush()
- signal.signal(signal.SIGINT, signal.SIG_DFL)
- with open('report.xml', 'wb') as report_xml_file:
- _result.jenkins_junit_xml(result).write(report_xml_file)
- return result
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py
deleted file mode 100644
index 5772620b60..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py
deleted file mode 100644
index 3aa92f37fb..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import pkgutil
-import unittest
-
-import six
-
-import tests
-
-
-class SanityTest(unittest.TestCase):
-
- maxDiff = 32768
-
- TEST_PKG_MODULE_NAME = 'tests'
- TEST_PKG_PATH = 'tests'
-
- def testTestsJsonUpToDate(self):
- """Autodiscovers all test suites and checks that tests.json is up to date"""
- loader = tests.Loader()
- loader.loadTestsFromNames([self.TEST_PKG_MODULE_NAME])
- test_suite_names = sorted({
- test_case_class.id().rsplit('.', 1)[0] for test_case_class in
- tests._loader.iterate_suite_cases(loader.suite)
- })
-
- tests_json_string = pkgutil.get_data(self.TEST_PKG_PATH, 'tests.json')
- tests_json = json.loads(
- tests_json_string.decode() if six.PY3 else tests_json_string)
-
- self.assertSequenceEqual(tests_json, test_suite_names)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/admin/test_admin.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/admin/test_admin.py
deleted file mode 100644
index adc4878282..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/admin/test_admin.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Copyright 2021 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A test to ensure that admin services are registered correctly."""
-
-from concurrent.futures import ThreadPoolExecutor
-import logging
-import sys
-import unittest
-
-import grpc
-import grpc_admin
-from grpc_channelz.v1 import channelz_pb2
-from grpc_channelz.v1 import channelz_pb2_grpc
-from grpc_csds import csds_pb2
-from grpc_csds import csds_pb2_grpc
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class TestAdmin(unittest.TestCase):
-
- def setUp(self):
- self._server = grpc.server(ThreadPoolExecutor())
- port = self._server.add_insecure_port('localhost:0')
- grpc_admin.add_admin_servicers(self._server)
- self._server.start()
-
- self._channel = grpc.insecure_channel('localhost:%s' % port)
-
- def tearDown(self):
- self._channel.close()
- self._server.stop(0)
-
- def test_has_csds(self):
- stub = csds_pb2_grpc.ClientStatusDiscoveryServiceStub(self._channel)
- resp = stub.FetchClientStatus(csds_pb2.ClientStatusRequest())
- # No exception raised and the response is valid
- self.assertGreater(len(resp.config), 0)
-
- def test_has_channelz(self):
- stub = channelz_pb2_grpc.ChannelzStub(self._channel)
- resp = stub.GetTopChannels(channelz_pb2.GetTopChannelsRequest())
- # No exception raised and the response is valid
- self.assertGreater(len(resp.channel), 0)
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py
deleted file mode 100644
index 994a8e1e80..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import site
-import sys
-
-_GRPC_BAZEL_RUNTIME_ENV = "GRPC_BAZEL_RUNTIME"
-
-
-# TODO(https://github.com/bazelbuild/bazel/issues/6844) Bazel failed to
-# interpret namespace packages correctly. This monkey patch will force the
-# Python process to parse the .pth file in the sys.path to resolve namespace
-# package in the right place.
-# Analysis in depth: https://github.com/bazelbuild/rules_python/issues/55
-def sys_path_to_site_dir_hack():
- """Add valid sys.path item to site directory to parse the .pth files."""
- # Only run within our Bazel environment
- if not os.environ.get(_GRPC_BAZEL_RUNTIME_ENV):
- return
- items = []
- for item in sys.path:
- if os.path.exists(item):
- # The only difference between sys.path and site-directory is
- # whether the .pth file will be parsed or not. A site-directory
- # will always exist in sys.path, but not another way around.
- items.append(item)
- for item in items:
- site.addsitedir(item)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py
deleted file mode 100644
index 38fdfc9c5c..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2018 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py
deleted file mode 100644
index 565a79814c..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py
+++ /dev/null
@@ -1,480 +0,0 @@
-# Copyright 2018 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_channelz.v1.channelz."""
-
-from concurrent import futures
-import sys
-import unittest
-
-import grpc
-from grpc_channelz.v1 import channelz
-from grpc_channelz.v1 import channelz_pb2
-from grpc_channelz.v1 import channelz_pb2_grpc
-
-from tests.unit import test_common
-from tests.unit.framework.common import test_constants
-
-_SUCCESSFUL_UNARY_UNARY = '/test/SuccessfulUnaryUnary'
-_FAILED_UNARY_UNARY = '/test/FailedUnaryUnary'
-_SUCCESSFUL_STREAM_STREAM = '/test/SuccessfulStreamStream'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_DISABLE_REUSE_PORT = (('grpc.so_reuseport', 0),)
-_ENABLE_CHANNELZ = (('grpc.enable_channelz', 1),)
-_DISABLE_CHANNELZ = (('grpc.enable_channelz', 0),)
-
-
-def _successful_unary_unary(request, servicer_context):
- return _RESPONSE
-
-
-def _failed_unary_unary(request, servicer_context):
- servicer_context.set_code(grpc.StatusCode.INTERNAL)
- servicer_context.set_details("Channelz Test Intended Failure")
-
-
-def _successful_stream_stream(request_iterator, servicer_context):
- for _ in request_iterator:
- yield _RESPONSE
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_successful_unary_unary)
- elif handler_call_details.method == _FAILED_UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_failed_unary_unary)
- elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM:
- return grpc.stream_stream_rpc_method_handler(
- _successful_stream_stream)
- else:
- return None
-
-
-class _ChannelServerPair(object):
-
- def __init__(self):
- # Server will enable channelz service
- self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=3),
- options=_DISABLE_REUSE_PORT +
- _ENABLE_CHANNELZ)
- port = self.server.add_insecure_port('[::]:0')
- self.server.add_generic_rpc_handlers((_GenericHandler(),))
- self.server.start()
-
- # Channel will enable channelz service...
- self.channel = grpc.insecure_channel('localhost:%d' % port,
- _ENABLE_CHANNELZ)
-
-
-def _generate_channel_server_pairs(n):
- return [_ChannelServerPair() for i in range(n)]
-
-
-def _close_channel_server_pairs(pairs):
- for pair in pairs:
- pair.server.stop(None)
- pair.channel.close()
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class ChannelzServicerTest(unittest.TestCase):
-
- def _send_successful_unary_unary(self, idx):
- _, r = self._pairs[idx].channel.unary_unary(
- _SUCCESSFUL_UNARY_UNARY).with_call(_REQUEST)
- self.assertEqual(r.code(), grpc.StatusCode.OK)
-
- def _send_failed_unary_unary(self, idx):
- try:
- self._pairs[idx].channel.unary_unary(_FAILED_UNARY_UNARY).with_call(
- _REQUEST)
- except grpc.RpcError:
- return
- else:
- self.fail("This call supposed to fail")
-
- def _send_successful_stream_stream(self, idx):
- response_iterator = self._pairs[idx].channel.stream_stream(
- _SUCCESSFUL_STREAM_STREAM).__call__(
- iter([_REQUEST] * test_constants.STREAM_LENGTH))
- cnt = 0
- for _ in response_iterator:
- cnt += 1
- self.assertEqual(cnt, test_constants.STREAM_LENGTH)
-
- def _get_channel_id(self, idx):
- """Channel id may not be consecutive"""
- resp = self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
- self.assertGreater(len(resp.channel), idx)
- return resp.channel[idx].ref.channel_id
-
- def setUp(self):
- self._pairs = []
- # This server is for Channelz info fetching only
- # It self should not enable Channelz
- self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=3),
- options=_DISABLE_REUSE_PORT +
- _DISABLE_CHANNELZ)
- port = self._server.add_insecure_port('[::]:0')
- channelz.add_channelz_servicer(self._server)
- self._server.start()
-
- # This channel is used to fetch Channelz info only
- # Channelz should not be enabled
- self._channel = grpc.insecure_channel('localhost:%d' % port,
- _DISABLE_CHANNELZ)
- self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel)
-
- def tearDown(self):
- self._server.stop(None)
- self._channel.close()
- _close_channel_server_pairs(self._pairs)
-
- def test_get_top_channels_basic(self):
- self._pairs = _generate_channel_server_pairs(1)
- resp = self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
- self.assertEqual(len(resp.channel), 1)
- self.assertEqual(resp.end, True)
-
- def test_get_top_channels_high_start_id(self):
- self._pairs = _generate_channel_server_pairs(1)
- resp = self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=10000))
- self.assertEqual(len(resp.channel), 0)
- self.assertEqual(resp.end, True)
-
- def test_successful_request(self):
- self._pairs = _generate_channel_server_pairs(1)
- self._send_successful_unary_unary(0)
- resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0)))
- self.assertEqual(resp.channel.data.calls_started, 1)
- self.assertEqual(resp.channel.data.calls_succeeded, 1)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- def test_failed_request(self):
- self._pairs = _generate_channel_server_pairs(1)
- self._send_failed_unary_unary(0)
- resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0)))
- self.assertEqual(resp.channel.data.calls_started, 1)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, 1)
-
- def test_many_requests(self):
- self._pairs = _generate_channel_server_pairs(1)
- k_success = 7
- k_failed = 9
- for i in range(k_success):
- self._send_successful_unary_unary(0)
- for i in range(k_failed):
- self._send_failed_unary_unary(0)
- resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0)))
- self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- def test_many_channel(self):
- k_channels = 4
- self._pairs = _generate_channel_server_pairs(k_channels)
- resp = self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
- self.assertEqual(len(resp.channel), k_channels)
-
- def test_many_requests_many_channel(self):
- k_channels = 4
- self._pairs = _generate_channel_server_pairs(k_channels)
- k_success = 11
- k_failed = 13
- for i in range(k_success):
- self._send_successful_unary_unary(0)
- self._send_successful_unary_unary(2)
- for i in range(k_failed):
- self._send_failed_unary_unary(1)
- self._send_failed_unary_unary(2)
-
- # The first channel saw only successes
- resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0)))
- self.assertEqual(resp.channel.data.calls_started, k_success)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- # The second channel saw only failures
- resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(1)))
- self.assertEqual(resp.channel.data.calls_started, k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- # The third channel saw both successes and failures
- resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(2)))
- self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- # The fourth channel saw nothing
- resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(3)))
- self.assertEqual(resp.channel.data.calls_started, 0)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- def test_many_subchannels(self):
- k_channels = 4
- self._pairs = _generate_channel_server_pairs(k_channels)
- k_success = 17
- k_failed = 19
- for i in range(k_success):
- self._send_successful_unary_unary(0)
- self._send_successful_unary_unary(2)
- for i in range(k_failed):
- self._send_failed_unary_unary(1)
- self._send_failed_unary_unary(2)
-
- gtc_resp = self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
- self.assertEqual(len(gtc_resp.channel), k_channels)
- for i in range(k_channels):
- # If no call performed in the channel, there shouldn't be any subchannel
- if gtc_resp.channel[i].data.calls_started == 0:
- self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0)
- continue
-
- # Otherwise, the subchannel should exist
- self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0)
- gsc_resp = self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gtc_resp.channel[i].subchannel_ref[0].
- subchannel_id))
- self.assertEqual(gtc_resp.channel[i].data.calls_started,
- gsc_resp.subchannel.data.calls_started)
- self.assertEqual(gtc_resp.channel[i].data.calls_succeeded,
- gsc_resp.subchannel.data.calls_succeeded)
- self.assertEqual(gtc_resp.channel[i].data.calls_failed,
- gsc_resp.subchannel.data.calls_failed)
-
- def test_server_basic(self):
- self._pairs = _generate_channel_server_pairs(1)
- resp = self._channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=0))
- self.assertEqual(len(resp.server), 1)
-
- def test_get_one_server(self):
- self._pairs = _generate_channel_server_pairs(1)
- gss_resp = self._channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=0))
- self.assertEqual(len(gss_resp.server), 1)
- gs_resp = self._channelz_stub.GetServer(
- channelz_pb2.GetServerRequest(
- server_id=gss_resp.server[0].ref.server_id))
- self.assertEqual(gss_resp.server[0].ref.server_id,
- gs_resp.server.ref.server_id)
-
- def test_server_call(self):
- self._pairs = _generate_channel_server_pairs(1)
- k_success = 23
- k_failed = 29
- for i in range(k_success):
- self._send_successful_unary_unary(0)
- for i in range(k_failed):
- self._send_failed_unary_unary(0)
-
- resp = self._channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=0))
- self.assertEqual(len(resp.server), 1)
- self.assertEqual(resp.server[0].data.calls_started,
- k_success + k_failed)
- self.assertEqual(resp.server[0].data.calls_succeeded, k_success)
- self.assertEqual(resp.server[0].data.calls_failed, k_failed)
-
- def test_many_subchannels_and_sockets(self):
- k_channels = 4
- self._pairs = _generate_channel_server_pairs(k_channels)
- k_success = 3
- k_failed = 5
- for i in range(k_success):
- self._send_successful_unary_unary(0)
- self._send_successful_unary_unary(2)
- for i in range(k_failed):
- self._send_failed_unary_unary(1)
- self._send_failed_unary_unary(2)
-
- gtc_resp = self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
- self.assertEqual(len(gtc_resp.channel), k_channels)
- for i in range(k_channels):
- # If no call performed in the channel, there shouldn't be any subchannel
- if gtc_resp.channel[i].data.calls_started == 0:
- self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0)
- continue
-
- # Otherwise, the subchannel should exist
- self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0)
- gsc_resp = self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gtc_resp.channel[i].subchannel_ref[0].
- subchannel_id))
- self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
-
- gs_resp = self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
- self.assertEqual(gsc_resp.subchannel.data.calls_started,
- gs_resp.socket.data.streams_started)
- self.assertEqual(gsc_resp.subchannel.data.calls_started,
- gs_resp.socket.data.streams_succeeded)
- # Calls started == messages sent, only valid for unary calls
- self.assertEqual(gsc_resp.subchannel.data.calls_started,
- gs_resp.socket.data.messages_sent)
- # Only receive responses when the RPC was successful
- self.assertEqual(gsc_resp.subchannel.data.calls_succeeded,
- gs_resp.socket.data.messages_received)
-
- if gs_resp.socket.remote.HasField("tcpip_address"):
- address = gs_resp.socket.remote.tcpip_address.ip_address
- self.assertTrue(
- len(address) == 4 or len(address) == 16, address)
- if gs_resp.socket.local.HasField("tcpip_address"):
- address = gs_resp.socket.local.tcpip_address.ip_address
- self.assertTrue(
- len(address) == 4 or len(address) == 16, address)
-
- def test_streaming_rpc(self):
- self._pairs = _generate_channel_server_pairs(1)
- # In C++, the argument for _send_successful_stream_stream is message length.
- # Here the argument is still channel idx, to be consistent with the other two.
- self._send_successful_stream_stream(0)
-
- gc_resp = self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0)))
- self.assertEqual(gc_resp.channel.data.calls_started, 1)
- self.assertEqual(gc_resp.channel.data.calls_succeeded, 1)
- self.assertEqual(gc_resp.channel.data.calls_failed, 0)
- # Subchannel exists
- self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
-
- gsc_resp = self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gc_resp.channel.subchannel_ref[0].subchannel_id))
- self.assertEqual(gsc_resp.subchannel.data.calls_started, 1)
- self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1)
- self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0)
- # Socket exists
- self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
-
- gs_resp = self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
- self.assertEqual(gs_resp.socket.data.streams_started, 1)
- self.assertEqual(gs_resp.socket.data.streams_succeeded, 1)
- self.assertEqual(gs_resp.socket.data.streams_failed, 0)
- self.assertEqual(gs_resp.socket.data.messages_sent,
- test_constants.STREAM_LENGTH)
- self.assertEqual(gs_resp.socket.data.messages_received,
- test_constants.STREAM_LENGTH)
-
- def test_server_sockets(self):
- self._pairs = _generate_channel_server_pairs(1)
- self._send_successful_unary_unary(0)
- self._send_failed_unary_unary(0)
-
- gs_resp = self._channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=0))
- self.assertEqual(len(gs_resp.server), 1)
- self.assertEqual(gs_resp.server[0].data.calls_started, 2)
- self.assertEqual(gs_resp.server[0].data.calls_succeeded, 1)
- self.assertEqual(gs_resp.server[0].data.calls_failed, 1)
-
- gss_resp = self._channelz_stub.GetServerSockets(
- channelz_pb2.GetServerSocketsRequest(
- server_id=gs_resp.server[0].ref.server_id, start_socket_id=0))
- # If the RPC call failed, it will raise a grpc.RpcError
- # So, if there is no exception raised, considered pass
-
- def test_server_listen_sockets(self):
- self._pairs = _generate_channel_server_pairs(1)
-
- gss_resp = self._channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=0))
- self.assertEqual(len(gss_resp.server), 1)
- self.assertEqual(len(gss_resp.server[0].listen_socket), 1)
-
- gs_resp = self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=gss_resp.server[0].listen_socket[0].socket_id))
-
- # If the RPC call failed, it will raise a grpc.RpcError
- # So, if there is no exception raised, considered pass
-
- def test_invalid_query_get_server(self):
- try:
- self._channelz_stub.GetServer(
- channelz_pb2.GetServerRequest(server_id=10000))
- except BaseException as e:
- self.assertIn('StatusCode.NOT_FOUND', str(e))
- else:
- self.fail('Invalid query not detected')
-
- def test_invalid_query_get_channel(self):
- try:
- self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=10000))
- except BaseException as e:
- self.assertIn('StatusCode.NOT_FOUND', str(e))
- else:
- self.fail('Invalid query not detected')
-
- def test_invalid_query_get_subchannel(self):
- try:
- self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(subchannel_id=10000))
- except BaseException as e:
- self.assertIn('StatusCode.NOT_FOUND', str(e))
- else:
- self.fail('Invalid query not detected')
-
- def test_invalid_query_get_socket(self):
- try:
- self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(socket_id=10000))
- except BaseException as e:
- self.assertIn('StatusCode.NOT_FOUND', str(e))
- else:
- self.fail('Invalid query not detected')
-
- def test_invalid_query_get_server_sockets(self):
- try:
- self._channelz_stub.GetServerSockets(
- channelz_pb2.GetServerSocketsRequest(
- server_id=10000,
- start_socket_id=0,
- ))
- except BaseException as e:
- self.assertIn('StatusCode.NOT_FOUND', str(e))
- else:
- self.fail('Invalid query not detected')
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/csds/test_csds.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/csds/test_csds.py
deleted file mode 100644
index 3a78b5c8ae..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/csds/test_csds.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright 2021 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A simple test to ensure that the Python wrapper can get xDS config."""
-
-from concurrent.futures import ThreadPoolExecutor
-import logging
-import os
-import sys
-import time
-import unittest
-
-from google.protobuf import json_format
-import grpc
-import grpc_csds
-from six.moves import queue
-
-try:
- from envoy.service.status.v3 import csds_pb2
- from envoy.service.status.v3 import csds_pb2_grpc
-except ImportError:
- from src.proto.grpc.testing.xds.v3 import csds_pb2
- from src.proto.grpc.testing.xds.v3 import csds_pb2_grpc
-
-_DUMMY_XDS_ADDRESS = 'xds:///foo.bar'
-_DUMMY_BOOTSTRAP_FILE = """
-{
- \"xds_servers\": [
- {
- \"server_uri\": \"fake:///xds_server\",
- \"channel_creds\": [
- {
- \"type\": \"fake\"
- }
- ],
- \"server_features\": [\"xds_v3\"]
- }
- ],
- \"node\": {
- \"id\": \"python_test_csds\",
- \"cluster\": \"test\",
- \"metadata\": {
- \"foo\": \"bar\"
- },
- \"locality\": {
- \"region\": \"corp\",
- \"zone\": \"svl\",
- \"sub_zone\": \"mp3\"
- }
- }
-}\
-"""
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class TestCsds(unittest.TestCase):
-
- def setUp(self):
- os.environ['GRPC_XDS_BOOTSTRAP_CONFIG'] = _DUMMY_BOOTSTRAP_FILE
- self._server = grpc.server(ThreadPoolExecutor())
- port = self._server.add_insecure_port('localhost:0')
- grpc_csds.add_csds_servicer(self._server)
- self._server.start()
-
- self._channel = grpc.insecure_channel('localhost:%s' % port)
- self._stub = csds_pb2_grpc.ClientStatusDiscoveryServiceStub(
- self._channel)
-
- def tearDown(self):
- self._channel.close()
- self._server.stop(0)
- os.environ.pop('GRPC_XDS_BOOTSTRAP_CONFIG', None)
-
- def get_xds_config_dump(self):
- return self._stub.FetchClientStatus(csds_pb2.ClientStatusRequest())
-
- def test_has_node(self):
- resp = self.get_xds_config_dump()
- self.assertEqual(1, len(resp.config))
- self.assertEqual('python_test_csds', resp.config[0].node.id)
- self.assertEqual('test', resp.config[0].node.cluster)
-
- def test_no_lds_found(self):
- dummy_channel = grpc.insecure_channel(_DUMMY_XDS_ADDRESS)
-
- # Force the XdsClient to initialize and request a resource
- with self.assertRaises(grpc.RpcError) as rpc_error:
- dummy_channel.unary_unary('')(b'', wait_for_ready=False)
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- rpc_error.exception.code())
-
- # The resource request will fail with DOES_NOT_EXIST (after 15s)
- while True:
- resp = self.get_xds_config_dump()
- config = json_format.MessageToDict(resp)
- ok = False
- try:
- for xds_config in config["config"][0].get("xdsConfig", []):
- if "listenerConfig" in xds_config:
- listener = xds_config["listenerConfig"][
- "dynamicListeners"][0]
- if listener['clientStatus'] == 'DOES_NOT_EXIST':
- ok = True
- break
- for generic_xds_config in config["config"][0].get(
- "genericXdsConfigs", []):
- if "Listener" in generic_xds_config["typeUrl"]:
- if generic_xds_config[
- 'clientStatus'] == 'DOES_NOT_EXIST':
- ok = True
- break
- except KeyError as e:
- logging.debug("Invalid config: %s\n%s: %s", config, type(e), e)
- pass
- if ok:
- break
- time.sleep(1)
- dummy_channel.close()
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class TestCsdsStream(TestCsds):
-
- def get_xds_config_dump(self):
- if not hasattr(self, 'request_queue'):
- request_queue = queue.Queue()
- response_iterator = self._stub.StreamClientStatus(
- iter(request_queue.get, None))
- request_queue.put(csds_pb2.ClientStatusRequest())
- return next(response_iterator)
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py
deleted file mode 100644
index 9a26bac010..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2018 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py
deleted file mode 100644
index bf44ae1515..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Copyright 2019 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Client-side fork interop tests as a unit test."""
-
-import subprocess
-import sys
-import tempfile
-import threading
-import unittest
-
-from grpc._cython import cygrpc
-import six
-
-from tests.fork import methods
-
-# New instance of multiprocessing.Process using fork without exec can and will
-# freeze if the Python process has any other threads running. This includes the
-# additional thread spawned by our _runner.py class. So in order to test our
-# compatibility with multiprocessing, we first fork+exec a new process to ensure
-# we don't have any conflicting background threads.
-_CLIENT_FORK_SCRIPT_TEMPLATE = """if True:
- import os
- import sys
- from grpc._cython import cygrpc
- from tests.fork import methods
-
- cygrpc._GRPC_ENABLE_FORK_SUPPORT = True
- os.environ['GRPC_POLL_STRATEGY'] = 'epoll1'
- methods.TestCase.%s.run_test({
- 'server_host': 'localhost',
- 'server_port': %d,
- 'use_tls': False
- })
-"""
-_SUBPROCESS_TIMEOUT_S = 30
-
-
-@unittest.skipUnless(
- sys.platform.startswith("linux"),
- "not supported on windows, and fork+exec networking blocked on mac")
-@unittest.skipUnless(six.PY2, "https://github.com/grpc/grpc/issues/18075")
-class ForkInteropTest(unittest.TestCase):
-
- def setUp(self):
- start_server_script = """if True:
- import sys
- import time
-
- import grpc
- from src.proto.grpc.testing import test_pb2_grpc
- from tests.interop import service as interop_service
- from tests.unit import test_common
-
- server = test_common.test_server()
- test_pb2_grpc.add_TestServiceServicer_to_server(
- interop_service.TestService(), server)
- port = server.add_insecure_port('[::]:0')
- server.start()
- print(port)
- sys.stdout.flush()
- while True:
- time.sleep(1)
- """
- streams = tuple(tempfile.TemporaryFile() for _ in range(2))
- self._server_process = subprocess.Popen(
- [sys.executable, '-c', start_server_script],
- stdout=streams[0],
- stderr=streams[1])
- timer = threading.Timer(_SUBPROCESS_TIMEOUT_S,
- self._server_process.kill)
- try:
- timer.start()
- while True:
- streams[0].seek(0)
- s = streams[0].readline()
- if not s:
- continue
- else:
- self._port = int(s)
- break
- except ValueError:
- raise Exception('Failed to get port from server')
- finally:
- timer.cancel()
-
- def testConnectivityWatch(self):
- self._verifyTestCase(methods.TestCase.CONNECTIVITY_WATCH)
-
- def testCloseChannelBeforeFork(self):
- self._verifyTestCase(methods.TestCase.CLOSE_CHANNEL_BEFORE_FORK)
-
- def testAsyncUnarySameChannel(self):
- self._verifyTestCase(methods.TestCase.ASYNC_UNARY_SAME_CHANNEL)
-
- def testAsyncUnaryNewChannel(self):
- self._verifyTestCase(methods.TestCase.ASYNC_UNARY_NEW_CHANNEL)
-
- def testBlockingUnarySameChannel(self):
- self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_SAME_CHANNEL)
-
- def testBlockingUnaryNewChannel(self):
- self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_NEW_CHANNEL)
-
- def testInProgressBidiContinueCall(self):
- self._verifyTestCase(methods.TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL)
-
- def testInProgressBidiSameChannelAsyncCall(self):
- self._verifyTestCase(
- methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL)
-
- def testInProgressBidiSameChannelBlockingCall(self):
- self._verifyTestCase(
- methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL)
-
- def testInProgressBidiNewChannelAsyncCall(self):
- self._verifyTestCase(
- methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL)
-
- def testInProgressBidiNewChannelBlockingCall(self):
- self._verifyTestCase(
- methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL)
-
- def tearDown(self):
- self._server_process.kill()
-
- def _verifyTestCase(self, test_case):
- script = _CLIENT_FORK_SCRIPT_TEMPLATE % (test_case.name, self._port)
- streams = tuple(tempfile.TemporaryFile() for _ in range(2))
- process = subprocess.Popen([sys.executable, '-c', script],
- stdout=streams[0],
- stderr=streams[1])
- timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, process.kill)
- timer.start()
- process.wait()
- timer.cancel()
- outputs = []
- for stream in streams:
- stream.seek(0)
- outputs.append(stream.read())
- self.assertEqual(
- 0, process.returncode,
- 'process failed with exit code %d (stdout: "%s", stderr: "%s")' %
- (process.returncode, outputs[0], outputs[1]))
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py
deleted file mode 100644
index 852e6da4d6..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright 2018 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python implementation of the GRPC interoperability test client."""
-
-import argparse
-import logging
-import sys
-
-from tests.fork import methods
-
-
-def _args():
-
- def parse_bool(value):
- if value == 'true':
- return True
- if value == 'false':
- return False
- raise argparse.ArgumentTypeError('Only true/false allowed')
-
- parser = argparse.ArgumentParser()
- parser.add_argument('--server_host',
- default="localhost",
- type=str,
- help='the host to which to connect')
- parser.add_argument('--server_port',
- type=int,
- required=True,
- help='the port to which to connect')
- parser.add_argument('--test_case',
- default='large_unary',
- type=str,
- help='the test case to execute')
- parser.add_argument('--use_tls',
- default=False,
- type=parse_bool,
- help='require a secure connection')
- return parser.parse_args()
-
-
-def _test_case_from_arg(test_case_arg):
- for test_case in methods.TestCase:
- if test_case_arg == test_case.value:
- return test_case
- else:
- raise ValueError('No test case "%s"!' % test_case_arg)
-
-
-def test_fork():
- logging.basicConfig(level=logging.INFO)
- args = vars(_args())
- if args['test_case'] == "all":
- for test_case in methods.TestCase:
- test_case.run_test(args)
- else:
- test_case = _test_case_from_arg(args['test_case'])
- test_case.run_test(args)
-
-
-if __name__ == '__main__':
- test_fork()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py
deleted file mode 100644
index a991272c6f..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py
+++ /dev/null
@@ -1,451 +0,0 @@
-# Copyright 2018 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Implementations of fork support test methods."""
-
-import enum
-import json
-import logging
-import multiprocessing
-import os
-import threading
-import time
-
-import grpc
-from six.moves import queue
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-
-_LOGGER = logging.getLogger(__name__)
-_RPC_TIMEOUT_S = 10
-_CHILD_FINISH_TIMEOUT_S = 60
-
-
-def _channel(args):
- target = '{}:{}'.format(args['server_host'], args['server_port'])
- if args['use_tls']:
- channel_credentials = grpc.ssl_channel_credentials()
- channel = grpc.secure_channel(target, channel_credentials)
- else:
- channel = grpc.insecure_channel(target)
- return channel
-
-
-def _validate_payload_type_and_length(response, expected_type, expected_length):
- if response.payload.type is not expected_type:
- raise ValueError('expected payload type %s, got %s' %
- (expected_type, type(response.payload.type)))
- elif len(response.payload.body) != expected_length:
- raise ValueError('expected payload body size %d, got %d' %
- (expected_length, len(response.payload.body)))
-
-
-def _async_unary(stub):
- size = 314159
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=size,
- payload=messages_pb2.Payload(body=b'\x00' * 271828))
- response_future = stub.UnaryCall.future(request, timeout=_RPC_TIMEOUT_S)
- response = response_future.result()
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
-
-
-def _blocking_unary(stub):
- size = 314159
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=size,
- payload=messages_pb2.Payload(body=b'\x00' * 271828))
- response = stub.UnaryCall(request, timeout=_RPC_TIMEOUT_S)
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
-
-
-class _Pipe(object):
-
- def __init__(self):
- self._condition = threading.Condition()
- self._values = []
- self._open = True
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return self.next()
-
- def next(self):
- with self._condition:
- while not self._values and self._open:
- self._condition.wait()
- if self._values:
- return self._values.pop(0)
- else:
- raise StopIteration()
-
- def add(self, value):
- with self._condition:
- self._values.append(value)
- self._condition.notify()
-
- def close(self):
- with self._condition:
- self._open = False
- self._condition.notify()
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- self.close()
-
-
-class _ChildProcess(object):
-
- def __init__(self, task, args=None):
- if args is None:
- args = ()
- self._exceptions = multiprocessing.Queue()
-
- def record_exceptions():
- try:
- task(*args)
- except grpc.RpcError as rpc_error:
- self._exceptions.put('RpcError: %s' % rpc_error)
- except Exception as e: # pylint: disable=broad-except
- self._exceptions.put(e)
-
- self._process = multiprocessing.Process(target=record_exceptions)
-
- def start(self):
- self._process.start()
-
- def finish(self):
- self._process.join(timeout=_CHILD_FINISH_TIMEOUT_S)
- if self._process.is_alive():
- raise RuntimeError('Child process did not terminate')
- if self._process.exitcode != 0:
- raise ValueError('Child process failed with exitcode %d' %
- self._process.exitcode)
- try:
- exception = self._exceptions.get(block=False)
- raise ValueError('Child process failed: "%s": "%s"' %
- (repr(exception), exception))
- except queue.Empty:
- pass
-
-
-def _async_unary_same_channel(channel):
-
- def child_target():
- try:
- _async_unary(stub)
- raise Exception(
- 'Child should not be able to re-use channel after fork')
- except ValueError as expected_value_error:
- pass
-
- stub = test_pb2_grpc.TestServiceStub(channel)
- _async_unary(stub)
- child_process = _ChildProcess(child_target)
- child_process.start()
- _async_unary(stub)
- child_process.finish()
-
-
-def _async_unary_new_channel(channel, args):
-
- def child_target():
- with _channel(args) as child_channel:
- child_stub = test_pb2_grpc.TestServiceStub(child_channel)
- _async_unary(child_stub)
- child_channel.close()
-
- stub = test_pb2_grpc.TestServiceStub(channel)
- _async_unary(stub)
- child_process = _ChildProcess(child_target)
- child_process.start()
- _async_unary(stub)
- child_process.finish()
-
-
-def _blocking_unary_same_channel(channel):
-
- def child_target():
- try:
- _blocking_unary(stub)
- raise Exception(
- 'Child should not be able to re-use channel after fork')
- except ValueError as expected_value_error:
- pass
-
- stub = test_pb2_grpc.TestServiceStub(channel)
- _blocking_unary(stub)
- child_process = _ChildProcess(child_target)
- child_process.start()
- child_process.finish()
-
-
-def _blocking_unary_new_channel(channel, args):
-
- def child_target():
- with _channel(args) as child_channel:
- child_stub = test_pb2_grpc.TestServiceStub(child_channel)
- _blocking_unary(child_stub)
-
- stub = test_pb2_grpc.TestServiceStub(channel)
- _blocking_unary(stub)
- child_process = _ChildProcess(child_target)
- child_process.start()
- _blocking_unary(stub)
- child_process.finish()
-
-
-# Verify that the fork channel registry can handle already closed channels
-def _close_channel_before_fork(channel, args):
-
- def child_target():
- new_channel.close()
- with _channel(args) as child_channel:
- child_stub = test_pb2_grpc.TestServiceStub(child_channel)
- _blocking_unary(child_stub)
-
- stub = test_pb2_grpc.TestServiceStub(channel)
- _blocking_unary(stub)
- channel.close()
-
- with _channel(args) as new_channel:
- new_stub = test_pb2_grpc.TestServiceStub(new_channel)
- child_process = _ChildProcess(child_target)
- child_process.start()
- _blocking_unary(new_stub)
- child_process.finish()
-
-
-def _connectivity_watch(channel, args):
-
- parent_states = []
- parent_channel_ready_event = threading.Event()
-
- def child_target():
-
- child_channel_ready_event = threading.Event()
-
- def child_connectivity_callback(state):
- if state is grpc.ChannelConnectivity.READY:
- child_channel_ready_event.set()
-
- with _channel(args) as child_channel:
- child_stub = test_pb2_grpc.TestServiceStub(child_channel)
- child_channel.subscribe(child_connectivity_callback)
- _async_unary(child_stub)
- if not child_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S):
- raise ValueError('Channel did not move to READY')
- if len(parent_states) > 1:
- raise ValueError(
- 'Received connectivity updates on parent callback',
- parent_states)
- child_channel.unsubscribe(child_connectivity_callback)
-
- def parent_connectivity_callback(state):
- parent_states.append(state)
- if state is grpc.ChannelConnectivity.READY:
- parent_channel_ready_event.set()
-
- channel.subscribe(parent_connectivity_callback)
- stub = test_pb2_grpc.TestServiceStub(channel)
- child_process = _ChildProcess(child_target)
- child_process.start()
- _async_unary(stub)
- if not parent_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S):
- raise ValueError('Channel did not move to READY')
- channel.unsubscribe(parent_connectivity_callback)
- child_process.finish()
-
-
-def _ping_pong_with_child_processes_after_first_response(
- channel, args, child_target, run_after_close=True):
- request_response_sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
- request_payload_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
- stub = test_pb2_grpc.TestServiceStub(channel)
- pipe = _Pipe()
- parent_bidi_call = stub.FullDuplexCall(pipe)
- child_processes = []
- first_message_received = False
- for response_size, payload_size in zip(request_response_sizes,
- request_payload_sizes):
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
- payload=messages_pb2.Payload(body=b'\x00' * payload_size))
- pipe.add(request)
- if first_message_received:
- child_process = _ChildProcess(child_target,
- (parent_bidi_call, channel, args))
- child_process.start()
- child_processes.append(child_process)
- response = next(parent_bidi_call)
- first_message_received = True
- child_process = _ChildProcess(child_target,
- (parent_bidi_call, channel, args))
- child_process.start()
- child_processes.append(child_process)
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- response_size)
- pipe.close()
- if run_after_close:
- child_process = _ChildProcess(child_target,
- (parent_bidi_call, channel, args))
- child_process.start()
- child_processes.append(child_process)
- for child_process in child_processes:
- child_process.finish()
-
-
-def _in_progress_bidi_continue_call(channel):
-
- def child_target(parent_bidi_call, parent_channel, args):
- stub = test_pb2_grpc.TestServiceStub(parent_channel)
- try:
- _async_unary(stub)
- raise Exception(
- 'Child should not be able to re-use channel after fork')
- except ValueError as expected_value_error:
- pass
- inherited_code = parent_bidi_call.code()
- inherited_details = parent_bidi_call.details()
- if inherited_code != grpc.StatusCode.CANCELLED:
- raise ValueError('Expected inherited code CANCELLED, got %s' %
- inherited_code)
- if inherited_details != 'Channel closed due to fork':
- raise ValueError(
- 'Expected inherited details Channel closed due to fork, got %s'
- % inherited_details)
-
- # Don't run child_target after closing the parent call, as the call may have
- # received a status from the server before fork occurs.
- _ping_pong_with_child_processes_after_first_response(channel,
- None,
- child_target,
- run_after_close=False)
-
-
-def _in_progress_bidi_same_channel_async_call(channel):
-
- def child_target(parent_bidi_call, parent_channel, args):
- stub = test_pb2_grpc.TestServiceStub(parent_channel)
- try:
- _async_unary(stub)
- raise Exception(
- 'Child should not be able to re-use channel after fork')
- except ValueError as expected_value_error:
- pass
-
- _ping_pong_with_child_processes_after_first_response(
- channel, None, child_target)
-
-
-def _in_progress_bidi_same_channel_blocking_call(channel):
-
- def child_target(parent_bidi_call, parent_channel, args):
- stub = test_pb2_grpc.TestServiceStub(parent_channel)
- try:
- _blocking_unary(stub)
- raise Exception(
- 'Child should not be able to re-use channel after fork')
- except ValueError as expected_value_error:
- pass
-
- _ping_pong_with_child_processes_after_first_response(
- channel, None, child_target)
-
-
-def _in_progress_bidi_new_channel_async_call(channel, args):
-
- def child_target(parent_bidi_call, parent_channel, args):
- with _channel(args) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- _async_unary(stub)
-
- _ping_pong_with_child_processes_after_first_response(
- channel, args, child_target)
-
-
-def _in_progress_bidi_new_channel_blocking_call(channel, args):
-
- def child_target(parent_bidi_call, parent_channel, args):
- with _channel(args) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- _blocking_unary(stub)
-
- _ping_pong_with_child_processes_after_first_response(
- channel, args, child_target)
-
-
-@enum.unique
-class TestCase(enum.Enum):
-
- CONNECTIVITY_WATCH = 'connectivity_watch'
- CLOSE_CHANNEL_BEFORE_FORK = 'close_channel_before_fork'
- ASYNC_UNARY_SAME_CHANNEL = 'async_unary_same_channel'
- ASYNC_UNARY_NEW_CHANNEL = 'async_unary_new_channel'
- BLOCKING_UNARY_SAME_CHANNEL = 'blocking_unary_same_channel'
- BLOCKING_UNARY_NEW_CHANNEL = 'blocking_unary_new_channel'
- IN_PROGRESS_BIDI_CONTINUE_CALL = 'in_progress_bidi_continue_call'
- IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL = 'in_progress_bidi_same_channel_async_call'
- IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_same_channel_blocking_call'
- IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL = 'in_progress_bidi_new_channel_async_call'
- IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_new_channel_blocking_call'
-
- def run_test(self, args):
- _LOGGER.info("Running %s", self)
- channel = _channel(args)
- if self is TestCase.ASYNC_UNARY_SAME_CHANNEL:
- _async_unary_same_channel(channel)
- elif self is TestCase.ASYNC_UNARY_NEW_CHANNEL:
- _async_unary_new_channel(channel, args)
- elif self is TestCase.BLOCKING_UNARY_SAME_CHANNEL:
- _blocking_unary_same_channel(channel)
- elif self is TestCase.BLOCKING_UNARY_NEW_CHANNEL:
- _blocking_unary_new_channel(channel, args)
- elif self is TestCase.CLOSE_CHANNEL_BEFORE_FORK:
- _close_channel_before_fork(channel, args)
- elif self is TestCase.CONNECTIVITY_WATCH:
- _connectivity_watch(channel, args)
- elif self is TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL:
- _in_progress_bidi_continue_call(channel)
- elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL:
- _in_progress_bidi_same_channel_async_call(channel)
- elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL:
- _in_progress_bidi_same_channel_blocking_call(channel)
- elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL:
- _in_progress_bidi_new_channel_async_call(channel, args)
- elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL:
- _in_progress_bidi_new_channel_blocking_call(channel, args)
- else:
- raise NotImplementedError('Test case "%s" not implemented!' %
- self.name)
- channel.close()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py
deleted file mode 100644
index 5772620b60..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
deleted file mode 100644
index d3dfa45944..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
+++ /dev/null
@@ -1,287 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_health.v1.health."""
-
-import logging
-import sys
-import threading
-import time
-import unittest
-
-import grpc
-from grpc_health.v1 import health
-from grpc_health.v1 import health_pb2
-from grpc_health.v1 import health_pb2_grpc
-from six.moves import queue
-
-from tests.unit import test_common
-from tests.unit import thread_pool
-from tests.unit.framework.common import test_constants
-
-_SERVING_SERVICE = 'grpc.test.TestServiceServing'
-_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown'
-_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing'
-_WATCH_SERVICE = 'grpc.test.WatchService'
-
-
-def _consume_responses(response_iterator, response_queue):
- for response in response_iterator:
- response_queue.put(response)
-
-
-class BaseWatchTests(object):
-
- @unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
- class WatchTests(unittest.TestCase):
-
- def start_server(self, non_blocking=False, thread_pool=None):
- self._thread_pool = thread_pool
- self._servicer = health.HealthServicer(
- experimental_non_blocking=non_blocking,
- experimental_thread_pool=thread_pool)
- self._servicer.set(_SERVING_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- self._servicer.set(_UNKNOWN_SERVICE,
- health_pb2.HealthCheckResponse.UNKNOWN)
- self._servicer.set(_NOT_SERVING_SERVICE,
- health_pb2.HealthCheckResponse.NOT_SERVING)
- self._server = test_common.test_server()
- port = self._server.add_insecure_port('[::]:0')
- health_pb2_grpc.add_HealthServicer_to_server(
- self._servicer, self._server)
- self._server.start()
-
- self._channel = grpc.insecure_channel('localhost:%d' % port)
- self._stub = health_pb2_grpc.HealthStub(self._channel)
-
- def tearDown(self):
- self._server.stop(None)
- self._channel.close()
-
- def test_watch_empty_service(self):
- request = health_pb2.HealthCheckRequest(service='')
- response_queue = queue.Queue()
- rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
- thread.start()
-
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- response.status)
-
- rendezvous.cancel()
- thread.join()
- self.assertTrue(response_queue.empty())
-
- if self._thread_pool is not None:
- self.assertTrue(self._thread_pool.was_used())
-
- def test_watch_new_service(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- response_queue = queue.Queue()
- rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
- thread.start()
-
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- response.status)
-
- self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- response.status)
-
- self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.NOT_SERVING)
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- response.status)
-
- rendezvous.cancel()
- thread.join()
- self.assertTrue(response_queue.empty())
-
- def test_watch_service_isolation(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- response_queue = queue.Queue()
- rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
- thread.start()
-
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- response.status)
-
- self._servicer.set('some-other-service',
- health_pb2.HealthCheckResponse.SERVING)
- with self.assertRaises(queue.Empty):
- response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
-
- rendezvous.cancel()
- thread.join()
- self.assertTrue(response_queue.empty())
-
- def test_two_watchers(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- response_queue1 = queue.Queue()
- response_queue2 = queue.Queue()
- rendezvous1 = self._stub.Watch(request)
- rendezvous2 = self._stub.Watch(request)
- thread1 = threading.Thread(target=_consume_responses,
- args=(rendezvous1, response_queue1))
- thread2 = threading.Thread(target=_consume_responses,
- args=(rendezvous2, response_queue2))
- thread1.start()
- thread2.start()
-
- response1 = response_queue1.get(
- timeout=test_constants.SHORT_TIMEOUT)
- response2 = response_queue2.get(
- timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- response1.status)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- response2.status)
-
- self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- response1 = response_queue1.get(
- timeout=test_constants.SHORT_TIMEOUT)
- response2 = response_queue2.get(
- timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- response1.status)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- response2.status)
-
- rendezvous1.cancel()
- rendezvous2.cancel()
- thread1.join()
- thread2.join()
- self.assertTrue(response_queue1.empty())
- self.assertTrue(response_queue2.empty())
-
- @unittest.skip("https://github.com/grpc/grpc/issues/18127")
- def test_cancelled_watch_removed_from_watch_list(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- response_queue = queue.Queue()
- rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
- thread.start()
-
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- response.status)
-
- rendezvous.cancel()
- self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- thread.join()
-
- # Wait, if necessary, for serving thread to process client cancellation
- timeout = time.time() + test_constants.TIME_ALLOWANCE
- while (time.time() < timeout and
- self._servicer._send_response_callbacks[_WATCH_SERVICE]):
- time.sleep(1)
- self.assertFalse(
- self._servicer._send_response_callbacks[_WATCH_SERVICE],
- 'watch set should be empty')
- self.assertTrue(response_queue.empty())
-
- def test_graceful_shutdown(self):
- request = health_pb2.HealthCheckRequest(service='')
- response_queue = queue.Queue()
- rendezvous = self._stub.Watch(request)
- thread = threading.Thread(target=_consume_responses,
- args=(rendezvous, response_queue))
- thread.start()
-
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- response.status)
-
- self._servicer.enter_graceful_shutdown()
- response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- response.status)
-
- # This should be a no-op.
- self._servicer.set('', health_pb2.HealthCheckResponse.SERVING)
-
- rendezvous.cancel()
- thread.join()
- self.assertTrue(response_queue.empty())
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class HealthServicerTest(BaseWatchTests.WatchTests):
-
- def setUp(self):
- self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None)
- super(HealthServicerTest,
- self).start_server(non_blocking=True,
- thread_pool=self._thread_pool)
-
- def test_check_empty_service(self):
- request = health_pb2.HealthCheckRequest()
- resp = self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
-
- def test_check_serving_service(self):
- request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE)
- resp = self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
-
- def test_check_unknown_service(self):
- request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE)
- resp = self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
-
- def test_check_not_serving_service(self):
- request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE)
- resp = self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- resp.status)
-
- def test_check_not_found_service(self):
- request = health_pb2.HealthCheckRequest(service='not-found')
- with self.assertRaises(grpc.RpcError) as context:
- resp = self._stub.Check(request)
-
- self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
-
- def test_health_service_name(self):
- self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health')
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class HealthServicerBackwardsCompatibleWatchTest(BaseWatchTests.WatchTests):
-
- def setUp(self):
- super(HealthServicerBackwardsCompatibleWatchTest,
- self).start_server(non_blocking=False, thread_pool=None)
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py
deleted file mode 100644
index 138f61995c..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python client used to test negative http2 conditions."""
-
-import argparse
-import time
-
-import grpc
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-
-
-def _validate_payload_type_and_length(response, expected_type, expected_length):
- if response.payload.type is not expected_type:
- raise ValueError('expected payload type %s, got %s' %
- (expected_type, type(response.payload.type)))
- elif len(response.payload.body) != expected_length:
- raise ValueError('expected payload body size %d, got %d' %
- (expected_length, len(response.payload.body)))
-
-
-def _expect_status_code(call, expected_code):
- if call.code() != expected_code:
- raise ValueError('expected code %s, got %s' %
- (expected_code, call.code()))
-
-
-def _expect_status_details(call, expected_details):
- if call.details() != expected_details:
- raise ValueError('expected message %s, got %s' %
- (expected_details, call.details()))
-
-
-def _validate_status_code_and_details(call, expected_code, expected_details):
- _expect_status_code(call, expected_code)
- _expect_status_details(call, expected_details)
-
-
-# common requests
-_REQUEST_SIZE = 314159
-_RESPONSE_SIZE = 271828
-
-_SIMPLE_REQUEST = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=_RESPONSE_SIZE,
- payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE))
-
-
-def _goaway(stub):
- first_response = stub.UnaryCall(_SIMPLE_REQUEST)
- _validate_payload_type_and_length(first_response, messages_pb2.COMPRESSABLE,
- _RESPONSE_SIZE)
- time.sleep(1)
- second_response = stub.UnaryCall(_SIMPLE_REQUEST)
- _validate_payload_type_and_length(second_response,
- messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
-
-
-def _rst_after_header(stub):
- resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
- _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL,
- "Received RST_STREAM with error code 0")
-
-
-def _rst_during_data(stub):
- resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
- _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL,
- "Received RST_STREAM with error code 0")
-
-
-def _rst_after_data(stub):
- resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
- _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL,
- "Received RST_STREAM with error code 0")
-
-
-def _ping(stub):
- response = stub.UnaryCall(_SIMPLE_REQUEST)
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- _RESPONSE_SIZE)
-
-
-def _max_streams(stub):
- # send one req to ensure server sets MAX_STREAMS
- response = stub.UnaryCall(_SIMPLE_REQUEST)
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- _RESPONSE_SIZE)
-
- # give the streams a workout
- futures = []
- for _ in range(15):
- futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST))
- for future in futures:
- _validate_payload_type_and_length(future.result(),
- messages_pb2.COMPRESSABLE,
- _RESPONSE_SIZE)
-
-
-def _run_test_case(test_case, stub):
- if test_case == 'goaway':
- _goaway(stub)
- elif test_case == 'rst_after_header':
- _rst_after_header(stub)
- elif test_case == 'rst_during_data':
- _rst_during_data(stub)
- elif test_case == 'rst_after_data':
- _rst_after_data(stub)
- elif test_case == 'ping':
- _ping(stub)
- elif test_case == 'max_streams':
- _max_streams(stub)
- else:
- raise ValueError("Invalid test case: %s" % test_case)
-
-
-def _args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--server_host',
- help='the host to which to connect',
- type=str,
- default="127.0.0.1")
- parser.add_argument('--server_port',
- help='the port to which to connect',
- type=int,
- default="8080")
- parser.add_argument('--test_case',
- help='the test case to execute',
- type=str,
- default="goaway")
- return parser.parse_args()
-
-
-def _stub(server_host, server_port):
- target = '{}:{}'.format(server_host, server_port)
- channel = grpc.insecure_channel(target)
- grpc.channel_ready_future(channel).result()
- return test_pb2_grpc.TestServiceStub(channel)
-
-
-def main():
- args = _args()
- stub = _stub(args.server_host, args.server_port)
- _run_test_case(args.test_case, stub)
-
-
-if __name__ == '__main__':
- main()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py
deleted file mode 100644
index 5fb4f3c3cf..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py
deleted file mode 100644
index 27e5dcdd75..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Insecure client-server interoperability as a unit test."""
-
-import sys
-import unittest
-
-import grpc
-
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.interop import _intraop_test_case
-from tests.interop import server
-from tests.interop import service
-from tests.unit import test_common
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class InsecureIntraopTest(_intraop_test_case.IntraopTestCase,
- unittest.TestCase):
-
- def setUp(self):
- self.server = test_common.test_server()
- test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(),
- self.server)
- port = self.server.add_insecure_port('[::]:0')
- self.server.start()
- self.stub = test_pb2_grpc.TestServiceStub(
- grpc.insecure_channel('localhost:{}'.format(port)))
-
- def tearDown(self):
- self.server.stop(None)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py
deleted file mode 100644
index 007db7ab41..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Common code for unit tests of the interoperability test code."""
-
-from tests.interop import methods
-
-
-class IntraopTestCase(object):
- """Unit test methods.
-
- This class must be mixed in with unittest.TestCase and a class that defines
- setUp and tearDown methods that manage a stub attribute.
- """
-
- def testEmptyUnary(self):
- methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None)
-
- def testLargeUnary(self):
- methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None)
-
- def testServerStreaming(self):
- methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None)
-
- def testClientStreaming(self):
- methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None)
-
- def testPingPong(self):
- methods.TestCase.PING_PONG.test_interoperability(self.stub, None)
-
- def testCancelAfterBegin(self):
- methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability(
- self.stub, None)
-
- def testCancelAfterFirstResponse(self):
- methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability(
- self.stub, None)
-
- def testTimeoutOnSleepingServer(self):
- methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability(
- self.stub, None)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
deleted file mode 100644
index 0ec88a2cd9..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Secure client-server interoperability as a unit test."""
-
-import sys
-import unittest
-
-import grpc
-
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.interop import _intraop_test_case
-from tests.interop import resources
-from tests.interop import service
-from tests.unit import test_common
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase):
-
- def setUp(self):
- self.server = test_common.test_server()
- test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(),
- self.server)
- port = self.server.add_secure_port(
- '[::]:0',
- grpc.ssl_server_credentials([(resources.private_key(),
- resources.certificate_chain())]))
- self.server.start()
- self.stub = test_pb2_grpc.TestServiceStub(
- grpc.secure_channel(
- 'localhost:{}'.format(port),
- grpc.ssl_channel_credentials(
- resources.test_root_certificates()), ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
- ),)))
-
- def tearDown(self):
- self.server.stop(None)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py
deleted file mode 100644
index c95e81353d..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python implementation of the GRPC interoperability test client."""
-
-import argparse
-import os
-
-from google import auth as google_auth
-from google.auth import jwt as google_auth_jwt
-import grpc
-
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.interop import methods
-from tests.interop import resources
-
-
-def parse_interop_client_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('--server_host',
- default="localhost",
- type=str,
- help='the host to which to connect')
- parser.add_argument('--server_port',
- type=int,
- required=True,
- help='the port to which to connect')
- parser.add_argument('--test_case',
- default='large_unary',
- type=str,
- help='the test case to execute')
- parser.add_argument('--use_tls',
- default=False,
- type=resources.parse_bool,
- help='require a secure connection')
- parser.add_argument('--use_alts',
- default=False,
- type=resources.parse_bool,
- help='require an ALTS secure connection')
- parser.add_argument('--use_test_ca',
- default=False,
- type=resources.parse_bool,
- help='replace platform root CAs with ca.pem')
- parser.add_argument('--custom_credentials_type',
- choices=["compute_engine_channel_creds"],
- default=None,
- help='use google default credentials')
- parser.add_argument('--server_host_override',
- type=str,
- help='the server host to which to claim to connect')
- parser.add_argument('--oauth_scope',
- type=str,
- help='scope for OAuth tokens')
- parser.add_argument('--default_service_account',
- type=str,
- help='email address of the default service account')
- parser.add_argument(
- "--grpc_test_use_grpclb_with_child_policy",
- type=str,
- help=(
- "If non-empty, set a static service config on channels created by "
- + "grpc::CreateTestChannel, that configures the grpclb LB policy " +
- "with a child policy being the value of this flag (e.g. round_robin "
- + "or pick_first)."))
- return parser.parse_args()
-
-
-def _create_call_credentials(args):
- if args.test_case == 'oauth2_auth_token':
- google_credentials, unused_project_id = google_auth.default(
- scopes=[args.oauth_scope])
- google_credentials.refresh(google_auth.transport.requests.Request())
- return grpc.access_token_call_credentials(google_credentials.token)
- elif args.test_case == 'compute_engine_creds':
- google_credentials, unused_project_id = google_auth.default(
- scopes=[args.oauth_scope])
- return grpc.metadata_call_credentials(
- google_auth.transport.grpc.AuthMetadataPlugin(
- credentials=google_credentials,
- request=google_auth.transport.requests.Request()))
- elif args.test_case == 'jwt_token_creds':
- google_credentials = google_auth_jwt.OnDemandCredentials.from_service_account_file(
- os.environ[google_auth.environment_vars.CREDENTIALS])
- return grpc.metadata_call_credentials(
- google_auth.transport.grpc.AuthMetadataPlugin(
- credentials=google_credentials, request=None))
- else:
- return None
-
-
-def get_secure_channel_parameters(args):
- call_credentials = _create_call_credentials(args)
-
- channel_opts = ()
- if args.grpc_test_use_grpclb_with_child_policy:
- channel_opts += ((
- "grpc.service_config",
- '{"loadBalancingConfig": [{"grpclb": {"childPolicy": [{"%s": {}}]}}]}'
- % args.grpc_test_use_grpclb_with_child_policy),)
- if args.custom_credentials_type is not None:
- if args.custom_credentials_type == "compute_engine_channel_creds":
- assert call_credentials is None
- google_credentials, unused_project_id = google_auth.default(
- scopes=[args.oauth_scope])
- call_creds = grpc.metadata_call_credentials(
- google_auth.transport.grpc.AuthMetadataPlugin(
- credentials=google_credentials,
- request=google_auth.transport.requests.Request()))
- channel_credentials = grpc.compute_engine_channel_credentials(
- call_creds)
- else:
- raise ValueError("Unknown credentials type '{}'".format(
- args.custom_credentials_type))
- elif args.use_tls:
- if args.use_test_ca:
- root_certificates = resources.test_root_certificates()
- else:
- root_certificates = None # will load default roots.
-
- channel_credentials = grpc.ssl_channel_credentials(root_certificates)
- if call_credentials is not None:
- channel_credentials = grpc.composite_channel_credentials(
- channel_credentials, call_credentials)
-
- if args.server_host_override:
- channel_opts += ((
- 'grpc.ssl_target_name_override',
- args.server_host_override,
- ),)
- elif args.use_alts:
- channel_credentials = grpc.alts_channel_credentials()
-
- return channel_credentials, channel_opts
-
-
-def _create_channel(args):
- target = '{}:{}'.format(args.server_host, args.server_port)
-
- if args.use_tls or args.use_alts or args.custom_credentials_type is not None:
- channel_credentials, options = get_secure_channel_parameters(args)
- return grpc.secure_channel(target, channel_credentials, options)
- else:
- return grpc.insecure_channel(target)
-
-
-def create_stub(channel, args):
- if args.test_case == "unimplemented_service":
- return test_pb2_grpc.UnimplementedServiceStub(channel)
- else:
- return test_pb2_grpc.TestServiceStub(channel)
-
-
-def _test_case_from_arg(test_case_arg):
- for test_case in methods.TestCase:
- if test_case_arg == test_case.value:
- return test_case
- else:
- raise ValueError('No test case "%s"!' % test_case_arg)
-
-
-def test_interoperability():
- args = parse_interop_client_args()
- channel = _create_channel(args)
- stub = create_stub(channel, args)
- test_case = _test_case_from_arg(args.test_case)
- test_case.test_interoperability(stub, args)
-
-
-if __name__ == '__main__':
- test_interoperability()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py
deleted file mode 100644
index 44a1c38bb9..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py
+++ /dev/null
@@ -1,482 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Implementations of interoperability test methods."""
-
-# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details
-# please refer to comments in the "bazel_namespace_package_hack" module.
-try:
- from tests import bazel_namespace_package_hack
- bazel_namespace_package_hack.sys_path_to_site_dir_hack()
-except ImportError:
- pass
-
-import enum
-import json
-import os
-import threading
-import time
-
-from google import auth as google_auth
-from google.auth import environment_vars as google_auth_environment_vars
-from google.auth.transport import grpc as google_auth_transport_grpc
-from google.auth.transport import requests as google_auth_transport_requests
-import grpc
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing import messages_pb2
-
-_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
-_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
-
-
-def _expect_status_code(call, expected_code):
- if call.code() != expected_code:
- raise ValueError('expected code %s, got %s' %
- (expected_code, call.code()))
-
-
-def _expect_status_details(call, expected_details):
- if call.details() != expected_details:
- raise ValueError('expected message %s, got %s' %
- (expected_details, call.details()))
-
-
-def _validate_status_code_and_details(call, expected_code, expected_details):
- _expect_status_code(call, expected_code)
- _expect_status_details(call, expected_details)
-
-
-def _validate_payload_type_and_length(response, expected_type, expected_length):
- if response.payload.type is not expected_type:
- raise ValueError('expected payload type %s, got %s' %
- (expected_type, type(response.payload.type)))
- elif len(response.payload.body) != expected_length:
- raise ValueError('expected payload body size %d, got %d' %
- (expected_length, len(response.payload.body)))
-
-
-def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope,
- call_credentials):
- size = 314159
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=size,
- payload=messages_pb2.Payload(body=b'\x00' * 271828),
- fill_username=fill_username,
- fill_oauth_scope=fill_oauth_scope)
- response_future = stub.UnaryCall.future(request,
- credentials=call_credentials)
- response = response_future.result()
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
- return response
-
-
-def _empty_unary(stub):
- response = stub.EmptyCall(empty_pb2.Empty())
- if not isinstance(response, empty_pb2.Empty):
- raise TypeError('response is of type "%s", not empty_pb2.Empty!' %
- type(response))
-
-
-def _large_unary(stub):
- _large_unary_common_behavior(stub, False, False, None)
-
-
-def _client_streaming(stub):
- payload_body_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
- payloads = (messages_pb2.Payload(body=b'\x00' * size)
- for size in payload_body_sizes)
- requests = (messages_pb2.StreamingInputCallRequest(payload=payload)
- for payload in payloads)
- response = stub.StreamingInputCall(requests)
- if response.aggregated_payload_size != 74922:
- raise ValueError('incorrect size %d!' %
- response.aggregated_payload_size)
-
-
-def _server_streaming(stub):
- sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
-
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(
- messages_pb2.ResponseParameters(size=sizes[0]),
- messages_pb2.ResponseParameters(size=sizes[1]),
- messages_pb2.ResponseParameters(size=sizes[2]),
- messages_pb2.ResponseParameters(size=sizes[3]),
- ))
- response_iterator = stub.StreamingOutputCall(request)
- for index, response in enumerate(response_iterator):
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- sizes[index])
-
-
-class _Pipe(object):
-
- def __init__(self):
- self._condition = threading.Condition()
- self._values = []
- self._open = True
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return self.next()
-
- def next(self):
- with self._condition:
- while not self._values and self._open:
- self._condition.wait()
- if self._values:
- return self._values.pop(0)
- else:
- raise StopIteration()
-
- def add(self, value):
- with self._condition:
- self._values.append(value)
- self._condition.notify()
-
- def close(self):
- with self._condition:
- self._open = False
- self._condition.notify()
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- self.close()
-
-
-def _ping_pong(stub):
- request_response_sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
- request_payload_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
-
- with _Pipe() as pipe:
- response_iterator = stub.FullDuplexCall(pipe)
- for response_size, payload_size in zip(request_response_sizes,
- request_payload_sizes):
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
- payload=messages_pb2.Payload(body=b'\x00' * payload_size))
- pipe.add(request)
- response = next(response_iterator)
- _validate_payload_type_and_length(response,
- messages_pb2.COMPRESSABLE,
- response_size)
-
-
-def _cancel_after_begin(stub):
- with _Pipe() as pipe:
- response_future = stub.StreamingInputCall.future(pipe)
- response_future.cancel()
- if not response_future.cancelled():
- raise ValueError('expected cancelled method to return True')
- if response_future.code() is not grpc.StatusCode.CANCELLED:
- raise ValueError('expected status code CANCELLED')
-
-
-def _cancel_after_first_response(stub):
- request_response_sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
- request_payload_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
- with _Pipe() as pipe:
- response_iterator = stub.FullDuplexCall(pipe)
-
- response_size = request_response_sizes[0]
- payload_size = request_payload_sizes[0]
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
- payload=messages_pb2.Payload(body=b'\x00' * payload_size))
- pipe.add(request)
- response = next(response_iterator)
- # We test the contents of `response` in the Ping Pong test - don't check
- # them here.
- response_iterator.cancel()
-
- try:
- next(response_iterator)
- except grpc.RpcError as rpc_error:
- if rpc_error.code() is not grpc.StatusCode.CANCELLED:
- raise
- else:
- raise ValueError('expected call to be cancelled')
-
-
-def _timeout_on_sleeping_server(stub):
- request_payload_size = 27182
- with _Pipe() as pipe:
- response_iterator = stub.FullDuplexCall(pipe, timeout=0.001)
-
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- payload=messages_pb2.Payload(body=b'\x00' * request_payload_size))
- pipe.add(request)
- try:
- next(response_iterator)
- except grpc.RpcError as rpc_error:
- if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
- raise
- else:
- raise ValueError('expected call to exceed deadline')
-
-
-def _empty_stream(stub):
- with _Pipe() as pipe:
- response_iterator = stub.FullDuplexCall(pipe)
- pipe.close()
- try:
- next(response_iterator)
- raise ValueError('expected exactly 0 responses')
- except StopIteration:
- pass
-
-
-def _status_code_and_message(stub):
- details = 'test status message'
- code = 2
- status = grpc.StatusCode.UNKNOWN # code = 2
-
- # Test with a UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=code, message=details))
- response_future = stub.UnaryCall.future(request)
- _validate_status_code_and_details(response_future, status, details)
-
- # Test with a FullDuplexCall
- with _Pipe() as pipe:
- response_iterator = stub.FullDuplexCall(pipe)
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(size=1),),
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=code, message=details))
- pipe.add(request) # sends the initial request.
- try:
- next(response_iterator)
- except grpc.RpcError as rpc_error:
- assert rpc_error.code() == status
- # Dropping out of with block closes the pipe
- _validate_status_code_and_details(response_iterator, status, details)
-
-
-def _unimplemented_method(test_service_stub):
- response_future = (test_service_stub.UnimplementedCall.future(
- empty_pb2.Empty()))
- _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
-
-
-def _unimplemented_service(unimplemented_service_stub):
- response_future = (unimplemented_service_stub.UnimplementedCall.future(
- empty_pb2.Empty()))
- _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
-
-
-def _custom_metadata(stub):
- initial_metadata_value = "test_initial_metadata_value"
- trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b"
- metadata = ((_INITIAL_METADATA_KEY, initial_metadata_value),
- (_TRAILING_METADATA_KEY, trailing_metadata_value))
-
- def _validate_metadata(response):
- initial_metadata = dict(response.initial_metadata())
- if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
- raise ValueError('expected initial metadata %s, got %s' %
- (initial_metadata_value,
- initial_metadata[_INITIAL_METADATA_KEY]))
- trailing_metadata = dict(response.trailing_metadata())
- if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
- raise ValueError('expected trailing metadata %s, got %s' %
- (trailing_metadata_value,
- trailing_metadata[_TRAILING_METADATA_KEY]))
-
- # Testing with UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'))
- response_future = stub.UnaryCall.future(request, metadata=metadata)
- _validate_metadata(response_future)
-
- # Testing with FullDuplexCall
- with _Pipe() as pipe:
- response_iterator = stub.FullDuplexCall(pipe, metadata=metadata)
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(size=1),))
- pipe.add(request) # Sends the request
- next(response_iterator) # Causes server to send trailing metadata
- # Dropping out of the with block closes the pipe
- _validate_metadata(response_iterator)
-
-
-def _compute_engine_creds(stub, args):
- response = _large_unary_common_behavior(stub, True, True, None)
- if args.default_service_account != response.username:
- raise ValueError('expected username %s, got %s' %
- (args.default_service_account, response.username))
-
-
-def _oauth2_auth_token(stub, args):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- response = _large_unary_common_behavior(stub, True, True, None)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
- if args.oauth_scope.find(response.oauth_scope) == -1:
- raise ValueError(
- 'expected to find oauth scope "{}" in received "{}"'.format(
- response.oauth_scope, args.oauth_scope))
-
-
-def _jwt_token_creds(stub, args):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- response = _large_unary_common_behavior(stub, True, False, None)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
-
-
-def _per_rpc_creds(stub, args):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- google_credentials, unused_project_id = google_auth.default(
- scopes=[args.oauth_scope])
- call_credentials = grpc.metadata_call_credentials(
- google_auth_transport_grpc.AuthMetadataPlugin(
- credentials=google_credentials,
- request=google_auth_transport_requests.Request()))
- response = _large_unary_common_behavior(stub, True, False, call_credentials)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
-
-
-def _special_status_message(stub, args):
- details = b'\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP \xf0\x9f\x98\x88\t\n'.decode(
- 'utf-8')
- code = 2
- status = grpc.StatusCode.UNKNOWN # code = 2
-
- # Test with a UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=code, message=details))
- response_future = stub.UnaryCall.future(request)
- _validate_status_code_and_details(response_future, status, details)
-
-
-@enum.unique
-class TestCase(enum.Enum):
- EMPTY_UNARY = 'empty_unary'
- LARGE_UNARY = 'large_unary'
- SERVER_STREAMING = 'server_streaming'
- CLIENT_STREAMING = 'client_streaming'
- PING_PONG = 'ping_pong'
- CANCEL_AFTER_BEGIN = 'cancel_after_begin'
- CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response'
- EMPTY_STREAM = 'empty_stream'
- STATUS_CODE_AND_MESSAGE = 'status_code_and_message'
- UNIMPLEMENTED_METHOD = 'unimplemented_method'
- UNIMPLEMENTED_SERVICE = 'unimplemented_service'
- CUSTOM_METADATA = "custom_metadata"
- COMPUTE_ENGINE_CREDS = 'compute_engine_creds'
- OAUTH2_AUTH_TOKEN = 'oauth2_auth_token'
- JWT_TOKEN_CREDS = 'jwt_token_creds'
- PER_RPC_CREDS = 'per_rpc_creds'
- TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server'
- SPECIAL_STATUS_MESSAGE = 'special_status_message'
-
- def test_interoperability(self, stub, args):
- if self is TestCase.EMPTY_UNARY:
- _empty_unary(stub)
- elif self is TestCase.LARGE_UNARY:
- _large_unary(stub)
- elif self is TestCase.SERVER_STREAMING:
- _server_streaming(stub)
- elif self is TestCase.CLIENT_STREAMING:
- _client_streaming(stub)
- elif self is TestCase.PING_PONG:
- _ping_pong(stub)
- elif self is TestCase.CANCEL_AFTER_BEGIN:
- _cancel_after_begin(stub)
- elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE:
- _cancel_after_first_response(stub)
- elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER:
- _timeout_on_sleeping_server(stub)
- elif self is TestCase.EMPTY_STREAM:
- _empty_stream(stub)
- elif self is TestCase.STATUS_CODE_AND_MESSAGE:
- _status_code_and_message(stub)
- elif self is TestCase.UNIMPLEMENTED_METHOD:
- _unimplemented_method(stub)
- elif self is TestCase.UNIMPLEMENTED_SERVICE:
- _unimplemented_service(stub)
- elif self is TestCase.CUSTOM_METADATA:
- _custom_metadata(stub)
- elif self is TestCase.COMPUTE_ENGINE_CREDS:
- _compute_engine_creds(stub, args)
- elif self is TestCase.OAUTH2_AUTH_TOKEN:
- _oauth2_auth_token(stub, args)
- elif self is TestCase.JWT_TOKEN_CREDS:
- _jwt_token_creds(stub, args)
- elif self is TestCase.PER_RPC_CREDS:
- _per_rpc_creds(stub, args)
- elif self is TestCase.SPECIAL_STATUS_MESSAGE:
- _special_status_message(stub, args)
- else:
- raise NotImplementedError('Test case "%s" not implemented!' %
- self.name)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py
deleted file mode 100644
index a47228a355..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Constants and functions for data used in interoperability testing."""
-
-import argparse
-import os
-import pkgutil
-
-_ROOT_CERTIFICATES_RESOURCE_PATH = 'credentials/ca.pem'
-_PRIVATE_KEY_RESOURCE_PATH = 'credentials/server1.key'
-_CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem'
-
-
-def test_root_certificates():
- return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH)
-
-
-def private_key():
- return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH)
-
-
-def certificate_chain():
- return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH)
-
-
-def parse_bool(value):
- if value == 'true':
- return True
- if value == 'false':
- return False
- raise argparse.ArgumentTypeError('Only true/false allowed')
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py
deleted file mode 100644
index 6286733edd..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# Copyright 2015 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python implementation of the GRPC interoperability test server."""
-
-import argparse
-from concurrent import futures
-import logging
-
-import grpc
-
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.interop import resources
-from tests.interop import service
-from tests.unit import test_common
-
-logging.basicConfig()
-_LOGGER = logging.getLogger(__name__)
-
-
-def parse_interop_server_arguments():
- parser = argparse.ArgumentParser()
- parser.add_argument('--port',
- type=int,
- required=True,
- help='the port on which to serve')
- parser.add_argument('--use_tls',
- default=False,
- type=resources.parse_bool,
- help='require a secure connection')
- parser.add_argument('--use_alts',
- default=False,
- type=resources.parse_bool,
- help='require an ALTS connection')
- return parser.parse_args()
-
-
-def get_server_credentials(use_tls):
- if use_tls:
- private_key = resources.private_key()
- certificate_chain = resources.certificate_chain()
- return grpc.ssl_server_credentials(((private_key, certificate_chain),))
- else:
- return grpc.alts_server_credentials()
-
-
-def serve():
- args = parse_interop_server_arguments()
-
- server = test_common.test_server()
- test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(),
- server)
- if args.use_tls or args.use_alts:
- credentials = get_server_credentials(args.use_tls)
- server.add_secure_port('[::]:{}'.format(args.port), credentials)
- else:
- server.add_insecure_port('[::]:{}'.format(args.port))
-
- server.start()
- _LOGGER.info('Server serving.')
- server.wait_for_termination()
- _LOGGER.info('Server stopped; exiting.')
-
-
-if __name__ == '__main__':
- serve()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py
deleted file mode 100644
index 08bb0c45a2..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python implementation of the TestServicer."""
-
-import time
-
-import grpc
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-
-_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
-_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
-_US_IN_A_SECOND = 1000 * 1000
-
-
-def _maybe_echo_metadata(servicer_context):
- """Copies metadata from request to response if it is present."""
- invocation_metadata = dict(servicer_context.invocation_metadata())
- if _INITIAL_METADATA_KEY in invocation_metadata:
- initial_metadatum = (_INITIAL_METADATA_KEY,
- invocation_metadata[_INITIAL_METADATA_KEY])
- servicer_context.send_initial_metadata((initial_metadatum,))
- if _TRAILING_METADATA_KEY in invocation_metadata:
- trailing_metadatum = (_TRAILING_METADATA_KEY,
- invocation_metadata[_TRAILING_METADATA_KEY])
- servicer_context.set_trailing_metadata((trailing_metadatum,))
-
-
-def _maybe_echo_status_and_message(request, servicer_context):
- """Sets the response context code and details if the request asks for them"""
- if request.HasField('response_status'):
- servicer_context.set_code(request.response_status.code)
- servicer_context.set_details(request.response_status.message)
-
-
-class TestService(test_pb2_grpc.TestServiceServicer):
-
- def EmptyCall(self, request, context):
- _maybe_echo_metadata(context)
- return empty_pb2.Empty()
-
- def UnaryCall(self, request, context):
- _maybe_echo_metadata(context)
- _maybe_echo_status_and_message(request, context)
- return messages_pb2.SimpleResponse(
- payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE,
- body=b'\x00' * request.response_size))
-
- def StreamingOutputCall(self, request, context):
- _maybe_echo_status_and_message(request, context)
- for response_parameters in request.response_parameters:
- if response_parameters.interval_us != 0:
- time.sleep(response_parameters.interval_us / _US_IN_A_SECOND)
- yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.response_type,
- body=b'\x00' *
- response_parameters.size))
-
- def StreamingInputCall(self, request_iterator, context):
- aggregate_size = 0
- for request in request_iterator:
- if request.payload is not None and request.payload.body:
- aggregate_size += len(request.payload.body)
- return messages_pb2.StreamingInputCallResponse(
- aggregated_payload_size=aggregate_size)
-
- def FullDuplexCall(self, request_iterator, context):
- _maybe_echo_metadata(context)
- for request in request_iterator:
- _maybe_echo_status_and_message(request, context)
- for response_parameters in request.response_parameters:
- if response_parameters.interval_us != 0:
- time.sleep(response_parameters.interval_us /
- _US_IN_A_SECOND)
- yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.payload.type,
- body=b'\x00' *
- response_parameters.size))
-
- # NOTE(nathaniel): Apparently this is the same as the full-duplex call?
- # NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
- def HalfDuplexCall(self, request_iterator, context):
- return self.FullDuplexCall(request_iterator, context)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py
deleted file mode 100644
index 5772620b60..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py
deleted file mode 100644
index 81d11c1865..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Defines test client behaviors (UNARY/STREAMING) (SYNC/ASYNC)."""
-
-import abc
-from concurrent import futures
-import threading
-import time
-
-import grpc
-from six.moves import queue
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from src.proto.grpc.testing import messages_pb2
-from tests.unit import resources
-from tests.unit import test_common
-
-_TIMEOUT = 60 * 60 * 24
-
-
-class GenericStub(object):
-
- def __init__(self, channel):
- self.UnaryCall = channel.unary_unary(
- '/grpc.testing.BenchmarkService/UnaryCall')
- self.StreamingFromServer = channel.unary_stream(
- '/grpc.testing.BenchmarkService/StreamingFromServer')
- self.StreamingCall = channel.stream_stream(
- '/grpc.testing.BenchmarkService/StreamingCall')
-
-
-class BenchmarkClient:
- """Benchmark client interface that exposes a non-blocking send_request()."""
-
- __metaclass__ = abc.ABCMeta
-
- def __init__(self, server, config, hist):
- # Create the stub
- if config.HasField('security_params'):
- creds = grpc.ssl_channel_credentials(
- resources.test_root_certificates())
- channel = test_common.test_secure_channel(
- server, creds, config.security_params.server_host_override)
- else:
- channel = grpc.insecure_channel(server)
-
- # waits for the channel to be ready before we start sending messages
- grpc.channel_ready_future(channel).result()
-
- if config.payload_config.WhichOneof('payload') == 'simple_params':
- self._generic = False
- self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub(
- channel)
- payload = messages_pb2.Payload(
- body=bytes(b'\0' *
- config.payload_config.simple_params.req_size))
- self._request = messages_pb2.SimpleRequest(
- payload=payload,
- response_size=config.payload_config.simple_params.resp_size)
- else:
- self._generic = True
- self._stub = GenericStub(channel)
- self._request = bytes(b'\0' *
- config.payload_config.bytebuf_params.req_size)
-
- self._hist = hist
- self._response_callbacks = []
-
- def add_response_callback(self, callback):
- """callback will be invoked as callback(client, query_time)"""
- self._response_callbacks.append(callback)
-
- @abc.abstractmethod
- def send_request(self):
- """Non-blocking wrapper for a client's request operation."""
- raise NotImplementedError()
-
- def start(self):
- pass
-
- def stop(self):
- pass
-
- def _handle_response(self, client, query_time):
- self._hist.add(query_time * 1e9) # Report times in nanoseconds
- for callback in self._response_callbacks:
- callback(client, query_time)
-
-
-class UnarySyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, server, config, hist):
- super(UnarySyncBenchmarkClient, self).__init__(server, config, hist)
- self._pool = futures.ThreadPoolExecutor(
- max_workers=config.outstanding_rpcs_per_channel)
-
- def send_request(self):
- # Send requests in separate threads to support multiple outstanding rpcs
- # (See src/proto/grpc/testing/control.proto)
- self._pool.submit(self._dispatch_request)
-
- def stop(self):
- self._pool.shutdown(wait=True)
- self._stub = None
-
- def _dispatch_request(self):
- start_time = time.time()
- self._stub.UnaryCall(self._request, _TIMEOUT)
- end_time = time.time()
- self._handle_response(self, end_time - start_time)
-
-
-class UnaryAsyncBenchmarkClient(BenchmarkClient):
-
- def send_request(self):
- # Use the Future callback api to support multiple outstanding rpcs
- start_time = time.time()
- response_future = self._stub.UnaryCall.future(self._request, _TIMEOUT)
- response_future.add_done_callback(
- lambda resp: self._response_received(start_time, resp))
-
- def _response_received(self, start_time, resp):
- resp.result()
- end_time = time.time()
- self._handle_response(self, end_time - start_time)
-
- def stop(self):
- self._stub = None
-
-
-class _SyncStream(object):
-
- def __init__(self, stub, generic, request, handle_response):
- self._stub = stub
- self._generic = generic
- self._request = request
- self._handle_response = handle_response
- self._is_streaming = False
- self._request_queue = queue.Queue()
- self._send_time_queue = queue.Queue()
-
- def send_request(self):
- self._send_time_queue.put(time.time())
- self._request_queue.put(self._request)
-
- def start(self):
- self._is_streaming = True
- response_stream = self._stub.StreamingCall(self._request_generator(),
- _TIMEOUT)
- for _ in response_stream:
- self._handle_response(
- self,
- time.time() - self._send_time_queue.get_nowait())
-
- def stop(self):
- self._is_streaming = False
-
- def _request_generator(self):
- while self._is_streaming:
- try:
- request = self._request_queue.get(block=True, timeout=1.0)
- yield request
- except queue.Empty:
- pass
-
-
-class StreamingSyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, server, config, hist):
- super(StreamingSyncBenchmarkClient, self).__init__(server, config, hist)
- self._pool = futures.ThreadPoolExecutor(
- max_workers=config.outstanding_rpcs_per_channel)
- self._streams = [
- _SyncStream(self._stub, self._generic, self._request,
- self._handle_response)
- for _ in range(config.outstanding_rpcs_per_channel)
- ]
- self._curr_stream = 0
-
- def send_request(self):
- # Use a round_robin scheduler to determine what stream to send on
- self._streams[self._curr_stream].send_request()
- self._curr_stream = (self._curr_stream + 1) % len(self._streams)
-
- def start(self):
- for stream in self._streams:
- self._pool.submit(stream.start)
-
- def stop(self):
- for stream in self._streams:
- stream.stop()
- self._pool.shutdown(wait=True)
- self._stub = None
-
-
-class ServerStreamingSyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, server, config, hist):
- super(ServerStreamingSyncBenchmarkClient,
- self).__init__(server, config, hist)
- if config.outstanding_rpcs_per_channel == 1:
- self._pool = None
- else:
- self._pool = futures.ThreadPoolExecutor(
- max_workers=config.outstanding_rpcs_per_channel)
- self._rpcs = []
- self._sender = None
-
- def send_request(self):
- if self._pool is None:
- self._sender = threading.Thread(
- target=self._one_stream_streaming_rpc, daemon=True)
- self._sender.start()
- else:
- self._pool.submit(self._one_stream_streaming_rpc)
-
- def _one_stream_streaming_rpc(self):
- response_stream = self._stub.StreamingFromServer(
- self._request, _TIMEOUT)
- self._rpcs.append(response_stream)
- start_time = time.time()
- for _ in response_stream:
- self._handle_response(self, time.time() - start_time)
- start_time = time.time()
-
- def stop(self):
- for call in self._rpcs:
- call.cancel()
- if self._sender is not None:
- self._sender.join()
- if self._pool is not None:
- self._pool.shutdown(wait=False)
- self._stub = None
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py
deleted file mode 100644
index 644543086b..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from src.proto.grpc.testing import messages_pb2
-
-
-class BenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer):
- """Synchronous Server implementation for the Benchmark service."""
-
- def UnaryCall(self, request, context):
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- return messages_pb2.SimpleResponse(payload=payload)
-
- def StreamingCall(self, request_iterator, context):
- for request in request_iterator:
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- yield messages_pb2.SimpleResponse(payload=payload)
-
-
-class GenericBenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer
- ):
- """Generic Server implementation for the Benchmark service."""
-
- def __init__(self, resp_size):
- self._response = b'\0' * resp_size
-
- def UnaryCall(self, request, context):
- return self._response
-
- def StreamingCall(self, request_iterator, context):
- for request in request_iterator:
- yield self._response
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py
deleted file mode 100644
index a03174472c..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Defines behavior for WHEN clients send requests.
-
-Each client exposes a non-blocking send_request() method that the
-ClientRunner invokes either periodically or in response to some event.
-"""
-
-import abc
-import threading
-import time
-
-
-class ClientRunner:
- """Abstract interface for sending requests from clients."""
-
- __metaclass__ = abc.ABCMeta
-
- def __init__(self, client):
- self._client = client
-
- @abc.abstractmethod
- def start(self):
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stop(self):
- raise NotImplementedError()
-
-
-class OpenLoopClientRunner(ClientRunner):
-
- def __init__(self, client, interval_generator):
- super(OpenLoopClientRunner, self).__init__(client)
- self._is_running = False
- self._interval_generator = interval_generator
- self._dispatch_thread = threading.Thread(target=self._dispatch_requests,
- args=())
-
- def start(self):
- self._is_running = True
- self._client.start()
- self._dispatch_thread.start()
-
- def stop(self):
- self._is_running = False
- self._client.stop()
- self._dispatch_thread.join()
- self._client = None
-
- def _dispatch_requests(self):
- while self._is_running:
- self._client.send_request()
- time.sleep(next(self._interval_generator))
-
-
-class ClosedLoopClientRunner(ClientRunner):
-
- def __init__(self, client, request_count, no_ping_pong):
- super(ClosedLoopClientRunner, self).__init__(client)
- self._is_running = False
- self._request_count = request_count
- # For server-streaming RPC, don't spawn new RPC after each responses.
- # This yield at most ~17% for single RPC scenarios.
- if not no_ping_pong:
- # Send a new request on each response for closed loop
- self._client.add_response_callback(self._send_request)
-
- def start(self):
- self._is_running = True
- self._client.start()
- for _ in range(self._request_count):
- self._client.send_request()
-
- def stop(self):
- self._is_running = False
- self._client.stop()
- self._client = None
-
- def _send_request(self, client, unused_response_time):
- if self._is_running:
- client.send_request()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py
deleted file mode 100644
index 8139a6ee2f..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import math
-import threading
-
-from src.proto.grpc.testing import stats_pb2
-
-
-class Histogram(object):
- """Histogram class used for recording performance testing data.
-
- This class is thread safe.
- """
-
- def __init__(self, resolution, max_possible):
- self._lock = threading.Lock()
- self._resolution = resolution
- self._max_possible = max_possible
- self._sum = 0
- self._sum_of_squares = 0
- self.multiplier = 1.0 + self._resolution
- self._count = 0
- self._min = self._max_possible
- self._max = 0
- self._buckets = [0] * (self._bucket_for(self._max_possible) + 1)
-
- def reset(self):
- with self._lock:
- self._sum = 0
- self._sum_of_squares = 0
- self._count = 0
- self._min = self._max_possible
- self._max = 0
- self._buckets = [0] * (self._bucket_for(self._max_possible) + 1)
-
- def add(self, val):
- with self._lock:
- self._sum += val
- self._sum_of_squares += val * val
- self._count += 1
- self._min = min(self._min, val)
- self._max = max(self._max, val)
- self._buckets[self._bucket_for(val)] += 1
-
- def get_data(self):
- with self._lock:
- data = stats_pb2.HistogramData()
- data.bucket.extend(self._buckets)
- data.min_seen = self._min
- data.max_seen = self._max
- data.sum = self._sum
- data.sum_of_squares = self._sum_of_squares
- data.count = self._count
- return data
-
- def merge(self, another_data):
- with self._lock:
- for i in range(len(self._buckets)):
- self._buckets[i] += another_data.bucket[i]
- self._min = min(self._min, another_data.min_seen)
- self._max = max(self._max, another_data.max_seen)
- self._sum += another_data.sum
- self._sum_of_squares += another_data.sum_of_squares
- self._count += another_data.count
-
- def _bucket_for(self, val):
- val = min(val, self._max_possible)
- return int(math.log(val, self.multiplier))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py
deleted file mode 100644
index 0708cc06f3..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The entry point for the qps worker."""
-
-import argparse
-import logging
-import time
-
-import grpc
-
-from src.proto.grpc.testing import worker_service_pb2_grpc
-from tests.qps import worker_server
-from tests.unit import test_common
-
-
-def run_worker_server(driver_port, server_port):
- server = test_common.test_server()
- servicer = worker_server.WorkerServer(server_port)
- worker_service_pb2_grpc.add_WorkerServiceServicer_to_server(
- servicer, server)
- server.add_insecure_port('[::]:{}'.format(driver_port))
- server.start()
- servicer.wait_for_quit()
- server.stop(0)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- parser = argparse.ArgumentParser(
- description='gRPC Python performance testing worker')
- parser.add_argument(
- '--driver_port',
- type=int,
- dest='driver_port',
- help='The port for the worker to expose for driver communication')
- parser.add_argument(
- '--server_port',
- type=int,
- default=None,
- dest='server_port',
- help='The port for the server if not specified by server config message'
- )
- args = parser.parse_args()
-
- run_worker_server(args.driver_port, args.server_port)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py
deleted file mode 100644
index 1b5a7593b5..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py
+++ /dev/null
@@ -1,197 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from concurrent import futures
-import multiprocessing
-import random
-import threading
-import time
-
-import grpc
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from src.proto.grpc.testing import control_pb2
-from src.proto.grpc.testing import stats_pb2
-from src.proto.grpc.testing import worker_service_pb2_grpc
-from tests.qps import benchmark_client
-from tests.qps import benchmark_server
-from tests.qps import client_runner
-from tests.qps import histogram
-from tests.unit import resources
-from tests.unit import test_common
-
-
-class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer):
- """Python Worker Server implementation."""
-
- def __init__(self, server_port=None):
- self._quit_event = threading.Event()
- self._server_port = server_port
-
- def RunServer(self, request_iterator, context):
- config = next(request_iterator).setup #pylint: disable=stop-iteration-return
- server, port = self._create_server(config)
- cores = multiprocessing.cpu_count()
- server.start()
- start_time = time.time()
- yield self._get_server_status(start_time, start_time, port, cores)
-
- for request in request_iterator:
- end_time = time.time()
- status = self._get_server_status(start_time, end_time, port, cores)
- if request.mark.reset:
- start_time = end_time
- yield status
- server.stop(None)
-
- def _get_server_status(self, start_time, end_time, port, cores):
- end_time = time.time()
- elapsed_time = end_time - start_time
- stats = stats_pb2.ServerStats(time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
- return control_pb2.ServerStatus(stats=stats, port=port, cores=cores)
-
- def _create_server(self, config):
- if config.async_server_threads == 0:
- # This is the default concurrent.futures thread pool size, but
- # None doesn't seem to work
- server_threads = multiprocessing.cpu_count() * 5
- else:
- server_threads = config.async_server_threads
- server = test_common.test_server(max_workers=server_threads)
- if config.server_type == control_pb2.ASYNC_SERVER:
- servicer = benchmark_server.BenchmarkServer()
- benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
- servicer, server)
- elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER:
- resp_size = config.payload_config.bytebuf_params.resp_size
- servicer = benchmark_server.GenericBenchmarkServer(resp_size)
- method_implementations = {
- 'StreamingCall':
- grpc.stream_stream_rpc_method_handler(servicer.StreamingCall
- ),
- 'UnaryCall':
- grpc.unary_unary_rpc_method_handler(servicer.UnaryCall),
- }
- handler = grpc.method_handlers_generic_handler(
- 'grpc.testing.BenchmarkService', method_implementations)
- server.add_generic_rpc_handlers((handler,))
- else:
- raise Exception('Unsupported server type {}'.format(
- config.server_type))
-
- if self._server_port is not None and config.port == 0:
- server_port = self._server_port
- else:
- server_port = config.port
-
- if config.HasField('security_params'): # Use SSL
- server_creds = grpc.ssl_server_credentials(
- ((resources.private_key(), resources.certificate_chain()),))
- port = server.add_secure_port('[::]:{}'.format(server_port),
- server_creds)
- else:
- port = server.add_insecure_port('[::]:{}'.format(server_port))
-
- return (server, port)
-
- def RunClient(self, request_iterator, context):
- config = next(request_iterator).setup #pylint: disable=stop-iteration-return
- client_runners = []
- qps_data = histogram.Histogram(config.histogram_params.resolution,
- config.histogram_params.max_possible)
- start_time = time.time()
-
- # Create a client for each channel
- for i in range(config.client_channels):
- server = config.server_targets[i % len(config.server_targets)]
- runner = self._create_client_runner(server, config, qps_data)
- client_runners.append(runner)
- runner.start()
-
- end_time = time.time()
- yield self._get_client_status(start_time, end_time, qps_data)
-
- # Respond to stat requests
- for request in request_iterator:
- end_time = time.time()
- status = self._get_client_status(start_time, end_time, qps_data)
- if request.mark.reset:
- qps_data.reset()
- start_time = time.time()
- yield status
-
- # Cleanup the clients
- for runner in client_runners:
- runner.stop()
-
- def _get_client_status(self, start_time, end_time, qps_data):
- latencies = qps_data.get_data()
- end_time = time.time()
- elapsed_time = end_time - start_time
- stats = stats_pb2.ClientStats(latencies=latencies,
- time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
- return control_pb2.ClientStatus(stats=stats)
-
- def _create_client_runner(self, server, config, qps_data):
- no_ping_pong = False
- if config.client_type == control_pb2.SYNC_CLIENT:
- if config.rpc_type == control_pb2.UNARY:
- client = benchmark_client.UnarySyncBenchmarkClient(
- server, config, qps_data)
- elif config.rpc_type == control_pb2.STREAMING:
- client = benchmark_client.StreamingSyncBenchmarkClient(
- server, config, qps_data)
- elif config.rpc_type == control_pb2.STREAMING_FROM_SERVER:
- no_ping_pong = True
- client = benchmark_client.ServerStreamingSyncBenchmarkClient(
- server, config, qps_data)
- elif config.client_type == control_pb2.ASYNC_CLIENT:
- if config.rpc_type == control_pb2.UNARY:
- client = benchmark_client.UnaryAsyncBenchmarkClient(
- server, config, qps_data)
- else:
- raise Exception('Async streaming client not supported')
- else:
- raise Exception('Unsupported client type {}'.format(
- config.client_type))
-
- # In multi-channel tests, we split the load across all channels
- load_factor = float(config.client_channels)
- if config.load_params.WhichOneof('load') == 'closed_loop':
- runner = client_runner.ClosedLoopClientRunner(
- client, config.outstanding_rpcs_per_channel, no_ping_pong)
- else: # Open loop Poisson
- alpha = config.load_params.poisson.offered_load / load_factor
-
- def poisson():
- while True:
- yield random.expovariate(alpha)
-
- runner = client_runner.OpenLoopClientRunner(client, poisson())
-
- return runner
-
- def CoreCount(self, request, context):
- return control_pb2.CoreResponse(cores=multiprocessing.cpu_count())
-
- def QuitWorker(self, request, context):
- self._quit_event.set()
- return control_pb2.Void()
-
- def wait_for_quit(self):
- self._quit_event.wait()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py
deleted file mode 100644
index 5772620b60..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py
deleted file mode 100644
index 63b73e231c..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_reflection.v1alpha.reflection."""
-
-import sys
-import unittest
-
-from google.protobuf import descriptor_pb2
-from google.protobuf import descriptor_pool
-import grpc
-from grpc_reflection.v1alpha import reflection
-from grpc_reflection.v1alpha import reflection_pb2
-from grpc_reflection.v1alpha import reflection_pb2_grpc
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing.proto2 import empty2_extensions_pb2
-from tests.unit import test_common
-
-_EMPTY_PROTO_FILE_NAME = 'src/proto/grpc/testing/empty.proto'
-_EMPTY_PROTO_SYMBOL_NAME = 'grpc.testing.Empty'
-_SERVICE_NAMES = ('Angstrom', 'Bohr', 'Curie', 'Dyson', 'Einstein', 'Feynman',
- 'Galilei')
-_EMPTY_EXTENSIONS_SYMBOL_NAME = 'grpc.testing.proto2.EmptyWithExtensions'
-_EMPTY_EXTENSIONS_NUMBERS = (
- 124,
- 125,
- 126,
- 127,
- 128,
-)
-
-
-def _file_descriptor_to_proto(descriptor):
- proto = descriptor_pb2.FileDescriptorProto()
- descriptor.CopyToProto(proto)
- return proto.SerializeToString()
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class ReflectionServicerTest(unittest.TestCase):
-
- # TODO(https://github.com/grpc/grpc/issues/17844)
- # Bazel + Python 3 will result in creating two different instance of
- # DESCRIPTOR for each message. So, the equal comparison between protobuf
- # returned by stub and manually crafted protobuf will always fail.
- def _assert_sequence_of_proto_equal(self, x, y):
- self.assertSequenceEqual(
- tuple(proto.SerializeToString() for proto in x),
- tuple(proto.SerializeToString() for proto in y),
- )
-
- def setUp(self):
- self._server = test_common.test_server()
- reflection.enable_server_reflection(_SERVICE_NAMES, self._server)
- port = self._server.add_insecure_port('[::]:0')
- self._server.start()
-
- self._channel = grpc.insecure_channel('localhost:%d' % port)
- self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel)
-
- def tearDown(self):
- self._server.stop(None)
- self._channel.close()
-
- def testFileByName(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_by_filename=_EMPTY_PROTO_FILE_NAME),
- reflection_pb2.ServerReflectionRequest(
- file_by_filename='i-donut-exist'),
- )
- responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(
- _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self._assert_sequence_of_proto_equal(expected_responses, responses)
-
- def testFileBySymbol(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME),
- reflection_pb2.ServerReflectionRequest(
- file_containing_symbol='i.donut.exist.co.uk.org.net.me.name.foo'
- ),
- )
- responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(
- _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self._assert_sequence_of_proto_equal(expected_responses, responses)
-
- def testFileContainingExtension(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_containing_extension=reflection_pb2.ExtensionRequest(
- containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME,
- extension_number=125,
- ),),
- reflection_pb2.ServerReflectionRequest(
- file_containing_extension=reflection_pb2.ExtensionRequest(
- containing_type='i.donut.exist.co.uk.org.net.me.name.foo',
- extension_number=55,
- ),),
- )
- responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(_file_descriptor_to_proto(
- empty2_extensions_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self._assert_sequence_of_proto_equal(expected_responses, responses)
-
- def testExtensionNumbersOfType(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME),
- reflection_pb2.ServerReflectionRequest(
- all_extension_numbers_of_type='i.donut.exist.co.uk.net.name.foo'
- ),
- )
- responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- all_extension_numbers_response=reflection_pb2.
- ExtensionNumberResponse(
- base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME,
- extension_number=_EMPTY_EXTENSIONS_NUMBERS)),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self._assert_sequence_of_proto_equal(expected_responses, responses)
-
- def testListServices(self):
- requests = (reflection_pb2.ServerReflectionRequest(list_services='',),)
- responses = tuple(self._stub.ServerReflectionInfo(iter(requests)))
- expected_responses = (reflection_pb2.ServerReflectionResponse(
- valid_host='',
- list_services_response=reflection_pb2.ListServiceResponse(
- service=tuple(
- reflection_pb2.ServiceResponse(name=name)
- for name in _SERVICE_NAMES))),)
- self._assert_sequence_of_proto_equal(expected_responses, responses)
-
- def testReflectionServiceName(self):
- self.assertEqual(reflection.SERVICE_NAME,
- 'grpc.reflection.v1alpha.ServerReflection')
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py
deleted file mode 100644
index 38fdfc9c5c..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2018 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py
deleted file mode 100644
index a79dd555ab..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Copyright 2018 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_status.
-
-isort:skip_file
-"""
-
-# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details
-# please refer to comments in the "bazel_namespace_package_hack" module.
-try:
- from tests import bazel_namespace_package_hack
- bazel_namespace_package_hack.sys_path_to_site_dir_hack()
-except ImportError:
- pass
-
-import unittest
-
-import logging
-import traceback
-import sys
-
-import grpc
-from grpc_status import rpc_status
-
-from tests.unit import test_common
-
-from google.protobuf import any_pb2
-from google.rpc import code_pb2, status_pb2, error_details_pb2
-
-_STATUS_OK = '/test/StatusOK'
-_STATUS_NOT_OK = '/test/StatusNotOk'
-_ERROR_DETAILS = '/test/ErrorDetails'
-_INCONSISTENT = '/test/Inconsistent'
-_INVALID_CODE = '/test/InvalidCode'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_GRPC_DETAILS_METADATA_KEY = 'grpc-status-details-bin'
-
-_STATUS_DETAILS = 'This is an error detail'
-_STATUS_DETAILS_ANOTHER = 'This is another error detail'
-
-
-def _ok_unary_unary(request, servicer_context):
- return _RESPONSE
-
-
-def _not_ok_unary_unary(request, servicer_context):
- servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS)
-
-
-def _error_details_unary_unary(request, servicer_context):
- details = any_pb2.Any()
- details.Pack(
- error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(),
- detail='Intentionally invoked'))
- rich_status = status_pb2.Status(
- code=code_pb2.INTERNAL,
- message=_STATUS_DETAILS,
- details=[details],
- )
- servicer_context.abort_with_status(rpc_status.to_status(rich_status))
-
-
-def _inconsistent_unary_unary(request, servicer_context):
- rich_status = status_pb2.Status(
- code=code_pb2.INTERNAL,
- message=_STATUS_DETAILS,
- )
- servicer_context.set_code(grpc.StatusCode.NOT_FOUND)
- servicer_context.set_details(_STATUS_DETAILS_ANOTHER)
- # User put inconsistent status information in trailing metadata
- servicer_context.set_trailing_metadata(
- ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),))
-
-
-def _invalid_code_unary_unary(request, servicer_context):
- rich_status = status_pb2.Status(
- code=42,
- message='Invalid code',
- )
- servicer_context.abort_with_status(rpc_status.to_status(rich_status))
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _STATUS_OK:
- return grpc.unary_unary_rpc_method_handler(_ok_unary_unary)
- elif handler_call_details.method == _STATUS_NOT_OK:
- return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary)
- elif handler_call_details.method == _ERROR_DETAILS:
- return grpc.unary_unary_rpc_method_handler(
- _error_details_unary_unary)
- elif handler_call_details.method == _INCONSISTENT:
- return grpc.unary_unary_rpc_method_handler(
- _inconsistent_unary_unary)
- elif handler_call_details.method == _INVALID_CODE:
- return grpc.unary_unary_rpc_method_handler(
- _invalid_code_unary_unary)
- else:
- return None
-
-
-@unittest.skipIf(sys.version_info[0] < 3,
- 'ProtoBuf descriptor has moved on from Python2')
-class StatusTest(unittest.TestCase):
-
- def setUp(self):
- self._server = test_common.test_server()
- self._server.add_generic_rpc_handlers((_GenericHandler(),))
- port = self._server.add_insecure_port('[::]:0')
- self._server.start()
-
- self._channel = grpc.insecure_channel('localhost:%d' % port)
-
- def tearDown(self):
- self._server.stop(None)
- self._channel.close()
-
- def test_status_ok(self):
- _, call = self._channel.unary_unary(_STATUS_OK).with_call(_REQUEST)
-
- # Succeed RPC doesn't have status
- status = rpc_status.from_call(call)
- self.assertIs(status, None)
-
- def test_status_not_ok(self):
- with self.assertRaises(grpc.RpcError) as exception_context:
- self._channel.unary_unary(_STATUS_NOT_OK).with_call(_REQUEST)
- rpc_error = exception_context.exception
-
- self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
- # Failed RPC doesn't automatically generate status
- status = rpc_status.from_call(rpc_error)
- self.assertIs(status, None)
-
- def test_error_details(self):
- with self.assertRaises(grpc.RpcError) as exception_context:
- self._channel.unary_unary(_ERROR_DETAILS).with_call(_REQUEST)
- rpc_error = exception_context.exception
-
- status = rpc_status.from_call(rpc_error)
- self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
- self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL'))
-
- # Check if the underlying proto message is intact
- self.assertEqual(
- status.details[0].Is(error_details_pb2.DebugInfo.DESCRIPTOR), True)
- info = error_details_pb2.DebugInfo()
- status.details[0].Unpack(info)
- self.assertIn('_error_details_unary_unary', info.stack_entries[-1])
-
- def test_code_message_validation(self):
- with self.assertRaises(grpc.RpcError) as exception_context:
- self._channel.unary_unary(_INCONSISTENT).with_call(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND)
-
- # Code/Message validation failed
- self.assertRaises(ValueError, rpc_status.from_call, rpc_error)
-
- def test_invalid_code(self):
- with self.assertRaises(grpc.RpcError) as exception_context:
- self._channel.unary_unary(_INVALID_CODE).with_call(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN)
- # Invalid status code exception raised during coversion
- self.assertIn('Invalid status code', rpc_error.details())
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py
deleted file mode 100644
index 5772620b60..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py
deleted file mode 100644
index 4481e61696..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py
+++ /dev/null
@@ -1,159 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Entry point for running stress tests."""
-
-import argparse
-from concurrent import futures
-import threading
-
-import grpc
-from six.moves import queue
-
-from src.proto.grpc.testing import metrics_pb2_grpc
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.interop import methods
-from tests.interop import resources
-from tests.qps import histogram
-from tests.stress import metrics_server
-from tests.stress import test_runner
-
-
-def _args():
- parser = argparse.ArgumentParser(
- description='gRPC Python stress test client')
- parser.add_argument(
- '--server_addresses',
- help='comma separated list of hostname:port to run servers on',
- default='localhost:8080',
- type=str)
- parser.add_argument(
- '--test_cases',
- help='comma separated list of testcase:weighting of tests to run',
- default='large_unary:100',
- type=str)
- parser.add_argument('--test_duration_secs',
- help='number of seconds to run the stress test',
- default=-1,
- type=int)
- parser.add_argument('--num_channels_per_server',
- help='number of channels per server',
- default=1,
- type=int)
- parser.add_argument('--num_stubs_per_channel',
- help='number of stubs to create per channel',
- default=1,
- type=int)
- parser.add_argument('--metrics_port',
- help='the port to listen for metrics requests on',
- default=8081,
- type=int)
- parser.add_argument(
- '--use_test_ca',
- help='Whether to use our fake CA. Requires --use_tls=true',
- default=False,
- type=bool)
- parser.add_argument('--use_tls',
- help='Whether to use TLS',
- default=False,
- type=bool)
- parser.add_argument('--server_host_override',
- help='the server host to which to claim to connect',
- type=str)
- return parser.parse_args()
-
-
-def _test_case_from_arg(test_case_arg):
- for test_case in methods.TestCase:
- if test_case_arg == test_case.value:
- return test_case
- else:
- raise ValueError('No test case {}!'.format(test_case_arg))
-
-
-def _parse_weighted_test_cases(test_case_args):
- weighted_test_cases = {}
- for test_case_arg in test_case_args.split(','):
- name, weight = test_case_arg.split(':', 1)
- test_case = _test_case_from_arg(name)
- weighted_test_cases[test_case] = int(weight)
- return weighted_test_cases
-
-
-def _get_channel(target, args):
- if args.use_tls:
- if args.use_test_ca:
- root_certificates = resources.test_root_certificates()
- else:
- root_certificates = None # will load default roots.
- channel_credentials = grpc.ssl_channel_credentials(
- root_certificates=root_certificates)
- options = ((
- 'grpc.ssl_target_name_override',
- args.server_host_override,
- ),)
- channel = grpc.secure_channel(target,
- channel_credentials,
- options=options)
- else:
- channel = grpc.insecure_channel(target)
-
- # waits for the channel to be ready before we start sending messages
- grpc.channel_ready_future(channel).result()
- return channel
-
-
-def run_test(args):
- test_cases = _parse_weighted_test_cases(args.test_cases)
- test_server_targets = args.server_addresses.split(',')
- # Propagate any client exceptions with a queue
- exception_queue = queue.Queue()
- stop_event = threading.Event()
- hist = histogram.Histogram(1, 1)
- runners = []
-
- server = grpc.server(futures.ThreadPoolExecutor(max_workers=25))
- metrics_pb2_grpc.add_MetricsServiceServicer_to_server(
- metrics_server.MetricsServer(hist), server)
- server.add_insecure_port('[::]:{}'.format(args.metrics_port))
- server.start()
-
- for test_server_target in test_server_targets:
- for _ in range(args.num_channels_per_server):
- channel = _get_channel(test_server_target, args)
- for _ in range(args.num_stubs_per_channel):
- stub = test_pb2_grpc.TestServiceStub(channel)
- runner = test_runner.TestRunner(stub, test_cases, hist,
- exception_queue, stop_event)
- runners.append(runner)
-
- for runner in runners:
- runner.start()
- try:
- timeout_secs = args.test_duration_secs
- if timeout_secs < 0:
- timeout_secs = None
- raise exception_queue.get(block=True, timeout=timeout_secs)
- except queue.Empty:
- # No exceptions thrown, success
- pass
- finally:
- stop_event.set()
- for runner in runners:
- runner.join()
- runner = None
- server.stop(None)
-
-
-if __name__ == '__main__':
- run_test(_args())
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py
deleted file mode 100644
index 33a74b4a38..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""MetricsService for publishing stress test qps data."""
-
-import time
-
-from src.proto.grpc.testing import metrics_pb2
-from src.proto.grpc.testing import metrics_pb2_grpc
-
-GAUGE_NAME = 'python_overall_qps'
-
-
-class MetricsServer(metrics_pb2_grpc.MetricsServiceServicer):
-
- def __init__(self, histogram):
- self._start_time = time.time()
- self._histogram = histogram
-
- def _get_qps(self):
- count = self._histogram.get_data().count
- delta = time.time() - self._start_time
- self._histogram.reset()
- self._start_time = time.time()
- return int(count / delta)
-
- def GetAllGauges(self, request, context):
- qps = self._get_qps()
- return [metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps)]
-
- def GetGauge(self, request, context):
- if request.name != GAUGE_NAME:
- raise Exception('Gauge {} does not exist'.format(request.name))
- qps = self._get_qps()
- return metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py
deleted file mode 100644
index 1b6003fc69..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Thread that sends random weighted requests on a TestService stub."""
-
-import random
-import threading
-import time
-import traceback
-
-
-def _weighted_test_case_generator(weighted_cases):
- weight_sum = sum(weighted_cases.itervalues())
-
- while True:
- val = random.uniform(0, weight_sum)
- partial_sum = 0
- for case in weighted_cases:
- partial_sum += weighted_cases[case]
- if val <= partial_sum:
- yield case
- break
-
-
-class TestRunner(threading.Thread):
-
- def __init__(self, stub, test_cases, hist, exception_queue, stop_event):
- super(TestRunner, self).__init__()
- self._exception_queue = exception_queue
- self._stop_event = stop_event
- self._stub = stub
- self._test_cases = _weighted_test_case_generator(test_cases)
- self._histogram = hist
-
- def run(self):
- while not self._stop_event.is_set():
- try:
- test_case = next(self._test_cases)
- start_time = time.time()
- test_case.test_interoperability(self._stub, None)
- end_time = time.time()
- self._histogram.add((end_time - start_time) * 1e9)
- except Exception as e: # pylint: disable=broad-except
- traceback.print_exc()
- self._exception_queue.put(
- Exception(
- "An exception occurred during test {}".format(
- test_case), e))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py
deleted file mode 100644
index 21d7e6c608..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright 2019 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import contextlib
-import datetime
-import subprocess
-import sys
-import threading
-import time
-
-import grpc
-import grpc.experimental
-
-_PORT = 5741
-_MESSAGE_SIZE = 4
-_RESPONSE_COUNT = 32 * 1024
-
-_SERVER_CODE = """
-import datetime
-import threading
-import grpc
-from concurrent import futures
-from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2
-from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc
-
-class Handler(unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceServicer):
-
- def Benchmark(self, request, context):
- payload = b'\\x00\\x01' * int(request.message_size / 2)
- for _ in range(request.response_count):
- yield unary_stream_benchmark_pb2.BenchmarkResponse(response=payload)
-
-
-server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
-server.add_insecure_port('[::]:%d')
-unary_stream_benchmark_pb2_grpc.add_UnaryStreamBenchmarkServiceServicer_to_server(Handler(), server)
-server.start()
-server.wait_for_termination()
-""" % _PORT
-
-try:
- from src.python.grpcio_tests.tests.stress import \
- unary_stream_benchmark_pb2_grpc
- from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2
-
- _GRPC_CHANNEL_OPTIONS = [
- ('grpc.max_metadata_size', 16 * 1024 * 1024),
- ('grpc.max_receive_message_length', 64 * 1024 * 1024),
- (grpc.experimental.ChannelOptions.SingleThreadedUnaryStream, 1),
- ]
-
- @contextlib.contextmanager
- def _running_server():
- server_process = subprocess.Popen([sys.executable, '-c', _SERVER_CODE],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- try:
- yield
- finally:
- server_process.terminate()
- server_process.wait()
- sys.stdout.write("stdout: {}".format(server_process.stdout.read()))
- sys.stdout.flush()
- sys.stdout.write("stderr: {}".format(server_process.stderr.read()))
- sys.stdout.flush()
-
- def profile(message_size, response_count):
- request = unary_stream_benchmark_pb2.BenchmarkRequest(
- message_size=message_size, response_count=response_count)
- with grpc.insecure_channel('[::]:{}'.format(_PORT),
- options=_GRPC_CHANNEL_OPTIONS) as channel:
- stub = unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceStub(
- channel)
- start = datetime.datetime.now()
- call = stub.Benchmark(request, wait_for_ready=True)
- for message in call:
- pass
- end = datetime.datetime.now()
- return end - start
-
- def main():
- with _running_server():
- for i in range(1000):
- latency = profile(_MESSAGE_SIZE, 1024)
- sys.stdout.write("{}\n".format(latency.total_seconds()))
- sys.stdout.flush()
-
- if __name__ == '__main__':
- main()
-
-except ImportError:
- # NOTE(rbellevi): The test runner should not load this module.
- pass
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py
deleted file mode 100644
index 1e120359cf..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py
deleted file mode 100644
index 3226d1fb02..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""An example gRPC Python-using application's common code elements."""
-
-from tests.testing.proto import requests_pb2
-from tests.testing.proto import services_pb2
-
-SERVICE_NAME = 'tests_of_grpc_testing.FirstService'
-UNARY_UNARY_METHOD_NAME = 'UnUn'
-UNARY_STREAM_METHOD_NAME = 'UnStre'
-STREAM_UNARY_METHOD_NAME = 'StreUn'
-STREAM_STREAM_METHOD_NAME = 'StreStre'
-
-UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=2)
-ERRONEOUS_UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=3)
-UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=5)
-ERRONEOUS_UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=7)
-UNARY_STREAM_REQUEST = requests_pb2.Charm(first_charm_field=11)
-STREAM_UNARY_REQUEST = requests_pb2.Charm(first_charm_field=13)
-STREAM_UNARY_RESPONSE = services_pb2.Strange(first_strange_field=17)
-STREAM_STREAM_REQUEST = requests_pb2.Top(first_top_field=19)
-STREAM_STREAM_RESPONSE = services_pb2.Bottom(first_bottom_field=23)
-TWO_STREAM_STREAM_RESPONSES = (STREAM_STREAM_RESPONSE,) * 2
-ABORT_REQUEST = requests_pb2.Up(first_up_field=42)
-ABORT_SUCCESS_QUERY = requests_pb2.Up(first_up_field=43)
-ABORT_NO_STATUS_RESPONSE = services_pb2.Down(first_down_field=50)
-ABORT_SUCCESS_RESPONSE = services_pb2.Down(first_down_field=51)
-ABORT_FAILURE_RESPONSE = services_pb2.Down(first_down_field=52)
-STREAM_STREAM_MUTATING_REQUEST = requests_pb2.Top(first_top_field=24601)
-STREAM_STREAM_MUTATING_COUNT = 2
-
-INFINITE_REQUEST_STREAM_TIMEOUT = 0.2
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py
deleted file mode 100644
index cac813c04d..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import grpc_testing
-
-from tests.testing.proto import requests_pb2
-from tests.testing.proto import services_pb2
-
-# TODO(https://github.com/grpc/grpc/issues/11657): Eliminate this entirely.
-# TODO(https://github.com/protocolbuffers/protobuf/issues/3452): Eliminate this if/else.
-if services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None:
- FIRST_SERVICE = 'Fix protobuf issue 3452!'
- FIRST_SERVICE_UNUN = 'Fix protobuf issue 3452!'
- FIRST_SERVICE_UNSTRE = 'Fix protobuf issue 3452!'
- FIRST_SERVICE_STREUN = 'Fix protobuf issue 3452!'
- FIRST_SERVICE_STRESTRE = 'Fix protobuf issue 3452!'
-else:
- FIRST_SERVICE = services_pb2.DESCRIPTOR.services_by_name['FirstService']
- FIRST_SERVICE_UNUN = FIRST_SERVICE.methods_by_name['UnUn']
- FIRST_SERVICE_UNSTRE = FIRST_SERVICE.methods_by_name['UnStre']
- FIRST_SERVICE_STREUN = FIRST_SERVICE.methods_by_name['StreUn']
- FIRST_SERVICE_STRESTRE = FIRST_SERVICE.methods_by_name['StreStre']
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py
deleted file mode 100644
index 548ed30c93..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""An example gRPC Python-using client-side application."""
-
-import collections
-import enum
-import threading
-import time
-
-import grpc
-
-from tests.testing import _application_common
-from tests.testing.proto import requests_pb2
-from tests.testing.proto import services_pb2
-from tests.testing.proto import services_pb2_grpc
-from tests.unit.framework.common import test_constants
-
-
-@enum.unique
-class Scenario(enum.Enum):
- UNARY_UNARY = 'unary unary'
- UNARY_STREAM = 'unary stream'
- STREAM_UNARY = 'stream unary'
- STREAM_STREAM = 'stream stream'
- CONCURRENT_STREAM_UNARY = 'concurrent stream unary'
- CONCURRENT_STREAM_STREAM = 'concurrent stream stream'
- CANCEL_UNARY_UNARY = 'cancel unary unary'
- CANCEL_UNARY_STREAM = 'cancel unary stream'
- INFINITE_REQUEST_STREAM = 'infinite request stream'
-
-
-class Outcome(collections.namedtuple('Outcome', ('kind', 'code', 'details'))):
- """Outcome of a client application scenario.
-
- Attributes:
- kind: A Kind value describing the overall kind of scenario execution.
- code: A grpc.StatusCode value. Only valid if kind is Kind.RPC_ERROR.
- details: A status details string. Only valid if kind is Kind.RPC_ERROR.
- """
-
- @enum.unique
- class Kind(enum.Enum):
- SATISFACTORY = 'satisfactory'
- UNSATISFACTORY = 'unsatisfactory'
- RPC_ERROR = 'rpc error'
-
-
-_SATISFACTORY_OUTCOME = Outcome(Outcome.Kind.SATISFACTORY, None, None)
-_UNSATISFACTORY_OUTCOME = Outcome(Outcome.Kind.UNSATISFACTORY, None, None)
-
-
-class _Pipe(object):
-
- def __init__(self):
- self._condition = threading.Condition()
- self._values = []
- self._open = True
-
- def __iter__(self):
- return self
-
- def _next(self):
- with self._condition:
- while True:
- if self._values:
- return self._values.pop(0)
- elif not self._open:
- raise StopIteration()
- else:
- self._condition.wait()
-
- def __next__(self): # (Python 3 Iterator Protocol)
- return self._next()
-
- def next(self): # (Python 2 Iterator Protocol)
- return self._next()
-
- def add(self, value):
- with self._condition:
- self._values.append(value)
- self._condition.notify_all()
-
- def close(self):
- with self._condition:
- self._open = False
- self._condition.notify_all()
-
-
-def _run_unary_unary(stub):
- response = stub.UnUn(_application_common.UNARY_UNARY_REQUEST)
- if _application_common.UNARY_UNARY_RESPONSE == response:
- return _SATISFACTORY_OUTCOME
- else:
- return _UNSATISFACTORY_OUTCOME
-
-
-def _run_unary_stream(stub):
- response_iterator = stub.UnStre(_application_common.UNARY_STREAM_REQUEST)
- try:
- next(response_iterator)
- except StopIteration:
- return _SATISFACTORY_OUTCOME
- else:
- return _UNSATISFACTORY_OUTCOME
-
-
-def _run_stream_unary(stub):
- response, call = stub.StreUn.with_call(
- iter((_application_common.STREAM_UNARY_REQUEST,) * 3))
- if (_application_common.STREAM_UNARY_RESPONSE == response and
- call.code() is grpc.StatusCode.OK):
- return _SATISFACTORY_OUTCOME
- else:
- return _UNSATISFACTORY_OUTCOME
-
-
-def _run_stream_stream(stub):
- request_pipe = _Pipe()
- response_iterator = stub.StreStre(iter(request_pipe))
- request_pipe.add(_application_common.STREAM_STREAM_REQUEST)
- first_responses = next(response_iterator), next(response_iterator)
- request_pipe.add(_application_common.STREAM_STREAM_REQUEST)
- second_responses = next(response_iterator), next(response_iterator)
- request_pipe.close()
- try:
- next(response_iterator)
- except StopIteration:
- unexpected_extra_response = False
- else:
- unexpected_extra_response = True
- if (first_responses == _application_common.TWO_STREAM_STREAM_RESPONSES and
- second_responses == _application_common.TWO_STREAM_STREAM_RESPONSES
- and not unexpected_extra_response):
- return _SATISFACTORY_OUTCOME
- else:
- return _UNSATISFACTORY_OUTCOME
-
-
-def _run_concurrent_stream_unary(stub):
- future_calls = tuple(
- stub.StreUn.future(iter((_application_common.STREAM_UNARY_REQUEST,) *
- 3))
- for _ in range(test_constants.THREAD_CONCURRENCY))
- for future_call in future_calls:
- if future_call.code() is grpc.StatusCode.OK:
- response = future_call.result()
- if _application_common.STREAM_UNARY_RESPONSE != response:
- return _UNSATISFACTORY_OUTCOME
- else:
- return _UNSATISFACTORY_OUTCOME
- else:
- return _SATISFACTORY_OUTCOME
-
-
-def _run_concurrent_stream_stream(stub):
- condition = threading.Condition()
- outcomes = [None] * test_constants.RPC_CONCURRENCY
-
- def run_stream_stream(index):
- outcome = _run_stream_stream(stub)
- with condition:
- outcomes[index] = outcome
- condition.notify()
-
- for index in range(test_constants.RPC_CONCURRENCY):
- thread = threading.Thread(target=run_stream_stream, args=(index,))
- thread.start()
- with condition:
- while True:
- if all(outcomes):
- for outcome in outcomes:
- if outcome.kind is not Outcome.Kind.SATISFACTORY:
- return _UNSATISFACTORY_OUTCOME
- else:
- return _SATISFACTORY_OUTCOME
- else:
- condition.wait()
-
-
-def _run_cancel_unary_unary(stub):
- response_future_call = stub.UnUn.future(
- _application_common.UNARY_UNARY_REQUEST)
- initial_metadata = response_future_call.initial_metadata()
- cancelled = response_future_call.cancel()
- if initial_metadata is not None and cancelled:
- return _SATISFACTORY_OUTCOME
- else:
- return _UNSATISFACTORY_OUTCOME
-
-
-def _run_infinite_request_stream(stub):
-
- def infinite_request_iterator():
- while True:
- yield _application_common.STREAM_UNARY_REQUEST
-
- response_future_call = stub.StreUn.future(
- infinite_request_iterator(),
- timeout=_application_common.INFINITE_REQUEST_STREAM_TIMEOUT)
- if response_future_call.code() is grpc.StatusCode.DEADLINE_EXCEEDED:
- return _SATISFACTORY_OUTCOME
- else:
- return _UNSATISFACTORY_OUTCOME
-
-
-_IMPLEMENTATIONS = {
- Scenario.UNARY_UNARY: _run_unary_unary,
- Scenario.UNARY_STREAM: _run_unary_stream,
- Scenario.STREAM_UNARY: _run_stream_unary,
- Scenario.STREAM_STREAM: _run_stream_stream,
- Scenario.CONCURRENT_STREAM_UNARY: _run_concurrent_stream_unary,
- Scenario.CONCURRENT_STREAM_STREAM: _run_concurrent_stream_stream,
- Scenario.CANCEL_UNARY_UNARY: _run_cancel_unary_unary,
- Scenario.INFINITE_REQUEST_STREAM: _run_infinite_request_stream,
-}
-
-
-def run(scenario, channel):
- stub = services_pb2_grpc.FirstServiceStub(channel)
- try:
- return _IMPLEMENTATIONS[scenario](stub)
- except grpc.RpcError as rpc_error:
- return Outcome(Outcome.Kind.RPC_ERROR, rpc_error.code(),
- rpc_error.details())
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py
deleted file mode 100644
index d92a1a9733..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py
+++ /dev/null
@@ -1,308 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from concurrent import futures
-import time
-import unittest
-
-import grpc
-from grpc.framework.foundation import logging_pool
-import grpc_testing
-
-from tests.testing import _application_common
-from tests.testing import _application_testing_common
-from tests.testing import _client_application
-from tests.testing.proto import requests_pb2
-from tests.testing.proto import services_pb2
-from tests.unit.framework.common import test_constants
-
-
-# TODO(https://github.com/protocolbuffers/protobuf/issues/3452): Drop this skip.
-@unittest.skipIf(
- services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None,
- 'Fix protobuf issue 3452!')
-class ClientTest(unittest.TestCase):
-
- def setUp(self):
- # In this test the client-side application under test executes in
- # a separate thread while we retain use of the test thread to "play
- # server".
- self._client_execution_thread_pool = logging_pool.pool(1)
-
- self._fake_time = grpc_testing.strict_fake_time(time.time())
- self._real_time = grpc_testing.strict_real_time()
- self._fake_time_channel = grpc_testing.channel(
- services_pb2.DESCRIPTOR.services_by_name.values(), self._fake_time)
- self._real_time_channel = grpc_testing.channel(
- services_pb2.DESCRIPTOR.services_by_name.values(), self._real_time)
-
- def tearDown(self):
- self._client_execution_thread_pool.shutdown(wait=True)
-
- def test_successful_unary_unary(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run, _client_application.Scenario.UNARY_UNARY,
- self._real_time_channel)
- invocation_metadata, request, rpc = (
- self._real_time_channel.take_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN))
- rpc.send_initial_metadata(())
- rpc.terminate(_application_common.UNARY_UNARY_RESPONSE, (),
- grpc.StatusCode.OK, '')
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.SATISFACTORY)
-
- def test_successful_unary_stream(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run, _client_application.Scenario.UNARY_STREAM,
- self._fake_time_channel)
- invocation_metadata, request, rpc = (
- self._fake_time_channel.take_unary_stream(
- _application_testing_common.FIRST_SERVICE_UNSTRE))
- rpc.send_initial_metadata(())
- rpc.terminate((), grpc.StatusCode.OK, '')
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.UNARY_STREAM_REQUEST, request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.SATISFACTORY)
-
- def test_successful_stream_unary(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run, _client_application.Scenario.STREAM_UNARY,
- self._real_time_channel)
- invocation_metadata, rpc = self._real_time_channel.take_stream_unary(
- _application_testing_common.FIRST_SERVICE_STREUN)
- rpc.send_initial_metadata(())
- first_request = rpc.take_request()
- second_request = rpc.take_request()
- third_request = rpc.take_request()
- rpc.requests_closed()
- rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (),
- grpc.StatusCode.OK, '')
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.STREAM_UNARY_REQUEST,
- first_request)
- self.assertEqual(_application_common.STREAM_UNARY_REQUEST,
- second_request)
- self.assertEqual(_application_common.STREAM_UNARY_REQUEST,
- third_request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.SATISFACTORY)
-
- def test_successful_stream_stream(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run, _client_application.Scenario.STREAM_STREAM,
- self._fake_time_channel)
- invocation_metadata, rpc = self._fake_time_channel.take_stream_stream(
- _application_testing_common.FIRST_SERVICE_STRESTRE)
- first_request = rpc.take_request()
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- second_request = rpc.take_request()
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.requests_closed()
- rpc.terminate((), grpc.StatusCode.OK, '')
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.STREAM_STREAM_REQUEST,
- first_request)
- self.assertEqual(_application_common.STREAM_STREAM_REQUEST,
- second_request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.SATISFACTORY)
-
- def test_concurrent_stream_stream(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run,
- _client_application.Scenario.CONCURRENT_STREAM_STREAM,
- self._real_time_channel)
- rpcs = []
- for _ in range(test_constants.RPC_CONCURRENCY):
- invocation_metadata, rpc = (
- self._real_time_channel.take_stream_stream(
- _application_testing_common.FIRST_SERVICE_STRESTRE))
- rpcs.append(rpc)
- requests = {}
- for rpc in rpcs:
- requests[rpc] = [rpc.take_request()]
- for rpc in rpcs:
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- for rpc in rpcs:
- requests[rpc].append(rpc.take_request())
- for rpc in rpcs:
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- for rpc in rpcs:
- rpc.requests_closed()
- for rpc in rpcs:
- rpc.terminate((), grpc.StatusCode.OK, '')
- application_return_value = application_future.result()
-
- for requests_of_one_rpc in requests.values():
- for request in requests_of_one_rpc:
- self.assertEqual(_application_common.STREAM_STREAM_REQUEST,
- request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.SATISFACTORY)
-
- def test_cancelled_unary_unary(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run,
- _client_application.Scenario.CANCEL_UNARY_UNARY,
- self._fake_time_channel)
- invocation_metadata, request, rpc = (
- self._fake_time_channel.take_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN))
- rpc.send_initial_metadata(())
- rpc.cancelled()
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.SATISFACTORY)
-
- def test_status_stream_unary(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run,
- _client_application.Scenario.CONCURRENT_STREAM_UNARY,
- self._fake_time_channel)
- rpcs = tuple(
- self._fake_time_channel.take_stream_unary(
- _application_testing_common.FIRST_SERVICE_STREUN)[1]
- for _ in range(test_constants.THREAD_CONCURRENCY))
- for rpc in rpcs:
- rpc.take_request()
- rpc.take_request()
- rpc.take_request()
- rpc.requests_closed()
- rpc.send_initial_metadata(((
- 'my_metadata_key',
- 'My Metadata Value!',
- ),))
- for rpc in rpcs[:-1]:
- rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (),
- grpc.StatusCode.OK, '')
- rpcs[-1].terminate(_application_common.STREAM_UNARY_RESPONSE, (),
- grpc.StatusCode.RESOURCE_EXHAUSTED,
- 'nope; not able to handle all those RPCs!')
- application_return_value = application_future.result()
-
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.UNSATISFACTORY)
-
- def test_status_stream_stream(self):
- code = grpc.StatusCode.DEADLINE_EXCEEDED
- details = 'test deadline exceeded!'
-
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run, _client_application.Scenario.STREAM_STREAM,
- self._real_time_channel)
- invocation_metadata, rpc = self._real_time_channel.take_stream_stream(
- _application_testing_common.FIRST_SERVICE_STRESTRE)
- first_request = rpc.take_request()
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- second_request = rpc.take_request()
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.requests_closed()
- rpc.terminate((), code, details)
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.STREAM_STREAM_REQUEST,
- first_request)
- self.assertEqual(_application_common.STREAM_STREAM_REQUEST,
- second_request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.RPC_ERROR)
- self.assertIs(application_return_value.code, code)
- self.assertEqual(application_return_value.details, details)
-
- def test_misbehaving_server_unary_unary(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run, _client_application.Scenario.UNARY_UNARY,
- self._fake_time_channel)
- invocation_metadata, request, rpc = (
- self._fake_time_channel.take_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN))
- rpc.send_initial_metadata(())
- rpc.terminate(_application_common.ERRONEOUS_UNARY_UNARY_RESPONSE, (),
- grpc.StatusCode.OK, '')
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.UNSATISFACTORY)
-
- def test_misbehaving_server_stream_stream(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run, _client_application.Scenario.STREAM_STREAM,
- self._real_time_channel)
- invocation_metadata, rpc = self._real_time_channel.take_stream_stream(
- _application_testing_common.FIRST_SERVICE_STRESTRE)
- first_request = rpc.take_request()
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- second_request = rpc.take_request()
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.send_response(_application_common.STREAM_STREAM_RESPONSE)
- rpc.requests_closed()
- rpc.terminate((), grpc.StatusCode.OK, '')
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.STREAM_STREAM_REQUEST,
- first_request)
- self.assertEqual(_application_common.STREAM_STREAM_REQUEST,
- second_request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.UNSATISFACTORY)
-
- def test_infinite_request_stream_real_time(self):
- application_future = self._client_execution_thread_pool.submit(
- _client_application.run,
- _client_application.Scenario.INFINITE_REQUEST_STREAM,
- self._real_time_channel)
- invocation_metadata, rpc = self._real_time_channel.take_stream_unary(
- _application_testing_common.FIRST_SERVICE_STREUN)
- rpc.send_initial_metadata(())
- first_request = rpc.take_request()
- second_request = rpc.take_request()
- third_request = rpc.take_request()
- self._real_time.sleep_for(
- _application_common.INFINITE_REQUEST_STREAM_TIMEOUT)
- rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (),
- grpc.StatusCode.DEADLINE_EXCEEDED, '')
- application_return_value = application_future.result()
-
- self.assertEqual(_application_common.STREAM_UNARY_REQUEST,
- first_request)
- self.assertEqual(_application_common.STREAM_UNARY_REQUEST,
- second_request)
- self.assertEqual(_application_common.STREAM_UNARY_REQUEST,
- third_request)
- self.assertIs(application_return_value.kind,
- _client_application.Outcome.Kind.SATISFACTORY)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py
deleted file mode 100644
index ffab6f5b93..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""An example gRPC Python-using server-side application."""
-
-import threading
-
-import grpc
-
-# requests_pb2 is a semantic dependency of this module.
-from tests.testing import _application_common
-from tests.testing.proto import requests_pb2 # pylint: disable=unused-import
-from tests.testing.proto import services_pb2
-from tests.testing.proto import services_pb2_grpc
-
-
-class FirstServiceServicer(services_pb2_grpc.FirstServiceServicer):
- """Services RPCs."""
-
- def __init__(self):
- self._abort_lock = threading.RLock()
- self._abort_response = _application_common.ABORT_NO_STATUS_RESPONSE
-
- def UnUn(self, request, context):
- if request == _application_common.UNARY_UNARY_REQUEST:
- return _application_common.UNARY_UNARY_RESPONSE
- elif request == _application_common.ABORT_REQUEST:
- with self._abort_lock:
- try:
- context.abort(grpc.StatusCode.PERMISSION_DENIED,
- "Denying permission to test abort.")
- except Exception as e: # pylint: disable=broad-except
- self._abort_response = _application_common.ABORT_SUCCESS_RESPONSE
- else:
- self._abort_status = _application_common.ABORT_FAILURE_RESPONSE
- return None # NOTE: For the linter.
- elif request == _application_common.ABORT_SUCCESS_QUERY:
- with self._abort_lock:
- return self._abort_response
- else:
- context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
- context.set_details('Something is wrong with your request!')
- return services_pb2.Down()
-
- def UnStre(self, request, context):
- if _application_common.UNARY_STREAM_REQUEST != request:
- context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
- context.set_details('Something is wrong with your request!')
- return
- yield services_pb2.Strange() # pylint: disable=unreachable
-
- def StreUn(self, request_iterator, context):
- context.send_initial_metadata(((
- 'server_application_metadata_key',
- 'Hi there!',
- ),))
- for request in request_iterator:
- if request != _application_common.STREAM_UNARY_REQUEST:
- context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
- context.set_details('Something is wrong with your request!')
- return services_pb2.Strange()
- elif not context.is_active():
- return services_pb2.Strange()
- else:
- return _application_common.STREAM_UNARY_RESPONSE
-
- def StreStre(self, request_iterator, context):
- valid_requests = (_application_common.STREAM_STREAM_REQUEST,
- _application_common.STREAM_STREAM_MUTATING_REQUEST)
- for request in request_iterator:
- if request not in valid_requests:
- context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
- context.set_details('Something is wrong with your request!')
- return
- elif not context.is_active():
- return
- elif request == _application_common.STREAM_STREAM_REQUEST:
- yield _application_common.STREAM_STREAM_RESPONSE
- yield _application_common.STREAM_STREAM_RESPONSE
- elif request == _application_common.STREAM_STREAM_MUTATING_REQUEST:
- response = services_pb2.Bottom()
- for i in range(
- _application_common.STREAM_STREAM_MUTATING_COUNT):
- response.first_bottom_field = i
- yield response
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py
deleted file mode 100644
index 617a41b7e5..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-import unittest
-
-import grpc
-import grpc_testing
-
-from tests.testing import _application_common
-from tests.testing import _application_testing_common
-from tests.testing import _server_application
-from tests.testing.proto import services_pb2
-
-
-class FirstServiceServicerTest(unittest.TestCase):
-
- def setUp(self):
- self._real_time = grpc_testing.strict_real_time()
- self._fake_time = grpc_testing.strict_fake_time(time.time())
- servicer = _server_application.FirstServiceServicer()
- descriptors_to_servicers = {
- _application_testing_common.FIRST_SERVICE: servicer
- }
- self._real_time_server = grpc_testing.server_from_dictionary(
- descriptors_to_servicers, self._real_time)
- self._fake_time_server = grpc_testing.server_from_dictionary(
- descriptors_to_servicers, self._fake_time)
-
- def test_successful_unary_unary(self):
- rpc = self._real_time_server.invoke_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN, (),
- _application_common.UNARY_UNARY_REQUEST, None)
- initial_metadata = rpc.initial_metadata()
- response, trailing_metadata, code, details = rpc.termination()
-
- self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response)
- self.assertIs(code, grpc.StatusCode.OK)
-
- def test_successful_unary_stream(self):
- rpc = self._real_time_server.invoke_unary_stream(
- _application_testing_common.FIRST_SERVICE_UNSTRE, (),
- _application_common.UNARY_STREAM_REQUEST, None)
- initial_metadata = rpc.initial_metadata()
- trailing_metadata, code, details = rpc.termination()
-
- self.assertIs(code, grpc.StatusCode.OK)
-
- def test_successful_stream_unary(self):
- rpc = self._real_time_server.invoke_stream_unary(
- _application_testing_common.FIRST_SERVICE_STREUN, (), None)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- rpc.requests_closed()
- initial_metadata = rpc.initial_metadata()
- response, trailing_metadata, code, details = rpc.termination()
-
- self.assertEqual(_application_common.STREAM_UNARY_RESPONSE, response)
- self.assertIs(code, grpc.StatusCode.OK)
-
- def test_successful_stream_stream(self):
- rpc = self._real_time_server.invoke_stream_stream(
- _application_testing_common.FIRST_SERVICE_STRESTRE, (), None)
- rpc.send_request(_application_common.STREAM_STREAM_REQUEST)
- initial_metadata = rpc.initial_metadata()
- responses = [
- rpc.take_response(),
- rpc.take_response(),
- ]
- rpc.send_request(_application_common.STREAM_STREAM_REQUEST)
- rpc.send_request(_application_common.STREAM_STREAM_REQUEST)
- responses.extend([
- rpc.take_response(),
- rpc.take_response(),
- rpc.take_response(),
- rpc.take_response(),
- ])
- rpc.requests_closed()
- trailing_metadata, code, details = rpc.termination()
-
- for response in responses:
- self.assertEqual(_application_common.STREAM_STREAM_RESPONSE,
- response)
- self.assertIs(code, grpc.StatusCode.OK)
-
- def test_mutating_stream_stream(self):
- rpc = self._real_time_server.invoke_stream_stream(
- _application_testing_common.FIRST_SERVICE_STRESTRE, (), None)
- rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST)
- initial_metadata = rpc.initial_metadata()
- responses = [
- rpc.take_response()
- for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT)
- ]
- rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST)
- responses.extend([
- rpc.take_response()
- for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT)
- ])
- rpc.requests_closed()
- _, _, _ = rpc.termination()
- expected_responses = (
- services_pb2.Bottom(first_bottom_field=0),
- services_pb2.Bottom(first_bottom_field=1),
- services_pb2.Bottom(first_bottom_field=0),
- services_pb2.Bottom(first_bottom_field=1),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- def test_server_rpc_idempotence(self):
- rpc = self._real_time_server.invoke_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN, (),
- _application_common.UNARY_UNARY_REQUEST, None)
- first_initial_metadata = rpc.initial_metadata()
- second_initial_metadata = rpc.initial_metadata()
- third_initial_metadata = rpc.initial_metadata()
- first_termination = rpc.termination()
- second_termination = rpc.termination()
- third_termination = rpc.termination()
-
- for later_initial_metadata in (
- second_initial_metadata,
- third_initial_metadata,
- ):
- self.assertEqual(first_initial_metadata, later_initial_metadata)
- response = first_termination[0]
- terminal_metadata = first_termination[1]
- code = first_termination[2]
- details = first_termination[3]
- for later_termination in (
- second_termination,
- third_termination,
- ):
- self.assertEqual(response, later_termination[0])
- self.assertEqual(terminal_metadata, later_termination[1])
- self.assertIs(code, later_termination[2])
- self.assertEqual(details, later_termination[3])
- self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response)
- self.assertIs(code, grpc.StatusCode.OK)
-
- def test_misbehaving_client_unary_unary(self):
- rpc = self._real_time_server.invoke_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN, (),
- _application_common.ERRONEOUS_UNARY_UNARY_REQUEST, None)
- initial_metadata = rpc.initial_metadata()
- response, trailing_metadata, code, details = rpc.termination()
-
- self.assertIsNot(code, grpc.StatusCode.OK)
-
- def test_infinite_request_stream_real_time(self):
- rpc = self._real_time_server.invoke_stream_unary(
- _application_testing_common.FIRST_SERVICE_STREUN, (),
- _application_common.INFINITE_REQUEST_STREAM_TIMEOUT)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- initial_metadata = rpc.initial_metadata()
- self._real_time.sleep_for(
- _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- response, trailing_metadata, code, details = rpc.termination()
-
- self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED)
-
- def test_infinite_request_stream_fake_time(self):
- rpc = self._fake_time_server.invoke_stream_unary(
- _application_testing_common.FIRST_SERVICE_STREUN, (),
- _application_common.INFINITE_REQUEST_STREAM_TIMEOUT)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- initial_metadata = rpc.initial_metadata()
- self._fake_time.sleep_for(
- _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2)
- rpc.send_request(_application_common.STREAM_UNARY_REQUEST)
- response, trailing_metadata, code, details = rpc.termination()
-
- self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED)
-
- def test_servicer_context_abort(self):
- rpc = self._real_time_server.invoke_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN, (),
- _application_common.ABORT_REQUEST, None)
- _, _, code, _ = rpc.termination()
- self.assertIs(code, grpc.StatusCode.PERMISSION_DENIED)
- rpc = self._real_time_server.invoke_unary_unary(
- _application_testing_common.FIRST_SERVICE_UNUN, (),
- _application_common.ABORT_SUCCESS_QUERY, None)
- response, _, code, _ = rpc.termination()
- self.assertEqual(_application_common.ABORT_SUCCESS_RESPONSE, response)
- self.assertIs(code, grpc.StatusCode.OK)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py
deleted file mode 100644
index cab665c045..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import random
-import threading
-import time
-import unittest
-
-import grpc_testing
-
-_QUANTUM = 0.3
-_MANY = 10000
-# Tests that run in real time can either wait for the scheduler to
-# eventually run what needs to be run (and risk timing out) or declare
-# that the scheduler didn't schedule work reasonably fast enough. We
-# choose the latter for this test.
-_PATHOLOGICAL_SCHEDULING = 'pathological thread scheduling!'
-
-
-class _TimeNoter(object):
-
- def __init__(self, time):
- self._condition = threading.Condition()
- self._time = time
- self._call_times = []
-
- def __call__(self):
- with self._condition:
- self._call_times.append(self._time.time())
-
- def call_times(self):
- with self._condition:
- return tuple(self._call_times)
-
-
-class TimeTest(object):
-
- def test_sleep_for(self):
- start_time = self._time.time()
- self._time.sleep_for(_QUANTUM)
- end_time = self._time.time()
-
- self.assertLessEqual(start_time + _QUANTUM, end_time)
-
- def test_sleep_until(self):
- start_time = self._time.time()
- self._time.sleep_until(start_time + _QUANTUM)
- end_time = self._time.time()
-
- self.assertLessEqual(start_time + _QUANTUM, end_time)
-
- def test_call_in(self):
- time_noter = _TimeNoter(self._time)
-
- start_time = self._time.time()
- self._time.call_in(time_noter, _QUANTUM)
- self._time.sleep_for(_QUANTUM * 2)
- call_times = time_noter.call_times()
-
- self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING)
- self.assertLessEqual(start_time + _QUANTUM, call_times[0])
-
- def test_call_at(self):
- time_noter = _TimeNoter(self._time)
-
- start_time = self._time.time()
- self._time.call_at(time_noter, self._time.time() + _QUANTUM)
- self._time.sleep_for(_QUANTUM * 2)
- call_times = time_noter.call_times()
-
- self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING)
- self.assertLessEqual(start_time + _QUANTUM, call_times[0])
-
- def test_cancel(self):
- time_noter = _TimeNoter(self._time)
-
- future = self._time.call_in(time_noter, _QUANTUM * 2)
- self._time.sleep_for(_QUANTUM)
- cancelled = future.cancel()
- self._time.sleep_for(_QUANTUM * 2)
- call_times = time_noter.call_times()
-
- self.assertFalse(call_times, msg=_PATHOLOGICAL_SCHEDULING)
- self.assertTrue(cancelled)
- self.assertTrue(future.cancelled())
-
- def test_many(self):
- test_events = tuple(threading.Event() for _ in range(_MANY))
- possibly_cancelled_futures = {}
- background_noise_futures = []
-
- for test_event in test_events:
- possibly_cancelled_futures[test_event] = self._time.call_in(
- test_event.set, _QUANTUM * (2 + random.random()))
- for _ in range(_MANY):
- background_noise_futures.append(
- self._time.call_in(threading.Event().set,
- _QUANTUM * 1000 * random.random()))
- self._time.sleep_for(_QUANTUM)
- cancelled = set()
- for test_event, test_future in possibly_cancelled_futures.items():
- if bool(random.randint(0, 1)) and test_future.cancel():
- cancelled.add(test_event)
- self._time.sleep_for(_QUANTUM * 3)
-
- for test_event in test_events:
- (self.assertFalse if test_event in cancelled else self.assertTrue)(
- test_event.is_set())
- for background_noise_future in background_noise_futures:
- background_noise_future.cancel()
-
- def test_same_behavior_used_several_times(self):
- time_noter = _TimeNoter(self._time)
-
- start_time = self._time.time()
- first_future_at_one = self._time.call_in(time_noter, _QUANTUM)
- second_future_at_one = self._time.call_in(time_noter, _QUANTUM)
- first_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3)
- second_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3)
- self._time.sleep_for(_QUANTUM * 2)
- first_future_at_one_cancelled = first_future_at_one.cancel()
- second_future_at_one_cancelled = second_future_at_one.cancel()
- first_future_at_three_cancelled = first_future_at_three.cancel()
- self._time.sleep_for(_QUANTUM * 2)
- second_future_at_three_cancelled = second_future_at_three.cancel()
- first_future_at_three_cancelled_again = first_future_at_three.cancel()
- call_times = time_noter.call_times()
-
- self.assertEqual(3, len(call_times), msg=_PATHOLOGICAL_SCHEDULING)
- self.assertFalse(first_future_at_one_cancelled)
- self.assertFalse(second_future_at_one_cancelled)
- self.assertTrue(first_future_at_three_cancelled)
- self.assertFalse(second_future_at_three_cancelled)
- self.assertTrue(first_future_at_three_cancelled_again)
- self.assertLessEqual(start_time + _QUANTUM, call_times[0])
- self.assertLessEqual(start_time + _QUANTUM, call_times[1])
- self.assertLessEqual(start_time + _QUANTUM * 3, call_times[2])
-
-
-class StrictRealTimeTest(TimeTest, unittest.TestCase):
-
- def setUp(self):
- self._time = grpc_testing.strict_real_time()
-
-
-class StrictFakeTimeTest(TimeTest, unittest.TestCase):
-
- def setUp(self):
- self._time = grpc_testing.strict_fake_time(
- random.randint(0, int(time.time())))
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py
deleted file mode 100644
index 1e120359cf..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py
deleted file mode 100644
index 97a1308574..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests exposure of SSL auth context"""
-
-import logging
-import pickle
-import unittest
-
-import grpc
-from grpc import _channel
-from grpc.experimental import session_cache
-import six
-
-from tests.unit import resources
-from tests.unit import test_common
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x00\x00\x00'
-
-_UNARY_UNARY = '/test/UnaryUnary'
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-_CLIENT_IDS = (
- b'*.test.google.fr',
- b'waterzooi.test.google.be',
- b'*.test.youtube.com',
- b'192.168.1.3',
-)
-_ID = 'id'
-_ID_KEY = 'id_key'
-_AUTH_CTX = 'auth_ctx'
-
-_PRIVATE_KEY = resources.private_key()
-_CERTIFICATE_CHAIN = resources.certificate_chain()
-_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
-_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
-_PROPERTY_OPTIONS = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
-),)
-
-
-def handle_unary_unary(request, servicer_context):
- return pickle.dumps({
- _ID: servicer_context.peer_identities(),
- _ID_KEY: servicer_context.peer_identity_key(),
- _AUTH_CTX: servicer_context.auth_context()
- })
-
-
-class AuthContextTest(unittest.TestCase):
-
- def testInsecure(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = test_common.test_server()
- server.add_generic_rpc_handlers((handler,))
- port = server.add_insecure_port('[::]:0')
- server.start()
-
- with grpc.insecure_channel('localhost:%d' % port) as channel:
- response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- server.stop(None)
-
- auth_data = pickle.loads(response)
- self.assertIsNone(auth_data[_ID])
- self.assertIsNone(auth_data[_ID_KEY])
- self.assertDictEqual({}, auth_data[_AUTH_CTX])
-
- def testSecureNoCert(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = test_common.test_server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
- port = server.add_secure_port('[::]:0', server_cred)
- server.start()
-
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES)
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
- response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- channel.close()
- server.stop(None)
-
- auth_data = pickle.loads(response)
- self.assertIsNone(auth_data[_ID])
- self.assertIsNone(auth_data[_ID_KEY])
- self.assertDictEqual(
- {
- 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'],
- 'transport_security_type': [b'ssl'],
- 'ssl_session_reused': [b'false'],
- }, auth_data[_AUTH_CTX])
-
- def testSecureClientCert(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = test_common.test_server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(
- _SERVER_CERTS,
- root_certificates=_TEST_ROOT_CERTIFICATES,
- require_client_auth=True)
- port = server.add_secure_port('[::]:0', server_cred)
- server.start()
-
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES,
- private_key=_PRIVATE_KEY,
- certificate_chain=_CERTIFICATE_CHAIN)
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
-
- response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- channel.close()
- server.stop(None)
-
- auth_data = pickle.loads(response)
- auth_ctx = auth_data[_AUTH_CTX]
- six.assertCountEqual(self, _CLIENT_IDS, auth_data[_ID])
- self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY])
- self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type'])
- self.assertSequenceEqual([b'*.test.google.com'],
- auth_ctx['x509_common_name'])
-
- def _do_one_shot_client_rpc(self, channel_creds, channel_options, port,
- expect_ssl_session_reused):
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=channel_options)
- response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- auth_data = pickle.loads(response)
- self.assertEqual(expect_ssl_session_reused,
- auth_data[_AUTH_CTX]['ssl_session_reused'])
- channel.close()
-
- def testSessionResumption(self):
- # Set up a secure server
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = test_common.test_server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
- port = server.add_secure_port('[::]:0', server_cred)
- server.start()
-
- # Create a cache for TLS session tickets
- cache = session_cache.ssl_session_cache_lru(1)
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES)
- channel_options = _PROPERTY_OPTIONS + (
- ('grpc.ssl_session_cache', cache),)
-
- # Initial connection has no session to resume
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'false'])
-
- # Subsequent connections resume sessions
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'true'])
- server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py
deleted file mode 100644
index cd55db58ba..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py
+++ /dev/null
@@ -1,383 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests server and client side compression."""
-
-from concurrent import futures
-import contextlib
-import functools
-import itertools
-import logging
-import os
-import unittest
-
-import grpc
-from grpc import _grpcio_metadata
-
-from tests.unit import _tcp_proxy
-from tests.unit import test_common
-from tests.unit.framework.common import test_constants
-
-_UNARY_UNARY = '/test/UnaryUnary'
-_UNARY_STREAM = '/test/UnaryStream'
-_STREAM_UNARY = '/test/StreamUnary'
-_STREAM_STREAM = '/test/StreamStream'
-
-# Cut down on test time.
-_STREAM_LENGTH = test_constants.STREAM_LENGTH // 16
-
-_HOST = 'localhost'
-
-_REQUEST = b'\x00' * 100
-_COMPRESSION_RATIO_THRESHOLD = 0.05
-_COMPRESSION_METHODS = (
- None,
- # Disabled for test tractability.
- # grpc.Compression.NoCompression,
- # grpc.Compression.Deflate,
- grpc.Compression.Gzip,
-)
-_COMPRESSION_NAMES = {
- None: 'Uncompressed',
- grpc.Compression.NoCompression: 'NoCompression',
- grpc.Compression.Deflate: 'DeflateCompression',
- grpc.Compression.Gzip: 'GzipCompression',
-}
-
-_TEST_OPTIONS = {
- 'client_streaming': (True, False),
- 'server_streaming': (True, False),
- 'channel_compression': _COMPRESSION_METHODS,
- 'multicallable_compression': _COMPRESSION_METHODS,
- 'server_compression': _COMPRESSION_METHODS,
- 'server_call_compression': _COMPRESSION_METHODS,
-}
-
-
-def _make_handle_unary_unary(pre_response_callback):
-
- def _handle_unary(request, servicer_context):
- if pre_response_callback:
- pre_response_callback(request, servicer_context)
- return request
-
- return _handle_unary
-
-
-def _make_handle_unary_stream(pre_response_callback):
-
- def _handle_unary_stream(request, servicer_context):
- if pre_response_callback:
- pre_response_callback(request, servicer_context)
- for _ in range(_STREAM_LENGTH):
- yield request
-
- return _handle_unary_stream
-
-
-def _make_handle_stream_unary(pre_response_callback):
-
- def _handle_stream_unary(request_iterator, servicer_context):
- if pre_response_callback:
- pre_response_callback(request_iterator, servicer_context)
- response = None
- for request in request_iterator:
- if not response:
- response = request
- return response
-
- return _handle_stream_unary
-
-
-def _make_handle_stream_stream(pre_response_callback):
-
- def _handle_stream(request_iterator, servicer_context):
- # TODO(issue:#6891) We should be able to remove this loop,
- # and replace with return; yield
- for request in request_iterator:
- if pre_response_callback:
- pre_response_callback(request, servicer_context)
- yield request
-
- return _handle_stream
-
-
-def set_call_compression(compression_method, request_or_iterator,
- servicer_context):
- del request_or_iterator
- servicer_context.set_compression(compression_method)
-
-
-def disable_next_compression(request, servicer_context):
- del request
- servicer_context.disable_next_message_compression()
-
-
-def disable_first_compression(request, servicer_context):
- if int(request.decode('ascii')) == 0:
- servicer_context.disable_next_message_compression()
-
-
-class _MethodHandler(grpc.RpcMethodHandler):
-
- def __init__(self, request_streaming, response_streaming,
- pre_response_callback):
- self.request_streaming = request_streaming
- self.response_streaming = response_streaming
- self.request_deserializer = None
- self.response_serializer = None
- self.unary_unary = None
- self.unary_stream = None
- self.stream_unary = None
- self.stream_stream = None
-
- if self.request_streaming and self.response_streaming:
- self.stream_stream = _make_handle_stream_stream(
- pre_response_callback)
- elif not self.request_streaming and not self.response_streaming:
- self.unary_unary = _make_handle_unary_unary(pre_response_callback)
- elif not self.request_streaming and self.response_streaming:
- self.unary_stream = _make_handle_unary_stream(pre_response_callback)
- else:
- self.stream_unary = _make_handle_stream_unary(pre_response_callback)
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def __init__(self, pre_response_callback):
- self._pre_response_callback = pre_response_callback
-
- def service(self, handler_call_details):
- if handler_call_details.method == _UNARY_UNARY:
- return _MethodHandler(False, False, self._pre_response_callback)
- elif handler_call_details.method == _UNARY_STREAM:
- return _MethodHandler(False, True, self._pre_response_callback)
- elif handler_call_details.method == _STREAM_UNARY:
- return _MethodHandler(True, False, self._pre_response_callback)
- elif handler_call_details.method == _STREAM_STREAM:
- return _MethodHandler(True, True, self._pre_response_callback)
- else:
- return None
-
-
-@contextlib.contextmanager
-def _instrumented_client_server_pair(channel_kwargs, server_kwargs,
- server_handler):
- server = grpc.server(futures.ThreadPoolExecutor(), **server_kwargs)
- server.add_generic_rpc_handlers((server_handler,))
- server_port = server.add_insecure_port('{}:0'.format(_HOST))
- server.start()
- with _tcp_proxy.TcpProxy(_HOST, _HOST, server_port) as proxy:
- proxy_port = proxy.get_port()
- with grpc.insecure_channel('{}:{}'.format(_HOST, proxy_port),
- **channel_kwargs) as client_channel:
- try:
- yield client_channel, proxy, server
- finally:
- server.stop(None)
-
-
-def _get_byte_counts(channel_kwargs, multicallable_kwargs, client_function,
- server_kwargs, server_handler, message):
- with _instrumented_client_server_pair(channel_kwargs, server_kwargs,
- server_handler) as pipeline:
- client_channel, proxy, server = pipeline
- client_function(client_channel, multicallable_kwargs, message)
- return proxy.get_byte_count()
-
-
-def _get_compression_ratios(client_function, first_channel_kwargs,
- first_multicallable_kwargs, first_server_kwargs,
- first_server_handler, second_channel_kwargs,
- second_multicallable_kwargs, second_server_kwargs,
- second_server_handler, message):
- try:
- # This test requires the byte length of each connection to be deterministic. As
- # it turns out, flow control puts bytes on the wire in a nondeterministic
- # manner. We disable it here in order to measure compression ratios
- # deterministically.
- os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL'] = 'true'
- first_bytes_sent, first_bytes_received = _get_byte_counts(
- first_channel_kwargs, first_multicallable_kwargs, client_function,
- first_server_kwargs, first_server_handler, message)
- second_bytes_sent, second_bytes_received = _get_byte_counts(
- second_channel_kwargs, second_multicallable_kwargs, client_function,
- second_server_kwargs, second_server_handler, message)
- return ((second_bytes_sent - first_bytes_sent) /
- float(first_bytes_sent),
- (second_bytes_received - first_bytes_received) /
- float(first_bytes_received))
- finally:
- del os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL']
-
-
-def _unary_unary_client(channel, multicallable_kwargs, message):
- multi_callable = channel.unary_unary(_UNARY_UNARY)
- response = multi_callable(message, **multicallable_kwargs)
- if response != message:
- raise RuntimeError("Request '{}' != Response '{}'".format(
- message, response))
-
-
-def _unary_stream_client(channel, multicallable_kwargs, message):
- multi_callable = channel.unary_stream(_UNARY_STREAM)
- response_iterator = multi_callable(message, **multicallable_kwargs)
- for response in response_iterator:
- if response != message:
- raise RuntimeError("Request '{}' != Response '{}'".format(
- message, response))
-
-
-def _stream_unary_client(channel, multicallable_kwargs, message):
- multi_callable = channel.stream_unary(_STREAM_UNARY)
- requests = (_REQUEST for _ in range(_STREAM_LENGTH))
- response = multi_callable(requests, **multicallable_kwargs)
- if response != message:
- raise RuntimeError("Request '{}' != Response '{}'".format(
- message, response))
-
-
-def _stream_stream_client(channel, multicallable_kwargs, message):
- multi_callable = channel.stream_stream(_STREAM_STREAM)
- request_prefix = str(0).encode('ascii') * 100
- requests = (
- request_prefix + str(i).encode('ascii') for i in range(_STREAM_LENGTH))
- response_iterator = multi_callable(requests, **multicallable_kwargs)
- for i, response in enumerate(response_iterator):
- if int(response.decode('ascii')) != i:
- raise RuntimeError("Request '{}' != Response '{}'".format(
- i, response))
-
-
-@unittest.skipIf(test_common.running_under_gevent(),
- "This test is nondeterministic under gevent.")
-class CompressionTest(unittest.TestCase):
-
- def assertCompressed(self, compression_ratio):
- self.assertLess(
- compression_ratio,
- -1.0 * _COMPRESSION_RATIO_THRESHOLD,
- msg='Actual compression ratio: {}'.format(compression_ratio))
-
- def assertNotCompressed(self, compression_ratio):
- self.assertGreaterEqual(
- compression_ratio,
- -1.0 * _COMPRESSION_RATIO_THRESHOLD,
- msg='Actual compession ratio: {}'.format(compression_ratio))
-
- def assertConfigurationCompressed(self, client_streaming, server_streaming,
- channel_compression,
- multicallable_compression,
- server_compression,
- server_call_compression):
- client_side_compressed = channel_compression or multicallable_compression
- server_side_compressed = server_compression or server_call_compression
- channel_kwargs = {
- 'compression': channel_compression,
- } if channel_compression else {}
- multicallable_kwargs = {
- 'compression': multicallable_compression,
- } if multicallable_compression else {}
-
- client_function = None
- if not client_streaming and not server_streaming:
- client_function = _unary_unary_client
- elif not client_streaming and server_streaming:
- client_function = _unary_stream_client
- elif client_streaming and not server_streaming:
- client_function = _stream_unary_client
- else:
- client_function = _stream_stream_client
-
- server_kwargs = {
- 'compression': server_compression,
- } if server_compression else {}
- server_handler = _GenericHandler(
- functools.partial(set_call_compression, grpc.Compression.Gzip)
- ) if server_call_compression else _GenericHandler(None)
- sent_ratio, received_ratio = _get_compression_ratios(
- client_function, {}, {}, {}, _GenericHandler(None), channel_kwargs,
- multicallable_kwargs, server_kwargs, server_handler, _REQUEST)
-
- if client_side_compressed:
- self.assertCompressed(sent_ratio)
- else:
- self.assertNotCompressed(sent_ratio)
-
- if server_side_compressed:
- self.assertCompressed(received_ratio)
- else:
- self.assertNotCompressed(received_ratio)
-
- def testDisableNextCompressionStreaming(self):
- server_kwargs = {
- 'compression': grpc.Compression.Deflate,
- }
- _, received_ratio = _get_compression_ratios(
- _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {},
- server_kwargs, _GenericHandler(disable_next_compression), _REQUEST)
- self.assertNotCompressed(received_ratio)
-
- def testDisableNextCompressionStreamingResets(self):
- server_kwargs = {
- 'compression': grpc.Compression.Deflate,
- }
- _, received_ratio = _get_compression_ratios(
- _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {},
- server_kwargs, _GenericHandler(disable_first_compression), _REQUEST)
- self.assertCompressed(received_ratio)
-
-
-def _get_compression_str(name, value):
- return '{}{}'.format(name, _COMPRESSION_NAMES[value])
-
-
-def _get_compression_test_name(client_streaming, server_streaming,
- channel_compression, multicallable_compression,
- server_compression, server_call_compression):
- client_arity = 'Stream' if client_streaming else 'Unary'
- server_arity = 'Stream' if server_streaming else 'Unary'
- arity = '{}{}'.format(client_arity, server_arity)
- channel_compression_str = _get_compression_str('Channel',
- channel_compression)
- multicallable_compression_str = _get_compression_str(
- 'Multicallable', multicallable_compression)
- server_compression_str = _get_compression_str('Server', server_compression)
- server_call_compression_str = _get_compression_str('ServerCall',
- server_call_compression)
- return 'test{}{}{}{}{}'.format(arity, channel_compression_str,
- multicallable_compression_str,
- server_compression_str,
- server_call_compression_str)
-
-
-def _test_options():
- for test_parameters in itertools.product(*_TEST_OPTIONS.values()):
- yield dict(zip(_TEST_OPTIONS.keys(), test_parameters))
-
-
-for options in _test_options():
-
- def test_compression(**kwargs):
-
- def _test_compression(self):
- self.assertConfigurationCompressed(**kwargs)
-
- return _test_compression
-
- setattr(CompressionTest, _get_compression_test_name(**options),
- test_compression(**options))
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py
deleted file mode 100644
index c20226830b..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py
+++ /dev/null
@@ -1,511 +0,0 @@
-# Copyright 2017 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests server certificate rotation.
-
-Here we test various aspects of gRPC Python, and in some cases gRPC
-Core by extension, support for server certificate rotation.
-
-* ServerSSLCertReloadTestWithClientAuth: test ability to rotate
- server's SSL cert for use in future channels with clients while not
- affecting any existing channel. The server requires client
- authentication.
-
-* ServerSSLCertReloadTestWithoutClientAuth: like
- ServerSSLCertReloadTestWithClientAuth except that the server does
- not authenticate the client.
-
-* ServerSSLCertReloadTestCertConfigReuse: tests gRPC Python's ability
- to deal with user's reuse of ServerCertificateConfiguration instances.
-"""
-
-import abc
-import collections
-from concurrent import futures
-import logging
-import os
-import threading
-import unittest
-
-import grpc
-import six
-
-from tests.testing import _application_common
-from tests.testing import _server_application
-from tests.testing.proto import services_pb2_grpc
-from tests.unit import resources
-from tests.unit import test_common
-
-CA_1_PEM = resources.cert_hier_1_root_ca_cert()
-CA_2_PEM = resources.cert_hier_2_root_ca_cert()
-
-CLIENT_KEY_1_PEM = resources.cert_hier_1_client_1_key()
-CLIENT_CERT_CHAIN_1_PEM = (resources.cert_hier_1_client_1_cert() +
- resources.cert_hier_1_intermediate_ca_cert())
-
-CLIENT_KEY_2_PEM = resources.cert_hier_2_client_1_key()
-CLIENT_CERT_CHAIN_2_PEM = (resources.cert_hier_2_client_1_cert() +
- resources.cert_hier_2_intermediate_ca_cert())
-
-SERVER_KEY_1_PEM = resources.cert_hier_1_server_1_key()
-SERVER_CERT_CHAIN_1_PEM = (resources.cert_hier_1_server_1_cert() +
- resources.cert_hier_1_intermediate_ca_cert())
-
-SERVER_KEY_2_PEM = resources.cert_hier_2_server_1_key()
-SERVER_CERT_CHAIN_2_PEM = (resources.cert_hier_2_server_1_cert() +
- resources.cert_hier_2_intermediate_ca_cert())
-
-# for use with the CertConfigFetcher. Roughly a simple custom mock
-# implementation
-Call = collections.namedtuple('Call', ['did_raise', 'returned_cert_config'])
-
-
-def _create_channel(port, credentials):
- return grpc.secure_channel('localhost:{}'.format(port), credentials)
-
-
-def _create_client_stub(channel, expect_success):
- if expect_success:
- # per Nathaniel: there's some robustness issue if we start
- # using a channel without waiting for it to be actually ready
- grpc.channel_ready_future(channel).result(timeout=10)
- return services_pb2_grpc.FirstServiceStub(channel)
-
-
-class CertConfigFetcher(object):
-
- def __init__(self):
- self._lock = threading.Lock()
- self._calls = []
- self._should_raise = False
- self._cert_config = None
-
- def reset(self):
- with self._lock:
- self._calls = []
- self._should_raise = False
- self._cert_config = None
-
- def configure(self, should_raise, cert_config):
- assert not (should_raise and cert_config), (
- "should not specify both should_raise and a cert_config at the same time"
- )
- with self._lock:
- self._should_raise = should_raise
- self._cert_config = cert_config
-
- def getCalls(self):
- with self._lock:
- return self._calls
-
- def __call__(self):
- with self._lock:
- if self._should_raise:
- self._calls.append(Call(True, None))
- raise ValueError('just for fun, should not affect the test')
- else:
- self._calls.append(Call(False, self._cert_config))
- return self._cert_config
-
-
-class _ServerSSLCertReloadTest(
- six.with_metaclass(abc.ABCMeta, unittest.TestCase)):
-
- def __init__(self, *args, **kwargs):
- super(_ServerSSLCertReloadTest, self).__init__(*args, **kwargs)
- self.server = None
- self.port = None
-
- @abc.abstractmethod
- def require_client_auth(self):
- raise NotImplementedError()
-
- def setUp(self):
- self.server = test_common.test_server()
- services_pb2_grpc.add_FirstServiceServicer_to_server(
- _server_application.FirstServiceServicer(), self.server)
- switch_cert_on_client_num = 10
- initial_cert_config = grpc.ssl_server_certificate_configuration(
- [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)],
- root_certificates=CA_2_PEM)
- self.cert_config_fetcher = CertConfigFetcher()
- server_credentials = grpc.dynamic_ssl_server_credentials(
- initial_cert_config,
- self.cert_config_fetcher,
- require_client_authentication=self.require_client_auth())
- self.port = self.server.add_secure_port('[::]:0', server_credentials)
- self.server.start()
-
- def tearDown(self):
- if self.server:
- self.server.stop(None)
-
- def _perform_rpc(self, client_stub, expect_success):
- # we don't care about the actual response of the rpc; only
- # whether we can perform it or not, and if not, the status
- # code must be UNAVAILABLE
- request = _application_common.UNARY_UNARY_REQUEST
- if expect_success:
- response = client_stub.UnUn(request)
- self.assertEqual(response, _application_common.UNARY_UNARY_RESPONSE)
- else:
- with self.assertRaises(grpc.RpcError) as exception_context:
- client_stub.UnUn(request)
- # If TLS 1.2 is used, then the client receives an alert message
- # before the handshake is complete, so the status is UNAVAILABLE. If
- # TLS 1.3 is used, then the client receives the alert message after
- # the handshake is complete, so the TSI handshaker returns the
- # TSI_PROTOCOL_FAILURE result. This result does not have a
- # corresponding status code, so this yields an UNKNOWN status.
- self.assertTrue(exception_context.exception.code(
- ) in [grpc.StatusCode.UNAVAILABLE, grpc.StatusCode.UNKNOWN])
-
- def _do_one_shot_client_rpc(self,
- expect_success,
- root_certificates=None,
- private_key=None,
- certificate_chain=None):
- credentials = grpc.ssl_channel_credentials(
- root_certificates=root_certificates,
- private_key=private_key,
- certificate_chain=certificate_chain)
- with _create_channel(self.port, credentials) as client_channel:
- client_stub = _create_client_stub(client_channel, expect_success)
- self._perform_rpc(client_stub, expect_success)
-
- def _test(self):
- # things should work...
- self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertIsNone(actual_calls[0].returned_cert_config)
-
- # client should reject server...
- # fails because client trusts ca2 and so will reject server
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertGreaterEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- for i, call in enumerate(actual_calls):
- self.assertFalse(call.did_raise, 'i= {}'.format(i))
- self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i))
-
- # should work again...
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(True, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertTrue(actual_calls[0].did_raise)
- self.assertIsNone(actual_calls[0].returned_cert_config)
-
- # if with_client_auth, then client should be rejected by
- # server because client uses key/cert1, but server trusts ca2,
- # so server will reject
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(not self.require_client_auth(),
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertGreaterEqual(len(actual_calls), 1)
- for i, call in enumerate(actual_calls):
- self.assertFalse(call.did_raise, 'i= {}'.format(i))
- self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i))
-
- # should work again...
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertIsNone(actual_calls[0].returned_cert_config)
-
- # now create the "persistent" clients
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- channel_A = _create_channel(
- self.port,
- grpc.ssl_channel_credentials(
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM))
- persistent_client_stub_A = _create_client_stub(channel_A, True)
- self._perform_rpc(persistent_client_stub_A, True)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertIsNone(actual_calls[0].returned_cert_config)
-
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- channel_B = _create_channel(
- self.port,
- grpc.ssl_channel_credentials(
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM))
- persistent_client_stub_B = _create_client_stub(channel_B, True)
- self._perform_rpc(persistent_client_stub_B, True)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertIsNone(actual_calls[0].returned_cert_config)
-
- # moment of truth!! client should reject server because the
- # server switch cert...
- cert_config = grpc.ssl_server_certificate_configuration(
- [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)],
- root_certificates=CA_1_PEM)
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, cert_config)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertGreaterEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- for i, call in enumerate(actual_calls):
- self.assertFalse(call.did_raise, 'i= {}'.format(i))
- self.assertEqual(call.returned_cert_config, cert_config,
- 'i= {}'.format(i))
-
- # now should work again...
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertIsNone(actual_calls[0].returned_cert_config)
-
- # client should be rejected by server if with_client_auth
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(not self.require_client_auth(),
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertGreaterEqual(len(actual_calls), 1)
- for i, call in enumerate(actual_calls):
- self.assertFalse(call.did_raise, 'i= {}'.format(i))
- self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i))
-
- # here client should reject server...
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertGreaterEqual(len(actual_calls), 1)
- for i, call in enumerate(actual_calls):
- self.assertFalse(call.did_raise, 'i= {}'.format(i))
- self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i))
-
- # persistent clients should continue to work
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._perform_rpc(persistent_client_stub_A, True)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 0)
-
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, None)
- self._perform_rpc(persistent_client_stub_B, True)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 0)
-
- channel_A.close()
- channel_B.close()
-
-
-class ServerSSLCertConfigFetcherParamsChecks(unittest.TestCase):
-
- def test_check_on_initial_config(self):
- with self.assertRaises(TypeError):
- grpc.dynamic_ssl_server_credentials(None, str)
- with self.assertRaises(TypeError):
- grpc.dynamic_ssl_server_credentials(1, str)
-
- def test_check_on_config_fetcher(self):
- cert_config = grpc.ssl_server_certificate_configuration(
- [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)],
- root_certificates=CA_1_PEM)
- with self.assertRaises(TypeError):
- grpc.dynamic_ssl_server_credentials(cert_config, None)
- with self.assertRaises(TypeError):
- grpc.dynamic_ssl_server_credentials(cert_config, 1)
-
-
-class ServerSSLCertReloadTestWithClientAuth(_ServerSSLCertReloadTest):
-
- def require_client_auth(self):
- return True
-
- test = _ServerSSLCertReloadTest._test
-
-
-class ServerSSLCertReloadTestWithoutClientAuth(_ServerSSLCertReloadTest):
-
- def require_client_auth(self):
- return False
-
- test = _ServerSSLCertReloadTest._test
-
-
-class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest):
- """Ensures that `ServerCertificateConfiguration` instances can be reused.
-
- Because gRPC Core takes ownership of the
- `grpc_ssl_server_certificate_config` encapsulated by
- `ServerCertificateConfiguration`, this test reuses the same
- `ServerCertificateConfiguration` instances multiple times to make sure
- gRPC Python takes care of maintaining the validity of
- `ServerCertificateConfiguration` instances, so that such instances can be
- re-used by user application.
- """
-
- def require_client_auth(self):
- return True
-
- def setUp(self):
- self.server = test_common.test_server()
- services_pb2_grpc.add_FirstServiceServicer_to_server(
- _server_application.FirstServiceServicer(), self.server)
- self.cert_config_A = grpc.ssl_server_certificate_configuration(
- [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)],
- root_certificates=CA_2_PEM)
- self.cert_config_B = grpc.ssl_server_certificate_configuration(
- [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)],
- root_certificates=CA_1_PEM)
- self.cert_config_fetcher = CertConfigFetcher()
- server_credentials = grpc.dynamic_ssl_server_credentials(
- self.cert_config_A,
- self.cert_config_fetcher,
- require_client_authentication=True)
- self.port = self.server.add_secure_port('[::]:0', server_credentials)
- self.server.start()
-
- def test_cert_config_reuse(self):
-
- # succeed with A
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, self.cert_config_A)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertEqual(actual_calls[0].returned_cert_config,
- self.cert_config_A)
-
- # fail with A
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, self.cert_config_A)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertGreaterEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- for i, call in enumerate(actual_calls):
- self.assertFalse(call.did_raise, 'i= {}'.format(i))
- self.assertEqual(call.returned_cert_config, self.cert_config_A,
- 'i= {}'.format(i))
-
- # succeed again with A
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, self.cert_config_A)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertEqual(actual_calls[0].returned_cert_config,
- self.cert_config_A)
-
- # succeed with B
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, self.cert_config_B)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertEqual(actual_calls[0].returned_cert_config,
- self.cert_config_B)
-
- # fail with B
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, self.cert_config_B)
- self._do_one_shot_client_rpc(False,
- root_certificates=CA_1_PEM,
- private_key=CLIENT_KEY_2_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_2_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertGreaterEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- for i, call in enumerate(actual_calls):
- self.assertFalse(call.did_raise, 'i= {}'.format(i))
- self.assertEqual(call.returned_cert_config, self.cert_config_B,
- 'i= {}'.format(i))
-
- # succeed again with B
- self.cert_config_fetcher.reset()
- self.cert_config_fetcher.configure(False, self.cert_config_B)
- self._do_one_shot_client_rpc(True,
- root_certificates=CA_2_PEM,
- private_key=CLIENT_KEY_1_PEM,
- certificate_chain=CLIENT_CERT_CHAIN_1_PEM)
- actual_calls = self.cert_config_fetcher.getCalls()
- self.assertEqual(len(actual_calls), 1)
- self.assertFalse(actual_calls[0].did_raise)
- self.assertEqual(actual_calls[0].returned_cert_config,
- self.cert_config_B)
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py
deleted file mode 100644
index 6091219126..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# Copyright 2018 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests experimental TLS Session Resumption API"""
-
-import logging
-import pickle
-import unittest
-
-import grpc
-from grpc import _channel
-from grpc.experimental import session_cache
-
-from tests.unit import resources
-from tests.unit import test_common
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x00\x00\x00'
-
-_UNARY_UNARY = '/test/UnaryUnary'
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-_ID = 'id'
-_ID_KEY = 'id_key'
-_AUTH_CTX = 'auth_ctx'
-
-_PRIVATE_KEY = resources.private_key()
-_CERTIFICATE_CHAIN = resources.certificate_chain()
-_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
-_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
-_PROPERTY_OPTIONS = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
-),)
-
-
-def handle_unary_unary(request, servicer_context):
- return pickle.dumps({
- _ID: servicer_context.peer_identities(),
- _ID_KEY: servicer_context.peer_identity_key(),
- _AUTH_CTX: servicer_context.auth_context()
- })
-
-
-def start_secure_server():
- handler = grpc.method_handlers_generic_handler(
- 'test',
- {'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary)})
- server = test_common.test_server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
- port = server.add_secure_port('[::]:0', server_cred)
- server.start()
-
- return server, port
-
-
-class SSLSessionCacheTest(unittest.TestCase):
-
- def _do_one_shot_client_rpc(self, channel_creds, channel_options, port,
- expect_ssl_session_reused):
- channel = grpc.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=channel_options)
- response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- auth_data = pickle.loads(response)
- self.assertEqual(expect_ssl_session_reused,
- auth_data[_AUTH_CTX]['ssl_session_reused'])
- channel.close()
-
- def testSSLSessionCacheLRU(self):
- server_1, port_1 = start_secure_server()
-
- cache = session_cache.ssl_session_cache_lru(1)
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES)
- channel_options = _PROPERTY_OPTIONS + (
- ('grpc.ssl_session_cache', cache),)
-
- # Initial connection has no session to resume
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'false'])
-
- # Connection to server_1 resumes from initial session
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'true'])
-
- # Connection to a different server with the same name overwrites the cache entry
- server_2, port_2 = start_secure_server()
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_2,
- expect_ssl_session_reused=[b'false'])
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_2,
- expect_ssl_session_reused=[b'true'])
- server_2.stop(None)
-
- # Connection to server_1 now falls back to full TLS handshake
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'false'])
-
- # Re-creating server_1 causes old sessions to become invalid
- server_1.stop(None)
- server_1, port_1 = start_secure_server()
-
- # Old sessions should no longer be valid
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'false'])
-
- # Resumption should work for subsequent connections
- self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port_1,
- expect_ssl_session_reused=[b'true'])
- server_1.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py
deleted file mode 100644
index cc2a2ea0a8..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests the implementations module of the gRPC Python Beta API."""
-
-import datetime
-import unittest
-
-from grpc.beta import implementations
-from oauth2client import client as oauth2client_client
-
-from tests.unit import resources
-
-
-class ChannelCredentialsTest(unittest.TestCase):
-
- def test_runtime_provided_root_certificates(self):
- channel_credentials = implementations.ssl_channel_credentials()
- self.assertIsInstance(channel_credentials,
- implementations.ChannelCredentials)
-
- def test_application_provided_root_certificates(self):
- channel_credentials = implementations.ssl_channel_credentials(
- resources.test_root_certificates())
- self.assertIsInstance(channel_credentials,
- implementations.ChannelCredentials)
-
-
-class CallCredentialsTest(unittest.TestCase):
-
- def test_google_call_credentials(self):
- creds = oauth2client_client.GoogleCredentials(
- 'token', 'client_id', 'secret', 'refresh_token',
- datetime.datetime(2008, 6, 24), 'https://refresh.uri.com/',
- 'user_agent')
- call_creds = implementations.google_call_credentials(creds)
- self.assertIsInstance(call_creds, implementations.CallCredentials)
-
- def test_access_token_call_credentials(self):
- call_creds = implementations.access_token_call_credentials('token')
- self.assertIsInstance(call_creds, implementations.CallCredentials)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py
deleted file mode 100644
index 8ddd310696..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/__init__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2019 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-from tests import _loader
-from tests import _runner
-
-Loader = _loader.Loader
-Runner = _runner.Runner
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py
deleted file mode 100644
index f4b321fc5b..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py
deleted file mode 100644
index e74dec0739..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2019 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import unittest
-
-from tests._sanity import _sanity_test
-
-
-class AioSanityTest(_sanity_test.SanityTest):
-
- TEST_PKG_MODULE_NAME = 'tests_aio'
- TEST_PKG_PATH = 'tests_aio'
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py
deleted file mode 100644
index 301dba4bae..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_client.py
+++ /dev/null
@@ -1,187 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python AsyncIO Benchmark Clients."""
-
-import abc
-import asyncio
-import logging
-import random
-import time
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from src.proto.grpc.testing import control_pb2
-from src.proto.grpc.testing import messages_pb2
-from tests.qps import histogram
-from tests.unit import resources
-
-
-class GenericStub(object):
-
- def __init__(self, channel: aio.Channel):
- self.UnaryCall = channel.unary_unary(
- '/grpc.testing.BenchmarkService/UnaryCall')
- self.StreamingFromServer = channel.unary_stream(
- '/grpc.testing.BenchmarkService/StreamingFromServer')
- self.StreamingCall = channel.stream_stream(
- '/grpc.testing.BenchmarkService/StreamingCall')
-
-
-class BenchmarkClient(abc.ABC):
- """Benchmark client interface that exposes a non-blocking send_request()."""
-
- def __init__(self, address: str, config: control_pb2.ClientConfig,
- hist: histogram.Histogram):
- # Disables underlying reuse of subchannels
- unique_option = (('iv', random.random()),)
-
- # Parses the channel argument from config
- channel_args = tuple(
- (arg.name, arg.str_value) if arg.HasField('str_value') else (
- arg.name, int(arg.int_value)) for arg in config.channel_args)
-
- # Creates the channel
- if config.HasField('security_params'):
- channel_credentials = grpc.ssl_channel_credentials(
- resources.test_root_certificates(),)
- server_host_override_option = ((
- 'grpc.ssl_target_name_override',
- config.security_params.server_host_override,
- ),)
- self._channel = aio.secure_channel(
- address, channel_credentials,
- unique_option + channel_args + server_host_override_option)
- else:
- self._channel = aio.insecure_channel(address,
- options=unique_option +
- channel_args)
-
- # Creates the stub
- if config.payload_config.WhichOneof('payload') == 'simple_params':
- self._generic = False
- self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub(
- self._channel)
- payload = messages_pb2.Payload(
- body=b'\0' * config.payload_config.simple_params.req_size)
- self._request = messages_pb2.SimpleRequest(
- payload=payload,
- response_size=config.payload_config.simple_params.resp_size)
- else:
- self._generic = True
- self._stub = GenericStub(self._channel)
- self._request = b'\0' * config.payload_config.bytebuf_params.req_size
-
- self._hist = hist
- self._response_callbacks = []
- self._concurrency = config.outstanding_rpcs_per_channel
-
- async def run(self) -> None:
- await self._channel.channel_ready()
-
- async def stop(self) -> None:
- await self._channel.close()
-
- def _record_query_time(self, query_time: float) -> None:
- self._hist.add(query_time * 1e9)
-
-
-class UnaryAsyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, address: str, config: control_pb2.ClientConfig,
- hist: histogram.Histogram):
- super().__init__(address, config, hist)
- self._running = None
- self._stopped = asyncio.Event()
-
- async def _send_request(self):
- start_time = time.monotonic()
- await self._stub.UnaryCall(self._request)
- self._record_query_time(time.monotonic() - start_time)
-
- async def _send_indefinitely(self) -> None:
- while self._running:
- await self._send_request()
-
- async def run(self) -> None:
- await super().run()
- self._running = True
- senders = (self._send_indefinitely() for _ in range(self._concurrency))
- await asyncio.gather(*senders)
- self._stopped.set()
-
- async def stop(self) -> None:
- self._running = False
- await self._stopped.wait()
- await super().stop()
-
-
-class StreamingAsyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, address: str, config: control_pb2.ClientConfig,
- hist: histogram.Histogram):
- super().__init__(address, config, hist)
- self._running = None
- self._stopped = asyncio.Event()
-
- async def _one_streaming_call(self):
- call = self._stub.StreamingCall()
- while self._running:
- start_time = time.time()
- await call.write(self._request)
- await call.read()
- self._record_query_time(time.time() - start_time)
- await call.done_writing()
-
- async def run(self):
- await super().run()
- self._running = True
- senders = (self._one_streaming_call() for _ in range(self._concurrency))
- await asyncio.gather(*senders)
- self._stopped.set()
-
- async def stop(self):
- self._running = False
- await self._stopped.wait()
- await super().stop()
-
-
-class ServerStreamingAsyncBenchmarkClient(BenchmarkClient):
-
- def __init__(self, address: str, config: control_pb2.ClientConfig,
- hist: histogram.Histogram):
- super().__init__(address, config, hist)
- self._running = None
- self._stopped = asyncio.Event()
-
- async def _one_server_streaming_call(self):
- call = self._stub.StreamingFromServer(self._request)
- while self._running:
- start_time = time.time()
- await call.read()
- self._record_query_time(time.time() - start_time)
-
- async def run(self):
- await super().run()
- self._running = True
- senders = (
- self._one_server_streaming_call() for _ in range(self._concurrency))
- await asyncio.gather(*senders)
- self._stopped.set()
-
- async def stop(self):
- self._running = False
- await self._stopped.wait()
- await super().stop()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py
deleted file mode 100644
index b519554a56..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/benchmark_servicer.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Python AsyncIO Benchmark Servicers."""
-
-import asyncio
-import logging
-import unittest
-
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from src.proto.grpc.testing import messages_pb2
-
-
-class BenchmarkServicer(benchmark_service_pb2_grpc.BenchmarkServiceServicer):
-
- async def UnaryCall(self, request, unused_context):
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- return messages_pb2.SimpleResponse(payload=payload)
-
- async def StreamingFromServer(self, request, unused_context):
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- # Sends response at full capacity!
- while True:
- yield messages_pb2.SimpleResponse(payload=payload)
-
- async def StreamingCall(self, request_iterator, unused_context):
- async for request in request_iterator:
- payload = messages_pb2.Payload(body=b'\0' * request.response_size)
- yield messages_pb2.SimpleResponse(payload=payload)
-
-
-class GenericBenchmarkServicer(
- benchmark_service_pb2_grpc.BenchmarkServiceServicer):
- """Generic (no-codec) Server implementation for the Benchmark service."""
-
- def __init__(self, resp_size):
- self._response = '\0' * resp_size
-
- async def UnaryCall(self, unused_request, unused_context):
- return self._response
-
- async def StreamingCall(self, request_iterator, unused_context):
- async for _ in request_iterator:
- yield self._response
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py
deleted file mode 100644
index 561298a626..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/server.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import logging
-import unittest
-
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from tests_aio.benchmark import benchmark_servicer
-
-
-async def _start_async_server():
- server = aio.server()
-
- port = server.add_insecure_port('localhost:%s' % 50051)
- servicer = benchmark_servicer.BenchmarkServicer()
- benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
- servicer, server)
-
- await server.start()
- logging.info('Benchmark server started at :%d' % port)
- await server.wait_for_termination()
-
-
-def main():
- loop = asyncio.get_event_loop()
- loop.create_task(_start_async_server())
- loop.run_forever()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- main()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py
deleted file mode 100644
index dc16f05087..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import asyncio
-import logging
-
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import worker_service_pb2_grpc
-from tests_aio.benchmark import worker_servicer
-
-
-async def run_worker_server(port: int) -> None:
- server = aio.server()
-
- servicer = worker_servicer.WorkerServicer()
- worker_service_pb2_grpc.add_WorkerServiceServicer_to_server(
- servicer, server)
-
- server.add_insecure_port('[::]:{}'.format(port))
-
- await server.start()
-
- await servicer.wait_for_quit()
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- parser = argparse.ArgumentParser(
- description='gRPC Python performance testing worker')
- parser.add_argument('--driver_port',
- type=int,
- dest='port',
- help='The port the worker should listen on')
- parser.add_argument('--uvloop',
- action='store_true',
- help='Use uvloop or not')
- args = parser.parse_args()
-
- if args.uvloop:
- import uvloop
- asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
- loop = uvloop.new_event_loop()
- asyncio.set_event_loop(loop)
-
- asyncio.get_event_loop().run_until_complete(run_worker_server(args.port))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py
deleted file mode 100644
index 684beddb4a..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/benchmark/worker_servicer.py
+++ /dev/null
@@ -1,372 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import collections
-import logging
-import multiprocessing
-import os
-import sys
-import time
-from typing import Tuple
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import benchmark_service_pb2_grpc
-from src.proto.grpc.testing import control_pb2
-from src.proto.grpc.testing import stats_pb2
-from src.proto.grpc.testing import worker_service_pb2_grpc
-from tests.qps import histogram
-from tests.unit import resources
-from tests.unit.framework.common import get_socket
-from tests_aio.benchmark import benchmark_client
-from tests_aio.benchmark import benchmark_servicer
-
-_NUM_CORES = multiprocessing.cpu_count()
-_WORKER_ENTRY_FILE = os.path.join(
- os.path.split(os.path.abspath(__file__))[0], 'worker.py')
-
-_LOGGER = logging.getLogger(__name__)
-
-
-class _SubWorker(
- collections.namedtuple('_SubWorker',
- ['process', 'port', 'channel', 'stub'])):
- """A data class that holds information about a child qps worker."""
-
- def _repr(self):
- return f'<_SubWorker pid={self.process.pid} port={self.port}>'
-
- def __repr__(self):
- return self._repr()
-
- def __str__(self):
- return self._repr()
-
-
-def _get_server_status(start_time: float, end_time: float,
- port: int) -> control_pb2.ServerStatus:
- """Creates ServerStatus proto message."""
- end_time = time.monotonic()
- elapsed_time = end_time - start_time
- # TODO(lidiz) Collect accurate time system to compute QPS/core-second.
- stats = stats_pb2.ServerStats(time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
- return control_pb2.ServerStatus(stats=stats, port=port, cores=_NUM_CORES)
-
-
-def _create_server(config: control_pb2.ServerConfig) -> Tuple[aio.Server, int]:
- """Creates a server object according to the ServerConfig."""
- channel_args = tuple(
- (arg.name,
- arg.str_value) if arg.HasField('str_value') else (arg.name,
- int(arg.int_value))
- for arg in config.channel_args)
-
- server = aio.server(options=channel_args + (('grpc.so_reuseport', 1),))
- if config.server_type == control_pb2.ASYNC_SERVER:
- servicer = benchmark_servicer.BenchmarkServicer()
- benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server(
- servicer, server)
- elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER:
- resp_size = config.payload_config.bytebuf_params.resp_size
- servicer = benchmark_servicer.GenericBenchmarkServicer(resp_size)
- method_implementations = {
- 'StreamingCall':
- grpc.stream_stream_rpc_method_handler(servicer.StreamingCall),
- 'UnaryCall':
- grpc.unary_unary_rpc_method_handler(servicer.UnaryCall),
- }
- handler = grpc.method_handlers_generic_handler(
- 'grpc.testing.BenchmarkService', method_implementations)
- server.add_generic_rpc_handlers((handler,))
- else:
- raise NotImplementedError('Unsupported server type {}'.format(
- config.server_type))
-
- if config.HasField('security_params'): # Use SSL
- server_creds = grpc.ssl_server_credentials(
- ((resources.private_key(), resources.certificate_chain()),))
- port = server.add_secure_port('[::]:{}'.format(config.port),
- server_creds)
- else:
- port = server.add_insecure_port('[::]:{}'.format(config.port))
-
- return server, port
-
-
-def _get_client_status(
- start_time: float, end_time: float,
- qps_data: histogram.Histogram) -> control_pb2.ClientStatus:
- """Creates ClientStatus proto message."""
- latencies = qps_data.get_data()
- end_time = time.monotonic()
- elapsed_time = end_time - start_time
- # TODO(lidiz) Collect accurate time system to compute QPS/core-second.
- stats = stats_pb2.ClientStats(latencies=latencies,
- time_elapsed=elapsed_time,
- time_user=elapsed_time,
- time_system=elapsed_time)
- return control_pb2.ClientStatus(stats=stats)
-
-
-def _create_client(
- server: str, config: control_pb2.ClientConfig,
- qps_data: histogram.Histogram) -> benchmark_client.BenchmarkClient:
- """Creates a client object according to the ClientConfig."""
- if config.load_params.WhichOneof('load') != 'closed_loop':
- raise NotImplementedError(
- f'Unsupported load parameter {config.load_params}')
-
- if config.client_type == control_pb2.ASYNC_CLIENT:
- if config.rpc_type == control_pb2.UNARY:
- client_type = benchmark_client.UnaryAsyncBenchmarkClient
- elif config.rpc_type == control_pb2.STREAMING:
- client_type = benchmark_client.StreamingAsyncBenchmarkClient
- elif config.rpc_type == control_pb2.STREAMING_FROM_SERVER:
- client_type = benchmark_client.ServerStreamingAsyncBenchmarkClient
- else:
- raise NotImplementedError(
- f'Unsupported rpc_type [{config.rpc_type}]')
- else:
- raise NotImplementedError(
- f'Unsupported client type {config.client_type}')
-
- return client_type(server, config, qps_data)
-
-
-def _pick_an_unused_port() -> int:
- """Picks an unused TCP port."""
- _, port, sock = get_socket()
- sock.close()
- return port
-
-
-async def _create_sub_worker() -> _SubWorker:
- """Creates a child qps worker as a subprocess."""
- port = _pick_an_unused_port()
-
- _LOGGER.info('Creating sub worker at port [%d]...', port)
- process = await asyncio.create_subprocess_exec(sys.executable,
- _WORKER_ENTRY_FILE,
- '--driver_port', str(port))
- _LOGGER.info('Created sub worker process for port [%d] at pid [%d]', port,
- process.pid)
- channel = aio.insecure_channel(f'localhost:{port}')
- _LOGGER.info('Waiting for sub worker at port [%d]', port)
- await channel.channel_ready()
- stub = worker_service_pb2_grpc.WorkerServiceStub(channel)
- return _SubWorker(
- process=process,
- port=port,
- channel=channel,
- stub=stub,
- )
-
-
-class WorkerServicer(worker_service_pb2_grpc.WorkerServiceServicer):
- """Python Worker Server implementation."""
-
- def __init__(self):
- self._loop = asyncio.get_event_loop()
- self._quit_event = asyncio.Event()
-
- async def _run_single_server(self, config, request_iterator, context):
- server, port = _create_server(config)
- await server.start()
- _LOGGER.info('Server started at port [%d]', port)
-
- start_time = time.monotonic()
- await context.write(_get_server_status(start_time, start_time, port))
-
- async for request in request_iterator:
- end_time = time.monotonic()
- status = _get_server_status(start_time, end_time, port)
- if request.mark.reset:
- start_time = end_time
- await context.write(status)
- await server.stop(None)
-
- async def RunServer(self, request_iterator, context):
- config_request = await context.read()
- config = config_request.setup
- _LOGGER.info('Received ServerConfig: %s', config)
-
- if config.server_processes <= 0:
- _LOGGER.info('Using server_processes == [%d]', _NUM_CORES)
- config.server_processes = _NUM_CORES
-
- if config.port == 0:
- config.port = _pick_an_unused_port()
- _LOGGER.info('Port picked [%d]', config.port)
-
- if config.server_processes == 1:
- # If server_processes == 1, start the server in this process.
- await self._run_single_server(config, request_iterator, context)
- else:
- # If server_processes > 1, offload to other processes.
- sub_workers = await asyncio.gather(
- *[_create_sub_worker() for _ in range(config.server_processes)])
-
- calls = [worker.stub.RunServer() for worker in sub_workers]
-
- config_request.setup.server_processes = 1
-
- for call in calls:
- await call.write(config_request)
- # An empty status indicates the peer is ready
- await call.read()
-
- start_time = time.monotonic()
- await context.write(
- _get_server_status(
- start_time,
- start_time,
- config.port,
- ))
-
- _LOGGER.info('Servers are ready to serve.')
-
- async for request in request_iterator:
- end_time = time.monotonic()
-
- for call in calls:
- await call.write(request)
- # Reports from sub workers doesn't matter
- await call.read()
-
- status = _get_server_status(
- start_time,
- end_time,
- config.port,
- )
- if request.mark.reset:
- start_time = end_time
- await context.write(status)
-
- for call in calls:
- await call.done_writing()
-
- for worker in sub_workers:
- await worker.stub.QuitWorker(control_pb2.Void())
- await worker.channel.close()
- _LOGGER.info('Waiting for [%s] to quit...', worker)
- await worker.process.wait()
-
- async def _run_single_client(self, config, request_iterator, context):
- running_tasks = []
- qps_data = histogram.Histogram(config.histogram_params.resolution,
- config.histogram_params.max_possible)
- start_time = time.monotonic()
-
- # Create a client for each channel as asyncio.Task
- for i in range(config.client_channels):
- server = config.server_targets[i % len(config.server_targets)]
- client = _create_client(server, config, qps_data)
- _LOGGER.info('Client created against server [%s]', server)
- running_tasks.append(self._loop.create_task(client.run()))
-
- end_time = time.monotonic()
- await context.write(_get_client_status(start_time, end_time, qps_data))
-
- # Respond to stat requests
- async for request in request_iterator:
- end_time = time.monotonic()
- status = _get_client_status(start_time, end_time, qps_data)
- if request.mark.reset:
- qps_data.reset()
- start_time = time.monotonic()
- await context.write(status)
-
- # Cleanup the clients
- for task in running_tasks:
- task.cancel()
-
- async def RunClient(self, request_iterator, context):
- config_request = await context.read()
- config = config_request.setup
- _LOGGER.info('Received ClientConfig: %s', config)
-
- if config.client_processes <= 0:
- _LOGGER.info('client_processes can\'t be [%d]',
- config.client_processes)
- _LOGGER.info('Using client_processes == [%d]', _NUM_CORES)
- config.client_processes = _NUM_CORES
-
- if config.client_processes == 1:
- # If client_processes == 1, run the benchmark in this process.
- await self._run_single_client(config, request_iterator, context)
- else:
- # If client_processes > 1, offload the work to other processes.
- sub_workers = await asyncio.gather(
- *[_create_sub_worker() for _ in range(config.client_processes)])
-
- calls = [worker.stub.RunClient() for worker in sub_workers]
-
- config_request.setup.client_processes = 1
-
- for call in calls:
- await call.write(config_request)
- # An empty status indicates the peer is ready
- await call.read()
-
- start_time = time.monotonic()
- result = histogram.Histogram(config.histogram_params.resolution,
- config.histogram_params.max_possible)
- end_time = time.monotonic()
- await context.write(_get_client_status(start_time, end_time,
- result))
-
- async for request in request_iterator:
- end_time = time.monotonic()
-
- for call in calls:
- _LOGGER.debug('Fetching status...')
- await call.write(request)
- sub_status = await call.read()
- result.merge(sub_status.stats.latencies)
- _LOGGER.debug('Update from sub worker count=[%d]',
- sub_status.stats.latencies.count)
-
- status = _get_client_status(start_time, end_time, result)
- if request.mark.reset:
- result.reset()
- start_time = time.monotonic()
- _LOGGER.debug('Reporting count=[%d]',
- status.stats.latencies.count)
- await context.write(status)
-
- for call in calls:
- await call.done_writing()
-
- for worker in sub_workers:
- await worker.stub.QuitWorker(control_pb2.Void())
- await worker.channel.close()
- _LOGGER.info('Waiting for sub worker [%s] to quit...', worker)
- await worker.process.wait()
- _LOGGER.info('Sub worker [%s] quit', worker)
-
- @staticmethod
- async def CoreCount(unused_request, unused_context):
- return control_pb2.CoreResponse(cores=_NUM_CORES)
-
- async def QuitWorker(self, unused_request, unused_context):
- _LOGGER.info('QuitWorker command received.')
- self._quit_event.set()
- return control_pb2.Void()
-
- async def wait_for_quit(self):
- await self._quit_event.wait()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py
deleted file mode 100644
index 1517f71d09..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py
deleted file mode 100644
index e400dc87dc..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/channelz/channelz_servicer_test.py
+++ /dev/null
@@ -1,473 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_channelz.v1.channelz."""
-
-import asyncio
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-from grpc_channelz.v1 import channelz
-from grpc_channelz.v1 import channelz_pb2
-from grpc_channelz.v1 import channelz_pb2_grpc
-
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._test_base import AioTestBase
-
-_SUCCESSFUL_UNARY_UNARY = '/test/SuccessfulUnaryUnary'
-_FAILED_UNARY_UNARY = '/test/FailedUnaryUnary'
-_SUCCESSFUL_STREAM_STREAM = '/test/SuccessfulStreamStream'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_DISABLE_REUSE_PORT = (('grpc.so_reuseport', 0),)
-_ENABLE_CHANNELZ = (('grpc.enable_channelz', 1),)
-_DISABLE_CHANNELZ = (('grpc.enable_channelz', 0),)
-
-_LARGE_UNASSIGNED_ID = 10000
-
-
-async def _successful_unary_unary(request, servicer_context):
- return _RESPONSE
-
-
-async def _failed_unary_unary(request, servicer_context):
- servicer_context.set_code(grpc.StatusCode.INTERNAL)
- servicer_context.set_details("Channelz Test Intended Failure")
-
-
-async def _successful_stream_stream(request_iterator, servicer_context):
- async for _ in request_iterator:
- yield _RESPONSE
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_successful_unary_unary)
- elif handler_call_details.method == _FAILED_UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_failed_unary_unary)
- elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM:
- return grpc.stream_stream_rpc_method_handler(
- _successful_stream_stream)
- else:
- return None
-
-
-class _ChannelServerPair:
-
- def __init__(self):
- self.address = ''
- self.server = None
- self.channel = None
- self.server_ref_id = None
- self.channel_ref_id = None
-
- async def start(self):
- # Server will enable channelz service
- self.server = aio.server(options=_DISABLE_REUSE_PORT + _ENABLE_CHANNELZ)
- port = self.server.add_insecure_port('[::]:0')
- self.address = 'localhost:%d' % port
- self.server.add_generic_rpc_handlers((_GenericHandler(),))
- await self.server.start()
-
- # Channel will enable channelz service...
- self.channel = aio.insecure_channel(self.address,
- options=_ENABLE_CHANNELZ)
-
- async def bind_channelz(self, channelz_stub):
- resp = await channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(start_channel_id=0))
- for channel in resp.channel:
- if channel.data.target == self.address:
- self.channel_ref_id = channel.ref.channel_id
-
- resp = await channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=0))
- self.server_ref_id = resp.server[-1].ref.server_id
-
- async def stop(self):
- await self.channel.close()
- await self.server.stop(None)
-
-
-async def _create_channel_server_pairs(n, channelz_stub=None):
- """Create channel-server pairs."""
- pairs = [_ChannelServerPair() for i in range(n)]
- for pair in pairs:
- await pair.start()
- if channelz_stub:
- await pair.bind_channelz(channelz_stub)
- return pairs
-
-
-async def _destroy_channel_server_pairs(pairs):
- for pair in pairs:
- await pair.stop()
-
-
-class ChannelzServicerTest(AioTestBase):
-
- async def setUp(self):
- # This server is for Channelz info fetching only
- # It self should not enable Channelz
- self._server = aio.server(options=_DISABLE_REUSE_PORT +
- _DISABLE_CHANNELZ)
- port = self._server.add_insecure_port('[::]:0')
- channelz.add_channelz_servicer(self._server)
- await self._server.start()
-
- # This channel is used to fetch Channelz info only
- # Channelz should not be enabled
- self._channel = aio.insecure_channel('localhost:%d' % port,
- options=_DISABLE_CHANNELZ)
- self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def _get_server_by_ref_id(self, ref_id):
- """Server id may not be consecutive"""
- resp = await self._channelz_stub.GetServers(
- channelz_pb2.GetServersRequest(start_server_id=ref_id))
- self.assertEqual(ref_id, resp.server[0].ref.server_id)
- return resp.server[0]
-
- async def _send_successful_unary_unary(self, pair):
- call = pair.channel.unary_unary(_SUCCESSFUL_UNARY_UNARY)(_REQUEST)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def _send_failed_unary_unary(self, pair):
- try:
- await pair.channel.unary_unary(_FAILED_UNARY_UNARY)(_REQUEST)
- except grpc.RpcError:
- return
- else:
- self.fail("This call supposed to fail")
-
- async def _send_successful_stream_stream(self, pair):
- call = pair.channel.stream_stream(_SUCCESSFUL_STREAM_STREAM)(iter(
- [_REQUEST] * test_constants.STREAM_LENGTH))
- cnt = 0
- async for _ in call:
- cnt += 1
- self.assertEqual(cnt, test_constants.STREAM_LENGTH)
-
- async def test_get_top_channels_high_start_id(self):
- pairs = await _create_channel_server_pairs(1)
-
- resp = await self._channelz_stub.GetTopChannels(
- channelz_pb2.GetTopChannelsRequest(
- start_channel_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(len(resp.channel), 0)
- self.assertEqual(resp.end, True)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_successful_request(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- await self._send_successful_unary_unary(pairs[0])
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
-
- self.assertEqual(resp.channel.data.calls_started, 1)
- self.assertEqual(resp.channel.data.calls_succeeded, 1)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_failed_request(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- await self._send_failed_unary_unary(pairs[0])
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, 1)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, 1)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_requests(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- k_success = 7
- k_failed = 9
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[0])
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_requests_many_channel(self):
- k_channels = 4
- pairs = await _create_channel_server_pairs(k_channels,
- self._channelz_stub)
- k_success = 11
- k_failed = 13
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- await self._send_successful_unary_unary(pairs[2])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[1])
- await self._send_failed_unary_unary(pairs[2])
-
- # The first channel saw only successes
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_success)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- # The second channel saw only failures
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[1].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- # The third channel saw both successes and failures
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[2].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.channel.data.calls_succeeded, k_success)
- self.assertEqual(resp.channel.data.calls_failed, k_failed)
-
- # The fourth channel saw nothing
- resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[3].channel_ref_id))
- self.assertEqual(resp.channel.data.calls_started, 0)
- self.assertEqual(resp.channel.data.calls_succeeded, 0)
- self.assertEqual(resp.channel.data.calls_failed, 0)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_subchannels(self):
- k_channels = 4
- pairs = await _create_channel_server_pairs(k_channels,
- self._channelz_stub)
- k_success = 17
- k_failed = 19
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- await self._send_successful_unary_unary(pairs[2])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[1])
- await self._send_failed_unary_unary(pairs[2])
-
- for i in range(k_channels):
- gc_resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(
- channel_id=pairs[i].channel_ref_id))
- # If no call performed in the channel, there shouldn't be any subchannel
- if gc_resp.channel.data.calls_started == 0:
- self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
- continue
-
- # Otherwise, the subchannel should exist
- self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
- gsc_resp = await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gc_resp.channel.subchannel_ref[0].
- subchannel_id))
- self.assertEqual(gc_resp.channel.data.calls_started,
- gsc_resp.subchannel.data.calls_started)
- self.assertEqual(gc_resp.channel.data.calls_succeeded,
- gsc_resp.subchannel.data.calls_succeeded)
- self.assertEqual(gc_resp.channel.data.calls_failed,
- gsc_resp.subchannel.data.calls_failed)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_server_call(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- k_success = 23
- k_failed = 29
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[0])
-
- resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
- self.assertEqual(resp.data.calls_started, k_success + k_failed)
- self.assertEqual(resp.data.calls_succeeded, k_success)
- self.assertEqual(resp.data.calls_failed, k_failed)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_many_subchannels_and_sockets(self):
- k_channels = 4
- pairs = await _create_channel_server_pairs(k_channels,
- self._channelz_stub)
- k_success = 3
- k_failed = 5
- for i in range(k_success):
- await self._send_successful_unary_unary(pairs[0])
- await self._send_successful_unary_unary(pairs[2])
- for i in range(k_failed):
- await self._send_failed_unary_unary(pairs[1])
- await self._send_failed_unary_unary(pairs[2])
-
- for i in range(k_channels):
- gc_resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(
- channel_id=pairs[i].channel_ref_id))
-
- # If no call performed in the channel, there shouldn't be any subchannel
- if gc_resp.channel.data.calls_started == 0:
- self.assertEqual(len(gc_resp.channel.subchannel_ref), 0)
- continue
-
- # Otherwise, the subchannel should exist
- self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
- gsc_resp = await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gc_resp.channel.subchannel_ref[0].
- subchannel_id))
- self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
-
- gs_resp = await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
- self.assertEqual(gsc_resp.subchannel.data.calls_started,
- gs_resp.socket.data.streams_started)
- self.assertEqual(0, gs_resp.socket.data.streams_failed)
- # Calls started == messages sent, only valid for unary calls
- self.assertEqual(gsc_resp.subchannel.data.calls_started,
- gs_resp.socket.data.messages_sent)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_streaming_rpc(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
- # In C++, the argument for _send_successful_stream_stream is message length.
- # Here the argument is still channel idx, to be consistent with the other two.
- await self._send_successful_stream_stream(pairs[0])
-
- gc_resp = await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=pairs[0].channel_ref_id))
- self.assertEqual(gc_resp.channel.data.calls_started, 1)
- self.assertEqual(gc_resp.channel.data.calls_succeeded, 1)
- self.assertEqual(gc_resp.channel.data.calls_failed, 0)
- # Subchannel exists
- self.assertGreater(len(gc_resp.channel.subchannel_ref), 0)
-
- gsc_resp = await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=gc_resp.channel.subchannel_ref[0].subchannel_id))
- self.assertEqual(gsc_resp.subchannel.data.calls_started, 1)
- self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1)
- self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0)
- # Socket exists
- self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1)
-
- gs_resp = await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=gsc_resp.subchannel.socket_ref[0].socket_id))
- self.assertEqual(gs_resp.socket.data.streams_started, 1)
- self.assertEqual(gs_resp.socket.data.streams_succeeded, 1)
- self.assertEqual(gs_resp.socket.data.streams_failed, 0)
- self.assertEqual(gs_resp.socket.data.messages_sent,
- test_constants.STREAM_LENGTH)
- self.assertEqual(gs_resp.socket.data.messages_received,
- test_constants.STREAM_LENGTH)
-
- await _destroy_channel_server_pairs(pairs)
-
- async def test_server_sockets(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- await self._send_successful_unary_unary(pairs[0])
- await self._send_failed_unary_unary(pairs[0])
-
- resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
- self.assertEqual(resp.data.calls_started, 2)
- self.assertEqual(resp.data.calls_succeeded, 1)
- self.assertEqual(resp.data.calls_failed, 1)
-
- gss_resp = await self._channelz_stub.GetServerSockets(
- channelz_pb2.GetServerSocketsRequest(server_id=resp.ref.server_id,
- start_socket_id=0))
- # If the RPC call failed, it will raise a grpc.RpcError
- # So, if there is no exception raised, considered pass
- await _destroy_channel_server_pairs(pairs)
-
- async def test_server_listen_sockets(self):
- pairs = await _create_channel_server_pairs(1, self._channelz_stub)
-
- resp = await self._get_server_by_ref_id(pairs[0].server_ref_id)
- self.assertEqual(len(resp.listen_socket), 1)
-
- gs_resp = await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(
- socket_id=resp.listen_socket[0].socket_id))
- # If the RPC call failed, it will raise a grpc.RpcError
- # So, if there is no exception raised, considered pass
- await _destroy_channel_server_pairs(pairs)
-
- async def test_invalid_query_get_server(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetServer(
- channelz_pb2.GetServerRequest(server_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_channel(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetChannel(
- channelz_pb2.GetChannelRequest(channel_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_subchannel(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetSubchannel(
- channelz_pb2.GetSubchannelRequest(
- subchannel_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_socket(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetSocket(
- channelz_pb2.GetSocketRequest(socket_id=_LARGE_UNASSIGNED_ID))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
- async def test_invalid_query_get_server_sockets(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channelz_stub.GetServerSockets(
- channelz_pb2.GetServerSocketsRequest(
- server_id=_LARGE_UNASSIGNED_ID,
- start_socket_id=0,
- ))
- self.assertEqual(grpc.StatusCode.NOT_FOUND,
- exception_context.exception.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py
deleted file mode 100644
index 1517f71d09..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py
deleted file mode 100644
index 7c6776ecd7..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/health_check/health_servicer_test.py
+++ /dev/null
@@ -1,280 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests AsyncIO version of grpcio-health-checking."""
-
-import asyncio
-import logging
-import random
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-from grpc_health.v1 import health
-from grpc_health.v1 import health_pb2
-from grpc_health.v1 import health_pb2_grpc
-
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._test_base import AioTestBase
-
-_SERVING_SERVICE = 'grpc.test.TestServiceServing'
-_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown'
-_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing'
-_WATCH_SERVICE = 'grpc.test.WatchService'
-
-_LARGE_NUMBER_OF_STATUS_CHANGES = 1000
-
-
-async def _pipe_to_queue(call, queue):
- async for response in call:
- await queue.put(response)
-
-
-class HealthServicerTest(AioTestBase):
-
- async def setUp(self):
- self._servicer = health.aio.HealthServicer()
- await self._servicer.set(_SERVING_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- await self._servicer.set(_UNKNOWN_SERVICE,
- health_pb2.HealthCheckResponse.UNKNOWN)
- await self._servicer.set(_NOT_SERVING_SERVICE,
- health_pb2.HealthCheckResponse.NOT_SERVING)
- self._server = aio.server()
- port = self._server.add_insecure_port('[::]:0')
- health_pb2_grpc.add_HealthServicer_to_server(self._servicer,
- self._server)
- await self._server.start()
-
- self._channel = aio.insecure_channel('localhost:%d' % port)
- self._stub = health_pb2_grpc.HealthStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_check_empty_service(self):
- request = health_pb2.HealthCheckRequest()
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
-
- async def test_check_serving_service(self):
- request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE)
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
-
- async def test_check_unknown_service(self):
- request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE)
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
-
- async def test_check_not_serving_service(self):
- request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE)
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- resp.status)
-
- async def test_check_not_found_service(self):
- request = health_pb2.HealthCheckRequest(service='not-found')
- with self.assertRaises(aio.AioRpcError) as context:
- await self._stub.Check(request)
-
- self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
-
- async def test_health_service_name(self):
- self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health')
-
- async def test_watch_empty_service(self):
- request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
-
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue.get()).status)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_watch_new_service(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
-
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue.get()).status)
-
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.NOT_SERVING)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- (await queue.get()).status)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_watch_service_isolation(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
-
- await self._servicer.set('some-other-service',
- health_pb2.HealthCheckResponse.SERVING)
- # The change of health status in other service should be isolated.
- # Hence, no additional notification should be observed.
- with self.assertRaises(asyncio.TimeoutError):
- await asyncio.wait_for(queue.get(), test_constants.SHORT_TIMEOUT)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_two_watchers(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- queue1 = asyncio.Queue()
- queue2 = asyncio.Queue()
- call1 = self._stub.Watch(request)
- call2 = self._stub.Watch(request)
- task1 = self.loop.create_task(_pipe_to_queue(call1, queue1))
- task2 = self.loop.create_task(_pipe_to_queue(call2, queue2))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue1.get()).status)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue2.get()).status)
-
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue1.get()).status)
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue2.get()).status)
-
- call1.cancel()
- call2.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task1
-
- with self.assertRaises(asyncio.CancelledError):
- await task2
-
- self.assertTrue(queue1.empty())
- self.assertTrue(queue2.empty())
-
- async def test_cancelled_watch_removed_from_watch_list(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
-
- call.cancel()
- await self._servicer.set(_WATCH_SERVICE,
- health_pb2.HealthCheckResponse.SERVING)
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- # Wait for the serving coroutine to process client cancellation.
- timeout = time.monotonic() + test_constants.TIME_ALLOWANCE
- while (time.monotonic() < timeout and self._servicer._server_watchers):
- await asyncio.sleep(1)
- self.assertFalse(self._servicer._server_watchers,
- 'There should not be any watcher left')
- self.assertTrue(queue.empty())
-
- async def test_graceful_shutdown(self):
- request = health_pb2.HealthCheckRequest(service=health.OVERALL_HEALTH)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVING,
- (await queue.get()).status)
-
- await self._servicer.enter_graceful_shutdown()
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- (await queue.get()).status)
-
- # This should be a no-op.
- await self._servicer.set(health.OVERALL_HEALTH,
- health_pb2.HealthCheckResponse.SERVING)
-
- resp = await self._stub.Check(request)
- self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
- resp.status)
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
- async def test_no_duplicate_status(self):
- request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE)
- call = self._stub.Watch(request)
- queue = asyncio.Queue()
- task = self.loop.create_task(_pipe_to_queue(call, queue))
-
- self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN,
- (await queue.get()).status)
- last_status = health_pb2.HealthCheckResponse.SERVICE_UNKNOWN
-
- for _ in range(_LARGE_NUMBER_OF_STATUS_CHANGES):
- if random.randint(0, 1) == 0:
- status = health_pb2.HealthCheckResponse.SERVING
- else:
- status = health_pb2.HealthCheckResponse.NOT_SERVING
-
- await self._servicer.set(_WATCH_SERVICE, status)
- if status != last_status:
- self.assertEqual(status, (await queue.get()).status)
- last_status = status
-
- call.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- self.assertTrue(queue.empty())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py
deleted file mode 100644
index b71ddbd314..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py
deleted file mode 100644
index a4c5e12ced..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/client.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import asyncio
-import logging
-import os
-
-import grpc
-from grpc.experimental import aio
-
-from tests.interop import client as interop_client_lib
-from tests_aio.interop import methods
-
-_LOGGER = logging.getLogger(__name__)
-_LOGGER.setLevel(logging.DEBUG)
-
-
-def _create_channel(args):
- target = f'{args.server_host}:{args.server_port}'
-
- if args.use_tls or args.use_alts or args.custom_credentials_type is not None:
- channel_credentials, options = interop_client_lib.get_secure_channel_parameters(
- args)
- return aio.secure_channel(target, channel_credentials, options)
- else:
- return aio.insecure_channel(target)
-
-
-def _test_case_from_arg(test_case_arg):
- for test_case in methods.TestCase:
- if test_case_arg == test_case.value:
- return test_case
- else:
- raise ValueError('No test case "%s"!' % test_case_arg)
-
-
-async def test_interoperability():
-
- args = interop_client_lib.parse_interop_client_args()
- channel = _create_channel(args)
- stub = interop_client_lib.create_stub(channel, args)
- test_case = _test_case_from_arg(args.test_case)
- await methods.test_interoperability(test_case, stub, args)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- asyncio.get_event_loop().set_debug(True)
- asyncio.get_event_loop().run_until_complete(test_interoperability())
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py
deleted file mode 100644
index 0db15be3a9..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/local_interop_test.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Conducts interop tests locally."""
-
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.interop import resources
-from tests_aio.interop import methods
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-
-
-class InteropTestCaseMixin:
- """Unit test methods.
-
- This class must be mixed in with unittest.TestCase and a class that defines
- setUp and tearDown methods that manage a stub attribute.
- """
- _stub: test_pb2_grpc.TestServiceStub
-
- async def test_empty_unary(self):
- await methods.test_interoperability(methods.TestCase.EMPTY_UNARY,
- self._stub, None)
-
- async def test_large_unary(self):
- await methods.test_interoperability(methods.TestCase.LARGE_UNARY,
- self._stub, None)
-
- async def test_server_streaming(self):
- await methods.test_interoperability(methods.TestCase.SERVER_STREAMING,
- self._stub, None)
-
- async def test_client_streaming(self):
- await methods.test_interoperability(methods.TestCase.CLIENT_STREAMING,
- self._stub, None)
-
- async def test_ping_pong(self):
- await methods.test_interoperability(methods.TestCase.PING_PONG,
- self._stub, None)
-
- async def test_cancel_after_begin(self):
- await methods.test_interoperability(methods.TestCase.CANCEL_AFTER_BEGIN,
- self._stub, None)
-
- async def test_cancel_after_first_response(self):
- await methods.test_interoperability(
- methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE, self._stub, None)
-
- async def test_timeout_on_sleeping_server(self):
- await methods.test_interoperability(
- methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER, self._stub, None)
-
- async def test_empty_stream(self):
- await methods.test_interoperability(methods.TestCase.EMPTY_STREAM,
- self._stub, None)
-
- async def test_status_code_and_message(self):
- await methods.test_interoperability(
- methods.TestCase.STATUS_CODE_AND_MESSAGE, self._stub, None)
-
- async def test_unimplemented_method(self):
- await methods.test_interoperability(
- methods.TestCase.UNIMPLEMENTED_METHOD, self._stub, None)
-
- async def test_unimplemented_service(self):
- await methods.test_interoperability(
- methods.TestCase.UNIMPLEMENTED_SERVICE, self._stub, None)
-
- async def test_custom_metadata(self):
- await methods.test_interoperability(methods.TestCase.CUSTOM_METADATA,
- self._stub, None)
-
- async def test_special_status_message(self):
- await methods.test_interoperability(
- methods.TestCase.SPECIAL_STATUS_MESSAGE, self._stub, None)
-
-
-class InsecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-class SecureLocalInteropTest(InteropTestCaseMixin, AioTestBase):
-
- async def setUp(self):
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- channel_credentials = grpc.ssl_channel_credentials(
- resources.test_root_certificates())
- channel_options = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
- ),)
-
- address, self._server = await start_test_server(
- secure=True, server_credentials=server_credentials)
- self._channel = aio.secure_channel(address, channel_credentials,
- channel_options)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.INFO)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py
deleted file mode 100644
index 6524a5ed0b..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/methods.py
+++ /dev/null
@@ -1,458 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Implementations of interoperability test methods."""
-
-import argparse
-import asyncio
-import collections
-import datetime
-import enum
-import inspect
-import json
-import os
-import threading
-import time
-from typing import Any, Optional, Union
-
-from google import auth as google_auth
-from google.auth import environment_vars as google_auth_environment_vars
-from google.auth.transport import grpc as google_auth_transport_grpc
-from google.auth.transport import requests as google_auth_transport_requests
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-
-_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
-_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
-
-
-async def _expect_status_code(call: aio.Call,
- expected_code: grpc.StatusCode) -> None:
- code = await call.code()
- if code != expected_code:
- raise ValueError('expected code %s, got %s' %
- (expected_code, await call.code()))
-
-
-async def _expect_status_details(call: aio.Call, expected_details: str) -> None:
- details = await call.details()
- if details != expected_details:
- raise ValueError('expected message %s, got %s' %
- (expected_details, await call.details()))
-
-
-async def _validate_status_code_and_details(call: aio.Call,
- expected_code: grpc.StatusCode,
- expected_details: str) -> None:
- await _expect_status_code(call, expected_code)
- await _expect_status_details(call, expected_details)
-
-
-def _validate_payload_type_and_length(response: Union[
- messages_pb2.SimpleResponse, messages_pb2.StreamingOutputCallResponse],
- expected_type: Any,
- expected_length: int) -> None:
- if response.payload.type is not expected_type:
- raise ValueError('expected payload type %s, got %s' %
- (expected_type, type(response.payload.type)))
- elif len(response.payload.body) != expected_length:
- raise ValueError('expected payload body size %d, got %d' %
- (expected_length, len(response.payload.body)))
-
-
-async def _large_unary_common_behavior(
- stub: test_pb2_grpc.TestServiceStub, fill_username: bool,
- fill_oauth_scope: bool, call_credentials: Optional[grpc.CallCredentials]
-) -> messages_pb2.SimpleResponse:
- size = 314159
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=size,
- payload=messages_pb2.Payload(body=b'\x00' * 271828),
- fill_username=fill_username,
- fill_oauth_scope=fill_oauth_scope)
- response = await stub.UnaryCall(request, credentials=call_credentials)
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
- return response
-
-
-async def _empty_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
- response = await stub.EmptyCall(empty_pb2.Empty())
- if not isinstance(response, empty_pb2.Empty):
- raise TypeError('response is of type "%s", not empty_pb2.Empty!' %
- type(response))
-
-
-async def _large_unary(stub: test_pb2_grpc.TestServiceStub) -> None:
- await _large_unary_common_behavior(stub, False, False, None)
-
-
-async def _client_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
- payload_body_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
-
- async def request_gen():
- for size in payload_body_sizes:
- yield messages_pb2.StreamingInputCallRequest(
- payload=messages_pb2.Payload(body=b'\x00' * size))
-
- response = await stub.StreamingInputCall(request_gen())
- if response.aggregated_payload_size != sum(payload_body_sizes):
- raise ValueError('incorrect size %d!' %
- response.aggregated_payload_size)
-
-
-async def _server_streaming(stub: test_pb2_grpc.TestServiceStub) -> None:
- sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
-
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(
- messages_pb2.ResponseParameters(size=sizes[0]),
- messages_pb2.ResponseParameters(size=sizes[1]),
- messages_pb2.ResponseParameters(size=sizes[2]),
- messages_pb2.ResponseParameters(size=sizes[3]),
- ))
- call = stub.StreamingOutputCall(request)
- for size in sizes:
- response = await call.read()
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- size)
-
-
-async def _ping_pong(stub: test_pb2_grpc.TestServiceStub) -> None:
- request_response_sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
- request_payload_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
-
- call = stub.FullDuplexCall()
- for response_size, payload_size in zip(request_response_sizes,
- request_payload_sizes):
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
- payload=messages_pb2.Payload(body=b'\x00' * payload_size))
-
- await call.write(request)
- response = await call.read()
- _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
- response_size)
- await call.done_writing()
- await _validate_status_code_and_details(call, grpc.StatusCode.OK, '')
-
-
-async def _cancel_after_begin(stub: test_pb2_grpc.TestServiceStub):
- call = stub.StreamingInputCall()
- call.cancel()
- if not call.cancelled():
- raise ValueError('expected cancelled method to return True')
- code = await call.code()
- if code is not grpc.StatusCode.CANCELLED:
- raise ValueError('expected status code CANCELLED')
-
-
-async def _cancel_after_first_response(stub: test_pb2_grpc.TestServiceStub):
- request_response_sizes = (
- 31415,
- 9,
- 2653,
- 58979,
- )
- request_payload_sizes = (
- 27182,
- 8,
- 1828,
- 45904,
- )
-
- call = stub.FullDuplexCall()
-
- response_size = request_response_sizes[0]
- payload_size = request_payload_sizes[0]
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(
- size=response_size),),
- payload=messages_pb2.Payload(body=b'\x00' * payload_size))
-
- await call.write(request)
- await call.read()
-
- call.cancel()
-
- try:
- await call.read()
- except asyncio.CancelledError:
- assert await call.code() is grpc.StatusCode.CANCELLED
- else:
- raise ValueError('expected call to be cancelled')
-
-
-async def _timeout_on_sleeping_server(stub: test_pb2_grpc.TestServiceStub):
- request_payload_size = 27182
- time_limit = datetime.timedelta(seconds=1)
-
- call = stub.FullDuplexCall(timeout=time_limit.total_seconds())
-
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- payload=messages_pb2.Payload(body=b'\x00' * request_payload_size),
- response_parameters=(messages_pb2.ResponseParameters(
- interval_us=int(time_limit.total_seconds() * 2 * 10**6)),))
- await call.write(request)
- await call.done_writing()
- try:
- await call.read()
- except aio.AioRpcError as rpc_error:
- if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
- raise
- else:
- raise ValueError('expected call to exceed deadline')
-
-
-async def _empty_stream(stub: test_pb2_grpc.TestServiceStub):
- call = stub.FullDuplexCall()
- await call.done_writing()
- assert await call.read() == aio.EOF
-
-
-async def _status_code_and_message(stub: test_pb2_grpc.TestServiceStub):
- details = 'test status message'
- status = grpc.StatusCode.UNKNOWN # code = 2
-
- # Test with a UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=status.value[0],
- message=details))
- call = stub.UnaryCall(request)
- await _validate_status_code_and_details(call, status, details)
-
- # Test with a FullDuplexCall
- call = stub.FullDuplexCall()
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(size=1),),
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=status.value[0],
- message=details))
- await call.write(request) # sends the initial request.
- await call.done_writing()
- try:
- await call.read()
- except aio.AioRpcError as rpc_error:
- assert rpc_error.code() == status
- await _validate_status_code_and_details(call, status, details)
-
-
-async def _unimplemented_method(stub: test_pb2_grpc.TestServiceStub):
- call = stub.UnimplementedCall(empty_pb2.Empty())
- await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
-
-
-async def _unimplemented_service(stub: test_pb2_grpc.UnimplementedServiceStub):
- call = stub.UnimplementedCall(empty_pb2.Empty())
- await _expect_status_code(call, grpc.StatusCode.UNIMPLEMENTED)
-
-
-async def _custom_metadata(stub: test_pb2_grpc.TestServiceStub):
- initial_metadata_value = "test_initial_metadata_value"
- trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b"
- metadata = aio.Metadata(
- (_INITIAL_METADATA_KEY, initial_metadata_value),
- (_TRAILING_METADATA_KEY, trailing_metadata_value),
- )
-
- async def _validate_metadata(call):
- initial_metadata = await call.initial_metadata()
- if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
- raise ValueError('expected initial metadata %s, got %s' %
- (initial_metadata_value,
- initial_metadata[_INITIAL_METADATA_KEY]))
-
- trailing_metadata = await call.trailing_metadata()
- if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
- raise ValueError('expected trailing metadata %s, got %s' %
- (trailing_metadata_value,
- trailing_metadata[_TRAILING_METADATA_KEY]))
-
- # Testing with UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'))
- call = stub.UnaryCall(request, metadata=metadata)
- await _validate_metadata(call)
-
- # Testing with FullDuplexCall
- call = stub.FullDuplexCall(metadata=metadata)
- request = messages_pb2.StreamingOutputCallRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_parameters=(messages_pb2.ResponseParameters(size=1),))
- await call.write(request)
- await call.read()
- await call.done_writing()
- await _validate_metadata(call)
-
-
-async def _compute_engine_creds(stub: test_pb2_grpc.TestServiceStub,
- args: argparse.Namespace):
- response = await _large_unary_common_behavior(stub, True, True, None)
- if args.default_service_account != response.username:
- raise ValueError('expected username %s, got %s' %
- (args.default_service_account, response.username))
-
-
-async def _oauth2_auth_token(stub: test_pb2_grpc.TestServiceStub,
- args: argparse.Namespace):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- response = await _large_unary_common_behavior(stub, True, True, None)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
- if args.oauth_scope.find(response.oauth_scope) == -1:
- raise ValueError(
- 'expected to find oauth scope "{}" in received "{}"'.format(
- response.oauth_scope, args.oauth_scope))
-
-
-async def _jwt_token_creds(stub: test_pb2_grpc.TestServiceStub):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- response = await _large_unary_common_behavior(stub, True, False, None)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
-
-
-async def _per_rpc_creds(stub: test_pb2_grpc.TestServiceStub,
- args: argparse.Namespace):
- json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS]
- wanted_email = json.load(open(json_key_filename, 'r'))['client_email']
- google_credentials, unused_project_id = google_auth.default(
- scopes=[args.oauth_scope])
- call_credentials = grpc.metadata_call_credentials(
- google_auth_transport_grpc.AuthMetadataPlugin(
- credentials=google_credentials,
- request=google_auth_transport_requests.Request()))
- response = await _large_unary_common_behavior(stub, True, False,
- call_credentials)
- if wanted_email != response.username:
- raise ValueError('expected username %s, got %s' %
- (wanted_email, response.username))
-
-
-async def _special_status_message(stub: test_pb2_grpc.TestServiceStub):
- details = b'\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP \xf0\x9f\x98\x88\t\n'.decode(
- 'utf-8')
- status = grpc.StatusCode.UNKNOWN # code = 2
-
- # Test with a UnaryCall
- request = messages_pb2.SimpleRequest(
- response_type=messages_pb2.COMPRESSABLE,
- response_size=1,
- payload=messages_pb2.Payload(body=b'\x00'),
- response_status=messages_pb2.EchoStatus(code=status.value[0],
- message=details))
- call = stub.UnaryCall(request)
- await _validate_status_code_and_details(call, status, details)
-
-
-@enum.unique
-class TestCase(enum.Enum):
- EMPTY_UNARY = 'empty_unary'
- LARGE_UNARY = 'large_unary'
- SERVER_STREAMING = 'server_streaming'
- CLIENT_STREAMING = 'client_streaming'
- PING_PONG = 'ping_pong'
- CANCEL_AFTER_BEGIN = 'cancel_after_begin'
- CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response'
- TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server'
- EMPTY_STREAM = 'empty_stream'
- STATUS_CODE_AND_MESSAGE = 'status_code_and_message'
- UNIMPLEMENTED_METHOD = 'unimplemented_method'
- UNIMPLEMENTED_SERVICE = 'unimplemented_service'
- CUSTOM_METADATA = "custom_metadata"
- COMPUTE_ENGINE_CREDS = 'compute_engine_creds'
- OAUTH2_AUTH_TOKEN = 'oauth2_auth_token'
- JWT_TOKEN_CREDS = 'jwt_token_creds'
- PER_RPC_CREDS = 'per_rpc_creds'
- SPECIAL_STATUS_MESSAGE = 'special_status_message'
-
-
-_TEST_CASE_IMPLEMENTATION_MAPPING = {
- TestCase.EMPTY_UNARY: _empty_unary,
- TestCase.LARGE_UNARY: _large_unary,
- TestCase.SERVER_STREAMING: _server_streaming,
- TestCase.CLIENT_STREAMING: _client_streaming,
- TestCase.PING_PONG: _ping_pong,
- TestCase.CANCEL_AFTER_BEGIN: _cancel_after_begin,
- TestCase.CANCEL_AFTER_FIRST_RESPONSE: _cancel_after_first_response,
- TestCase.TIMEOUT_ON_SLEEPING_SERVER: _timeout_on_sleeping_server,
- TestCase.EMPTY_STREAM: _empty_stream,
- TestCase.STATUS_CODE_AND_MESSAGE: _status_code_and_message,
- TestCase.UNIMPLEMENTED_METHOD: _unimplemented_method,
- TestCase.UNIMPLEMENTED_SERVICE: _unimplemented_service,
- TestCase.CUSTOM_METADATA: _custom_metadata,
- TestCase.COMPUTE_ENGINE_CREDS: _compute_engine_creds,
- TestCase.OAUTH2_AUTH_TOKEN: _oauth2_auth_token,
- TestCase.JWT_TOKEN_CREDS: _jwt_token_creds,
- TestCase.PER_RPC_CREDS: _per_rpc_creds,
- TestCase.SPECIAL_STATUS_MESSAGE: _special_status_message,
-}
-
-
-async def test_interoperability(
- case: TestCase,
- stub: test_pb2_grpc.TestServiceStub,
- args: Optional[argparse.Namespace] = None) -> None:
- method = _TEST_CASE_IMPLEMENTATION_MAPPING.get(case)
- if method is None:
- raise NotImplementedError(f'Test case "{case}" not implemented!')
- else:
- num_params = len(inspect.signature(method).parameters)
- if num_params == 1:
- await method(stub)
- elif num_params == 2:
- if args is not None:
- await method(stub, args)
- else:
- raise ValueError(f'Failed to run case [{case}]: args is None')
- else:
- raise ValueError(f'Invalid number of parameters [{num_params}]')
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py
deleted file mode 100644
index e40c831a87..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/interop/server.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The gRPC interoperability test server using AsyncIO stack."""
-
-import argparse
-import asyncio
-import logging
-
-import grpc
-
-from tests.interop import server as interop_server_lib
-from tests_aio.unit import _test_server
-
-logging.basicConfig(level=logging.DEBUG)
-_LOGGER = logging.getLogger(__name__)
-_LOGGER.setLevel(logging.DEBUG)
-
-
-async def serve():
- args = interop_server_lib.parse_interop_server_arguments()
-
- if args.use_tls or args.use_alts:
- credentials = interop_server_lib.get_server_credentials(args.use_tls)
- address, server = await _test_server.start_test_server(
- port=args.port, secure=True, server_credentials=credentials)
- else:
- address, server = await _test_server.start_test_server(
- port=args.port,
- secure=False,
- )
-
- _LOGGER.info('Server serving at %s', address)
- await server.wait_for_termination()
- _LOGGER.info('Server stopped; exiting.')
-
-
-if __name__ == '__main__':
- asyncio.get_event_loop().run_until_complete(serve())
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py
deleted file mode 100644
index 5772620b60..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py
deleted file mode 100644
index 8e5c9df664..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/reflection/reflection_servicer_test.py
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright 2016 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_reflection.v1alpha.reflection."""
-
-import logging
-import unittest
-
-from google.protobuf import descriptor_pb2
-import grpc
-from grpc.experimental import aio
-from grpc_reflection.v1alpha import reflection
-from grpc_reflection.v1alpha import reflection_pb2
-from grpc_reflection.v1alpha import reflection_pb2_grpc
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing.proto2 import empty2_extensions_pb2
-from tests_aio.unit._test_base import AioTestBase
-
-_EMPTY_PROTO_FILE_NAME = 'src/proto/grpc/testing/empty.proto'
-_EMPTY_PROTO_SYMBOL_NAME = 'grpc.testing.Empty'
-_SERVICE_NAMES = ('Angstrom', 'Bohr', 'Curie', 'Dyson', 'Einstein', 'Feynman',
- 'Galilei')
-_EMPTY_EXTENSIONS_SYMBOL_NAME = 'grpc.testing.proto2.EmptyWithExtensions'
-_EMPTY_EXTENSIONS_NUMBERS = (
- 124,
- 125,
- 126,
- 127,
- 128,
-)
-
-
-def _file_descriptor_to_proto(descriptor):
- proto = descriptor_pb2.FileDescriptorProto()
- descriptor.CopyToProto(proto)
- return proto.SerializeToString()
-
-
-class ReflectionServicerTest(AioTestBase):
-
- async def setUp(self):
- self._server = aio.server()
- reflection.enable_server_reflection(_SERVICE_NAMES, self._server)
- port = self._server.add_insecure_port('[::]:0')
- await self._server.start()
-
- self._channel = aio.insecure_channel('localhost:%d' % port)
- self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel)
-
- async def tearDown(self):
- await self._server.stop(None)
- await self._channel.close()
-
- async def test_file_by_name(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_by_filename=_EMPTY_PROTO_FILE_NAME),
- reflection_pb2.ServerReflectionRequest(
- file_by_filename='i-donut-exist'),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(
- _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_file_by_symbol(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME),
- reflection_pb2.ServerReflectionRequest(
- file_containing_symbol='i.donut.exist.co.uk.org.net.me.name.foo'
- ),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(
- _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_file_containing_extension(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- file_containing_extension=reflection_pb2.ExtensionRequest(
- containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME,
- extension_number=125,
- ),),
- reflection_pb2.ServerReflectionRequest(
- file_containing_extension=reflection_pb2.ExtensionRequest(
- containing_type='i.donut.exist.co.uk.org.net.me.name.foo',
- extension_number=55,
- ),),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- file_descriptor_response=reflection_pb2.FileDescriptorResponse(
- file_descriptor_proto=(_file_descriptor_to_proto(
- empty2_extensions_pb2.DESCRIPTOR),))),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_extension_numbers_of_type(self):
- requests = (
- reflection_pb2.ServerReflectionRequest(
- all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME),
- reflection_pb2.ServerReflectionRequest(
- all_extension_numbers_of_type='i.donut.exist.co.uk.net.name.foo'
- ),
- )
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- all_extension_numbers_response=reflection_pb2.
- ExtensionNumberResponse(
- base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME,
- extension_number=_EMPTY_EXTENSIONS_NUMBERS)),
- reflection_pb2.ServerReflectionResponse(
- valid_host='',
- error_response=reflection_pb2.ErrorResponse(
- error_code=grpc.StatusCode.NOT_FOUND.value[0],
- error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
- )),
- )
- self.assertSequenceEqual(expected_responses, responses)
-
- async def test_list_services(self):
- requests = (reflection_pb2.ServerReflectionRequest(list_services='',),)
- responses = []
- async for response in self._stub.ServerReflectionInfo(iter(requests)):
- responses.append(response)
- expected_responses = (reflection_pb2.ServerReflectionResponse(
- valid_host='',
- list_services_response=reflection_pb2.ListServiceResponse(
- service=tuple(
- reflection_pb2.ServiceResponse(name=name)
- for name in _SERVICE_NAMES))),)
- self.assertSequenceEqual(expected_responses, responses)
-
- def test_reflection_service_name(self):
- self.assertEqual(reflection.SERVICE_NAME,
- 'grpc.reflection.v1alpha.ServerReflection')
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py
deleted file mode 100644
index 1517f71d09..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py
deleted file mode 100644
index df5b75b9cd..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/status/grpc_status_test.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests of grpc_status with gRPC AsyncIO stack."""
-
-import logging
-import traceback
-import unittest
-
-from google.protobuf import any_pb2
-from google.rpc import code_pb2
-from google.rpc import error_details_pb2
-from google.rpc import status_pb2
-import grpc
-from grpc.experimental import aio
-from grpc_status import rpc_status
-
-from tests_aio.unit._test_base import AioTestBase
-
-_STATUS_OK = '/test/StatusOK'
-_STATUS_NOT_OK = '/test/StatusNotOk'
-_ERROR_DETAILS = '/test/ErrorDetails'
-_INCONSISTENT = '/test/Inconsistent'
-_INVALID_CODE = '/test/InvalidCode'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_GRPC_DETAILS_METADATA_KEY = 'grpc-status-details-bin'
-
-_STATUS_DETAILS = 'This is an error detail'
-_STATUS_DETAILS_ANOTHER = 'This is another error detail'
-
-
-async def _ok_unary_unary(request, servicer_context):
- return _RESPONSE
-
-
-async def _not_ok_unary_unary(request, servicer_context):
- await servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS)
-
-
-async def _error_details_unary_unary(request, servicer_context):
- details = any_pb2.Any()
- details.Pack(
- error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(),
- detail='Intentionally invoked'))
- rich_status = status_pb2.Status(
- code=code_pb2.INTERNAL,
- message=_STATUS_DETAILS,
- details=[details],
- )
- await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
-
-
-async def _inconsistent_unary_unary(request, servicer_context):
- rich_status = status_pb2.Status(
- code=code_pb2.INTERNAL,
- message=_STATUS_DETAILS,
- )
- servicer_context.set_code(grpc.StatusCode.NOT_FOUND)
- servicer_context.set_details(_STATUS_DETAILS_ANOTHER)
- # User put inconsistent status information in trailing metadata
- servicer_context.set_trailing_metadata(
- ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),))
-
-
-async def _invalid_code_unary_unary(request, servicer_context):
- rich_status = status_pb2.Status(
- code=42,
- message='Invalid code',
- )
- await servicer_context.abort_with_status(rpc_status.to_status(rich_status))
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _STATUS_OK:
- return grpc.unary_unary_rpc_method_handler(_ok_unary_unary)
- elif handler_call_details.method == _STATUS_NOT_OK:
- return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary)
- elif handler_call_details.method == _ERROR_DETAILS:
- return grpc.unary_unary_rpc_method_handler(
- _error_details_unary_unary)
- elif handler_call_details.method == _INCONSISTENT:
- return grpc.unary_unary_rpc_method_handler(
- _inconsistent_unary_unary)
- elif handler_call_details.method == _INVALID_CODE:
- return grpc.unary_unary_rpc_method_handler(
- _invalid_code_unary_unary)
- else:
- return None
-
-
-class StatusTest(AioTestBase):
-
- async def setUp(self):
- self._server = aio.server()
- self._server.add_generic_rpc_handlers((_GenericHandler(),))
- port = self._server.add_insecure_port('[::]:0')
- await self._server.start()
-
- self._channel = aio.insecure_channel('localhost:%d' % port)
-
- async def tearDown(self):
- await self._server.stop(None)
- await self._channel.close()
-
- async def test_status_ok(self):
- call = self._channel.unary_unary(_STATUS_OK)(_REQUEST)
-
- # Succeed RPC doesn't have status
- status = await rpc_status.aio.from_call(call)
- self.assertIs(status, None)
-
- async def test_status_not_ok(self):
- call = self._channel.unary_unary(_STATUS_NOT_OK)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
-
- self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
- # Failed RPC doesn't automatically generate status
- status = await rpc_status.aio.from_call(call)
- self.assertIs(status, None)
-
- async def test_error_details(self):
- call = self._channel.unary_unary(_ERROR_DETAILS)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
-
- status = await rpc_status.aio.from_call(call)
- self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL)
- self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL'))
-
- # Check if the underlying proto message is intact
- self.assertTrue(status.details[0].Is(
- error_details_pb2.DebugInfo.DESCRIPTOR))
- info = error_details_pb2.DebugInfo()
- status.details[0].Unpack(info)
- self.assertIn('_error_details_unary_unary', info.stack_entries[-1])
-
- async def test_code_message_validation(self):
- call = self._channel.unary_unary(_INCONSISTENT)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
- self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND)
-
- # Code/Message validation failed
- with self.assertRaises(ValueError):
- await rpc_status.aio.from_call(call)
-
- async def test_invalid_code(self):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channel.unary_unary(_INVALID_CODE)(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN)
- # Invalid status code exception raised during coversion
- self.assertIn('Invalid status code', rpc_error.details())
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py
deleted file mode 100644
index f4b321fc5b..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py
deleted file mode 100644
index 05947733a0..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_common.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-from typing import AsyncIterable
-
-import grpc
-from grpc.aio._metadata import Metadata
-from grpc.aio._typing import MetadataKey
-from grpc.aio._typing import MetadataValue
-from grpc.aio._typing import MetadatumType
-from grpc.experimental import aio
-
-from tests.unit.framework.common import test_constants
-
-ADHOC_METHOD = '/test/AdHoc'
-
-
-def seen_metadata(expected: Metadata, actual: Metadata):
- return not bool(set(tuple(expected)) - set(tuple(actual)))
-
-
-def seen_metadatum(expected_key: MetadataKey, expected_value: MetadataValue,
- actual: Metadata) -> bool:
- obtained = actual[expected_key]
- return obtained == expected_value
-
-
-async def block_until_certain_state(channel: aio.Channel,
- expected_state: grpc.ChannelConnectivity):
- state = channel.get_state()
- while state != expected_state:
- await channel.wait_for_state_change(state)
- state = channel.get_state()
-
-
-def inject_callbacks(call: aio.Call):
- first_callback_ran = asyncio.Event()
-
- def first_callback(call):
- # Validate that all resopnses have been received
- # and the call is an end state.
- assert call.done()
- first_callback_ran.set()
-
- second_callback_ran = asyncio.Event()
-
- def second_callback(call):
- # Validate that all responses have been received
- # and the call is an end state.
- assert call.done()
- second_callback_ran.set()
-
- call.add_done_callback(first_callback)
- call.add_done_callback(second_callback)
-
- async def validation():
- await asyncio.wait_for(
- asyncio.gather(first_callback_ran.wait(),
- second_callback_ran.wait()),
- test_constants.SHORT_TIMEOUT)
-
- return validation()
-
-
-class CountingRequestIterator:
-
- def __init__(self, request_iterator):
- self.request_cnt = 0
- self._request_iterator = request_iterator
-
- async def _forward_requests(self):
- async for request in self._request_iterator:
- self.request_cnt += 1
- yield request
-
- def __aiter__(self):
- return self._forward_requests()
-
-
-class CountingResponseIterator:
-
- def __init__(self, response_iterator):
- self.response_cnt = 0
- self._response_iterator = response_iterator
-
- async def _forward_responses(self):
- async for response in self._response_iterator:
- self.response_cnt += 1
- yield response
-
- def __aiter__(self):
- return self._forward_responses()
-
-
-class AdhocGenericHandler(grpc.GenericRpcHandler):
- """A generic handler to plugin testing server methods on the fly."""
- _handler: grpc.RpcMethodHandler
-
- def __init__(self):
- self._handler = None
-
- def set_adhoc_handler(self, handler: grpc.RpcMethodHandler):
- self._handler = handler
-
- def service(self, handler_call_details):
- if handler_call_details.method == ADHOC_METHOD:
- return self._handler
- else:
- return None
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py
deleted file mode 100644
index ab7e06f8fc..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_constants.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# If we use an unreachable IP, depending on the network stack, we might not get
-# with an RST fast enough. This used to cause tests to flake under different
-# platforms.
-UNREACHABLE_TARGET = 'foo/bar'
-UNARY_CALL_WITH_SLEEP_VALUE = 0.2
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py
deleted file mode 100644
index c0594cb06a..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_metadata_test.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Copyright 2020 gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests for the metadata abstraction that's used in the asynchronous driver."""
-import logging
-import unittest
-
-from grpc.experimental.aio import Metadata
-
-
-class TestTypeMetadata(unittest.TestCase):
- """Tests for the metadata type"""
-
- _DEFAULT_DATA = (("key1", "value1"), ("key2", "value2"))
- _MULTI_ENTRY_DATA = (("key1", "value1"), ("key1", "other value 1"),
- ("key2", "value2"))
-
- def test_init_metadata(self):
- test_cases = {
- "emtpy": (),
- "with-single-data": self._DEFAULT_DATA,
- "with-multi-data": self._MULTI_ENTRY_DATA,
- }
- for case, args in test_cases.items():
- with self.subTest(case=case):
- metadata = Metadata(*args)
- self.assertEqual(len(metadata), len(args))
-
- def test_get_item(self):
- metadata = Metadata(("key", "value1"), ("key", "value2"),
- ("key2", "other value"))
- self.assertEqual(metadata["key"], "value1")
- self.assertEqual(metadata["key2"], "other value")
- self.assertEqual(metadata.get("key"), "value1")
- self.assertEqual(metadata.get("key2"), "other value")
-
- with self.assertRaises(KeyError):
- metadata["key not found"]
- self.assertIsNone(metadata.get("key not found"))
-
- def test_add_value(self):
- metadata = Metadata()
- metadata.add("key", "value")
- metadata.add("key", "second value")
- metadata.add("key2", "value2")
-
- self.assertEqual(metadata["key"], "value")
- self.assertEqual(metadata["key2"], "value2")
-
- def test_get_all_items(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- self.assertEqual(metadata.get_all("key1"), ["value1", "other value 1"])
- self.assertEqual(metadata.get_all("key2"), ["value2"])
- self.assertEqual(metadata.get_all("non existing key"), [])
-
- def test_container(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- self.assertIn("key1", metadata)
-
- def test_equals(self):
- metadata = Metadata()
- for key, value in self._DEFAULT_DATA:
- metadata.add(key, value)
- metadata2 = Metadata(*self._DEFAULT_DATA)
-
- self.assertEqual(metadata, metadata2)
- self.assertNotEqual(metadata, "foo")
-
- def test_repr(self):
- metadata = Metadata(*self._DEFAULT_DATA)
- expected = "Metadata({0!r})".format(self._DEFAULT_DATA)
- self.assertEqual(repr(metadata), expected)
-
- def test_set(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- override_value = "override value"
- for _ in range(3):
- metadata["key1"] = override_value
-
- self.assertEqual(metadata["key1"], override_value)
- self.assertEqual(metadata.get_all("key1"),
- [override_value, "other value 1"])
-
- empty_metadata = Metadata()
- for _ in range(3):
- empty_metadata["key"] = override_value
-
- self.assertEqual(empty_metadata["key"], override_value)
- self.assertEqual(empty_metadata.get_all("key"), [override_value])
-
- def test_set_all(self):
- metadata = Metadata(*self._DEFAULT_DATA)
- metadata.set_all("key", ["value1", b"new value 2"])
-
- self.assertEqual(metadata["key"], "value1")
- self.assertEqual(metadata.get_all("key"), ["value1", b"new value 2"])
-
- def test_delete_values(self):
- metadata = Metadata(*self._MULTI_ENTRY_DATA)
- del metadata["key1"]
- self.assertEqual(metadata.get("key1"), "other value 1")
-
- metadata.delete_all("key1")
- self.assertNotIn("key1", metadata)
-
- metadata.delete_all("key2")
- self.assertEqual(len(metadata), 0)
-
- with self.assertRaises(KeyError):
- del metadata["other key"]
-
- def test_metadata_from_tuple(self):
- scenarios = (
- (None, Metadata()),
- (Metadata(), Metadata()),
- (self._DEFAULT_DATA, Metadata(*self._DEFAULT_DATA)),
- (self._MULTI_ENTRY_DATA, Metadata(*self._MULTI_ENTRY_DATA)),
- (Metadata(*self._DEFAULT_DATA), Metadata(*self._DEFAULT_DATA)),
- )
- for source, expected in scenarios:
- with self.subTest(raw_metadata=source, expected=expected):
- self.assertEqual(expected, Metadata.from_tuple(source))
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py
deleted file mode 100644
index fcd1e90a5a..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_base.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import functools
-import logging
-from typing import Callable
-import unittest
-
-from grpc.experimental import aio
-
-__all__ = 'AioTestBase'
-
-_COROUTINE_FUNCTION_ALLOWLIST = ['setUp', 'tearDown']
-
-
-def _async_to_sync_decorator(f: Callable, loop: asyncio.AbstractEventLoop):
-
- @functools.wraps(f)
- def wrapper(*args, **kwargs):
- return loop.run_until_complete(f(*args, **kwargs))
-
- return wrapper
-
-
-def _get_default_loop(debug=True):
- try:
- loop = asyncio.get_event_loop()
- except:
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
- finally:
- loop.set_debug(debug)
- return loop
-
-
-# NOTE(gnossen) this test class can also be implemented with metaclass.
-class AioTestBase(unittest.TestCase):
- # NOTE(lidi) We need to pick a loop for entire testing phase, otherwise it
- # will trigger create new loops in new threads, leads to deadlock.
- _TEST_LOOP = _get_default_loop()
-
- @property
- def loop(self):
- return self._TEST_LOOP
-
- def __getattribute__(self, name):
- """Overrides the loading logic to support coroutine functions."""
- attr = super().__getattribute__(name)
-
- # If possible, converts the coroutine into a sync function.
- if name.startswith('test_') or name in _COROUTINE_FUNCTION_ALLOWLIST:
- if asyncio.iscoroutinefunction(attr):
- return _async_to_sync_decorator(attr, self._TEST_LOOP)
- # For other attributes, let them pass.
- return attr
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py
deleted file mode 100644
index 0119fda37c..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/_test_server.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import datetime
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit import resources
-from tests_aio.unit import _constants
-
-_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
-_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
-
-
-async def _maybe_echo_metadata(servicer_context):
- """Copies metadata from request to response if it is present."""
- invocation_metadata = dict(servicer_context.invocation_metadata())
- if _INITIAL_METADATA_KEY in invocation_metadata:
- initial_metadatum = (_INITIAL_METADATA_KEY,
- invocation_metadata[_INITIAL_METADATA_KEY])
- await servicer_context.send_initial_metadata((initial_metadatum,))
- if _TRAILING_METADATA_KEY in invocation_metadata:
- trailing_metadatum = (_TRAILING_METADATA_KEY,
- invocation_metadata[_TRAILING_METADATA_KEY])
- servicer_context.set_trailing_metadata((trailing_metadatum,))
-
-
-async def _maybe_echo_status(request: messages_pb2.SimpleRequest,
- servicer_context):
- """Echos the RPC status if demanded by the request."""
- if request.HasField('response_status'):
- await servicer_context.abort(request.response_status.code,
- request.response_status.message)
-
-
-class TestServiceServicer(test_pb2_grpc.TestServiceServicer):
-
- async def UnaryCall(self, request, context):
- await _maybe_echo_metadata(context)
- await _maybe_echo_status(request, context)
- return messages_pb2.SimpleResponse(
- payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE,
- body=b'\x00' * request.response_size))
-
- async def EmptyCall(self, request, context):
- return empty_pb2.Empty()
-
- async def StreamingOutputCall(
- self, request: messages_pb2.StreamingOutputCallRequest,
- unused_context):
- for response_parameters in request.response_parameters:
- if response_parameters.interval_us != 0:
- await asyncio.sleep(
- datetime.timedelta(microseconds=response_parameters.
- interval_us).total_seconds())
- if response_parameters.size != 0:
- yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.response_type,
- body=b'\x00' *
- response_parameters.size))
- else:
- yield messages_pb2.StreamingOutputCallResponse()
-
- # Next methods are extra ones that are registred programatically
- # when the sever is instantiated. They are not being provided by
- # the proto file.
- async def UnaryCallWithSleep(self, unused_request, unused_context):
- await asyncio.sleep(_constants.UNARY_CALL_WITH_SLEEP_VALUE)
- return messages_pb2.SimpleResponse()
-
- async def StreamingInputCall(self, request_async_iterator, unused_context):
- aggregate_size = 0
- async for request in request_async_iterator:
- if request.payload is not None and request.payload.body:
- aggregate_size += len(request.payload.body)
- return messages_pb2.StreamingInputCallResponse(
- aggregated_payload_size=aggregate_size)
-
- async def FullDuplexCall(self, request_async_iterator, context):
- await _maybe_echo_metadata(context)
- async for request in request_async_iterator:
- await _maybe_echo_status(request, context)
- for response_parameters in request.response_parameters:
- if response_parameters.interval_us != 0:
- await asyncio.sleep(
- datetime.timedelta(microseconds=response_parameters.
- interval_us).total_seconds())
- if response_parameters.size != 0:
- yield messages_pb2.StreamingOutputCallResponse(
- payload=messages_pb2.Payload(type=request.payload.type,
- body=b'\x00' *
- response_parameters.size))
- else:
- yield messages_pb2.StreamingOutputCallResponse()
-
-
-def _create_extra_generic_handler(servicer: TestServiceServicer):
- # Add programatically extra methods not provided by the proto file
- # that are used during the tests
- rpc_method_handlers = {
- 'UnaryCallWithSleep':
- grpc.unary_unary_rpc_method_handler(
- servicer.UnaryCallWithSleep,
- request_deserializer=messages_pb2.SimpleRequest.FromString,
- response_serializer=messages_pb2.SimpleResponse.
- SerializeToString)
- }
- return grpc.method_handlers_generic_handler('grpc.testing.TestService',
- rpc_method_handlers)
-
-
-async def start_test_server(port=0,
- secure=False,
- server_credentials=None,
- interceptors=None):
- server = aio.server(options=(('grpc.so_reuseport', 0),),
- interceptors=interceptors)
- servicer = TestServiceServicer()
- test_pb2_grpc.add_TestServiceServicer_to_server(servicer, server)
-
- server.add_generic_rpc_handlers((_create_extra_generic_handler(servicer),))
-
- if secure:
- if server_credentials is None:
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- port = server.add_secure_port('[::]:%d' % port, server_credentials)
- else:
- port = server.add_insecure_port('[::]:%d' % port)
-
- await server.start()
-
- # NOTE(lidizheng) returning the server to prevent it from deallocation
- return 'localhost:%d' % port, server
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py
deleted file mode 100644
index 45ef9481a8..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/abort_test.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import gc
-import logging
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._test_base import AioTestBase
-
-_UNARY_UNARY_ABORT = '/test/UnaryUnaryAbort'
-_SUPPRESS_ABORT = '/test/SuppressAbort'
-_REPLACE_ABORT = '/test/ReplaceAbort'
-_ABORT_AFTER_REPLY = '/test/AbortAfterReply'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-_NUM_STREAM_RESPONSES = 5
-
-_ABORT_CODE = grpc.StatusCode.RESOURCE_EXHAUSTED
-_ABORT_DETAILS = 'Phony error details'
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- @staticmethod
- async def _unary_unary_abort(unused_request, context):
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- raise RuntimeError('This line should not be executed')
-
- @staticmethod
- async def _suppress_abort(unused_request, context):
- try:
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- except aio.AbortError as e:
- pass
- return _RESPONSE
-
- @staticmethod
- async def _replace_abort(unused_request, context):
- try:
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- except aio.AbortError as e:
- await context.abort(grpc.StatusCode.INVALID_ARGUMENT,
- 'Override abort!')
-
- @staticmethod
- async def _abort_after_reply(unused_request, context):
- yield _RESPONSE
- await context.abort(_ABORT_CODE, _ABORT_DETAILS)
- raise RuntimeError('This line should not be executed')
-
- def service(self, handler_details):
- if handler_details.method == _UNARY_UNARY_ABORT:
- return grpc.unary_unary_rpc_method_handler(self._unary_unary_abort)
- if handler_details.method == _SUPPRESS_ABORT:
- return grpc.unary_unary_rpc_method_handler(self._suppress_abort)
- if handler_details.method == _REPLACE_ABORT:
- return grpc.unary_unary_rpc_method_handler(self._replace_abort)
- if handler_details.method == _ABORT_AFTER_REPLY:
- return grpc.unary_stream_rpc_method_handler(self._abort_after_reply)
-
-
-async def _start_test_server():
- server = aio.server()
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
- return 'localhost:%d' % port, server
-
-
-class TestAbort(AioTestBase):
-
- async def setUp(self):
- address, self._server = await _start_test_server()
- self._channel = aio.insecure_channel(address)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_unary_unary_abort(self):
- method = self._channel.unary_unary(_UNARY_UNARY_ABORT)
- call = method(_REQUEST)
-
- self.assertEqual(_ABORT_CODE, await call.code())
- self.assertEqual(_ABORT_DETAILS, await call.details())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- async def test_suppress_abort(self):
- method = self._channel.unary_unary(_SUPPRESS_ABORT)
- call = method(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- async def test_replace_abort(self):
- method = self._channel.unary_unary(_REPLACE_ABORT)
- call = method(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- async def test_abort_after_reply(self):
- method = self._channel.unary_stream(_ABORT_AFTER_REPLY)
- call = method(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.read()
- await call.read()
-
- rpc_error = exception_context.exception
- self.assertEqual(_ABORT_CODE, rpc_error.code())
- self.assertEqual(_ABORT_DETAILS, rpc_error.details())
-
- self.assertEqual(_ABORT_CODE, await call.code())
- self.assertEqual(_ABORT_DETAILS, await call.details())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py
deleted file mode 100644
index 730871d1be..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/aio_rpc_error_test.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests AioRpcError class."""
-
-import logging
-import unittest
-
-import grpc
-from grpc.aio._call import AioRpcError
-from grpc.experimental import aio
-
-from tests_aio.unit._test_base import AioTestBase
-
-_TEST_INITIAL_METADATA = aio.Metadata(
- ('initial metadata key', 'initial metadata value'))
-_TEST_TRAILING_METADATA = aio.Metadata(
- ('trailing metadata key', 'trailing metadata value'))
-_TEST_DEBUG_ERROR_STRING = '{This is a debug string}'
-
-
-class TestAioRpcError(unittest.TestCase):
-
- def test_attributes(self):
- aio_rpc_error = AioRpcError(grpc.StatusCode.CANCELLED,
- initial_metadata=_TEST_INITIAL_METADATA,
- trailing_metadata=_TEST_TRAILING_METADATA,
- details="details",
- debug_error_string=_TEST_DEBUG_ERROR_STRING)
- self.assertEqual(aio_rpc_error.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(aio_rpc_error.details(), 'details')
- self.assertEqual(aio_rpc_error.initial_metadata(),
- _TEST_INITIAL_METADATA)
- self.assertEqual(aio_rpc_error.trailing_metadata(),
- _TEST_TRAILING_METADATA)
- self.assertEqual(aio_rpc_error.debug_error_string(),
- _TEST_DEBUG_ERROR_STRING)
-
-
-if __name__ == '__main__':
- logging.basicConfig()
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py
deleted file mode 100644
index 698f94b48e..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/auth_context_test.py
+++ /dev/null
@@ -1,194 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Porting auth context tests from sync stack."""
-
-import logging
-import pickle
-import unittest
-
-import grpc
-from grpc.experimental import aio
-from grpc.experimental import session_cache
-import six
-
-from tests.unit import resources
-from tests_aio.unit._test_base import AioTestBase
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x00\x00\x00'
-
-_UNARY_UNARY = '/test/UnaryUnary'
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-_CLIENT_IDS = (
- b'*.test.google.fr',
- b'waterzooi.test.google.be',
- b'*.test.youtube.com',
- b'192.168.1.3',
-)
-_ID = 'id'
-_ID_KEY = 'id_key'
-_AUTH_CTX = 'auth_ctx'
-
-_PRIVATE_KEY = resources.private_key()
-_CERTIFICATE_CHAIN = resources.certificate_chain()
-_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
-_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
-_PROPERTY_OPTIONS = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
-),)
-
-
-async def handle_unary_unary(unused_request: bytes,
- servicer_context: aio.ServicerContext):
- return pickle.dumps({
- _ID: servicer_context.peer_identities(),
- _ID_KEY: servicer_context.peer_identity_key(),
- _AUTH_CTX: servicer_context.auth_context()
- })
-
-
-class TestAuthContext(AioTestBase):
-
- async def test_insecure(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- port = server.add_insecure_port('[::]:0')
- await server.start()
-
- async with aio.insecure_channel('localhost:%d' % port) as channel:
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- await server.stop(None)
-
- auth_data = pickle.loads(response)
- self.assertIsNone(auth_data[_ID])
- self.assertIsNone(auth_data[_ID_KEY])
- self.assertDictEqual({}, auth_data[_AUTH_CTX])
-
- async def test_secure_no_cert(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
- port = server.add_secure_port('[::]:0', server_cred)
- await server.start()
-
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES)
- channel = aio.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- await channel.close()
- await server.stop(None)
-
- auth_data = pickle.loads(response)
- self.assertIsNone(auth_data[_ID])
- self.assertIsNone(auth_data[_ID_KEY])
- self.assertDictEqual(
- {
- 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'],
- 'transport_security_type': [b'ssl'],
- 'ssl_session_reused': [b'false'],
- }, auth_data[_AUTH_CTX])
-
- async def test_secure_client_cert(self):
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(
- _SERVER_CERTS,
- root_certificates=_TEST_ROOT_CERTIFICATES,
- require_client_auth=True)
- port = server.add_secure_port('[::]:0', server_cred)
- await server.start()
-
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES,
- private_key=_PRIVATE_KEY,
- certificate_chain=_CERTIFICATE_CHAIN)
- channel = aio.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=_PROPERTY_OPTIONS)
-
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- await channel.close()
- await server.stop(None)
-
- auth_data = pickle.loads(response)
- auth_ctx = auth_data[_AUTH_CTX]
- self.assertCountEqual(_CLIENT_IDS, auth_data[_ID])
- self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY])
- self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type'])
- self.assertSequenceEqual([b'*.test.google.com'],
- auth_ctx['x509_common_name'])
-
- async def _do_one_shot_client_rpc(self, channel_creds, channel_options,
- port, expect_ssl_session_reused):
- channel = aio.secure_channel('localhost:{}'.format(port),
- channel_creds,
- options=channel_options)
- response = await channel.unary_unary(_UNARY_UNARY)(_REQUEST)
- auth_data = pickle.loads(response)
- self.assertEqual(expect_ssl_session_reused,
- auth_data[_AUTH_CTX]['ssl_session_reused'])
- await channel.close()
-
- async def test_session_resumption(self):
- # Set up a secure server
- handler = grpc.method_handlers_generic_handler('test', {
- 'UnaryUnary':
- grpc.unary_unary_rpc_method_handler(handle_unary_unary)
- })
- server = aio.server()
- server.add_generic_rpc_handlers((handler,))
- server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
- port = server.add_secure_port('[::]:0', server_cred)
- await server.start()
-
- # Create a cache for TLS session tickets
- cache = session_cache.ssl_session_cache_lru(1)
- channel_creds = grpc.ssl_channel_credentials(
- root_certificates=_TEST_ROOT_CERTIFICATES)
- channel_options = _PROPERTY_OPTIONS + (
- ('grpc.ssl_session_cache', cache),)
-
- # Initial connection has no session to resume
- await self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'false'])
-
- # Subsequent connections resume sessions
- await self._do_one_shot_client_rpc(channel_creds,
- channel_options,
- port,
- expect_ssl_session_reused=[b'true'])
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main()
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py
deleted file mode 100644
index ab8dc19f48..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/call_test.py
+++ /dev/null
@@ -1,846 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the Call classes."""
-
-import asyncio
-import datetime
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_SHORT_TIMEOUT_S = datetime.timedelta(seconds=1).total_seconds()
-
-_NUM_STREAM_RESPONSES = 5
-_RESPONSE_PAYLOAD_SIZE = 42
-_REQUEST_PAYLOAD_SIZE = 7
-_LOCAL_CANCEL_DETAILS_EXPECTATION = 'Locally cancelled by application!'
-_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
-_INFINITE_INTERVAL_US = 2**31 - 1
-
-
-class _MulticallableTestMixin():
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-class TestUnaryUnaryCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_call_to_string(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertTrue(str(call) is not None)
- self.assertTrue(repr(call) is not None)
-
- await call
-
- self.assertTrue(str(call) is not None)
- self.assertTrue(repr(call) is not None)
-
- async def test_call_ok(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.done())
-
- response = await call
-
- self.assertTrue(call.done())
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- # Response is cached at call object level, reentrance
- # returns again the same response
- response_retry = await call
- self.assertIs(response, response_retry)
-
- async def test_call_rpc_error(self):
- async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- call = stub.UnaryCall(messages_pb2.SimpleRequest())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- async def test_call_code_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_call_details_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual('', await call.details())
-
- async def test_call_initial_metadata_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(aio.Metadata(), await call.initial_metadata())
-
- async def test_call_trailing_metadata_awaitable(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(aio.Metadata(), await call.trailing_metadata())
-
- async def test_call_initial_metadata_cancelable(self):
- coro_started = asyncio.Event()
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- coro_started.set()
- await call.initial_metadata()
-
- task = self.loop.create_task(coro())
- await coro_started.wait()
- task.cancel()
-
- # Test that initial metadata can still be asked thought
- # a cancellation happened with the previous task
- self.assertEqual(aio.Metadata(), await call.initial_metadata())
-
- async def test_call_initial_metadata_multiple_waiters(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- return await call.initial_metadata()
-
- task1 = self.loop.create_task(coro())
- task2 = self.loop.create_task(coro())
-
- await call
- expected = [aio.Metadata() for _ in range(2)]
- self.assertEqual(expected, await asyncio.gather(*[task1, task2]))
-
- async def test_call_code_cancelable(self):
- coro_started = asyncio.Event()
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- coro_started.set()
- await call.code()
-
- task = self.loop.create_task(coro())
- await coro_started.wait()
- task.cancel()
-
- # Test that code can still be asked thought
- # a cancellation happened with the previous task
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_call_code_multiple_waiters(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- async def coro():
- return await call.code()
-
- task1 = self.loop.create_task(coro())
- task2 = self.loop.create_task(coro())
-
- await call
-
- self.assertEqual([grpc.StatusCode.OK, grpc.StatusCode.OK], await
- asyncio.gather(task1, task2))
-
- async def test_cancel_unary_unary(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.cancelled())
-
- self.assertTrue(call.cancel())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- # The info in the RpcError should match the info in Call object.
- self.assertTrue(call.cancelled())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- 'Locally cancelled by application!')
-
- async def test_cancel_unary_unary_in_task(self):
- coro_started = asyncio.Event()
- call = self._stub.EmptyCall(messages_pb2.SimpleRequest())
-
- async def another_coro():
- coro_started.set()
- await call
-
- task = self.loop.create_task(another_coro())
- await coro_started.wait()
-
- self.assertFalse(task.done())
- task.cancel()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- async def test_passing_credentials_fails_over_insecure_channel(self):
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
- with self.assertRaisesRegex(
- aio.UsageError,
- "Call credentials are only valid on secure channels"):
- self._stub.UnaryCall(messages_pb2.SimpleRequest(),
- credentials=call_credentials)
-
-
-class TestUnaryStreamCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_call_rpc_error(self):
- channel = aio.insecure_channel(UNREACHABLE_TARGET)
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- pass
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
- await channel.close()
-
- async def test_cancel_unary_stream(self):
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
- self.assertFalse(call.cancelled())
-
- response = await call.read()
- self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertTrue(call.cancel())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
- self.assertEqual(_LOCAL_CANCEL_DETAILS_EXPECTATION, await
- call.details())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call.read()
- self.assertTrue(call.cancelled())
-
- async def test_multiple_cancel_unary_stream(self):
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
- self.assertFalse(call.cancelled())
-
- response = await call.read()
- self.assertIs(type(response), messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertTrue(call.cancel())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call.read()
-
- async def test_early_cancel_unary_stream(self):
- """Test cancellation before receiving messages."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertFalse(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call.read()
-
- self.assertTrue(call.cancelled())
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
- self.assertEqual(_LOCAL_CANCEL_DETAILS_EXPECTATION, await
- call.details())
-
- async def test_late_cancel_unary_stream(self):
- """Test cancellation after received all messages."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # After all messages received, it is possible that the final state
- # is received or on its way. It's basically a data race, so our
- # expectation here is do not crash :)
- call.cancel()
- self.assertIn(await call.code(),
- [grpc.StatusCode.OK, grpc.StatusCode.CANCELLED])
-
- async def test_too_many_reads_unary_stream(self):
- """Test calling read after received all messages fails."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- self.assertIs(await call.read(), aio.EOF)
-
- # After the RPC is finished, further reads will lead to exception.
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertIs(await call.read(), aio.EOF)
-
- async def test_unary_stream_async_generator(self):
- """Sunny day test case for unary_stream."""
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
- self.assertFalse(call.cancelled())
-
- async for response in call:
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_cancel_unary_stream_in_task_using_read(self):
- coro_started = asyncio.Event()
-
- # Configs the server method to block forever
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_INFINITE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- async def another_coro():
- coro_started.set()
- await call.read()
-
- task = self.loop.create_task(another_coro())
- await coro_started.wait()
-
- self.assertFalse(task.done())
- task.cancel()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- async def test_cancel_unary_stream_in_task_using_async_for(self):
- coro_started = asyncio.Event()
-
- # Configs the server method to block forever
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_INFINITE_INTERVAL_US,
- ))
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- async def another_coro():
- coro_started.set()
- async for _ in call:
- pass
-
- task = self.loop.create_task(another_coro())
- await coro_started.wait()
-
- self.assertFalse(task.done())
- task.cancel()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- with self.assertRaises(asyncio.CancelledError):
- await task
-
- async def test_time_remaining(self):
- request = messages_pb2.StreamingOutputCallRequest()
- # First message comes back immediately
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
- # Second message comes back after a unit of wait time
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- call = self._stub.StreamingOutputCall(request,
- timeout=_SHORT_TIMEOUT_S * 2)
-
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # Should be around the same as the timeout
- remained_time = call.time_remaining()
- self.assertGreater(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
- self.assertLess(remained_time, _SHORT_TIMEOUT_S * 5 / 2)
-
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # Should be around the timeout minus a unit of wait time
- remained_time = call.time_remaining()
- self.assertGreater(remained_time, _SHORT_TIMEOUT_S / 2)
- self.assertLess(remained_time, _SHORT_TIMEOUT_S * 3 / 2)
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_empty_responses(self):
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters())
-
- # Invokes the actual RPC
- call = self._stub.StreamingOutputCall(request)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(b'', response.SerializeToString())
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
-
-class TestStreamUnaryCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_cancel_stream_unary(self):
- call = self._stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
-
- await call.done_writing()
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- async def test_early_cancel_stream_unary(self):
- call = self._stub.StreamingInputCall()
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
-
- with self.assertRaises(asyncio.InvalidStateError):
- await call.write(messages_pb2.StreamingInputCallRequest())
-
- # Should be no-op
- await call.done_writing()
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- async def test_write_after_done_writing(self):
- call = self._stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
-
- # Should be no-op
- await call.done_writing()
-
- with self.assertRaises(asyncio.InvalidStateError):
- await call.write(messages_pb2.StreamingInputCallRequest())
-
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_error_in_async_generator(self):
- # Server will pause between responses
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # We expect the request iterator to receive the exception
- request_iterator_received_the_exception = asyncio.Event()
-
- async def request_iterator():
- with self.assertRaises(asyncio.CancelledError):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
- await asyncio.sleep(_SHORT_TIMEOUT_S)
- request_iterator_received_the_exception.set()
-
- call = self._stub.StreamingInputCall(request_iterator())
-
- # Cancel the RPC after at least one response
- async def cancel_later():
- await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
- call.cancel()
-
- cancel_later_task = self.loop.create_task(cancel_later())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- await request_iterator_received_the_exception.wait()
-
- # No failures in the cancel later task!
- await cancel_later_task
-
- async def test_normal_iterable_requests(self):
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
- requests = [request] * _NUM_STREAM_RESPONSES
-
- # Sends out requests
- call = self._stub.StreamingInputCall(requests)
-
- # RPC should succeed
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_call_rpc_error(self):
- async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # The error should be raised automatically without any traffic.
- call = stub.StreamingInputCall()
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- async def test_timeout(self):
- call = self._stub.StreamingInputCall(timeout=_SHORT_TIMEOUT_S)
-
- # The error should be raised automatically without any traffic.
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, await call.code())
-
-
-# Prepares the request that stream in a ping-pong manner.
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-_STREAM_OUTPUT_REQUEST_ONE_EMPTY_RESPONSE = messages_pb2.StreamingOutputCallRequest(
-)
-_STREAM_OUTPUT_REQUEST_ONE_EMPTY_RESPONSE.response_parameters.append(
- messages_pb2.ResponseParameters())
-
-
-class TestStreamStreamCall(_MulticallableTestMixin, AioTestBase):
-
- async def test_cancel(self):
- # Invokes the actual RPC
- call = self._stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_cancel_with_pending_read(self):
- call = self._stub.FullDuplexCall()
-
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_cancel_with_ongoing_read(self):
- call = self._stub.FullDuplexCall()
- coro_started = asyncio.Event()
-
- async def read_coro():
- coro_started.set()
- await call.read()
-
- read_task = self.loop.create_task(read_coro())
- await coro_started.wait()
- self.assertFalse(read_task.done())
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_early_cancel(self):
- call = self._stub.FullDuplexCall()
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_cancel_after_done_writing(self):
- call = self._stub.FullDuplexCall()
- await call.done_writing()
-
- # Cancels the RPC
- self.assertFalse(call.done())
- self.assertFalse(call.cancelled())
- self.assertTrue(call.cancel())
- self.assertTrue(call.cancelled())
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_late_cancel(self):
- call = self._stub.FullDuplexCall()
- await call.done_writing()
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- # Cancels the RPC
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancelled())
-
- # Status is still OK
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_async_generator(self):
-
- async def request_generator():
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
-
- call = self._stub.FullDuplexCall(request_generator())
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_too_many_reads(self):
-
- async def request_generator():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
-
- call = self._stub.FullDuplexCall(request_generator())
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- self.assertIs(await call.read(), aio.EOF)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- # After the RPC finished, the read should also produce EOF
- self.assertIs(await call.read(), aio.EOF)
-
- async def test_read_write_after_done_writing(self):
- call = self._stub.FullDuplexCall()
-
- # Writes two requests, and pending two requests
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
- await call.done_writing()
-
- # Further write should fail
- with self.assertRaises(asyncio.InvalidStateError):
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_RESPONSE)
-
- # But read should be unaffected
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- response = await call.read()
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_error_in_async_generator(self):
- # Server will pause between responses
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE,
- interval_us=_RESPONSE_INTERVAL_US,
- ))
-
- # We expect the request iterator to receive the exception
- request_iterator_received_the_exception = asyncio.Event()
-
- async def request_iterator():
- with self.assertRaises(asyncio.CancelledError):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
- await asyncio.sleep(_SHORT_TIMEOUT_S)
- request_iterator_received_the_exception.set()
-
- call = self._stub.FullDuplexCall(request_iterator())
-
- # Cancel the RPC after at least one response
- async def cancel_later():
- await asyncio.sleep(_SHORT_TIMEOUT_S * 2)
- call.cancel()
-
- cancel_later_task = self.loop.create_task(cancel_later())
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- await request_iterator_received_the_exception.wait()
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
- # No failures in the cancel later task!
- await cancel_later_task
-
- async def test_normal_iterable_requests(self):
- requests = [_STREAM_OUTPUT_REQUEST_ONE_RESPONSE] * _NUM_STREAM_RESPONSES
-
- call = self._stub.FullDuplexCall(iter(requests))
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_empty_ping_pong(self):
- call = self._stub.FullDuplexCall()
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(_STREAM_OUTPUT_REQUEST_ONE_EMPTY_RESPONSE)
- response = await call.read()
- self.assertEqual(b'', response.SerializeToString())
- await call.done_writing()
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py
deleted file mode 100644
index 6eb4c3c2d5..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_argument_test.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright 2019 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the Core channel arguments."""
-
-import asyncio
-import errno
-import logging
-import platform
-import random
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit.framework import common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_RANDOM_SEED = 42
-
-_ENABLE_REUSE_PORT = 'SO_REUSEPORT enabled'
-_DISABLE_REUSE_PORT = 'SO_REUSEPORT disabled'
-_SOCKET_OPT_SO_REUSEPORT = 'grpc.so_reuseport'
-_OPTIONS = (
- (_ENABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 1),)),
- (_DISABLE_REUSE_PORT, ((_SOCKET_OPT_SO_REUSEPORT, 0),)),
-)
-
-_NUM_SERVER_CREATED = 5
-
-_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH = 'grpc.max_receive_message_length'
-_MAX_MESSAGE_LENGTH = 1024
-
-_ADDRESS_TOKEN_ERRNO = errno.EADDRINUSE, errno.ENOSR
-
-
-class _TestPointerWrapper(object):
-
- def __int__(self):
- return 123456
-
-
-_TEST_CHANNEL_ARGS = (
- ('arg1', b'bytes_val'),
- ('arg2', 'str_val'),
- ('arg3', 1),
- (b'arg4', 'str_val'),
- ('arg6', _TestPointerWrapper()),
-)
-
-_INVALID_TEST_CHANNEL_ARGS = [
- {
- 'foo': 'bar'
- },
- (('key',),),
- 'str',
-]
-
-
-async def test_if_reuse_port_enabled(server: aio.Server):
- port = server.add_insecure_port('localhost:0')
- await server.start()
-
- try:
- with common.bound_socket(
- bind_address='localhost',
- port=port,
- listen=False,
- ) as (unused_host, bound_port):
- assert bound_port == port
- except OSError as e:
- if e.errno in _ADDRESS_TOKEN_ERRNO:
- return False
- else:
- logging.exception(e)
- raise
- else:
- return True
-
-
-class TestChannelArgument(AioTestBase):
-
- async def setUp(self):
- random.seed(_RANDOM_SEED)
-
- @unittest.skipIf(platform.system() == 'Windows',
- 'SO_REUSEPORT only available in Linux-like OS.')
- @unittest.skipIf('aarch64' in platform.machine(),
- 'SO_REUSEPORT needs to be enabled in Core\'s port.h.')
- async def test_server_so_reuse_port_is_set_properly(self):
-
- async def test_body():
- fact, options = random.choice(_OPTIONS)
- server = aio.server(options=options)
- try:
- result = await test_if_reuse_port_enabled(server)
- if fact == _ENABLE_REUSE_PORT and not result:
- self.fail(
- 'Enabled reuse port in options, but not observed in socket'
- )
- elif fact == _DISABLE_REUSE_PORT and result:
- self.fail(
- 'Disabled reuse port in options, but observed in socket'
- )
- finally:
- await server.stop(None)
-
- # Creating a lot of servers concurrently
- await asyncio.gather(*(test_body() for _ in range(_NUM_SERVER_CREATED)))
-
- async def test_client(self):
- # Do not segfault, or raise exception!
- channel = aio.insecure_channel('[::]:0', options=_TEST_CHANNEL_ARGS)
- await channel.close()
-
- async def test_server(self):
- # Do not segfault, or raise exception!
- server = aio.server(options=_TEST_CHANNEL_ARGS)
- await server.stop(None)
-
- async def test_invalid_client_args(self):
- for invalid_arg in _INVALID_TEST_CHANNEL_ARGS:
- self.assertRaises((ValueError, TypeError),
- aio.insecure_channel,
- '[::]:0',
- options=invalid_arg)
-
- async def test_max_message_length_applied(self):
- address, server = await start_test_server()
-
- async with aio.insecure_channel(
- address,
- options=((_GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH,
- _MAX_MESSAGE_LENGTH),)) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- # First request will pass
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_MAX_MESSAGE_LENGTH // 2,))
- # Second request should fail
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_MAX_MESSAGE_LENGTH * 2,))
-
- call = stub.StreamingOutputCall(request)
-
- response = await call.read()
- self.assertEqual(_MAX_MESSAGE_LENGTH // 2,
- len(response.payload.body))
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.read()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED,
- rpc_error.code())
- self.assertIn(str(_MAX_MESSAGE_LENGTH), rpc_error.details())
-
- self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, await
- call.code())
-
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py
deleted file mode 100644
index 46e4d208cc..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_ready_test.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the channel_ready function."""
-
-import asyncio
-import gc
-import logging
-import socket
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests.unit.framework.common import get_socket
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-
-class TestChannelReady(AioTestBase):
-
- async def setUp(self):
- address, self._port, self._socket = get_socket(
- listen=False, sock_options=(socket.SO_REUSEADDR,))
- self._channel = aio.insecure_channel(f"{address}:{self._port}")
- self._socket.close()
-
- async def tearDown(self):
- await self._channel.close()
-
- async def test_channel_ready_success(self):
- # Start `channel_ready` as another Task
- channel_ready_task = self.loop.create_task(
- self._channel.channel_ready())
-
- # Wait for TRANSIENT_FAILURE
- await _common.block_until_certain_state(
- self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE)
-
- try:
- # Start the server
- _, server = await start_test_server(port=self._port)
-
- # The RPC should recover itself
- await channel_ready_task
- finally:
- await server.stop(None)
-
- async def test_channel_ready_blocked(self):
- with self.assertRaises(asyncio.TimeoutError):
- await asyncio.wait_for(self._channel.channel_ready(),
- test_constants.SHORT_TIMEOUT)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py
deleted file mode 100644
index 699fe798f8..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/channel_test.py
+++ /dev/null
@@ -1,231 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the grpc.aio.Channel class."""
-
-import logging
-import os
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._constants import UNARY_CALL_WITH_SLEEP_VALUE
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_UNARY_CALL_METHOD = '/grpc.testing.TestService/UnaryCall'
-_UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep'
-_STREAMING_OUTPUT_CALL_METHOD = '/grpc.testing.TestService/StreamingOutputCall'
-
-_INVOCATION_METADATA = (
- ('x-grpc-test-echo-initial', 'initial-md-value'),
- ('x-grpc-test-echo-trailing-bin', b'\x00\x02'),
-)
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class TestChannel(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_async_context(self):
- async with aio.insecure_channel(self._server_target) as channel:
- hi = channel.unary_unary(
- _UNARY_CALL_METHOD,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- await hi(messages_pb2.SimpleRequest())
-
- async def test_unary_unary(self):
- async with aio.insecure_channel(self._server_target) as channel:
- hi = channel.unary_unary(
- _UNARY_CALL_METHOD,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- response = await hi(messages_pb2.SimpleRequest())
-
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- async def test_unary_call_times_out(self):
- async with aio.insecure_channel(self._server_target) as channel:
- hi = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- with self.assertRaises(grpc.RpcError) as exception_context:
- await hi(messages_pb2.SimpleRequest(),
- timeout=UNARY_CALL_WITH_SLEEP_VALUE / 2)
-
- _, details = grpc.StatusCode.DEADLINE_EXCEEDED.value # pylint: disable=unused-variable
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED,
- exception_context.exception.code())
- self.assertEqual(details.title(),
- exception_context.exception.details())
- self.assertIsNotNone(exception_context.exception.initial_metadata())
- self.assertIsNotNone(
- exception_context.exception.trailing_metadata())
-
- @unittest.skipIf(os.name == 'nt',
- 'TODO: https://github.com/grpc/grpc/issues/21658')
- async def test_unary_call_does_not_times_out(self):
- async with aio.insecure_channel(self._server_target) as channel:
- hi = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- call = hi(messages_pb2.SimpleRequest(),
- timeout=UNARY_CALL_WITH_SLEEP_VALUE * 5)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_unary_stream(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- # Invokes the actual RPC
- call = stub.StreamingOutputCall(request)
-
- # Validates the responses
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- await channel.close()
-
- async def test_stream_unary_using_write(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- await call.done_writing()
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- await channel.close()
-
- async def test_stream_unary_using_async_gen(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall(gen())
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- await channel.close()
-
- async def test_stream_stream_using_read_write(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Invokes the actual RPC
- call = stub.FullDuplexCall()
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
- await channel.close()
-
- async def test_stream_stream_using_async_gen(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- # Invokes the actual RPC
- call = stub.FullDuplexCall(gen())
-
- async for response in call:
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
- await channel.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py
deleted file mode 100644
index 13ad9b075d..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_stream_interceptor_test.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit._common import CountingRequestIterator
-from tests_aio.unit._common import CountingResponseIterator
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_NUM_STREAM_REQUESTS = 5
-_RESPONSE_PAYLOAD_SIZE = 7
-
-
-class _StreamStreamInterceptorEmpty(aio.StreamStreamClientInterceptor):
-
- async def intercept_stream_stream(self, continuation, client_call_details,
- request_iterator):
- return await continuation(client_call_details, request_iterator)
-
- def assert_in_final_state(self, test: unittest.TestCase):
- pass
-
-
-class _StreamStreamInterceptorWithRequestAndResponseIterator(
- aio.StreamStreamClientInterceptor):
-
- async def intercept_stream_stream(self, continuation, client_call_details,
- request_iterator):
- self.request_iterator = CountingRequestIterator(request_iterator)
- call = await continuation(client_call_details, self.request_iterator)
- self.response_iterator = CountingResponseIterator(call)
- return self.response_iterator
-
- def assert_in_final_state(self, test: unittest.TestCase):
- test.assertEqual(_NUM_STREAM_REQUESTS,
- self.request_iterator.request_cnt)
- test.assertEqual(_NUM_STREAM_RESPONSES,
- self.response_iterator.response_cnt)
-
-
-class TestStreamStreamClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_intercepts(self):
-
- for interceptor_class in (
- _StreamStreamInterceptorEmpty,
- _StreamStreamInterceptorWithRequestAndResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE))
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.FullDuplexCall(request_iterator())
-
- await call.wait_for_connection()
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIsInstance(
- response, messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_intercepts_using_write_and_read(self):
- for interceptor_class in (
- _StreamStreamInterceptorEmpty,
- _StreamStreamInterceptorWithRequestAndResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE))
-
- call = stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(
- response, messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_multiple_interceptors_request_iterator(self):
- for interceptor_class in (
- _StreamStreamInterceptorEmpty,
- _StreamStreamInterceptorWithRequestAndResponseIterator):
-
- with self.subTest(name=interceptor_class):
-
- interceptors = [interceptor_class(), interceptor_class()]
- channel = aio.insecure_channel(self._server_target,
- interceptors=interceptors)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(
- size=_RESPONSE_PAYLOAD_SIZE))
-
- call = stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(
- response, messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- for interceptor in interceptors:
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py
deleted file mode 100644
index ff99920c7f..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_stream_unary_interceptor_test.py
+++ /dev/null
@@ -1,518 +0,0 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import asyncio
-import datetime
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._common import CountingRequestIterator
-from tests_aio.unit._common import inject_callbacks
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_SHORT_TIMEOUT_S = 1.0
-
-_NUM_STREAM_REQUESTS = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
-
-
-class _StreamUnaryInterceptorEmpty(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation, client_call_details,
- request_iterator):
- return await continuation(client_call_details, request_iterator)
-
- def assert_in_final_state(self, test: unittest.TestCase):
- pass
-
-
-class _StreamUnaryInterceptorWithRequestIterator(
- aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation, client_call_details,
- request_iterator):
- self.request_iterator = CountingRequestIterator(request_iterator)
- call = await continuation(client_call_details, self.request_iterator)
- return call
-
- def assert_in_final_state(self, test: unittest.TestCase):
- test.assertEqual(_NUM_STREAM_REQUESTS,
- self.request_iterator.request_cnt)
-
-
-class TestStreamUnaryClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_intercepts(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- response = await call
-
- self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_intercepts_using_write(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- call = stub.StreamingInputCall()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- await call.done_writing()
-
- response = await call
-
- self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_not_finished(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- validation = inject_callbacks(call)
-
- response = await call
-
- await validation
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_finished(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- response = await call
-
- validation = inject_callbacks(call)
-
- await validation
-
- await channel.close()
-
- async def test_multiple_interceptors_request_iterator(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
-
- interceptors = [interceptor_class(), interceptor_class()]
- channel = aio.insecure_channel(self._server_target,
- interceptors=interceptors)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- response = await call
-
- self.assertEqual(_NUM_STREAM_REQUESTS * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- for interceptor in interceptors:
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_intercepts_request_iterator_rpc_error(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET, interceptors=[interceptor_class()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- # When there is an error the request iterator is no longer
- # consumed.
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- await channel.close()
-
- async def test_intercepts_request_iterator_rpc_error_using_write(self):
- for interceptor_class in (_StreamUnaryInterceptorEmpty,
- _StreamUnaryInterceptorWithRequestIterator):
-
- with self.subTest(name=interceptor_class):
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET, interceptors=[interceptor_class()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- call = stub.StreamingInputCall()
-
- # When there is an error during the write, exception is raised.
- with self.assertRaises(asyncio.InvalidStateError):
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
-
- await channel.close()
-
- async def test_cancel_before_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- # When there is an error during the write, exception is raised.
- with self.assertRaises(asyncio.InvalidStateError):
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_after_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- call = await continuation(client_call_details, request_iterator)
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- # When there is an error during the write, exception is raised.
- with self.assertRaises(asyncio.InvalidStateError):
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_while_writing(self):
- # Test cancelation before making any write or after doing at least 1
- for num_writes_before_cancel in (0, 1):
- with self.subTest(name="Num writes before cancel: {}".format(
- num_writes_before_cancel)):
-
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET,
- interceptors=[_StreamUnaryInterceptorWithRequestIterator()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' *
- _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(
- payload=payload)
-
- call = stub.StreamingInputCall()
-
- with self.assertRaises(asyncio.InvalidStateError):
- for i in range(_NUM_STREAM_REQUESTS):
- if i == num_writes_before_cancel:
- self.assertTrue(call.cancel())
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
-
- await channel.close()
-
- async def test_cancel_by_the_interceptor(self):
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- call = await continuation(client_call_details, request_iterator)
- call.cancel()
- return call
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- with self.assertRaises(asyncio.InvalidStateError):
- for i in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
-
- await channel.close()
-
- async def test_exception_raised_by_interceptor(self):
-
- class InterceptorException(Exception):
- pass
-
- class Interceptor(aio.StreamUnaryClientInterceptor):
-
- async def intercept_stream_unary(self, continuation,
- client_call_details,
- request_iterator):
- raise InterceptorException
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- call = stub.StreamingInputCall()
-
- with self.assertRaises(InterceptorException):
- for i in range(_NUM_STREAM_REQUESTS):
- await call.write(request)
-
- with self.assertRaises(InterceptorException):
- await call
-
- await channel.close()
-
- async def test_intercepts_prohibit_mixing_style(self):
- channel = aio.insecure_channel(
- self._server_target, interceptors=[_StreamUnaryInterceptorEmpty()])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def request_iterator():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield request
-
- call = stub.StreamingInputCall(request_iterator())
-
- with self.assertRaises(grpc._cython.cygrpc.UsageError):
- await call.write(request)
-
- with self.assertRaises(grpc._cython.cygrpc.UsageError):
- await call.done_writing()
-
- await channel.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py
deleted file mode 100644
index f0c0cba8eb..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_stream_interceptor_test.py
+++ /dev/null
@@ -1,396 +0,0 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import asyncio
-import datetime
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._common import CountingResponseIterator
-from tests_aio.unit._common import inject_callbacks
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_SHORT_TIMEOUT_S = 1.0
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 7
-_RESPONSE_INTERVAL_US = int(_SHORT_TIMEOUT_S * 1000 * 1000)
-
-
-class _UnaryStreamInterceptorEmpty(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation, client_call_details,
- request):
- return await continuation(client_call_details, request)
-
- def assert_in_final_state(self, test: unittest.TestCase):
- pass
-
-
-class _UnaryStreamInterceptorWithResponseIterator(
- aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation, client_call_details,
- request):
- call = await continuation(client_call_details, request)
- self.response_iterator = CountingResponseIterator(call)
- return self.response_iterator
-
- def assert_in_final_state(self, test: unittest.TestCase):
- test.assertEqual(_NUM_STREAM_RESPONSES,
- self.response_iterator.response_cnt)
-
-
-class TestUnaryStreamClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_intercepts(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- await call.wait_for_connection()
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.debug_error_string(), '')
- self.assertEqual(call.cancel(), False)
- self.assertEqual(call.cancelled(), False)
- self.assertEqual(call.done(), True)
-
- interceptor.assert_in_final_state(self)
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_not_finished(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- validation = inject_callbacks(call)
-
- async for response in call:
- pass
-
- await validation
-
- await channel.close()
-
- async def test_add_done_callback_interceptor_task_finished(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
- interceptor = interceptor_class()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- # This ensures that the callbacks will be registered
- # with the intercepted call rather than saving in the
- # pending state list.
- await call.wait_for_connection()
-
- validation = inject_callbacks(call)
-
- async for response in call:
- pass
-
- await validation
-
- await channel.close()
-
- async def test_response_iterator_using_read(self):
- interceptor = _UnaryStreamInterceptorWithResponseIterator()
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=[interceptor])
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend(
- [messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)] *
- _NUM_STREAM_RESPONSES)
-
- call = stub.StreamingOutputCall(request)
-
- response_cnt = 0
- for response in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(interceptor.response_iterator.response_cnt,
- _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- await channel.close()
-
- async def test_multiple_interceptors_response_iterator(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
-
- interceptors = [interceptor_class(), interceptor_class()]
-
- channel = aio.insecure_channel(self._server_target,
- interceptors=interceptors)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend([
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)
- ] * _NUM_STREAM_RESPONSES)
-
- call = stub.StreamingOutputCall(request)
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE,
- len(response.payload.body))
-
- self.assertEqual(response_cnt, _NUM_STREAM_RESPONSES)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- await channel.close()
-
- async def test_intercepts_response_iterator_rpc_error(self):
- for interceptor_class in (_UnaryStreamInterceptorEmpty,
- _UnaryStreamInterceptorWithResponseIterator):
-
- with self.subTest(name=interceptor_class):
-
- channel = aio.insecure_channel(
- UNREACHABLE_TARGET, interceptors=[interceptor_class()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- pass
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
- await channel.close()
-
- async def test_cancel_before_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- pass
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_after_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- interceptor_reached.set()
- await wait_for_ever
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- pass
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- await channel.close()
-
- async def test_cancel_consuming_response_iterator(self):
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend(
- [messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE)] *
- _NUM_STREAM_RESPONSES)
-
- channel = aio.insecure_channel(
- self._server_target,
- interceptors=[_UnaryStreamInterceptorWithResponseIterator()])
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- call.cancel()
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- await channel.close()
-
- async def test_cancel_by_the_interceptor(self):
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- call.cancel()
- return call
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(asyncio.CancelledError):
- async for response in call:
- pass
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- await channel.close()
-
- async def test_exception_raised_by_interceptor(self):
-
- class InterceptorException(Exception):
- pass
-
- class Interceptor(aio.UnaryStreamClientInterceptor):
-
- async def intercept_unary_stream(self, continuation,
- client_call_details, request):
- raise InterceptorException
-
- channel = aio.insecure_channel(UNREACHABLE_TARGET,
- interceptors=[Interceptor()])
- request = messages_pb2.StreamingOutputCallRequest()
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.StreamingOutputCall(request)
-
- with self.assertRaises(InterceptorException):
- async for response in call:
- pass
-
- await channel.close()
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py
deleted file mode 100644
index 7367c454fa..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/client_unary_unary_interceptor_test.py
+++ /dev/null
@@ -1,702 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import asyncio
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit import _common
-from tests_aio.unit import _constants
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import _INITIAL_METADATA_KEY
-from tests_aio.unit._test_server import _TRAILING_METADATA_KEY
-from tests_aio.unit._test_server import start_test_server
-
-_LOCAL_CANCEL_DETAILS_EXPECTATION = 'Locally cancelled by application!'
-_INITIAL_METADATA_TO_INJECT = aio.Metadata(
- (_INITIAL_METADATA_KEY, 'extra info'),
- (_TRAILING_METADATA_KEY, b'\x13\x37'),
-)
-_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED = 1.0
-
-
-class TestUnaryUnaryClientInterceptor(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- def test_invalid_interceptor(self):
-
- class InvalidInterceptor:
- """Just an invalid Interceptor"""
-
- with self.assertRaises(ValueError):
- aio.insecure_channel("", interceptors=[InvalidInterceptor()])
-
- async def test_executed_right_order(self):
-
- interceptors_executed = []
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
- """Interceptor used for testing if the interceptor is being called"""
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- interceptors_executed.append(self)
- call = await continuation(client_call_details, request)
- return call
-
- interceptors = [Interceptor() for i in range(2)]
-
- async with aio.insecure_channel(self._server_target,
- interceptors=interceptors) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- # Check that all interceptors were executed, and were executed
- # in the right order.
- self.assertSequenceEqual(interceptors_executed, interceptors)
-
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- @unittest.expectedFailure
- # TODO(https://github.com/grpc/grpc/issues/20144) Once metadata support is
- # implemented in the client-side, this test must be implemented.
- def test_modify_metadata(self):
- raise NotImplementedError()
-
- @unittest.expectedFailure
- # TODO(https://github.com/grpc/grpc/issues/20532) Once credentials support is
- # implemented in the client-side, this test must be implemented.
- def test_modify_credentials(self):
- raise NotImplementedError()
-
- async def test_status_code_Ok(self):
-
- class StatusCodeOkInterceptor(aio.UnaryUnaryClientInterceptor):
- """Interceptor used for observing status code Ok returned by the RPC"""
-
- def __init__(self):
- self.status_code_Ok_observed = False
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- code = await call.code()
- if code == grpc.StatusCode.OK:
- self.status_code_Ok_observed = True
-
- return call
-
- interceptor = StatusCodeOkInterceptor()
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[interceptor]) as channel:
-
- # when no error StatusCode.OK must be observed
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- await multicallable(messages_pb2.SimpleRequest())
-
- self.assertTrue(interceptor.status_code_Ok_observed)
-
- async def test_add_timeout(self):
-
- class TimeoutInterceptor(aio.UnaryUnaryClientInterceptor):
- """Interceptor used for adding a timeout to the RPC"""
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- new_client_call_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
- metadata=client_call_details.metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready)
- return await continuation(new_client_call_details, request)
-
- interceptor = TimeoutInterceptor()
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[interceptor]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(messages_pb2.SimpleRequest())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertEqual(exception_context.exception.code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
-
- self.assertTrue(call.done())
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, await
- call.code())
-
- async def test_retry(self):
-
- class RetryInterceptor(aio.UnaryUnaryClientInterceptor):
- """Simulates a Retry Interceptor which ends up by making
- two RPC calls."""
-
- def __init__(self):
- self.calls = []
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- new_client_call_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2,
- metadata=client_call_details.metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready)
-
- try:
- call = await continuation(new_client_call_details, request)
- await call
- except grpc.RpcError:
- pass
-
- self.calls.append(call)
-
- new_client_call_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=None,
- metadata=client_call_details.metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready)
-
- call = await continuation(new_client_call_details, request)
- self.calls.append(call)
- return call
-
- interceptor = RetryInterceptor()
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[interceptor]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(messages_pb2.SimpleRequest())
-
- await call
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- # Check that two calls were made, first one finishing with
- # a deadline and second one finishing ok..
- self.assertEqual(len(interceptor.calls), 2)
- self.assertEqual(await interceptor.calls[0].code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
- self.assertEqual(await interceptor.calls[1].code(),
- grpc.StatusCode.OK)
-
- async def test_rpcresponse(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
- """Raw responses are seen as reegular calls"""
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- response = await call
- return call
-
- class ResponseInterceptor(aio.UnaryUnaryClientInterceptor):
- """Return a raw response"""
- response = messages_pb2.SimpleResponse()
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- return ResponseInterceptor.response
-
- interceptor, interceptor_response = Interceptor(), ResponseInterceptor()
-
- async with aio.insecure_channel(
- self._server_target,
- interceptors=[interceptor, interceptor_response]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- # Check that the response returned is the one returned by the
- # interceptor
- self.assertEqual(id(response), id(ResponseInterceptor.response))
-
- # Check all of the UnaryUnaryCallResponse attributes
- self.assertTrue(call.done())
- self.assertFalse(call.cancel())
- self.assertFalse(call.cancelled())
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
- self.assertEqual(await call.debug_error_string(), None)
-
-
-class TestInterceptedUnaryUnaryCall(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_call_ok(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(type(response), messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_call_ok_awaited(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- await call
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(type(response), messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(await call.details(), '')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_call_rpc_error(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(
- messages_pb2.SimpleRequest(),
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(await call.code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
- self.assertEqual(await call.details(), 'Deadline Exceeded')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_call_rpc_error_awaited(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- await call
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCallWithSleep',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- call = multicallable(
- messages_pb2.SimpleRequest(),
- timeout=_constants.UNARY_CALL_WITH_SLEEP_VALUE / 2)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- self.assertTrue(call.done())
- self.assertFalse(call.cancelled())
- self.assertEqual(await call.code(),
- grpc.StatusCode.DEADLINE_EXCEEDED)
- self.assertEqual(await call.details(), 'Deadline Exceeded')
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(await call.trailing_metadata(), aio.Metadata())
-
- async def test_cancel_before_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- interceptor_reached.set()
- await wait_for_ever
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
-
- async def test_cancel_after_rpc(self):
-
- interceptor_reached = asyncio.Event()
- wait_for_ever = self.loop.create_future()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- await call
- interceptor_reached.set()
- await wait_for_ever
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- self.assertFalse(call.cancelled())
- self.assertFalse(call.done())
-
- await interceptor_reached.wait()
- self.assertTrue(call.cancel())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
-
- async def test_cancel_inside_interceptor_after_rpc_awaiting(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- call.cancel()
- await call
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), None)
- self.assertEqual(await call.trailing_metadata(), None)
-
- async def test_cancel_inside_interceptor_after_rpc_not_awaiting(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- call = await continuation(client_call_details, request)
- call.cancel()
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- with self.assertRaises(asyncio.CancelledError):
- await call
-
- self.assertTrue(call.cancelled())
- self.assertTrue(call.done())
- self.assertEqual(await call.code(), grpc.StatusCode.CANCELLED)
- self.assertEqual(await call.details(),
- _LOCAL_CANCEL_DETAILS_EXPECTATION)
- self.assertEqual(await call.initial_metadata(), aio.Metadata())
- self.assertEqual(
- await call.trailing_metadata(), aio.Metadata(),
- "When the raw response is None, empty metadata is returned")
-
- async def test_initial_metadata_modification(self):
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
- new_metadata = aio.Metadata(*client_call_details.metadata,
- *_INITIAL_METADATA_TO_INJECT)
- new_details = aio.ClientCallDetails(
- method=client_call_details.method,
- timeout=client_call_details.timeout,
- metadata=new_metadata,
- credentials=client_call_details.credentials,
- wait_for_ready=client_call_details.wait_for_ready,
- )
- return await continuation(new_details, request)
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- call = stub.UnaryCall(messages_pb2.SimpleRequest())
-
- # Expected to see the echoed initial metadata
- self.assertTrue(
- _common.seen_metadatum(
- expected_key=_INITIAL_METADATA_KEY,
- expected_value=_INITIAL_METADATA_TO_INJECT[
- _INITIAL_METADATA_KEY],
- actual=await call.initial_metadata(),
- ))
- # Expected to see the echoed trailing metadata
- self.assertTrue(
- _common.seen_metadatum(
- expected_key=_TRAILING_METADATA_KEY,
- expected_value=_INITIAL_METADATA_TO_INJECT[
- _TRAILING_METADATA_KEY],
- actual=await call.trailing_metadata(),
- ))
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_add_done_callback_before_finishes(self):
- called = asyncio.Event()
- interceptor_can_continue = asyncio.Event()
-
- def callback(call):
- called.set()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- await interceptor_can_continue.wait()
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- call.add_done_callback(callback)
- interceptor_can_continue.set()
- await call
-
- try:
- await asyncio.wait_for(
- called.wait(),
- timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
- except:
- self.fail("Callback was not called")
-
- async def test_add_done_callback_after_finishes(self):
- called = asyncio.Event()
-
- def callback(call):
- called.set()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- await call
-
- call.add_done_callback(callback)
-
- try:
- await asyncio.wait_for(
- called.wait(),
- timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
- except:
- self.fail("Callback was not called")
-
- async def test_add_done_callback_after_finishes_before_await(self):
- called = asyncio.Event()
-
- def callback(call):
- called.set()
-
- class Interceptor(aio.UnaryUnaryClientInterceptor):
-
- async def intercept_unary_unary(self, continuation,
- client_call_details, request):
-
- call = await continuation(client_call_details, request)
- return call
-
- async with aio.insecure_channel(self._server_target,
- interceptors=[Interceptor()
- ]) as channel:
-
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
-
- call.add_done_callback(callback)
-
- await call
-
- try:
- await asyncio.wait_for(
- called.wait(),
- timeout=_TIMEOUT_CHECK_IF_CALLBACK_WAS_CALLED)
- except:
- self.fail("Callback was not called")
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py
deleted file mode 100644
index 8d481a9a3b..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/close_channel_test.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of closing a grpc.aio.Channel."""
-
-import asyncio
-import logging
-import unittest
-
-import grpc
-from grpc.aio import _base_call
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep'
-_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE = 60
-
-
-class TestCloseChannel(AioTestBase):
-
- async def setUp(self):
- self._server_target, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- async def test_graceful_close(self):
- channel = aio.insecure_channel(self._server_target)
- UnaryCallWithSleep = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
-
- await channel.close(grace=_LONG_TIMEOUT_THAT_SHOULD_NOT_EXPIRE)
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_none_graceful_close(self):
- channel = aio.insecure_channel(self._server_target)
- UnaryCallWithSleep = channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
-
- call = UnaryCallWithSleep(messages_pb2.SimpleRequest())
-
- await channel.close(None)
-
- self.assertEqual(grpc.StatusCode.CANCELLED, await call.code())
-
- async def test_close_unary_unary(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- calls = [stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_unary_stream(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- request = messages_pb2.StreamingOutputCallRequest()
- calls = [stub.StreamingOutputCall(request) for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_stream_unary(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- calls = [stub.StreamingInputCall() for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_stream_stream(self):
- channel = aio.insecure_channel(self._server_target)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- calls = [stub.FullDuplexCall() for _ in range(2)]
-
- await channel.close()
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_close_async_context(self):
- async with aio.insecure_channel(self._server_target) as channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- calls = [
- stub.UnaryCall(messages_pb2.SimpleRequest()) for _ in range(2)
- ]
-
- for call in calls:
- self.assertTrue(call.cancelled())
-
- async def test_channel_isolation(self):
- async with aio.insecure_channel(self._server_target) as channel1:
- async with aio.insecure_channel(self._server_target) as channel2:
- stub1 = test_pb2_grpc.TestServiceStub(channel1)
- stub2 = test_pb2_grpc.TestServiceStub(channel2)
-
- call1 = stub1.UnaryCall(messages_pb2.SimpleRequest())
- call2 = stub2.UnaryCall(messages_pb2.SimpleRequest())
-
- self.assertFalse(call1.cancelled())
- self.assertTrue(call2.cancelled())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py
deleted file mode 100644
index 4749f39d03..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compatibility_test.py
+++ /dev/null
@@ -1,367 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the compatibility between AsyncIO stack and the old stack."""
-
-import asyncio
-from concurrent.futures import ThreadPoolExecutor
-import logging
-import os
-import random
-import threading
-from typing import Callable, Iterable, Sequence, Tuple
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import TestServiceServicer
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-_REQUEST = b'\x03\x07'
-
-
-def _unique_options() -> Sequence[Tuple[str, float]]:
- return (('iv', random.random()),)
-
-
-@unittest.skipIf(
- os.environ.get('GRPC_ASYNCIO_ENGINE', '').lower() == 'custom_io_manager',
- 'Compatible mode needs POLLER completion queue.')
-class TestCompatibility(AioTestBase):
-
- async def setUp(self):
- self._async_server = aio.server(
- options=(('grpc.so_reuseport', 0),),
- migration_thread_pool=ThreadPoolExecutor())
-
- test_pb2_grpc.add_TestServiceServicer_to_server(TestServiceServicer(),
- self._async_server)
- self._adhoc_handlers = _common.AdhocGenericHandler()
- self._async_server.add_generic_rpc_handlers((self._adhoc_handlers,))
-
- port = self._async_server.add_insecure_port('[::]:0')
- address = 'localhost:%d' % port
- await self._async_server.start()
-
- # Create async stub
- self._async_channel = aio.insecure_channel(address,
- options=_unique_options())
- self._async_stub = test_pb2_grpc.TestServiceStub(self._async_channel)
-
- # Create sync stub
- self._sync_channel = grpc.insecure_channel(address,
- options=_unique_options())
- self._sync_stub = test_pb2_grpc.TestServiceStub(self._sync_channel)
-
- async def tearDown(self):
- self._sync_channel.close()
- await self._async_channel.close()
- await self._async_server.stop(None)
-
- async def _run_in_another_thread(self, func: Callable[[], None]):
- work_done = asyncio.Event()
-
- def thread_work():
- func()
- self.loop.call_soon_threadsafe(work_done.set)
-
- thread = threading.Thread(target=thread_work, daemon=True)
- thread.start()
- await work_done.wait()
- thread.join()
-
- async def test_unary_unary(self):
- # Calling async API in this thread
- await self._async_stub.UnaryCall(messages_pb2.SimpleRequest(),
- timeout=test_constants.LONG_TIMEOUT)
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response, call = self._sync_stub.UnaryCall.with_call(
- messages_pb2.SimpleRequest(),
- timeout=test_constants.LONG_TIMEOUT)
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(grpc.StatusCode.OK, call.code())
-
- await self._run_in_another_thread(sync_work)
-
- async def test_unary_stream(self):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- # Calling async API in this thread
- call = self._async_stub.StreamingOutputCall(request)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.read()
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response_iterator = self._sync_stub.StreamingOutputCall(request)
- for response in response_iterator:
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
- self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
-
- await self._run_in_another_thread(sync_work)
-
- async def test_stream_unary(self):
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Calling async API in this thread
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- response = await self._async_stub.StreamingInputCall(gen())
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response = self._sync_stub.StreamingInputCall(
- iter([request] * _NUM_STREAM_RESPONSES))
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- await self._run_in_another_thread(sync_work)
-
- async def test_stream_stream(self):
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- # Calling async API in this thread
- call = self._async_stub.FullDuplexCall()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
-
- await call.done_writing()
- assert await call.code() == grpc.StatusCode.OK
-
- # Calling sync API in a different thread
- def sync_work() -> None:
- response_iterator = self._sync_stub.FullDuplexCall(iter([request]))
- for response in response_iterator:
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
- self.assertEqual(grpc.StatusCode.OK, response_iterator.code())
-
- await self._run_in_another_thread(sync_work)
-
- async def test_server(self):
-
- class GenericHandlers(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
-
- # It's fine to instantiate server object in the event loop thread.
- # The server will spawn its own serving thread.
- server = grpc.server(ThreadPoolExecutor(),
- handlers=(GenericHandlers(),))
- port = server.add_insecure_port('localhost:0')
- server.start()
-
- def sync_work() -> None:
- for _ in range(100):
- with grpc.insecure_channel('localhost:%d' % port) as channel:
- response = channel.unary_unary('/test/test')(b'\x07\x08')
- self.assertEqual(response, b'\x07\x08')
-
- await self._run_in_another_thread(sync_work)
-
- async def test_many_loop(self):
- address, server = await start_test_server()
-
- # Run another loop in another thread
- def sync_work():
-
- async def async_work():
- # Create async stub
- async_channel = aio.insecure_channel(address,
- options=_unique_options())
- async_stub = test_pb2_grpc.TestServiceStub(async_channel)
-
- call = async_stub.UnaryCall(messages_pb2.SimpleRequest())
- response = await call
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- loop = asyncio.new_event_loop()
- loop.run_until_complete(async_work())
-
- await self._run_in_another_thread(sync_work)
- await server.stop(None)
-
- async def test_sync_unary_unary_success(self):
-
- @grpc.unary_unary_rpc_method_handler
- def echo_unary_unary(request: bytes, unused_context):
- return request
-
- self._adhoc_handlers.set_adhoc_handler(echo_unary_unary)
- response = await self._async_channel.unary_unary(_common.ADHOC_METHOD
- )(_REQUEST)
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_unary_unary_metadata(self):
- metadata = (('unique', 'key-42'),)
-
- @grpc.unary_unary_rpc_method_handler
- def metadata_unary_unary(request: bytes, context: grpc.ServicerContext):
- context.send_initial_metadata(metadata)
- return request
-
- self._adhoc_handlers.set_adhoc_handler(metadata_unary_unary)
- call = self._async_channel.unary_unary(_common.ADHOC_METHOD)(_REQUEST)
- self.assertTrue(
- _common.seen_metadata(aio.Metadata(*metadata), await
- call.initial_metadata()))
-
- async def test_sync_unary_unary_abort(self):
-
- @grpc.unary_unary_rpc_method_handler
- def abort_unary_unary(request: bytes, context: grpc.ServicerContext):
- context.abort(grpc.StatusCode.INTERNAL, 'Test')
-
- self._adhoc_handlers.set_adhoc_handler(abort_unary_unary)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._async_channel.unary_unary(_common.ADHOC_METHOD
- )(_REQUEST)
- self.assertEqual(grpc.StatusCode.INTERNAL,
- exception_context.exception.code())
-
- async def test_sync_unary_unary_set_code(self):
-
- @grpc.unary_unary_rpc_method_handler
- def set_code_unary_unary(request: bytes, context: grpc.ServicerContext):
- context.set_code(grpc.StatusCode.INTERNAL)
-
- self._adhoc_handlers.set_adhoc_handler(set_code_unary_unary)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._async_channel.unary_unary(_common.ADHOC_METHOD
- )(_REQUEST)
- self.assertEqual(grpc.StatusCode.INTERNAL,
- exception_context.exception.code())
-
- async def test_sync_unary_stream_success(self):
-
- @grpc.unary_stream_rpc_method_handler
- def echo_unary_stream(request: bytes, unused_context):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- self._adhoc_handlers.set_adhoc_handler(echo_unary_stream)
- call = self._async_channel.unary_stream(_common.ADHOC_METHOD)(_REQUEST)
- async for response in call:
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_unary_stream_error(self):
-
- @grpc.unary_stream_rpc_method_handler
- def error_unary_stream(request: bytes, unused_context):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
- raise RuntimeError('Test')
-
- self._adhoc_handlers.set_adhoc_handler(error_unary_stream)
- call = self._async_channel.unary_stream(_common.ADHOC_METHOD)(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- self.assertEqual(_REQUEST, response)
- self.assertEqual(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
- async def test_sync_stream_unary_success(self):
-
- @grpc.stream_unary_rpc_method_handler
- def echo_stream_unary(request_iterator: Iterable[bytes],
- unused_context):
- self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
- return _REQUEST
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- response = await self._async_channel.stream_unary(_common.ADHOC_METHOD
- )(request_iterator)
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_stream_unary_error(self):
-
- @grpc.stream_unary_rpc_method_handler
- def echo_stream_unary(request_iterator: Iterable[bytes],
- unused_context):
- self.assertEqual(len(list(request_iterator)), _NUM_STREAM_RESPONSES)
- raise RuntimeError('Test')
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_unary)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- response = await self._async_channel.stream_unary(
- _common.ADHOC_METHOD)(request_iterator)
- self.assertEqual(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
- async def test_sync_stream_stream_success(self):
-
- @grpc.stream_stream_rpc_method_handler
- def echo_stream_stream(request_iterator: Iterable[bytes],
- unused_context):
- for request in request_iterator:
- yield request
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- call = self._async_channel.stream_stream(
- _common.ADHOC_METHOD)(request_iterator)
- async for response in call:
- self.assertEqual(_REQUEST, response)
-
- async def test_sync_stream_stream_error(self):
-
- @grpc.stream_stream_rpc_method_handler
- def echo_stream_stream(request_iterator: Iterable[bytes],
- unused_context):
- for request in request_iterator:
- yield request
- raise RuntimeError('test')
-
- self._adhoc_handlers.set_adhoc_handler(echo_stream_stream)
- request_iterator = iter([_REQUEST] * _NUM_STREAM_RESPONSES)
- call = self._async_channel.stream_stream(
- _common.ADHOC_METHOD)(request_iterator)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- async for response in call:
- self.assertEqual(_REQUEST, response)
- self.assertEqual(grpc.StatusCode.UNKNOWN,
- exception_context.exception.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py
deleted file mode 100644
index eb28a93f58..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/compression_test.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the compression mechanism."""
-
-import asyncio
-import logging
-import platform
-import random
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-
-_GZIP_CHANNEL_ARGUMENT = ('grpc.default_compression_algorithm', 2)
-_GZIP_DISABLED_CHANNEL_ARGUMENT = ('grpc.compression_enabled_algorithms_bitset',
- 3)
-_DEFLATE_DISABLED_CHANNEL_ARGUMENT = (
- 'grpc.compression_enabled_algorithms_bitset', 5)
-
-_TEST_UNARY_UNARY = '/test/TestUnaryUnary'
-_TEST_SET_COMPRESSION = '/test/TestSetCompression'
-_TEST_DISABLE_COMPRESSION_UNARY = '/test/TestDisableCompressionUnary'
-_TEST_DISABLE_COMPRESSION_STREAM = '/test/TestDisableCompressionStream'
-
-_REQUEST = b'\x01' * 100
-_RESPONSE = b'\x02' * 100
-
-
-async def _test_unary_unary(unused_request, unused_context):
- return _RESPONSE
-
-
-async def _test_set_compression(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- context.set_compression(grpc.Compression.Deflate)
- await context.write(_RESPONSE)
- try:
- context.set_compression(grpc.Compression.Deflate)
- except RuntimeError:
- # NOTE(lidiz) Testing if the servicer context raises exception when
- # the set_compression method is called after initial_metadata sent.
- # After the initial_metadata sent, the server-side has no control over
- # which compression algorithm it should use.
- pass
- else:
- raise ValueError(
- 'Expecting exceptions if set_compression is not effective')
-
-
-async def _test_disable_compression_unary(request, context):
- assert _REQUEST == request
- context.set_compression(grpc.Compression.Deflate)
- context.disable_next_message_compression()
- return _RESPONSE
-
-
-async def _test_disable_compression_stream(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- context.set_compression(grpc.Compression.Deflate)
- await context.write(_RESPONSE)
- context.disable_next_message_compression()
- await context.write(_RESPONSE)
- await context.write(_RESPONSE)
-
-
-_ROUTING_TABLE = {
- _TEST_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(_test_unary_unary),
- _TEST_SET_COMPRESSION:
- grpc.stream_stream_rpc_method_handler(_test_set_compression),
- _TEST_DISABLE_COMPRESSION_UNARY:
- grpc.unary_unary_rpc_method_handler(_test_disable_compression_unary),
- _TEST_DISABLE_COMPRESSION_STREAM:
- grpc.stream_stream_rpc_method_handler(_test_disable_compression_stream),
-}
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- return _ROUTING_TABLE.get(handler_call_details.method)
-
-
-async def _start_test_server(options=None):
- server = aio.server(options=options)
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
- return f'localhost:{port}', server
-
-
-class TestCompression(AioTestBase):
-
- async def setUp(self):
- server_options = (_GZIP_DISABLED_CHANNEL_ARGUMENT,)
- self._address, self._server = await _start_test_server(server_options)
- self._channel = aio.insecure_channel(self._address)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_channel_level_compression_baned_compression(self):
- # GZIP is disabled, this call should fail
- async with aio.insecure_channel(
- self._address, compression=grpc.Compression.Gzip) as channel:
- multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
- call = multicallable(_REQUEST)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
-
- async def test_channel_level_compression_allowed_compression(self):
- # Deflate is allowed, this call should succeed
- async with aio.insecure_channel(
- self._address, compression=grpc.Compression.Deflate) as channel:
- multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
- call = multicallable(_REQUEST)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_client_call_level_compression_baned_compression(self):
- multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
-
- # GZIP is disabled, this call should fail
- call = multicallable(_REQUEST, compression=grpc.Compression.Gzip)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
-
- async def test_client_call_level_compression_allowed_compression(self):
- multicallable = self._channel.unary_unary(_TEST_UNARY_UNARY)
-
- # Deflate is allowed, this call should succeed
- call = multicallable(_REQUEST, compression=grpc.Compression.Deflate)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_call_level_compression(self):
- multicallable = self._channel.stream_stream(_TEST_SET_COMPRESSION)
- call = multicallable()
- await call.write(_REQUEST)
- await call.done_writing()
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_disable_compression_unary(self):
- multicallable = self._channel.unary_unary(
- _TEST_DISABLE_COMPRESSION_UNARY)
- call = multicallable(_REQUEST)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_disable_compression_stream(self):
- multicallable = self._channel.stream_stream(
- _TEST_DISABLE_COMPRESSION_STREAM)
- call = multicallable()
- await call.write(_REQUEST)
- await call.done_writing()
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_server_default_compression_algorithm(self):
- server = aio.server(compression=grpc.Compression.Deflate)
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
-
- async with aio.insecure_channel(f'localhost:{port}') as channel:
- multicallable = channel.unary_unary(_TEST_UNARY_UNARY)
- call = multicallable(_REQUEST)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py
deleted file mode 100644
index 8ed2689cb8..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/connectivity_test.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the connectivity state."""
-
-import asyncio
-import logging
-import platform
-import threading
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-
-class TestConnectivityState(AioTestBase):
-
- async def setUp(self):
- self._server_address, self._server = await start_test_server()
-
- async def tearDown(self):
- await self._server.stop(None)
-
- @unittest.skipIf('aarch64' in platform.machine(),
- 'The transient failure propagation is slower on aarch64')
- async def test_unavailable_backend(self):
- async with aio.insecure_channel(UNREACHABLE_TARGET) as channel:
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(False))
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(True))
-
- # Should not time out
- await asyncio.wait_for(
- _common.block_until_certain_state(
- channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE),
- test_constants.SHORT_TIMEOUT)
-
- async def test_normal_backend(self):
- async with aio.insecure_channel(self._server_address) as channel:
- current_state = channel.get_state(True)
- self.assertEqual(grpc.ChannelConnectivity.IDLE, current_state)
-
- # Should not time out
- await asyncio.wait_for(
- _common.block_until_certain_state(
- channel, grpc.ChannelConnectivity.READY),
- test_constants.SHORT_TIMEOUT)
-
- async def test_timeout(self):
- async with aio.insecure_channel(self._server_address) as channel:
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(False))
-
- # If timed out, the function should return None.
- with self.assertRaises(asyncio.TimeoutError):
- await asyncio.wait_for(
- _common.block_until_certain_state(
- channel, grpc.ChannelConnectivity.READY),
- test_constants.SHORT_TIMEOUT)
-
- async def test_shutdown(self):
- channel = aio.insecure_channel(self._server_address)
-
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- channel.get_state(False))
-
- # Waiting for changes in a separate coroutine
- wait_started = asyncio.Event()
-
- async def a_pending_wait():
- wait_started.set()
- await channel.wait_for_state_change(grpc.ChannelConnectivity.IDLE)
-
- pending_task = self.loop.create_task(a_pending_wait())
- await wait_started.wait()
-
- await channel.close()
-
- self.assertEqual(grpc.ChannelConnectivity.SHUTDOWN,
- channel.get_state(True))
-
- self.assertEqual(grpc.ChannelConnectivity.SHUTDOWN,
- channel.get_state(False))
-
- # Make sure there isn't any exception in the task
- await pending_task
-
- # It can raise exceptions since it is an usage error, but it should not
- # segfault or abort.
- with self.assertRaises(aio.UsageError):
- await channel.wait_for_state_change(
- grpc.ChannelConnectivity.SHUTDOWN)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py
deleted file mode 100644
index 743d6599ef..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/context_peer_test.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the server context ability to access peer info."""
-
-import asyncio
-import logging
-import os
-from typing import Callable, Iterable, Sequence, Tuple
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import TestServiceServicer
-from tests_aio.unit._test_server import start_test_server
-
-_REQUEST = b'\x03\x07'
-_TEST_METHOD = '/test/UnaryUnary'
-
-
-class TestContextPeer(AioTestBase):
-
- async def test_peer(self):
-
- @grpc.unary_unary_rpc_method_handler
- async def check_peer_unary_unary(request: bytes,
- context: aio.ServicerContext):
- self.assertEqual(_REQUEST, request)
- # The peer address could be ipv4 or ipv6
- self.assertIn('ip', context.peer())
- return request
-
- # Creates a server
- server = aio.server()
- handlers = grpc.method_handlers_generic_handler(
- 'test', {'UnaryUnary': check_peer_unary_unary})
- server.add_generic_rpc_handlers((handlers,))
- port = server.add_insecure_port('[::]:0')
- await server.start()
-
- # Creates a channel
- async with aio.insecure_channel('localhost:%d' % port) as channel:
- response = await channel.unary_unary(_TEST_METHOD)(_REQUEST)
- self.assertEqual(_REQUEST, response)
-
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py
deleted file mode 100644
index 42a070f2e8..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/done_callback_test.py
+++ /dev/null
@@ -1,276 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the done callbacks mechanism."""
-
-import asyncio
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit._common import inject_callbacks
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-_REQUEST = b'\x01\x02\x03'
-_RESPONSE = b'\x04\x05\x06'
-_TEST_METHOD = '/test/Test'
-_FAKE_METHOD = '/test/Fake'
-
-
-class TestClientSideDoneCallback(AioTestBase):
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_add_after_done(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- validation = inject_callbacks(call)
- await validation
-
- async def test_unary_unary(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- validation = inject_callbacks(call)
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await validation
-
- async def test_unary_stream(self):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- call = self._stub.StreamingOutputCall(request)
- validation = inject_callbacks(call)
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await validation
-
- async def test_stream_unary(self):
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- call = self._stub.StreamingInputCall(gen())
- validation = inject_callbacks(call)
-
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- await validation
-
- async def test_stream_stream(self):
- call = self._stub.FullDuplexCall()
- validation = inject_callbacks(call)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
- await validation
-
-
-class TestServerSideDoneCallback(AioTestBase):
-
- async def setUp(self):
- self._server = aio.server()
- port = self._server.add_insecure_port('[::]:0')
- self._channel = aio.insecure_channel('localhost:%d' % port)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def _register_method_handler(self, method_handler):
- """Registers method handler and starts the server"""
- generic_handler = grpc.method_handlers_generic_handler(
- 'test',
- dict(Test=method_handler),
- )
- self._server.add_generic_rpc_handlers((generic_handler,))
- await self._server.start()
-
- async def test_unary_unary(self):
- validation_future = self.loop.create_future()
-
- async def test_handler(request: bytes, context: aio.ServicerContext):
- self.assertEqual(_REQUEST, request)
- validation_future.set_result(inject_callbacks(context))
- return _RESPONSE
-
- await self._register_method_handler(
- grpc.unary_unary_rpc_method_handler(test_handler))
- response = await self._channel.unary_unary(_TEST_METHOD)(_REQUEST)
- self.assertEqual(_RESPONSE, response)
-
- validation = await validation_future
- await validation
-
- async def test_unary_stream(self):
- validation_future = self.loop.create_future()
-
- async def test_handler(request: bytes, context: aio.ServicerContext):
- self.assertEqual(_REQUEST, request)
- validation_future.set_result(inject_callbacks(context))
- for _ in range(_NUM_STREAM_RESPONSES):
- yield _RESPONSE
-
- await self._register_method_handler(
- grpc.unary_stream_rpc_method_handler(test_handler))
- call = self._channel.unary_stream(_TEST_METHOD)(_REQUEST)
- async for response in call:
- self.assertEqual(_RESPONSE, response)
-
- validation = await validation_future
- await validation
-
- async def test_stream_unary(self):
- validation_future = self.loop.create_future()
-
- async def test_handler(request_iterator, context: aio.ServicerContext):
- validation_future.set_result(inject_callbacks(context))
-
- async for request in request_iterator:
- self.assertEqual(_REQUEST, request)
- return _RESPONSE
-
- await self._register_method_handler(
- grpc.stream_unary_rpc_method_handler(test_handler))
- call = self._channel.stream_unary(_TEST_METHOD)()
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(_REQUEST)
- await call.done_writing()
- self.assertEqual(_RESPONSE, await call)
-
- validation = await validation_future
- await validation
-
- async def test_stream_stream(self):
- validation_future = self.loop.create_future()
-
- async def test_handler(request_iterator, context: aio.ServicerContext):
- validation_future.set_result(inject_callbacks(context))
-
- async for request in request_iterator:
- self.assertEqual(_REQUEST, request)
- return _RESPONSE
-
- await self._register_method_handler(
- grpc.stream_stream_rpc_method_handler(test_handler))
- call = self._channel.stream_stream(_TEST_METHOD)()
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(_REQUEST)
- await call.done_writing()
- async for response in call:
- self.assertEqual(_RESPONSE, response)
-
- validation = await validation_future
- await validation
-
- async def test_error_in_handler(self):
- """Errors in the handler still triggers callbacks."""
- validation_future = self.loop.create_future()
-
- async def test_handler(request: bytes, context: aio.ServicerContext):
- self.assertEqual(_REQUEST, request)
- validation_future.set_result(inject_callbacks(context))
- raise RuntimeError('A test RuntimeError')
-
- await self._register_method_handler(
- grpc.unary_unary_rpc_method_handler(test_handler))
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channel.unary_unary(_TEST_METHOD)(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNKNOWN, rpc_error.code())
-
- validation = await validation_future
- await validation
-
- async def test_error_in_callback(self):
- """Errors in the callback won't be propagated to client."""
- validation_future = self.loop.create_future()
-
- async def test_handler(request: bytes, context: aio.ServicerContext):
- self.assertEqual(_REQUEST, request)
-
- def exception_raiser(unused_context):
- raise RuntimeError('A test RuntimeError')
-
- context.add_done_callback(exception_raiser)
- validation_future.set_result(inject_callbacks(context))
- return _RESPONSE
-
- await self._register_method_handler(
- grpc.unary_unary_rpc_method_handler(test_handler))
-
- response = await self._channel.unary_unary(_TEST_METHOD)(_REQUEST)
- self.assertEqual(_RESPONSE, response)
-
- # Following callbacks won't be invoked, if one of the callback crashed.
- validation = await validation_future
- with self.assertRaises(asyncio.TimeoutError):
- await validation
-
- # Invoke RPC one more time to ensure the toxic callback won't break the
- # server.
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await self._channel.unary_unary(_FAKE_METHOD)(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py
deleted file mode 100644
index b7889b9942..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/init_test.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import logging
-import unittest
-
-
-class TestInit(unittest.TestCase):
-
- def test_grpc(self):
- import grpc # pylint: disable=wrong-import-position
- channel = grpc.aio.insecure_channel('phony')
- self.assertIsInstance(channel, grpc.aio.Channel)
-
- def test_grpc_dot_aio(self):
- import grpc.aio # pylint: disable=wrong-import-position
- channel = grpc.aio.insecure_channel('phony')
- self.assertIsInstance(channel, grpc.aio.Channel)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py
deleted file mode 100644
index 4043d19e31..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/metadata_test.py
+++ /dev/null
@@ -1,325 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the metadata mechanism."""
-
-import asyncio
-import logging
-import platform
-import random
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-
-_TEST_CLIENT_TO_SERVER = '/test/TestClientToServer'
-_TEST_SERVER_TO_CLIENT = '/test/TestServerToClient'
-_TEST_TRAILING_METADATA = '/test/TestTrailingMetadata'
-_TEST_ECHO_INITIAL_METADATA = '/test/TestEchoInitialMetadata'
-_TEST_GENERIC_HANDLER = '/test/TestGenericHandler'
-_TEST_UNARY_STREAM = '/test/TestUnaryStream'
-_TEST_STREAM_UNARY = '/test/TestStreamUnary'
-_TEST_STREAM_STREAM = '/test/TestStreamStream'
-_TEST_INSPECT_CONTEXT = '/test/TestInspectContext'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-_INITIAL_METADATA_FROM_CLIENT_TO_SERVER = aio.Metadata(
- ('client-to-server', 'question'),
- ('client-to-server-bin', b'\x07\x07\x07'),
-)
-_INITIAL_METADATA_FROM_SERVER_TO_CLIENT = aio.Metadata(
- ('server-to-client', 'answer'),
- ('server-to-client-bin', b'\x06\x06\x06'),
-)
-_TRAILING_METADATA = aio.Metadata(
- ('a-trailing-metadata', 'stack-trace'),
- ('a-trailing-metadata-bin', b'\x05\x05\x05'),
-)
-_INITIAL_METADATA_FOR_GENERIC_HANDLER = aio.Metadata(
- ('a-must-have-key', 'secret'),)
-
-_INVALID_METADATA_TEST_CASES = (
- (
- TypeError,
- ((42, 42),),
- ),
- (
- TypeError,
- (({}, {}),),
- ),
- (
- TypeError,
- ((None, {}),),
- ),
- (
- TypeError,
- (({}, {}),),
- ),
- (
- TypeError,
- (('normal', object()),),
- ),
-)
-
-_NON_OK_CODE = grpc.StatusCode.NOT_FOUND
-_DETAILS = 'Test details!'
-
-
-class _TestGenericHandlerForMethods(grpc.GenericRpcHandler):
-
- def __init__(self):
- self._routing_table = {
- _TEST_CLIENT_TO_SERVER:
- grpc.unary_unary_rpc_method_handler(self._test_client_to_server
- ),
- _TEST_SERVER_TO_CLIENT:
- grpc.unary_unary_rpc_method_handler(self._test_server_to_client
- ),
- _TEST_TRAILING_METADATA:
- grpc.unary_unary_rpc_method_handler(self._test_trailing_metadata
- ),
- _TEST_UNARY_STREAM:
- grpc.unary_stream_rpc_method_handler(self._test_unary_stream),
- _TEST_STREAM_UNARY:
- grpc.stream_unary_rpc_method_handler(self._test_stream_unary),
- _TEST_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(self._test_stream_stream),
- _TEST_INSPECT_CONTEXT:
- grpc.unary_unary_rpc_method_handler(self._test_inspect_context),
- }
-
- @staticmethod
- async def _test_client_to_server(request, context):
- assert _REQUEST == request
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- return _RESPONSE
-
- @staticmethod
- async def _test_server_to_client(request, context):
- assert _REQUEST == request
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
- return _RESPONSE
-
- @staticmethod
- async def _test_trailing_metadata(request, context):
- assert _REQUEST == request
- context.set_trailing_metadata(_TRAILING_METADATA)
- return _RESPONSE
-
- @staticmethod
- async def _test_unary_stream(request, context):
- assert _REQUEST == request
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
- yield _RESPONSE
- context.set_trailing_metadata(_TRAILING_METADATA)
-
- @staticmethod
- async def _test_stream_unary(request_iterator, context):
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
-
- async for request in request_iterator:
- assert _REQUEST == request
-
- context.set_trailing_metadata(_TRAILING_METADATA)
- return _RESPONSE
-
- @staticmethod
- async def _test_stream_stream(request_iterator, context):
- assert _common.seen_metadata(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER,
- context.invocation_metadata())
- await context.send_initial_metadata(
- _INITIAL_METADATA_FROM_SERVER_TO_CLIENT)
-
- async for request in request_iterator:
- assert _REQUEST == request
-
- yield _RESPONSE
- context.set_trailing_metadata(_TRAILING_METADATA)
-
- @staticmethod
- async def _test_inspect_context(request, context):
- assert _REQUEST == request
- context.set_code(_NON_OK_CODE)
- context.set_details(_DETAILS)
- context.set_trailing_metadata(_TRAILING_METADATA)
-
- # ensure that we can read back the data we set on the context
- assert context.get_code() == _NON_OK_CODE
- assert context.get_details() == _DETAILS
- assert context.get_trailing_metadata() == _TRAILING_METADATA
- return _RESPONSE
-
- def service(self, handler_call_details):
- return self._routing_table.get(handler_call_details.method)
-
-
-class _TestGenericHandlerItself(grpc.GenericRpcHandler):
-
- @staticmethod
- async def _method(request, unused_context):
- assert _REQUEST == request
- return _RESPONSE
-
- def service(self, handler_call_details):
- assert _common.seen_metadata(_INITIAL_METADATA_FOR_GENERIC_HANDLER,
- handler_call_details.invocation_metadata)
- return grpc.unary_unary_rpc_method_handler(self._method)
-
-
-async def _start_test_server():
- server = aio.server()
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((
- _TestGenericHandlerForMethods(),
- _TestGenericHandlerItself(),
- ))
- await server.start()
- return 'localhost:%d' % port, server
-
-
-class TestMetadata(AioTestBase):
-
- async def setUp(self):
- address, self._server = await _start_test_server()
- self._client = aio.insecure_channel(address)
-
- async def tearDown(self):
- await self._client.close()
- await self._server.stop(None)
-
- async def test_from_client_to_server(self):
- multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
- call = multicallable(_REQUEST,
- metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_from_server_to_client(self):
- multicallable = self._client.unary_unary(_TEST_SERVER_TO_CLIENT)
- call = multicallable(_REQUEST)
-
- self.assertEqual(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata())
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_trailing_metadata(self):
- multicallable = self._client.unary_unary(_TEST_TRAILING_METADATA)
- call = multicallable(_REQUEST)
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_from_client_to_server_with_list(self):
- multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
- call = multicallable(
- _REQUEST, metadata=list(_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)) # pytype: disable=wrong-arg-types
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- @unittest.skipIf(platform.system() == 'Windows',
- 'https://github.com/grpc/grpc/issues/21943')
- async def test_invalid_metadata(self):
- multicallable = self._client.unary_unary(_TEST_CLIENT_TO_SERVER)
- for exception_type, metadata in _INVALID_METADATA_TEST_CASES:
- with self.subTest(metadata=metadata):
- with self.assertRaises(exception_type):
- call = multicallable(_REQUEST, metadata=metadata)
- await call
-
- async def test_generic_handler(self):
- multicallable = self._client.unary_unary(_TEST_GENERIC_HANDLER)
- call = multicallable(_REQUEST,
- metadata=_INITIAL_METADATA_FOR_GENERIC_HANDLER)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_stream(self):
- multicallable = self._client.unary_stream(_TEST_UNARY_STREAM)
- call = multicallable(_REQUEST,
- metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
-
- self.assertTrue(
- _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata()))
-
- self.assertSequenceEqual([_RESPONSE],
- [request async for request in call])
-
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_unary(self):
- multicallable = self._client.stream_unary(_TEST_STREAM_UNARY)
- call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
- await call.write(_REQUEST)
- await call.done_writing()
-
- self.assertTrue(
- _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata()))
- self.assertEqual(_RESPONSE, await call)
-
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_stream(self):
- multicallable = self._client.stream_stream(_TEST_STREAM_STREAM)
- call = multicallable(metadata=_INITIAL_METADATA_FROM_CLIENT_TO_SERVER)
- await call.write(_REQUEST)
- await call.done_writing()
-
- self.assertTrue(
- _common.seen_metadata(_INITIAL_METADATA_FROM_SERVER_TO_CLIENT, await
- call.initial_metadata()))
- self.assertSequenceEqual([_RESPONSE],
- [request async for request in call])
- self.assertEqual(_TRAILING_METADATA, await call.trailing_metadata())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_compatibility_with_tuple(self):
- metadata_obj = aio.Metadata(('key', '42'), ('key-2', 'value'))
- self.assertEqual(metadata_obj, tuple(metadata_obj))
- self.assertEqual(tuple(metadata_obj), metadata_obj)
-
- expected_sum = tuple(metadata_obj) + (('third', '3'),)
- self.assertEqual(expected_sum, metadata_obj + (('third', '3'),))
- self.assertEqual(expected_sum, metadata_obj + aio.Metadata(
- ('third', '3')))
-
- async def test_inspect_context(self):
- multicallable = self._client.unary_unary(_TEST_INSPECT_CONTEXT)
- call = multicallable(_REQUEST)
- with self.assertRaises(grpc.RpcError) as exc_data:
- await call
-
- err = exc_data.exception
- self.assertEqual(_NON_OK_CODE, err.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py
deleted file mode 100644
index 79a7518585..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/outside_init_test.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior around the metadata mechanism."""
-
-import asyncio
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_OF_LOOPS = 50
-
-
-class TestOutsideInit(unittest.TestCase):
-
- def test_behavior_outside_asyncio(self):
- # Ensures non-AsyncIO object can be initiated
- channel_creds = grpc.ssl_channel_credentials()
-
- # Ensures AsyncIO API not raising outside of AsyncIO.
- # NOTE(lidiz) This behavior is bound with GAPIC generator, and required
- # by test frameworks like pytest. In test frameworks, objects shared
- # across cases need to be created outside of AsyncIO coroutines.
- aio.insecure_channel('')
- aio.secure_channel('', channel_creds)
- aio.server()
- aio.init_grpc_aio()
- aio.shutdown_grpc_aio()
-
- def test_multi_ephemeral_loops(self):
- # Initializes AIO module outside. It's part of the test. We especially
- # want to ensure the closing of the default loop won't cause deadlocks.
- aio.init_grpc_aio()
-
- async def ping_pong():
- address, server = await start_test_server()
- channel = aio.insecure_channel(address)
- stub = test_pb2_grpc.TestServiceStub(channel)
-
- await stub.UnaryCall(messages_pb2.SimpleRequest())
-
- await channel.close()
- await server.stop(None)
-
- for i in range(_NUM_OF_LOOPS):
- old_loop = asyncio.get_event_loop()
- old_loop.close()
-
- loop = asyncio.new_event_loop()
- loop.set_debug(True)
- asyncio.set_event_loop(loop)
-
- loop.run_until_complete(ping_pong())
-
- aio.shutdown_grpc_aio()
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py
deleted file mode 100644
index a5b03f43ae..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/secure_call_test.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2020 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests the behaviour of the Call classes under a secure channel."""
-
-import logging
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit import resources
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
-_NUM_STREAM_RESPONSES = 5
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class _SecureCallMixin:
- """A Mixin to run the call tests over a secure channel."""
-
- async def setUp(self):
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- channel_credentials = grpc.ssl_channel_credentials(
- resources.test_root_certificates())
-
- self._server_address, self._server = await start_test_server(
- secure=True, server_credentials=server_credentials)
- channel_options = ((
- 'grpc.ssl_target_name_override',
- _SERVER_HOST_OVERRIDE,
- ),)
- self._channel = aio.secure_channel(self._server_address,
- channel_credentials, channel_options)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
-
-class TestUnaryUnarySecureCall(_SecureCallMixin, AioTestBase):
- """unary_unary Calls made over a secure channel."""
-
- async def test_call_ok_over_secure_channel(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
- response = await call
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_call_with_credentials(self):
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest(),
- credentials=call_credentials)
- response = await call
-
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
-
-class TestUnaryStreamSecureCall(_SecureCallMixin, AioTestBase):
- """unary_stream calls over a secure channel"""
-
- async def test_unary_stream_async_generator_secure(self):
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.extend(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,)
- for _ in range(_NUM_STREAM_RESPONSES))
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
- call = self._stub.StreamingOutputCall(request,
- credentials=call_credentials)
-
- async for response in call:
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(len(response.payload.body), _RESPONSE_PAYLOAD_SIZE)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
-
-# Prepares the request that stream in a ping-pong manner.
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE = messages_pb2.StreamingOutputCallRequest()
-_STREAM_OUTPUT_REQUEST_ONE_RESPONSE.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
-
-class TestStreamStreamSecureCall(_SecureCallMixin, AioTestBase):
- _STREAM_ITERATIONS = 2
-
- async def test_async_generator_secure_channel(self):
-
- async def request_generator():
- for _ in range(self._STREAM_ITERATIONS):
- yield _STREAM_OUTPUT_REQUEST_ONE_RESPONSE
-
- call_credentials = grpc.composite_call_credentials(
- grpc.access_token_call_credentials("abc"),
- grpc.access_token_call_credentials("def"),
- )
-
- call = self._stub.FullDuplexCall(request_generator(),
- credentials=call_credentials)
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py
deleted file mode 100644
index 18f5df09de..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_interceptor_test.py
+++ /dev/null
@@ -1,334 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test the functionality of server interceptors."""
-
-import asyncio
-import functools
-import logging
-from typing import Any, Awaitable, Callable, Tuple
-import unittest
-
-import grpc
-from grpc.experimental import aio
-from grpc.experimental import wrap_server_method_handler
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class _LoggingInterceptor(aio.ServerInterceptor):
-
- def __init__(self, tag: str, record: list) -> None:
- self.tag = tag
- self.record = record
-
- async def intercept_service(
- self, continuation: Callable[[grpc.HandlerCallDetails],
- Awaitable[grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- self.record.append(self.tag + ':intercept_service')
- return await continuation(handler_call_details)
-
-
-class _GenericInterceptor(aio.ServerInterceptor):
-
- def __init__(
- self, fn: Callable[[
- Callable[[grpc.HandlerCallDetails],
- Awaitable[grpc.RpcMethodHandler]], grpc.HandlerCallDetails
- ], Any]
- ) -> None:
- self._fn = fn
-
- async def intercept_service(
- self, continuation: Callable[[grpc.HandlerCallDetails],
- Awaitable[grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- return await self._fn(continuation, handler_call_details)
-
-
-def _filter_server_interceptor(
- condition: Callable,
- interceptor: aio.ServerInterceptor) -> aio.ServerInterceptor:
-
- async def intercept_service(
- continuation: Callable[[grpc.HandlerCallDetails],
- Awaitable[grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- if condition(handler_call_details):
- return await interceptor.intercept_service(continuation,
- handler_call_details)
- return await continuation(handler_call_details)
-
- return _GenericInterceptor(intercept_service)
-
-
-class _CacheInterceptor(aio.ServerInterceptor):
- """An interceptor that caches response based on request message."""
-
- def __init__(self, cache_store=None):
- self.cache_store = cache_store or {}
-
- async def intercept_service(
- self, continuation: Callable[[grpc.HandlerCallDetails],
- Awaitable[grpc.RpcMethodHandler]],
- handler_call_details: grpc.HandlerCallDetails
- ) -> grpc.RpcMethodHandler:
- # Get the actual handler
- handler = await continuation(handler_call_details)
-
- # Only intercept unary call RPCs
- if handler and (handler.request_streaming or # pytype: disable=attribute-error
- handler.response_streaming): # pytype: disable=attribute-error
- return handler
-
- def wrapper(behavior: Callable[
- [messages_pb2.SimpleRequest, aio.ServicerContext],
- messages_pb2.SimpleResponse]):
-
- @functools.wraps(behavior)
- async def wrapper(
- request: messages_pb2.SimpleRequest,
- context: aio.ServicerContext
- ) -> messages_pb2.SimpleResponse:
- if request.response_size not in self.cache_store:
- self.cache_store[request.response_size] = await behavior(
- request, context)
- return self.cache_store[request.response_size]
-
- return wrapper
-
- return wrap_server_method_handler(wrapper, handler)
-
-
-async def _create_server_stub_pair(
- *interceptors: aio.ServerInterceptor
-) -> Tuple[aio.Server, test_pb2_grpc.TestServiceStub]:
- """Creates a server-stub pair with given interceptors.
-
- Returning the server object to protect it from being garbage collected.
- """
- server_target, server = await start_test_server(interceptors=interceptors)
- channel = aio.insecure_channel(server_target)
- return server, test_pb2_grpc.TestServiceStub(channel)
-
-
-class TestServerInterceptor(AioTestBase):
-
- async def test_invalid_interceptor(self):
-
- class InvalidInterceptor:
- """Just an invalid Interceptor"""
-
- with self.assertRaises(ValueError):
- server_target, _ = await start_test_server(
- interceptors=(InvalidInterceptor(),))
-
- async def test_executed_right_order(self):
- record = []
- server_target, _ = await start_test_server(interceptors=(
- _LoggingInterceptor('log1', record),
- _LoggingInterceptor('log2', record),
- ))
-
- async with aio.insecure_channel(server_target) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
-
- # Check that all interceptors were executed, and were executed
- # in the right order.
- self.assertSequenceEqual([
- 'log1:intercept_service',
- 'log2:intercept_service',
- ], record)
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- async def test_response_ok(self):
- record = []
- server_target, _ = await start_test_server(
- interceptors=(_LoggingInterceptor('log1', record),))
-
- async with aio.insecure_channel(server_target) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
- call = multicallable(messages_pb2.SimpleRequest())
- response = await call
- code = await call.code()
-
- self.assertSequenceEqual(['log1:intercept_service'], record)
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
- self.assertEqual(code, grpc.StatusCode.OK)
-
- async def test_apply_different_interceptors_by_metadata(self):
- record = []
- conditional_interceptor = _filter_server_interceptor(
- lambda x: ('secret', '42') in x.invocation_metadata,
- _LoggingInterceptor('log3', record))
- server_target, _ = await start_test_server(interceptors=(
- _LoggingInterceptor('log1', record),
- conditional_interceptor,
- _LoggingInterceptor('log2', record),
- ))
-
- async with aio.insecure_channel(server_target) as channel:
- multicallable = channel.unary_unary(
- '/grpc.testing.TestService/UnaryCall',
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString)
-
- metadata = aio.Metadata(('key', 'value'),)
- call = multicallable(messages_pb2.SimpleRequest(),
- metadata=metadata)
- await call
- self.assertSequenceEqual([
- 'log1:intercept_service',
- 'log2:intercept_service',
- ], record)
-
- record.clear()
- metadata = aio.Metadata(('key', 'value'), ('secret', '42'))
- call = multicallable(messages_pb2.SimpleRequest(),
- metadata=metadata)
- await call
- self.assertSequenceEqual([
- 'log1:intercept_service',
- 'log3:intercept_service',
- 'log2:intercept_service',
- ], record)
-
- async def test_response_caching(self):
- # Prepares a preset value to help testing
- interceptor = _CacheInterceptor({
- 42:
- messages_pb2.SimpleResponse(payload=messages_pb2.Payload(
- body=b'\x42'))
- })
-
- # Constructs a server with the cache interceptor
- server, stub = await _create_server_stub_pair(interceptor)
-
- # Tests if the cache store is used
- response = await stub.UnaryCall(
- messages_pb2.SimpleRequest(response_size=42))
- self.assertEqual(1, len(interceptor.cache_store[42].payload.body))
- self.assertEqual(interceptor.cache_store[42], response)
-
- # Tests response can be cached
- response = await stub.UnaryCall(
- messages_pb2.SimpleRequest(response_size=1337))
- self.assertEqual(1337, len(interceptor.cache_store[1337].payload.body))
- self.assertEqual(interceptor.cache_store[1337], response)
- response = await stub.UnaryCall(
- messages_pb2.SimpleRequest(response_size=1337))
- self.assertEqual(interceptor.cache_store[1337], response)
-
- async def test_interceptor_unary_stream(self):
- record = []
- server, stub = await _create_server_stub_pair(
- _LoggingInterceptor('log_unary_stream', record))
-
- # Prepares the request
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE,))
-
- # Tests if the cache store is used
- call = stub.StreamingOutputCall(request)
-
- # Ensures the RPC goes fine
- async for response in call:
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- self.assertSequenceEqual([
- 'log_unary_stream:intercept_service',
- ], record)
-
- async def test_interceptor_stream_unary(self):
- record = []
- server, stub = await _create_server_stub_pair(
- _LoggingInterceptor('log_stream_unary', record))
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall()
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- # Sends out requests
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- await call.done_writing()
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- self.assertSequenceEqual([
- 'log_stream_unary:intercept_service',
- ], record)
-
- async def test_interceptor_stream_stream(self):
- record = []
- server, stub = await _create_server_stub_pair(
- _LoggingInterceptor('log_stream_stream', record))
-
- # Prepares the request
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- # Invokes the actual RPC
- call = stub.StreamingInputCall(gen())
-
- # Validates the responses
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- self.assertSequenceEqual([
- 'log_stream_stream:intercept_service',
- ], record)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py
deleted file mode 100644
index 2f0f6de110..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_test.py
+++ /dev/null
@@ -1,592 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import asyncio
-import logging
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests.unit import resources
-from tests.unit.framework.common import test_constants
-from tests_aio.unit._test_base import AioTestBase
-
-_SIMPLE_UNARY_UNARY = '/test/SimpleUnaryUnary'
-_BLOCK_FOREVER = '/test/BlockForever'
-_BLOCK_BRIEFLY = '/test/BlockBriefly'
-_UNARY_STREAM_ASYNC_GEN = '/test/UnaryStreamAsyncGen'
-_UNARY_STREAM_READER_WRITER = '/test/UnaryStreamReaderWriter'
-_UNARY_STREAM_EVILLY_MIXED = '/test/UnaryStreamEvillyMixed'
-_STREAM_UNARY_ASYNC_GEN = '/test/StreamUnaryAsyncGen'
-_STREAM_UNARY_READER_WRITER = '/test/StreamUnaryReaderWriter'
-_STREAM_UNARY_EVILLY_MIXED = '/test/StreamUnaryEvillyMixed'
-_STREAM_STREAM_ASYNC_GEN = '/test/StreamStreamAsyncGen'
-_STREAM_STREAM_READER_WRITER = '/test/StreamStreamReaderWriter'
-_STREAM_STREAM_EVILLY_MIXED = '/test/StreamStreamEvillyMixed'
-_UNIMPLEMENTED_METHOD = '/test/UnimplementedMethod'
-_ERROR_IN_STREAM_STREAM = '/test/ErrorInStreamStream'
-_ERROR_IN_STREAM_UNARY = '/test/ErrorInStreamUnary'
-_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY = '/test/ErrorWithoutRaiseInUnaryUnary'
-_ERROR_WITHOUT_RAISE_IN_STREAM_STREAM = '/test/ErrorWithoutRaiseInStreamStream'
-_INVALID_TRAILING_METADATA = '/test/InvalidTrailingMetadata'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-_NUM_STREAM_REQUESTS = 3
-_NUM_STREAM_RESPONSES = 5
-_MAXIMUM_CONCURRENT_RPCS = 5
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def __init__(self):
- self._called = asyncio.get_event_loop().create_future()
- self._routing_table = {
- _SIMPLE_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(self._unary_unary),
- _BLOCK_FOREVER:
- grpc.unary_unary_rpc_method_handler(self._block_forever),
- _BLOCK_BRIEFLY:
- grpc.unary_unary_rpc_method_handler(self._block_briefly),
- _UNARY_STREAM_ASYNC_GEN:
- grpc.unary_stream_rpc_method_handler(
- self._unary_stream_async_gen),
- _UNARY_STREAM_READER_WRITER:
- grpc.unary_stream_rpc_method_handler(
- self._unary_stream_reader_writer),
- _UNARY_STREAM_EVILLY_MIXED:
- grpc.unary_stream_rpc_method_handler(
- self._unary_stream_evilly_mixed),
- _STREAM_UNARY_ASYNC_GEN:
- grpc.stream_unary_rpc_method_handler(
- self._stream_unary_async_gen),
- _STREAM_UNARY_READER_WRITER:
- grpc.stream_unary_rpc_method_handler(
- self._stream_unary_reader_writer),
- _STREAM_UNARY_EVILLY_MIXED:
- grpc.stream_unary_rpc_method_handler(
- self._stream_unary_evilly_mixed),
- _STREAM_STREAM_ASYNC_GEN:
- grpc.stream_stream_rpc_method_handler(
- self._stream_stream_async_gen),
- _STREAM_STREAM_READER_WRITER:
- grpc.stream_stream_rpc_method_handler(
- self._stream_stream_reader_writer),
- _STREAM_STREAM_EVILLY_MIXED:
- grpc.stream_stream_rpc_method_handler(
- self._stream_stream_evilly_mixed),
- _ERROR_IN_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(
- self._error_in_stream_stream),
- _ERROR_IN_STREAM_UNARY:
- grpc.stream_unary_rpc_method_handler(
- self._value_error_in_stream_unary),
- _ERROR_WITHOUT_RAISE_IN_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(
- self._error_without_raise_in_unary_unary),
- _ERROR_WITHOUT_RAISE_IN_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(
- self._error_without_raise_in_stream_stream),
- _INVALID_TRAILING_METADATA:
- grpc.unary_unary_rpc_method_handler(
- self._invalid_trailing_metadata),
- }
-
- @staticmethod
- async def _unary_unary(unused_request, unused_context):
- return _RESPONSE
-
- async def _block_forever(self, unused_request, unused_context):
- await asyncio.get_event_loop().create_future()
-
- async def _block_briefly(self, unused_request, unused_context):
- await asyncio.sleep(test_constants.SHORT_TIMEOUT / 2)
- return _RESPONSE
-
- async def _unary_stream_async_gen(self, unused_request, unused_context):
- for _ in range(_NUM_STREAM_RESPONSES):
- yield _RESPONSE
-
- async def _unary_stream_reader_writer(self, unused_request, context):
- for _ in range(_NUM_STREAM_RESPONSES):
- await context.write(_RESPONSE)
-
- async def _unary_stream_evilly_mixed(self, unused_request, context):
- yield _RESPONSE
- for _ in range(_NUM_STREAM_RESPONSES - 1):
- await context.write(_RESPONSE)
-
- async def _stream_unary_async_gen(self, request_iterator, unused_context):
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS == request_count
- return _RESPONSE
-
- async def _stream_unary_reader_writer(self, unused_request, context):
- for _ in range(_NUM_STREAM_REQUESTS):
- assert _REQUEST == await context.read()
- return _RESPONSE
-
- async def _stream_unary_evilly_mixed(self, request_iterator, context):
- assert _REQUEST == await context.read()
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS - 1 == request_count
- return _RESPONSE
-
- async def _stream_stream_async_gen(self, request_iterator, unused_context):
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS == request_count
-
- for _ in range(_NUM_STREAM_RESPONSES):
- yield _RESPONSE
-
- async def _stream_stream_reader_writer(self, unused_request, context):
- for _ in range(_NUM_STREAM_REQUESTS):
- assert _REQUEST == await context.read()
- for _ in range(_NUM_STREAM_RESPONSES):
- await context.write(_RESPONSE)
-
- async def _stream_stream_evilly_mixed(self, request_iterator, context):
- assert _REQUEST == await context.read()
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- assert _NUM_STREAM_REQUESTS - 1 == request_count
-
- yield _RESPONSE
- for _ in range(_NUM_STREAM_RESPONSES - 1):
- await context.write(_RESPONSE)
-
- async def _error_in_stream_stream(self, request_iterator, unused_context):
- async for request in request_iterator:
- assert _REQUEST == request
- raise RuntimeError('A testing RuntimeError!')
- yield _RESPONSE
-
- async def _value_error_in_stream_unary(self, request_iterator, context):
- request_count = 0
- async for request in request_iterator:
- assert _REQUEST == request
- request_count += 1
- if request_count >= 1:
- raise ValueError('A testing RuntimeError!')
-
- async def _error_without_raise_in_unary_unary(self, request, context):
- assert _REQUEST == request
- context.set_code(grpc.StatusCode.INTERNAL)
-
- async def _error_without_raise_in_stream_stream(self, request_iterator,
- context):
- async for request in request_iterator:
- assert _REQUEST == request
- context.set_code(grpc.StatusCode.INTERNAL)
-
- async def _invalid_trailing_metadata(self, request, context):
- assert _REQUEST == request
- for invalid_metadata in [
- 42, {}, {
- 'error': 'error'
- }, [{
- 'error': "error"
- }]
- ]:
- try:
- context.set_trailing_metadata(invalid_metadata)
- except TypeError:
- pass
- else:
- raise ValueError(
- f'No TypeError raised for invalid metadata: {invalid_metadata}'
- )
-
- await context.abort(grpc.StatusCode.DATA_LOSS,
- details="invalid abort",
- trailing_metadata=({
- 'error': ('error1', 'error2')
- }))
-
- def service(self, handler_details):
- if not self._called.done():
- self._called.set_result(None)
- return self._routing_table.get(handler_details.method)
-
- async def wait_for_call(self):
- await self._called
-
-
-async def _start_test_server():
- server = aio.server()
- port = server.add_insecure_port('[::]:0')
- generic_handler = _GenericHandler()
- server.add_generic_rpc_handlers((generic_handler,))
- await server.start()
- return 'localhost:%d' % port, server, generic_handler
-
-
-class TestServer(AioTestBase):
-
- async def setUp(self):
- addr, self._server, self._generic_handler = await _start_test_server()
- self._channel = aio.insecure_channel(addr)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_unary_unary(self):
- unary_unary_call = self._channel.unary_unary(_SIMPLE_UNARY_UNARY)
- response = await unary_unary_call(_REQUEST)
- self.assertEqual(response, _RESPONSE)
-
- async def test_unary_stream_async_generator(self):
- unary_stream_call = self._channel.unary_stream(_UNARY_STREAM_ASYNC_GEN)
- call = unary_stream_call(_REQUEST)
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_unary_stream_reader_writer(self):
- unary_stream_call = self._channel.unary_stream(
- _UNARY_STREAM_READER_WRITER)
- call = unary_stream_call(_REQUEST)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_unary_stream_evilly_mixed(self):
- unary_stream_call = self._channel.unary_stream(
- _UNARY_STREAM_EVILLY_MIXED)
- call = unary_stream_call(_REQUEST)
-
- # Uses reader API
- self.assertEqual(_RESPONSE, await call.read())
-
- # Uses async generator API, mixed!
- with self.assertRaises(aio.UsageError):
- async for response in call:
- self.assertEqual(_RESPONSE, response)
-
- async def test_stream_unary_async_generator(self):
- stream_unary_call = self._channel.stream_unary(_STREAM_UNARY_ASYNC_GEN)
- call = stream_unary_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- response = await call
- self.assertEqual(_RESPONSE, response)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_unary_async_generator_with_request_iter(self):
- stream_unary_call = self._channel.stream_unary(_STREAM_UNARY_ASYNC_GEN)
-
- finished = False
-
- def request_gen():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield _REQUEST
- nonlocal finished
- finished = True
-
- call = stream_unary_call(request_gen())
-
- response = await call
- self.assertEqual(_RESPONSE, response)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
- self.assertEqual(finished, True)
-
- async def test_stream_unary_reader_writer(self):
- stream_unary_call = self._channel.stream_unary(
- _STREAM_UNARY_READER_WRITER)
- call = stream_unary_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- response = await call
- self.assertEqual(_RESPONSE, response)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_unary_evilly_mixed(self):
- stream_unary_call = self._channel.stream_unary(
- _STREAM_UNARY_EVILLY_MIXED)
- call = stream_unary_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- response = await call
- self.assertEqual(_RESPONSE, response)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_async_generator(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_ASYNC_GEN)
- call = stream_stream_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_reader_writer(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_READER_WRITER)
- call = stream_stream_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_evilly_mixed(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_EVILLY_MIXED)
- call = stream_stream_call()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- for _ in range(_NUM_STREAM_RESPONSES):
- response = await call.read()
- self.assertEqual(_RESPONSE, response)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_shutdown(self):
- await self._server.stop(None)
- # Ensures no SIGSEGV triggered, and ends within timeout.
-
- async def test_shutdown_after_call(self):
- await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
-
- await self._server.stop(None)
-
- async def test_graceful_shutdown_success(self):
- call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- shutdown_start_time = time.time()
- await self._server.stop(test_constants.SHORT_TIMEOUT)
- grace_period_length = time.time() - shutdown_start_time
- self.assertGreater(grace_period_length,
- test_constants.SHORT_TIMEOUT / 3)
-
- # Validates the states.
- self.assertEqual(_RESPONSE, await call)
- self.assertTrue(call.done())
-
- async def test_graceful_shutdown_failed(self):
- call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- await self._server.stop(test_constants.SHORT_TIMEOUT)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- async def test_concurrent_graceful_shutdown(self):
- call = self._channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- # Expects the shortest grace period to be effective.
- shutdown_start_time = time.time()
- await asyncio.gather(
- self._server.stop(test_constants.LONG_TIMEOUT),
- self._server.stop(test_constants.SHORT_TIMEOUT),
- self._server.stop(test_constants.LONG_TIMEOUT),
- )
- grace_period_length = time.time() - shutdown_start_time
- self.assertGreater(grace_period_length,
- test_constants.SHORT_TIMEOUT / 3)
-
- self.assertEqual(_RESPONSE, await call)
- self.assertTrue(call.done())
-
- async def test_concurrent_graceful_shutdown_immediate(self):
- call = self._channel.unary_unary(_BLOCK_FOREVER)(_REQUEST)
- await self._generic_handler.wait_for_call()
-
- # Expects no grace period, due to the "server.stop(None)".
- await asyncio.gather(
- self._server.stop(test_constants.LONG_TIMEOUT),
- self._server.stop(None),
- self._server.stop(test_constants.SHORT_TIMEOUT),
- self._server.stop(test_constants.LONG_TIMEOUT),
- )
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- self.assertEqual(grpc.StatusCode.UNAVAILABLE,
- exception_context.exception.code())
-
- async def test_shutdown_before_call(self):
- await self._server.stop(None)
-
- # Ensures the server is cleaned up at this point.
- # Some proper exception should be raised.
- with self.assertRaises(aio.AioRpcError):
- await self._channel.unary_unary(_SIMPLE_UNARY_UNARY)(_REQUEST)
-
- async def test_unimplemented(self):
- call = self._channel.unary_unary(_UNIMPLEMENTED_METHOD)
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call(_REQUEST)
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, rpc_error.code())
-
- async def test_shutdown_during_stream_stream(self):
- stream_stream_call = self._channel.stream_stream(
- _STREAM_STREAM_ASYNC_GEN)
- call = stream_stream_call()
-
- # Don't half close the RPC yet, keep it alive.
- await call.write(_REQUEST)
- await self._server.stop(None)
-
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, await call.code())
- # No segfault
-
- async def test_error_in_stream_stream(self):
- stream_stream_call = self._channel.stream_stream(
- _ERROR_IN_STREAM_STREAM)
- call = stream_stream_call()
-
- # Don't half close the RPC yet, keep it alive.
- await call.write(_REQUEST)
-
- # Don't segfault here
- self.assertEqual(grpc.StatusCode.UNKNOWN, await call.code())
-
- async def test_error_without_raise_in_unary_unary(self):
- call = self._channel.unary_unary(_ERROR_WITHOUT_RAISE_IN_UNARY_UNARY)(
- _REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.INTERNAL, rpc_error.code())
-
- async def test_error_without_raise_in_stream_stream(self):
- call = self._channel.stream_stream(
- _ERROR_WITHOUT_RAISE_IN_STREAM_STREAM)()
-
- for _ in range(_NUM_STREAM_REQUESTS):
- await call.write(_REQUEST)
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.INTERNAL, await call.code())
-
- async def test_error_in_stream_unary(self):
- stream_unary_call = self._channel.stream_unary(_ERROR_IN_STREAM_UNARY)
-
- async def request_gen():
- for _ in range(_NUM_STREAM_REQUESTS):
- yield _REQUEST
-
- call = stream_unary_call(request_gen())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNKNOWN, rpc_error.code())
-
- async def test_port_binding_exception(self):
- server = aio.server(options=(('grpc.so_reuseport', 0),))
- port = server.add_insecure_port('localhost:0')
- bind_address = "localhost:%d" % port
-
- with self.assertRaises(RuntimeError):
- server.add_insecure_port(bind_address)
-
- server_credentials = grpc.ssl_server_credentials([
- (resources.private_key(), resources.certificate_chain())
- ])
- with self.assertRaises(RuntimeError):
- server.add_secure_port(bind_address, server_credentials)
-
- async def test_maximum_concurrent_rpcs(self):
- # Build the server with concurrent rpc argument
- server = aio.server(maximum_concurrent_rpcs=_MAXIMUM_CONCURRENT_RPCS)
- port = server.add_insecure_port('localhost:0')
- bind_address = "localhost:%d" % port
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
- # Build the channel
- channel = aio.insecure_channel(bind_address)
- # Deplete the concurrent quota with 3 times of max RPCs
- rpcs = []
- for _ in range(3 * _MAXIMUM_CONCURRENT_RPCS):
- rpcs.append(channel.unary_unary(_BLOCK_BRIEFLY)(_REQUEST))
- task = self.loop.create_task(
- asyncio.wait(rpcs, return_when=asyncio.FIRST_EXCEPTION))
- # Each batch took test_constants.SHORT_TIMEOUT /2
- start_time = time.time()
- await task
- elapsed_time = time.time() - start_time
- self.assertGreater(elapsed_time, test_constants.SHORT_TIMEOUT * 3 / 2)
- # Clean-up
- await channel.close()
- await server.stop(0)
-
- async def test_invalid_trailing_metadata(self):
- call = self._channel.unary_unary(_INVALID_TRAILING_METADATA)(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNKNOWN, rpc_error.code())
- self.assertIn('trailing', rpc_error.details())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_time_remaining_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_time_remaining_test.py
deleted file mode 100644
index 340e4cc350..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/server_time_remaining_test.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright 2021 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test the time_remaining() method of async ServicerContext."""
-
-import asyncio
-import datetime
-import logging
-import unittest
-
-import grpc
-from grpc import aio
-
-from tests_aio.unit._common import ADHOC_METHOD
-from tests_aio.unit._common import AdhocGenericHandler
-from tests_aio.unit._test_base import AioTestBase
-
-_REQUEST = b'\x09\x05'
-_REQUEST_TIMEOUT_S = datetime.timedelta(seconds=5).total_seconds()
-
-
-class TestServerTimeRemaining(AioTestBase):
-
- async def setUp(self):
- # Create async server
- self._server = aio.server(options=(('grpc.so_reuseport', 0),))
- self._adhoc_handlers = AdhocGenericHandler()
- self._server.add_generic_rpc_handlers((self._adhoc_handlers,))
- port = self._server.add_insecure_port('[::]:0')
- address = 'localhost:%d' % port
- await self._server.start()
- # Create async channel
- self._channel = aio.insecure_channel(address)
-
- async def tearDown(self):
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_servicer_context_time_remaining(self):
- seen_time_remaining = []
-
- @grpc.unary_unary_rpc_method_handler
- def log_time_remaining(request: bytes,
- context: grpc.ServicerContext) -> bytes:
- seen_time_remaining.append(context.time_remaining())
- return b""
-
- # Check if the deadline propagates properly
- self._adhoc_handlers.set_adhoc_handler(log_time_remaining)
- await self._channel.unary_unary(ADHOC_METHOD)(
- _REQUEST, timeout=_REQUEST_TIMEOUT_S)
- self.assertGreater(seen_time_remaining[0], _REQUEST_TIMEOUT_S / 2)
- # Check if there is no timeout, the time_remaining will be None
- self._adhoc_handlers.set_adhoc_handler(log_time_remaining)
- await self._channel.unary_unary(ADHOC_METHOD)(_REQUEST)
- self.assertIsNone(seen_time_remaining[1])
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py
deleted file mode 100644
index dab0f5113f..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/timeout_test.py
+++ /dev/null
@@ -1,178 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the timeout mechanism on client side."""
-
-import asyncio
-import datetime
-import logging
-import platform
-import random
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-
-_SLEEP_TIME_UNIT_S = datetime.timedelta(seconds=1).total_seconds()
-
-_TEST_SLEEPY_UNARY_UNARY = '/test/Test/SleepyUnaryUnary'
-_TEST_SLEEPY_UNARY_STREAM = '/test/Test/SleepyUnaryStream'
-_TEST_SLEEPY_STREAM_UNARY = '/test/Test/SleepyStreamUnary'
-_TEST_SLEEPY_STREAM_STREAM = '/test/Test/SleepyStreamStream'
-
-_REQUEST = b'\x00\x00\x00'
-_RESPONSE = b'\x01\x01\x01'
-
-
-async def _test_sleepy_unary_unary(unused_request, unused_context):
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- return _RESPONSE
-
-
-async def _test_sleepy_unary_stream(unused_request, unused_context):
- yield _RESPONSE
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- yield _RESPONSE
-
-
-async def _test_sleepy_stream_unary(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- assert _REQUEST == await context.read()
- return _RESPONSE
-
-
-async def _test_sleepy_stream_stream(unused_request_iterator, context):
- assert _REQUEST == await context.read()
- await asyncio.sleep(_SLEEP_TIME_UNIT_S)
- await context.write(_RESPONSE)
-
-
-_ROUTING_TABLE = {
- _TEST_SLEEPY_UNARY_UNARY:
- grpc.unary_unary_rpc_method_handler(_test_sleepy_unary_unary),
- _TEST_SLEEPY_UNARY_STREAM:
- grpc.unary_stream_rpc_method_handler(_test_sleepy_unary_stream),
- _TEST_SLEEPY_STREAM_UNARY:
- grpc.stream_unary_rpc_method_handler(_test_sleepy_stream_unary),
- _TEST_SLEEPY_STREAM_STREAM:
- grpc.stream_stream_rpc_method_handler(_test_sleepy_stream_stream)
-}
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- return _ROUTING_TABLE.get(handler_call_details.method)
-
-
-async def _start_test_server():
- server = aio.server()
- port = server.add_insecure_port('[::]:0')
- server.add_generic_rpc_handlers((_GenericHandler(),))
- await server.start()
- return f'localhost:{port}', server
-
-
-class TestTimeout(AioTestBase):
-
- async def setUp(self):
- address, self._server = await _start_test_server()
- self._client = aio.insecure_channel(address)
- self.assertEqual(grpc.ChannelConnectivity.IDLE,
- self._client.get_state(True))
- await _common.block_until_certain_state(self._client,
- grpc.ChannelConnectivity.READY)
-
- async def tearDown(self):
- await self._client.close()
- await self._server.stop(None)
-
- async def test_unary_unary_success_with_timeout(self):
- multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
- call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
- self.assertEqual(_RESPONSE, await call)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_unary_deadline_exceeded(self):
- multicallable = self._client.unary_unary(_TEST_SLEEPY_UNARY_UNARY)
- call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
- async def test_unary_stream_success_with_timeout(self):
- multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
- call = multicallable(_REQUEST, timeout=2 * _SLEEP_TIME_UNIT_S)
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_stream_deadline_exceeded(self):
- multicallable = self._client.unary_stream(_TEST_SLEEPY_UNARY_STREAM)
- call = multicallable(_REQUEST, timeout=0.5 * _SLEEP_TIME_UNIT_S)
- self.assertEqual(_RESPONSE, await call.read())
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.read()
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
- async def test_stream_unary_success_with_timeout(self):
- multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
- call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
- await call.write(_REQUEST)
- await call.write(_REQUEST)
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_unary_deadline_exceeded(self):
- multicallable = self._client.stream_unary(_TEST_SLEEPY_STREAM_UNARY)
- call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.write(_REQUEST)
- await call.write(_REQUEST)
- await call
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
- async def test_stream_stream_success_with_timeout(self):
- multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
- call = multicallable(timeout=2 * _SLEEP_TIME_UNIT_S)
- await call.write(_REQUEST)
- self.assertEqual(_RESPONSE, await call.read())
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_stream_stream_deadline_exceeded(self):
- multicallable = self._client.stream_stream(_TEST_SLEEPY_STREAM_STREAM)
- call = multicallable(timeout=0.5 * _SLEEP_TIME_UNIT_S)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.write(_REQUEST)
- await call.read()
-
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED, rpc_error.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py
deleted file mode 100644
index a49a1241c4..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_connection_test.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests behavior of the wait for connection API on client side."""
-
-import asyncio
-import datetime
-import logging
-from typing import Callable, Tuple
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_aio.unit import _common
-from tests_aio.unit._constants import UNREACHABLE_TARGET
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_REQUEST = b'\x01\x02\x03'
-_TEST_METHOD = '/test/Test'
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-class TestWaitForConnection(AioTestBase):
- """Tests if wait_for_connection raises connectivity issue."""
-
- async def setUp(self):
- address, self._server = await start_test_server()
- self._channel = aio.insecure_channel(address)
- self._phony_channel = aio.insecure_channel(UNREACHABLE_TARGET)
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
-
- async def tearDown(self):
- await self._phony_channel.close()
- await self._channel.close()
- await self._server.stop(None)
-
- async def test_unary_unary_ok(self):
- call = self._stub.UnaryCall(messages_pb2.SimpleRequest())
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- response = await call
- self.assertIsInstance(response, messages_pb2.SimpleResponse)
-
- async def test_unary_stream_ok(self):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- call = self._stub.StreamingOutputCall(request)
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- response_cnt = 0
- async for response in call:
- response_cnt += 1
- self.assertIs(type(response),
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- self.assertEqual(_NUM_STREAM_RESPONSES, response_cnt)
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_unary_ok(self):
- call = self._stub.StreamingInputCall()
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- await call.done_writing()
-
- response = await call
- self.assertIsInstance(response, messages_pb2.StreamingInputCallResponse)
- self.assertEqual(_NUM_STREAM_RESPONSES * _REQUEST_PAYLOAD_SIZE,
- response.aggregated_payload_size)
-
- self.assertEqual(await call.code(), grpc.StatusCode.OK)
-
- async def test_stream_stream_ok(self):
- call = self._stub.FullDuplexCall()
-
- # No exception raised and no message swallowed.
- await call.wait_for_connection()
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- self.assertIsInstance(response,
- messages_pb2.StreamingOutputCallResponse)
- self.assertEqual(_RESPONSE_PAYLOAD_SIZE, len(response.payload.body))
-
- await call.done_writing()
-
- self.assertEqual(grpc.StatusCode.OK, await call.code())
-
- async def test_unary_unary_error(self):
- call = self._phony_channel.unary_unary(_TEST_METHOD)(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_unary_stream_error(self):
- call = self._phony_channel.unary_stream(_TEST_METHOD)(_REQUEST)
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_stream_unary_error(self):
- call = self._phony_channel.stream_unary(_TEST_METHOD)()
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_stream_stream_error(self):
- call = self._phony_channel.stream_stream(_TEST_METHOD)()
-
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await call.wait_for_connection()
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py
deleted file mode 100644
index 303c138642..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_aio/unit/wait_for_ready_test.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Testing the done callbacks mechanism."""
-
-import asyncio
-import gc
-import logging
-import platform
-import time
-import unittest
-
-import grpc
-from grpc.experimental import aio
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests.unit.framework.common import get_socket
-from tests.unit.framework.common import test_constants
-from tests_aio.unit import _common
-from tests_aio.unit._test_base import AioTestBase
-from tests_aio.unit._test_server import start_test_server
-
-_NUM_STREAM_RESPONSES = 5
-_REQUEST_PAYLOAD_SIZE = 7
-_RESPONSE_PAYLOAD_SIZE = 42
-
-
-async def _perform_unary_unary(stub, wait_for_ready):
- await stub.UnaryCall(messages_pb2.SimpleRequest(),
- timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
-
-async def _perform_unary_stream(stub, wait_for_ready):
- request = messages_pb2.StreamingOutputCallRequest()
- for _ in range(_NUM_STREAM_RESPONSES):
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- call = stub.StreamingOutputCall(request,
- timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.read()
- assert await call.code() == grpc.StatusCode.OK
-
-
-async def _perform_stream_unary(stub, wait_for_ready):
- payload = messages_pb2.Payload(body=b'\0' * _REQUEST_PAYLOAD_SIZE)
- request = messages_pb2.StreamingInputCallRequest(payload=payload)
-
- async def gen():
- for _ in range(_NUM_STREAM_RESPONSES):
- yield request
-
- await stub.StreamingInputCall(gen(),
- timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
-
-async def _perform_stream_stream(stub, wait_for_ready):
- call = stub.FullDuplexCall(timeout=test_constants.LONG_TIMEOUT,
- wait_for_ready=wait_for_ready)
-
- request = messages_pb2.StreamingOutputCallRequest()
- request.response_parameters.append(
- messages_pb2.ResponseParameters(size=_RESPONSE_PAYLOAD_SIZE))
-
- for _ in range(_NUM_STREAM_RESPONSES):
- await call.write(request)
- response = await call.read()
- assert _RESPONSE_PAYLOAD_SIZE == len(response.payload.body)
-
- await call.done_writing()
- assert await call.code() == grpc.StatusCode.OK
-
-
-_RPC_ACTIONS = (
- _perform_unary_unary,
- _perform_unary_stream,
- _perform_stream_unary,
- _perform_stream_stream,
-)
-
-
-class TestWaitForReady(AioTestBase):
-
- async def setUp(self):
- address, self._port, self._socket = get_socket(listen=False)
- self._channel = aio.insecure_channel(f"{address}:{self._port}")
- self._stub = test_pb2_grpc.TestServiceStub(self._channel)
- self._socket.close()
-
- async def tearDown(self):
- await self._channel.close()
-
- async def _connection_fails_fast(self, wait_for_ready):
- for action in _RPC_ACTIONS:
- with self.subTest(name=action):
- with self.assertRaises(aio.AioRpcError) as exception_context:
- await action(self._stub, wait_for_ready)
- rpc_error = exception_context.exception
- self.assertEqual(grpc.StatusCode.UNAVAILABLE, rpc_error.code())
-
- async def test_call_wait_for_ready_default(self):
- """RPC should fail immediately after connection failed."""
- await self._connection_fails_fast(None)
-
- async def test_call_wait_for_ready_disabled(self):
- """RPC should fail immediately after connection failed."""
- await self._connection_fails_fast(False)
-
- @unittest.skipIf(platform.system() == 'Windows',
- 'https://github.com/grpc/grpc/pull/26729')
- async def test_call_wait_for_ready_enabled(self):
- """RPC will wait until the connection is ready."""
- for action in _RPC_ACTIONS:
- with self.subTest(name=action.__name__):
- # Starts the RPC
- action_task = self.loop.create_task(action(self._stub, True))
-
- # Wait for TRANSIENT_FAILURE, and RPC is not aborting
- await _common.block_until_certain_state(
- self._channel, grpc.ChannelConnectivity.TRANSIENT_FAILURE)
-
- try:
- # Start the server
- _, server = await start_test_server(port=self._port)
-
- # The RPC should recover itself
- await action_task
- finally:
- if server is not None:
- await server.stop(None)
-
-
-if __name__ == '__main__':
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/__init__.py
deleted file mode 100644
index 712a2e1de2..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2021 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/__init__.py
deleted file mode 100644
index 712a2e1de2..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2021 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/_test_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/_test_server.py
deleted file mode 100644
index 82327aa709..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/_test_server.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2021 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from concurrent import futures
-from typing import Any, Tuple
-
-import gevent
-import grpc
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-
-LONG_UNARY_CALL_WITH_SLEEP_VALUE = 1
-
-
-class TestServiceServicer(test_pb2_grpc.TestServiceServicer):
-
- def UnaryCall(self, request, context):
- return messages_pb2.SimpleResponse()
-
- def UnaryCallWithSleep(self, unused_request, unused_context):
- gevent.sleep(LONG_UNARY_CALL_WITH_SLEEP_VALUE)
- return messages_pb2.SimpleResponse()
-
-
-def start_test_server(port: int = 0) -> Tuple[str, Any]:
- server = grpc.server(futures.ThreadPoolExecutor())
- servicer = TestServiceServicer()
- test_pb2_grpc.add_TestServiceServicer_to_server(TestServiceServicer(),
- server)
-
- server.add_generic_rpc_handlers((_create_extra_generic_handler(servicer),))
- port = server.add_insecure_port('[::]:%d' % port)
- server.start()
- return 'localhost:%d' % port, server
-
-
-def _create_extra_generic_handler(servicer: TestServiceServicer) -> Any:
- # Add programatically extra methods not provided by the proto file
- # that are used during the tests
- rpc_method_handlers = {
- 'UnaryCallWithSleep':
- grpc.unary_unary_rpc_method_handler(
- servicer.UnaryCallWithSleep,
- request_deserializer=messages_pb2.SimpleRequest.FromString,
- response_serializer=messages_pb2.SimpleResponse.
- SerializeToString)
- }
- return grpc.method_handlers_generic_handler('grpc.testing.TestService',
- rpc_method_handlers)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/close_channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/close_channel_test.py
deleted file mode 100644
index ca73fd685d..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_gevent/unit/close_channel_test.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 2021 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import unittest
-
-import gevent
-from gevent.pool import Group
-import grpc
-
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-from tests_gevent.unit._test_server import start_test_server
-
-_UNARY_CALL_METHOD_WITH_SLEEP = '/grpc.testing.TestService/UnaryCallWithSleep'
-
-
-class CloseChannelTest(unittest.TestCase):
-
- def setUp(self):
- self._server_target, self._server = start_test_server()
- self._channel = grpc.insecure_channel(self._server_target)
- self._unhandled_exception = False
- sys.excepthook = self._global_exception_handler
-
- def tearDown(self):
- self._channel.close()
- self._server.stop(None)
-
- def test_graceful_close(self):
- stub = test_pb2_grpc.TestServiceStub(self._channel)
- _, response = stub.UnaryCall.with_call(messages_pb2.SimpleRequest())
-
- self._channel.close()
-
- self.assertEqual(grpc.StatusCode.OK, response.code())
-
- def test_graceful_close_in_greenlet(self):
- group = Group()
- stub = test_pb2_grpc.TestServiceStub(self._channel)
- greenlet = group.spawn(self._run_client, stub.UnaryCall)
- # release loop so that greenlet can take control
- gevent.sleep()
- self._channel.close()
- group.killone(greenlet)
- self.assertFalse(self._unhandled_exception, "Unhandled GreenletExit")
- try:
- greenlet.get()
- except Exception as e: # pylint: disable=broad-except
- self.fail(f"Unexpected exception in greenlet: {e}")
-
- def test_ungraceful_close_in_greenlet(self):
- group = Group()
- UnaryCallWithSleep = self._channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
- greenlet = group.spawn(self._run_client, UnaryCallWithSleep)
- # release loop so that greenlet can take control
- gevent.sleep()
- group.killone(greenlet)
- self.assertFalse(self._unhandled_exception, "Unhandled GreenletExit")
-
- def test_kill_greenlet_with_generic_exception(self):
- group = Group()
- UnaryCallWithSleep = self._channel.unary_unary(
- _UNARY_CALL_METHOD_WITH_SLEEP,
- request_serializer=messages_pb2.SimpleRequest.SerializeToString,
- response_deserializer=messages_pb2.SimpleResponse.FromString,
- )
- greenlet = group.spawn(self._run_client, UnaryCallWithSleep)
- # release loop so that greenlet can take control
- gevent.sleep()
- group.killone(greenlet, exception=Exception)
- self.assertFalse(self._unhandled_exception, "Unhandled exception")
- self.assertRaises(Exception, greenlet.get)
-
- def _run_client(self, call):
- try:
- call.with_call(messages_pb2.SimpleRequest())
- except grpc.RpcError as e:
- if e.code() != grpc.StatusCode.CANCELLED:
- raise
-
- def _global_exception_handler(self, exctype, value, tb):
- if exctype == gevent.GreenletExit:
- self._unhandled_exception = True
- return
- sys.__excepthook__(exctype, value, tb)
-
-
-if __name__ == '__main__':
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py
deleted file mode 100644
index 6732ae8cbb..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/__init__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2020 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import
-
-from tests import _loader
-from tests import _runner
-
-Loader = _loader.Loader
-Runner = _runner.Runner
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py
deleted file mode 100644
index c98bba39a5..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_client.py
+++ /dev/null
@@ -1,501 +0,0 @@
-# Copyright 2020 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import collections
-from concurrent import futures
-import datetime
-import logging
-import signal
-import sys
-import threading
-import time
-from typing import DefaultDict, Dict, List, Mapping, Sequence, Set, Tuple
-
-import grpc
-import grpc_admin
-from grpc_channelz.v1 import channelz
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-
-logger = logging.getLogger()
-console_handler = logging.StreamHandler()
-formatter = logging.Formatter(fmt='%(asctime)s: %(levelname)-8s %(message)s')
-console_handler.setFormatter(formatter)
-logger.addHandler(console_handler)
-
-_SUPPORTED_METHODS = (
- "UnaryCall",
- "EmptyCall",
-)
-
-_METHOD_CAMEL_TO_CAPS_SNAKE = {
- "UnaryCall": "UNARY_CALL",
- "EmptyCall": "EMPTY_CALL",
-}
-
-_METHOD_STR_TO_ENUM = {
- "UnaryCall": messages_pb2.ClientConfigureRequest.UNARY_CALL,
- "EmptyCall": messages_pb2.ClientConfigureRequest.EMPTY_CALL,
-}
-
-_METHOD_ENUM_TO_STR = {v: k for k, v in _METHOD_STR_TO_ENUM.items()}
-
-PerMethodMetadataType = Mapping[str, Sequence[Tuple[str, str]]]
-
-_CONFIG_CHANGE_TIMEOUT = datetime.timedelta(milliseconds=500)
-
-
-class _StatsWatcher:
- _start: int
- _end: int
- _rpcs_needed: int
- _rpcs_by_peer: DefaultDict[str, int]
- _rpcs_by_method: DefaultDict[str, DefaultDict[str, int]]
- _no_remote_peer: int
- _lock: threading.Lock
- _condition: threading.Condition
-
- def __init__(self, start: int, end: int):
- self._start = start
- self._end = end
- self._rpcs_needed = end - start
- self._rpcs_by_peer = collections.defaultdict(int)
- self._rpcs_by_method = collections.defaultdict(
- lambda: collections.defaultdict(int))
- self._condition = threading.Condition()
- self._no_remote_peer = 0
-
- def on_rpc_complete(self, request_id: int, peer: str, method: str) -> None:
- """Records statistics for a single RPC."""
- if self._start <= request_id < self._end:
- with self._condition:
- if not peer:
- self._no_remote_peer += 1
- else:
- self._rpcs_by_peer[peer] += 1
- self._rpcs_by_method[method][peer] += 1
- self._rpcs_needed -= 1
- self._condition.notify()
-
- def await_rpc_stats_response(
- self, timeout_sec: int) -> messages_pb2.LoadBalancerStatsResponse:
- """Blocks until a full response has been collected."""
- with self._condition:
- self._condition.wait_for(lambda: not self._rpcs_needed,
- timeout=float(timeout_sec))
- response = messages_pb2.LoadBalancerStatsResponse()
- for peer, count in self._rpcs_by_peer.items():
- response.rpcs_by_peer[peer] = count
- for method, count_by_peer in self._rpcs_by_method.items():
- for peer, count in count_by_peer.items():
- response.rpcs_by_method[method].rpcs_by_peer[peer] = count
- response.num_failures = self._no_remote_peer + self._rpcs_needed
- return response
-
-
-_global_lock = threading.Lock()
-_stop_event = threading.Event()
-_global_rpc_id: int = 0
-_watchers: Set[_StatsWatcher] = set()
-_global_server = None
-_global_rpcs_started: Mapping[str, int] = collections.defaultdict(int)
-_global_rpcs_succeeded: Mapping[str, int] = collections.defaultdict(int)
-_global_rpcs_failed: Mapping[str, int] = collections.defaultdict(int)
-
-# Mapping[method, Mapping[status_code, count]]
-_global_rpc_statuses: Mapping[str, Mapping[int, int]] = collections.defaultdict(
- lambda: collections.defaultdict(int))
-
-
-def _handle_sigint(sig, frame) -> None:
- logger.warning("Received SIGINT")
- _stop_event.set()
- _global_server.stop(None)
-
-
-class _LoadBalancerStatsServicer(test_pb2_grpc.LoadBalancerStatsServiceServicer
- ):
-
- def __init__(self):
- super(_LoadBalancerStatsServicer).__init__()
-
- def GetClientStats(
- self, request: messages_pb2.LoadBalancerStatsRequest,
- context: grpc.ServicerContext
- ) -> messages_pb2.LoadBalancerStatsResponse:
- logger.info("Received stats request.")
- start = None
- end = None
- watcher = None
- with _global_lock:
- start = _global_rpc_id + 1
- end = start + request.num_rpcs
- watcher = _StatsWatcher(start, end)
- _watchers.add(watcher)
- response = watcher.await_rpc_stats_response(request.timeout_sec)
- with _global_lock:
- _watchers.remove(watcher)
- logger.info("Returning stats response: %s", response)
- return response
-
- def GetClientAccumulatedStats(
- self, request: messages_pb2.LoadBalancerAccumulatedStatsRequest,
- context: grpc.ServicerContext
- ) -> messages_pb2.LoadBalancerAccumulatedStatsResponse:
- logger.info("Received cumulative stats request.")
- response = messages_pb2.LoadBalancerAccumulatedStatsResponse()
- with _global_lock:
- for method in _SUPPORTED_METHODS:
- caps_method = _METHOD_CAMEL_TO_CAPS_SNAKE[method]
- response.num_rpcs_started_by_method[
- caps_method] = _global_rpcs_started[method]
- response.num_rpcs_succeeded_by_method[
- caps_method] = _global_rpcs_succeeded[method]
- response.num_rpcs_failed_by_method[
- caps_method] = _global_rpcs_failed[method]
- response.stats_per_method[
- caps_method].rpcs_started = _global_rpcs_started[method]
- for code, count in _global_rpc_statuses[method].items():
- response.stats_per_method[caps_method].result[code] = count
- logger.info("Returning cumulative stats response.")
- return response
-
-
-def _start_rpc(method: str, metadata: Sequence[Tuple[str, str]],
- request_id: int, stub: test_pb2_grpc.TestServiceStub,
- timeout: float, futures: Mapping[int, Tuple[grpc.Future,
- str]]) -> None:
- logger.debug(f"Sending {method} request to backend: {request_id}")
- if method == "UnaryCall":
- future = stub.UnaryCall.future(messages_pb2.SimpleRequest(),
- metadata=metadata,
- timeout=timeout)
- elif method == "EmptyCall":
- future = stub.EmptyCall.future(empty_pb2.Empty(),
- metadata=metadata,
- timeout=timeout)
- else:
- raise ValueError(f"Unrecognized method '{method}'.")
- futures[request_id] = (future, method)
-
-
-def _on_rpc_done(rpc_id: int, future: grpc.Future, method: str,
- print_response: bool) -> None:
- exception = future.exception()
- hostname = ""
- _global_rpc_statuses[method][future.code().value[0]] += 1
- if exception is not None:
- with _global_lock:
- _global_rpcs_failed[method] += 1
- if exception.code() == grpc.StatusCode.DEADLINE_EXCEEDED:
- logger.error(f"RPC {rpc_id} timed out")
- else:
- logger.error(exception)
- else:
- response = future.result()
- hostname = None
- for metadatum in future.initial_metadata():
- if metadatum[0] == "hostname":
- hostname = metadatum[1]
- break
- else:
- hostname = response.hostname
- if future.code() == grpc.StatusCode.OK:
- with _global_lock:
- _global_rpcs_succeeded[method] += 1
- else:
- with _global_lock:
- _global_rpcs_failed[method] += 1
- if print_response:
- if future.code() == grpc.StatusCode.OK:
- logger.debug("Successful response.")
- else:
- logger.debug(f"RPC failed: {call}")
- with _global_lock:
- for watcher in _watchers:
- watcher.on_rpc_complete(rpc_id, hostname, method)
-
-
-def _remove_completed_rpcs(futures: Mapping[int, grpc.Future],
- print_response: bool) -> None:
- logger.debug("Removing completed RPCs")
- done = []
- for future_id, (future, method) in futures.items():
- if future.done():
- _on_rpc_done(future_id, future, method, args.print_response)
- done.append(future_id)
- for rpc_id in done:
- del futures[rpc_id]
-
-
-def _cancel_all_rpcs(futures: Mapping[int, Tuple[grpc.Future, str]]) -> None:
- logger.info("Cancelling all remaining RPCs")
- for future, _ in futures.values():
- future.cancel()
-
-
-class _ChannelConfiguration:
- """Configuration for a single client channel.
-
- Instances of this class are meant to be dealt with as PODs. That is,
- data member should be accessed directly. This class is not thread-safe.
- When accessing any of its members, the lock member should be held.
- """
-
- def __init__(self, method: str, metadata: Sequence[Tuple[str, str]],
- qps: int, server: str, rpc_timeout_sec: int,
- print_response: bool, secure_mode: bool):
- # condition is signalled when a change is made to the config.
- self.condition = threading.Condition()
-
- self.method = method
- self.metadata = metadata
- self.qps = qps
- self.server = server
- self.rpc_timeout_sec = rpc_timeout_sec
- self.print_response = print_response
- self.secure_mode = secure_mode
-
-
-def _run_single_channel(config: _ChannelConfiguration) -> None:
- global _global_rpc_id # pylint: disable=global-statement
- with config.condition:
- server = config.server
- channel = None
- if config.secure_mode:
- fallback_creds = grpc.experimental.insecure_channel_credentials()
- channel_creds = grpc.xds_channel_credentials(fallback_creds)
- channel = grpc.secure_channel(server, channel_creds)
- else:
- channel = grpc.insecure_channel(server)
- with channel:
- stub = test_pb2_grpc.TestServiceStub(channel)
- futures: Dict[int, Tuple[grpc.Future, str]] = {}
- while not _stop_event.is_set():
- with config.condition:
- if config.qps == 0:
- config.condition.wait(
- timeout=_CONFIG_CHANGE_TIMEOUT.total_seconds())
- continue
- else:
- duration_per_query = 1.0 / float(config.qps)
- request_id = None
- with _global_lock:
- request_id = _global_rpc_id
- _global_rpc_id += 1
- _global_rpcs_started[config.method] += 1
- start = time.time()
- end = start + duration_per_query
- with config.condition:
- _start_rpc(config.method, config.metadata, request_id, stub,
- float(config.rpc_timeout_sec), futures)
- with config.condition:
- _remove_completed_rpcs(futures, config.print_response)
- logger.debug(f"Currently {len(futures)} in-flight RPCs")
- now = time.time()
- while now < end:
- time.sleep(end - now)
- now = time.time()
- _cancel_all_rpcs(futures)
-
-
-class _XdsUpdateClientConfigureServicer(
- test_pb2_grpc.XdsUpdateClientConfigureServiceServicer):
-
- def __init__(self, per_method_configs: Mapping[str, _ChannelConfiguration],
- qps: int):
- super(_XdsUpdateClientConfigureServicer).__init__()
- self._per_method_configs = per_method_configs
- self._qps = qps
-
- def Configure(
- self, request: messages_pb2.ClientConfigureRequest,
- context: grpc.ServicerContext
- ) -> messages_pb2.ClientConfigureResponse:
- logger.info("Received Configure RPC: %s", request)
- method_strs = [_METHOD_ENUM_TO_STR[t] for t in request.types]
- for method in _SUPPORTED_METHODS:
- method_enum = _METHOD_STR_TO_ENUM[method]
- channel_config = self._per_method_configs[method]
- if method in method_strs:
- qps = self._qps
- metadata = ((md.key, md.value)
- for md in request.metadata
- if md.type == method_enum)
- # For backward compatibility, do not change timeout when we
- # receive a default value timeout.
- if request.timeout_sec == 0:
- timeout_sec = channel_config.rpc_timeout_sec
- else:
- timeout_sec = request.timeout_sec
- else:
- qps = 0
- metadata = ()
- # Leave timeout unchanged for backward compatibility.
- timeout_sec = channel_config.rpc_timeout_sec
- with channel_config.condition:
- channel_config.qps = qps
- channel_config.metadata = list(metadata)
- channel_config.rpc_timeout_sec = timeout_sec
- channel_config.condition.notify_all()
- return messages_pb2.ClientConfigureResponse()
-
-
-class _MethodHandle:
- """An object grouping together threads driving RPCs for a method."""
-
- _channel_threads: List[threading.Thread]
-
- def __init__(self, num_channels: int,
- channel_config: _ChannelConfiguration):
- """Creates and starts a group of threads running the indicated method."""
- self._channel_threads = []
- for i in range(num_channels):
- thread = threading.Thread(target=_run_single_channel,
- args=(channel_config,))
- thread.start()
- self._channel_threads.append(thread)
-
- def stop(self) -> None:
- """Joins all threads referenced by the handle."""
- for channel_thread in self._channel_threads:
- channel_thread.join()
-
-
-def _run(args: argparse.Namespace, methods: Sequence[str],
- per_method_metadata: PerMethodMetadataType) -> None:
- logger.info("Starting python xDS Interop Client.")
- global _global_server # pylint: disable=global-statement
- method_handles = []
- channel_configs = {}
- for method in _SUPPORTED_METHODS:
- if method in methods:
- qps = args.qps
- else:
- qps = 0
- channel_config = _ChannelConfiguration(
- method, per_method_metadata.get(method, []), qps, args.server,
- args.rpc_timeout_sec, args.print_response, args.secure_mode)
- channel_configs[method] = channel_config
- method_handles.append(_MethodHandle(args.num_channels, channel_config))
- _global_server = grpc.server(futures.ThreadPoolExecutor())
- _global_server.add_insecure_port(f"0.0.0.0:{args.stats_port}")
- test_pb2_grpc.add_LoadBalancerStatsServiceServicer_to_server(
- _LoadBalancerStatsServicer(), _global_server)
- test_pb2_grpc.add_XdsUpdateClientConfigureServiceServicer_to_server(
- _XdsUpdateClientConfigureServicer(channel_configs, args.qps),
- _global_server)
- channelz.add_channelz_servicer(_global_server)
- grpc_admin.add_admin_servicers(_global_server)
- _global_server.start()
- _global_server.wait_for_termination()
- for method_handle in method_handles:
- method_handle.stop()
-
-
-def parse_metadata_arg(metadata_arg: str) -> PerMethodMetadataType:
- metadata = metadata_arg.split(",") if args.metadata else []
- per_method_metadata = collections.defaultdict(list)
- for metadatum in metadata:
- elems = metadatum.split(":")
- if len(elems) != 3:
- raise ValueError(
- f"'{metadatum}' was not in the form 'METHOD:KEY:VALUE'")
- if elems[0] not in _SUPPORTED_METHODS:
- raise ValueError(f"Unrecognized method '{elems[0]}'")
- per_method_metadata[elems[0]].append((elems[1], elems[2]))
- return per_method_metadata
-
-
-def parse_rpc_arg(rpc_arg: str) -> Sequence[str]:
- methods = rpc_arg.split(",")
- if set(methods) - set(_SUPPORTED_METHODS):
- raise ValueError("--rpc supported methods: {}".format(
- ", ".join(_SUPPORTED_METHODS)))
- return methods
-
-
-def bool_arg(arg: str) -> bool:
- if arg.lower() in ("true", "yes", "y"):
- return True
- elif arg.lower() in ("false", "no", "n"):
- return False
- else:
- raise argparse.ArgumentTypeError(f"Could not parse '{arg}' as a bool.")
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(
- description='Run Python XDS interop client.')
- parser.add_argument(
- "--num_channels",
- default=1,
- type=int,
- help="The number of channels from which to send requests.")
- parser.add_argument("--print_response",
- default="False",
- type=bool_arg,
- help="Write RPC response to STDOUT.")
- parser.add_argument(
- "--qps",
- default=1,
- type=int,
- help="The number of queries to send from each channel per second.")
- parser.add_argument("--rpc_timeout_sec",
- default=30,
- type=int,
- help="The per-RPC timeout in seconds.")
- parser.add_argument("--server",
- default="localhost:50051",
- help="The address of the server.")
- parser.add_argument(
- "--stats_port",
- default=50052,
- type=int,
- help="The port on which to expose the peer distribution stats service.")
- parser.add_argument(
- "--secure_mode",
- default="False",
- type=bool_arg,
- help="If specified, uses xDS credentials to connect to the server.")
- parser.add_argument('--verbose',
- help='verbose log output',
- default=False,
- action='store_true')
- parser.add_argument("--log_file",
- default=None,
- type=str,
- help="A file to log to.")
- rpc_help = "A comma-delimited list of RPC methods to run. Must be one of "
- rpc_help += ", ".join(_SUPPORTED_METHODS)
- rpc_help += "."
- parser.add_argument("--rpc", default="UnaryCall", type=str, help=rpc_help)
- metadata_help = (
- "A comma-delimited list of 3-tuples of the form " +
- "METHOD:KEY:VALUE, e.g. " +
- "EmptyCall:key1:value1,UnaryCall:key2:value2,EmptyCall:k3:v3")
- parser.add_argument("--metadata", default="", type=str, help=metadata_help)
- args = parser.parse_args()
- signal.signal(signal.SIGINT, _handle_sigint)
- if args.verbose:
- logger.setLevel(logging.DEBUG)
- if args.log_file:
- file_handler = logging.FileHandler(args.log_file, mode='a')
- file_handler.setFormatter(formatter)
- logger.addHandler(file_handler)
- _run(args, parse_rpc_arg(args.rpc), parse_metadata_arg(args.metadata))
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_server.py
deleted file mode 100644
index 2c44b42e2e..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/interop/xds_interop_server.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# Copyright 2021 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import collections
-from concurrent import futures
-import logging
-import signal
-import socket
-import sys
-import threading
-import time
-from typing import DefaultDict, Dict, List, Mapping, Sequence, Set, Tuple
-
-import grpc
-from grpc_channelz.v1 import channelz
-from grpc_channelz.v1 import channelz_pb2
-from grpc_health.v1 import health as grpc_health
-from grpc_health.v1 import health_pb2
-from grpc_health.v1 import health_pb2_grpc
-from grpc_reflection.v1alpha import reflection
-
-from src.proto.grpc.testing import empty_pb2
-from src.proto.grpc.testing import messages_pb2
-from src.proto.grpc.testing import test_pb2
-from src.proto.grpc.testing import test_pb2_grpc
-
-# NOTE: This interop server is not fully compatible with all xDS interop tests.
-# It currently only implements enough functionality to pass the xDS security
-# tests.
-
-_LISTEN_HOST = "0.0.0.0"
-
-_THREAD_POOL_SIZE = 256
-
-logger = logging.getLogger()
-console_handler = logging.StreamHandler()
-formatter = logging.Formatter(fmt='%(asctime)s: %(levelname)-8s %(message)s')
-console_handler.setFormatter(formatter)
-logger.addHandler(console_handler)
-
-
-class TestService(test_pb2_grpc.TestServiceServicer):
-
- def __init__(self, server_id, hostname):
- self._server_id = server_id
- self._hostname = hostname
-
- def EmptyCall(self, _: empty_pb2.Empty,
- context: grpc.ServicerContext) -> empty_pb2.Empty:
- return empty_pb2.Empty()
-
- def UnaryCall(self, request: messages_pb2.SimpleRequest,
- context: grpc.ServicerContext) -> messages_pb2.SimpleResponse:
- response = messages_pb2.SimpleResponse()
- response.server_id = self._server_id
- response.hostname = self._hostname
- return response
-
-
-def _configure_maintenance_server(server: grpc.Server,
- maintenance_port: int) -> None:
- channelz.add_channelz_servicer(server)
- listen_address = f"{_LISTEN_HOST}:{maintenance_port}"
- server.add_insecure_port(listen_address)
- health_servicer = grpc_health.HealthServicer(
- experimental_non_blocking=True,
- experimental_thread_pool=futures.ThreadPoolExecutor(
- max_workers=_THREAD_POOL_SIZE))
-
- health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server)
- SERVICE_NAMES = (
- test_pb2.DESCRIPTOR.services_by_name["TestService"].full_name,
- health_pb2.DESCRIPTOR.services_by_name["Health"].full_name,
- channelz_pb2.DESCRIPTOR.services_by_name["Channelz"].full_name,
- reflection.SERVICE_NAME,
- )
- for service in SERVICE_NAMES:
- health_servicer.set(service, health_pb2.HealthCheckResponse.SERVING)
- reflection.enable_server_reflection(SERVICE_NAMES, server)
-
-
-def _configure_test_server(server: grpc.Server, port: int, secure_mode: bool,
- server_id: str) -> None:
- test_pb2_grpc.add_TestServiceServicer_to_server(
- TestService(server_id, socket.gethostname()), server)
- listen_address = f"{_LISTEN_HOST}:{port}"
- if not secure_mode:
- server.add_insecure_port(listen_address)
- else:
- logger.info("Running with xDS Server credentials")
- server_fallback_creds = grpc.insecure_server_credentials()
- server_creds = grpc.xds_server_credentials(server_fallback_creds)
- server.add_secure_port(listen_address, server_creds)
-
-
-def _run(port: int, maintenance_port: int, secure_mode: bool,
- server_id: str) -> None:
- if port == maintenance_port:
- server = grpc.server(
- futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE))
- _configure_test_server(server, port, secure_mode, server_id)
- _configure_maintenance_server(server, maintenance_port)
- server.start()
- logger.info("Test server listening on port %d", port)
- logger.info("Maintenance server listening on port %d", maintenance_port)
- server.wait_for_termination()
- else:
- test_server = grpc.server(
- futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE),
- xds=secure_mode)
- _configure_test_server(test_server, port, secure_mode, server_id)
- test_server.start()
- logger.info("Test server listening on port %d", port)
- maintenance_server = grpc.server(
- futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE))
- _configure_maintenance_server(maintenance_server, maintenance_port)
- maintenance_server.start()
- logger.info("Maintenance server listening on port %d", maintenance_port)
- test_server.wait_for_termination()
- maintenance_server.wait_for_termination()
-
-
-def bool_arg(arg: str) -> bool:
- if arg.lower() in ("true", "yes", "y"):
- return True
- elif arg.lower() in ("false", "no", "n"):
- return False
- else:
- raise argparse.ArgumentTypeError(f"Could not parse '{arg}' as a bool.")
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(
- description="Run Python xDS interop server.")
- parser.add_argument("--port",
- type=int,
- default=8080,
- help="Port for test server.")
- parser.add_argument("--maintenance_port",
- type=int,
- default=8080,
- help="Port for servers besides test server.")
- parser.add_argument(
- "--secure_mode",
- type=bool_arg,
- default="False",
- help="If specified, uses xDS to retrieve server credentials.")
- parser.add_argument("--server_id",
- type=str,
- default="python_server",
- help="The server ID to return in responses..")
- parser.add_argument('--verbose',
- help='verbose log output',
- default=False,
- action='store_true')
- args = parser.parse_args()
- if args.verbose:
- logger.setLevel(logging.DEBUG)
- else:
- logger.setLevel(logging.INFO)
- if args.secure_mode and args.port == args.maintenance_port:
- raise ValueError(
- "--port and --maintenance_port must not be the same when --secure_mode is set."
- )
- _run(args.port, args.maintenance_port, args.secure_mode, args.server_id)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py
deleted file mode 100644
index f4b321fc5b..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2019 The gRPC Authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py
deleted file mode 100644
index f8b8382e31..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_leak_test.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright 2020 The gRPC Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""A smoke test for memory leaks on short-lived channels without close.
-
-This test doesn't guarantee all resources are cleaned if `Channel.close` is not
-explicitly invoked. The recommended way of using Channel object is using `with`
-clause, and let context manager automatically close the channel.
-"""
-
-from concurrent.futures import ThreadPoolExecutor
-import logging
-import os
-import resource
-import sys
-import unittest
-
-import grpc
-
-_TEST_METHOD = '/test/Test'
-_REQUEST = b'\x23\x33'
-_LARGE_NUM_OF_ITERATIONS = 5000
-
-# If MAX_RSS inflated more than this size, the test is failed.
-_FAIL_THRESHOLD = 25 * 1024 * 1024 # 25 MiB
-
-
-def _get_max_rss():
- return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
-
-
-def _pretty_print_bytes(x):
- if x > 1024 * 1024 * 1024:
- return "%.2f GiB" % (x / 1024.0 / 1024 / 1024)
- elif x > 1024 * 1024:
- return "%.2f MiB" % (x / 1024.0 / 1024)
- elif x > 1024:
- return "%.2f KiB" % (x / 1024.0)
- else:
- return "%d B" % x
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _TEST_METHOD:
- return grpc.unary_unary_rpc_method_handler(lambda x, _: x)
-
-
-def _start_a_test_server():
- server = grpc.server(ThreadPoolExecutor(max_workers=1),
- options=(('grpc.so_reuseport', 0),))
- server.add_generic_rpc_handlers((_GenericHandler(),))
- port = server.add_insecure_port('localhost:0')
- server.start()
- return 'localhost:%d' % port, server
-
-
-def _perform_an_rpc(address):
- channel = grpc.insecure_channel(address)
- multicallable = channel.unary_unary(_TEST_METHOD)
- response = multicallable(_REQUEST)
- assert _REQUEST == response
-
-
-class TestLeak(unittest.TestCase):
-
- def test_leak_with_single_shot_rpcs(self):
- address, server = _start_a_test_server()
-
- # Records memory before experiment.
- before = _get_max_rss()
-
- # Amplifies the leak.
- for n in range(_LARGE_NUM_OF_ITERATIONS):
- _perform_an_rpc(address)
-
- # Fails the test if memory leak detected.
- diff = _get_max_rss() - before
- if diff > _FAIL_THRESHOLD:
- self.fail("Max RSS inflated {} > {}".format(
- _pretty_print_bytes(diff),
- _pretty_print_bytes(_FAIL_THRESHOLD)))
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.DEBUG)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py
deleted file mode 100644
index f0365e15e7..0000000000
--- a/contrib/libs/grpc/src/python/grpcio_tests/tests_py3_only/unit/_simple_stubs_test.py
+++ /dev/null
@@ -1,416 +0,0 @@
-# Copyright 2020 The gRPC authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Tests for Simple Stubs."""
-
-# TODO(https://github.com/grpc/grpc/issues/21965): Run under setuptools.
-
-import os
-
-_MAXIMUM_CHANNELS = 10
-
-_DEFAULT_TIMEOUT = 1.0
-
-os.environ["GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"] = "2"
-os.environ["GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"] = str(_MAXIMUM_CHANNELS)
-os.environ["GRPC_PYTHON_DEFAULT_TIMEOUT_SECONDS"] = str(_DEFAULT_TIMEOUT)
-
-import contextlib
-import datetime
-import inspect
-import logging
-import sys
-import threading
-import time
-from typing import Callable, Optional
-import unittest
-
-import grpc
-import grpc.experimental
-
-from tests.unit import resources
-from tests.unit import test_common
-from tests.unit.framework.common import get_socket
-
-_REQUEST = b"0000"
-
-_CACHE_EPOCHS = 8
-_CACHE_TRIALS = 6
-
-_SERVER_RESPONSE_COUNT = 10
-_CLIENT_REQUEST_COUNT = _SERVER_RESPONSE_COUNT
-
-_STRESS_EPOCHS = _MAXIMUM_CHANNELS * 10
-
-_UNARY_UNARY = "/test/UnaryUnary"
-_UNARY_STREAM = "/test/UnaryStream"
-_STREAM_UNARY = "/test/StreamUnary"
-_STREAM_STREAM = "/test/StreamStream"
-_BLACK_HOLE = "/test/BlackHole"
-
-
-@contextlib.contextmanager
-def _env(key: str, value: str):
- os.environ[key] = value
- yield
- del os.environ[key]
-
-
-def _unary_unary_handler(request, context):
- return request
-
-
-def _unary_stream_handler(request, context):
- for _ in range(_SERVER_RESPONSE_COUNT):
- yield request
-
-
-def _stream_unary_handler(request_iterator, context):
- request = None
- for single_request in request_iterator:
- request = single_request
- return request
-
-
-def _stream_stream_handler(request_iterator, context):
- for request in request_iterator:
- yield request
-
-
-def _black_hole_handler(request, context):
- event = threading.Event()
-
- def _on_done():
- event.set()
-
- context.add_callback(_on_done)
- while not event.is_set():
- time.sleep(0.1)
-
-
-class _GenericHandler(grpc.GenericRpcHandler):
-
- def service(self, handler_call_details):
- if handler_call_details.method == _UNARY_UNARY:
- return grpc.unary_unary_rpc_method_handler(_unary_unary_handler)
- elif handler_call_details.method == _UNARY_STREAM:
- return grpc.unary_stream_rpc_method_handler(_unary_stream_handler)
- elif handler_call_details.method == _STREAM_UNARY:
- return grpc.stream_unary_rpc_method_handler(_stream_unary_handler)
- elif handler_call_details.method == _STREAM_STREAM:
- return grpc.stream_stream_rpc_method_handler(_stream_stream_handler)
- elif handler_call_details.method == _BLACK_HOLE:
- return grpc.unary_unary_rpc_method_handler(_black_hole_handler)
- else:
- raise NotImplementedError()
-
-
-def _time_invocation(to_time: Callable[[], None]) -> datetime.timedelta:
- start = datetime.datetime.now()
- to_time()
- return datetime.datetime.now() - start
-
-
-@contextlib.contextmanager
-def _server(credentials: Optional[grpc.ServerCredentials]):
- try:
- server = test_common.test_server()
- target = '[::]:0'
- if credentials is None:
- port = server.add_insecure_port(target)
- else:
- port = server.add_secure_port(target, credentials)
- server.add_generic_rpc_handlers((_GenericHandler(),))
- server.start()
- yield port
- finally:
- server.stop(None)
-
-
-class SimpleStubsTest(unittest.TestCase):
-
- def assert_cached(self, to_check: Callable[[str], None]) -> None:
- """Asserts that a function caches intermediate data/state.
-
- To be specific, given a function whose caching behavior is
- deterministic in the value of a supplied string, this function asserts
- that, on average, subsequent invocations of the function for a specific
- string are faster than first invocations with that same string.
-
- Args:
- to_check: A function returning nothing, that caches values based on
- an arbitrary supplied string.
- """
- initial_runs = []
- cached_runs = []
- for epoch in range(_CACHE_EPOCHS):
- runs = []
- text = str(epoch)
- for trial in range(_CACHE_TRIALS):
- runs.append(_time_invocation(lambda: to_check(text)))
- initial_runs.append(runs[0])
- cached_runs.extend(runs[1:])
- average_cold = sum((run for run in initial_runs),
- datetime.timedelta()) / len(initial_runs)
- average_warm = sum((run for run in cached_runs),
- datetime.timedelta()) / len(cached_runs)
- self.assertLess(average_warm, average_cold)
-
- def assert_eventually(self,
- predicate: Callable[[], bool],
- *,
- timeout: Optional[datetime.timedelta] = None,
- message: Optional[Callable[[], str]] = None) -> None:
- message = message or (lambda: "Proposition did not evaluate to true")
- timeout = timeout or datetime.timedelta(seconds=10)
- end = datetime.datetime.now() + timeout
- while datetime.datetime.now() < end:
- if predicate():
- break
- time.sleep(0.5)
- else:
- self.fail(message() + " after " + str(timeout))
-
- def test_unary_unary_insecure(self):
- with _server(None) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- channel_credentials=grpc.experimental.
- insecure_channel_credentials(),
- timeout=None)
- self.assertEqual(_REQUEST, response)
-
- def test_unary_unary_secure(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- channel_credentials=grpc.local_channel_credentials(),
- timeout=None)
- self.assertEqual(_REQUEST, response)
-
- def test_channels_cached(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- test_name = inspect.stack()[0][3]
- args = (_REQUEST, target, _UNARY_UNARY)
- kwargs = {"channel_credentials": grpc.local_channel_credentials()}
-
- def _invoke(seed: str):
- run_kwargs = dict(kwargs)
- run_kwargs["options"] = ((test_name + seed, ""),)
- grpc.experimental.unary_unary(*args, **run_kwargs)
-
- self.assert_cached(_invoke)
-
- def test_channels_evicted(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- channel_credentials=grpc.local_channel_credentials())
- self.assert_eventually(
- lambda: grpc._simple_stubs.ChannelCache.get(
- )._test_only_channel_count() == 0,
- message=lambda:
- f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} remain"
- )
-
- def test_total_channels_enforced(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- for i in range(_STRESS_EPOCHS):
- # Ensure we get a new channel each time.
- options = (("foo", str(i)),)
- # Send messages at full blast.
- grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- options=options,
- channel_credentials=grpc.local_channel_credentials())
- self.assert_eventually(
- lambda: grpc._simple_stubs.ChannelCache.get(
- )._test_only_channel_count() <= _MAXIMUM_CHANNELS + 1,
- message=lambda:
- f"{grpc._simple_stubs.ChannelCache.get()._test_only_channel_count()} channels remain"
- )
-
- def test_unary_stream(self):
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- for response in grpc.experimental.unary_stream(
- _REQUEST,
- target,
- _UNARY_STREAM,
- channel_credentials=grpc.local_channel_credentials()):
- self.assertEqual(_REQUEST, response)
-
- def test_stream_unary(self):
-
- def request_iter():
- for _ in range(_CLIENT_REQUEST_COUNT):
- yield _REQUEST
-
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.stream_unary(
- request_iter(),
- target,
- _STREAM_UNARY,
- channel_credentials=grpc.local_channel_credentials())
- self.assertEqual(_REQUEST, response)
-
- def test_stream_stream(self):
-
- def request_iter():
- for _ in range(_CLIENT_REQUEST_COUNT):
- yield _REQUEST
-
- with _server(grpc.local_server_credentials()) as port:
- target = f'localhost:{port}'
- for response in grpc.experimental.stream_stream(
- request_iter(),
- target,
- _STREAM_STREAM,
- channel_credentials=grpc.local_channel_credentials()):
- self.assertEqual(_REQUEST, response)
-
- def test_default_ssl(self):
- _private_key = resources.private_key()
- _certificate_chain = resources.certificate_chain()
- _server_certs = ((_private_key, _certificate_chain),)
- _server_host_override = 'foo.test.google.fr'
- _test_root_certificates = resources.test_root_certificates()
- _property_options = ((
- 'grpc.ssl_target_name_override',
- _server_host_override,
- ),)
- cert_dir = os.path.join(os.path.dirname(resources.__file__),
- "credentials")
- cert_file = os.path.join(cert_dir, "ca.pem")
- with _env("GRPC_DEFAULT_SSL_ROOTS_FILE_PATH", cert_file):
- server_creds = grpc.ssl_server_credentials(_server_certs)
- with _server(server_creds) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(
- _REQUEST, target, _UNARY_UNARY, options=_property_options)
-
- def test_insecure_sugar(self):
- with _server(None) as port:
- target = f'localhost:{port}'
- response = grpc.experimental.unary_unary(_REQUEST,
- target,
- _UNARY_UNARY,
- insecure=True)
- self.assertEqual(_REQUEST, response)
-
- def test_insecure_sugar_mutually_exclusive(self):
- with _server(None) as port:
- target = f'localhost:{port}'
- with self.assertRaises(ValueError):
- response = grpc.experimental.unary_unary(
- _REQUEST,
- target,
- _UNARY_UNARY,
- insecure=True,
- channel_credentials=grpc.local_channel_credentials())
-
- def test_default_wait_for_ready(self):
- addr, port, sock = get_socket()
- sock.close()
- target = f'{addr}:{port}'
- channel = grpc._simple_stubs.ChannelCache.get().get_channel(
- target, (), None, True, None)
- rpc_finished_event = threading.Event()
- rpc_failed_event = threading.Event()
- server = None
-
- def _on_connectivity_changed(connectivity):
- nonlocal server
- if connectivity is grpc.ChannelConnectivity.TRANSIENT_FAILURE:
- self.assertFalse(rpc_finished_event.is_set())
- self.assertFalse(rpc_failed_event.is_set())
- server = test_common.test_server()
- server.add_insecure_port(target)
- server.add_generic_rpc_handlers((_GenericHandler(),))
- server.start()
- channel.unsubscribe(_on_connectivity_changed)
- elif connectivity in (grpc.ChannelConnectivity.IDLE,
- grpc.ChannelConnectivity.CONNECTING):
- pass
- else:
- self.fail("Encountered unknown state.")
-
- channel.subscribe(_on_connectivity_changed)
-
- def _send_rpc():
- try:
- response = grpc.experimental.unary_unary(_REQUEST,
- target,
- _UNARY_UNARY,
- timeout=None,
- insecure=True)
- rpc_finished_event.set()
- except Exception as e:
- rpc_failed_event.set()
-
- t = threading.Thread(target=_send_rpc)
- t.start()
- t.join()
- self.assertFalse(rpc_failed_event.is_set())
- self.assertTrue(rpc_finished_event.is_set())
- if server is not None:
- server.stop(None)
-
- def assert_times_out(self, invocation_args):
- with _server(None) as port:
- target = f'localhost:{port}'
- with self.assertRaises(grpc.RpcError) as cm:
- response = grpc.experimental.unary_unary(_REQUEST,
- target,
- _BLACK_HOLE,
- insecure=True,
- **invocation_args)
- self.assertEqual(grpc.StatusCode.DEADLINE_EXCEEDED,
- cm.exception.code())
-
- def test_default_timeout(self):
- not_present = object()
- wait_for_ready_values = [True, not_present]
- timeout_values = [0.5, not_present]
- cases = []
- for wait_for_ready in wait_for_ready_values:
- for timeout in timeout_values:
- case = {}
- if timeout is not not_present:
- case["timeout"] = timeout
- if wait_for_ready is not not_present:
- case["wait_for_ready"] = wait_for_ready
- cases.append(case)
-
- for case in cases:
- with self.subTest(**case):
- self.assert_times_out(case)
-
-
-if __name__ == "__main__":
- logging.basicConfig(level=logging.INFO)
- unittest.main(verbosity=2)
diff --git a/contrib/libs/re2/re2/testing/.yandex_meta/licenses.list.txt b/contrib/libs/re2/re2/testing/.yandex_meta/licenses.list.txt
deleted file mode 100644
index 0dec3af329..0000000000
--- a/contrib/libs/re2/re2/testing/.yandex_meta/licenses.list.txt
+++ /dev/null
@@ -1,69 +0,0 @@
-====================BSD-3-Clause====================
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-
-====================COPYRIGHT====================
-// Copyright 2002-2009 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2005 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2006 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2006-2007 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2006-2008 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2007 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2008 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2009 The RE2 Authors. All Rights Reserved.
-
-
-====================COPYRIGHT====================
-// Copyright 2010 The RE2 Authors. All Rights Reserved.
-
-
-====================File: LICENSE====================
-// Copyright (c) 2009 The RE2 Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.