diff options
author | Devtools Arcadia <[email protected]> | 2022-02-07 18:08:42 +0300 |
---|---|---|
committer | Devtools Arcadia <[email protected]> | 2022-02-07 18:08:42 +0300 |
commit | 1110808a9d39d4b808aef724c861a2e1a38d2a69 (patch) | |
tree | e26c9fed0de5d9873cce7e00bc214573dc2195b7 /contrib/libs/grpc/src/python/grpcio_tests/tests |
intermediate changes
ref:cde9a383711a11544ce7e107a78147fb96cc4029
Diffstat (limited to 'contrib/libs/grpc/src/python/grpcio_tests/tests')
126 files changed, 17142 insertions, 0 deletions
diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py new file mode 100644 index 00000000000..d2466fd0228 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from tests import _loader +from tests import _runner + +Loader = _loader.Loader +Runner = _runner.Runner diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py new file mode 100644 index 00000000000..80c107aa8e4 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_loader.py @@ -0,0 +1,106 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import importlib +import pkgutil +import re +import unittest + +import coverage + +TEST_MODULE_REGEX = r'^.*_test$' + + +class Loader(object): + """Test loader for setuptools test suite support. + + Attributes: + suite (unittest.TestSuite): All tests collected by the loader. + loader (unittest.TestLoader): Standard Python unittest loader to be ran per + module discovered. + module_matcher (re.RegexObject): A regular expression object to match + against module names and determine whether or not the discovered module + contributes to the test suite. + """ + + def __init__(self): + self.suite = unittest.TestSuite() + self.loader = unittest.TestLoader() + self.module_matcher = re.compile(TEST_MODULE_REGEX) + + def loadTestsFromNames(self, names, module=None): + """Function mirroring TestLoader::loadTestsFromNames, as expected by + setuptools.setup argument `test_loader`.""" + # ensure that we capture decorators and definitions (else our coverage + # measure unnecessarily suffers) + coverage_context = coverage.Coverage(data_suffix=True) + coverage_context.start() + imported_modules = tuple( + importlib.import_module(name) for name in names) + for imported_module in imported_modules: + self.visit_module(imported_module) + for imported_module in imported_modules: + try: + package_paths = imported_module.__path__ + except AttributeError: + continue + self.walk_packages(package_paths) + coverage_context.stop() + coverage_context.save() + return self.suite + + def walk_packages(self, package_paths): + """Walks over the packages, dispatching `visit_module` calls. + + Args: + package_paths (list): A list of paths over which to walk through modules + along. + """ + for importer, module_name, is_package in ( + pkgutil.walk_packages(package_paths)): + module = importer.find_module(module_name).load_module(module_name) + self.visit_module(module) + + def visit_module(self, module): + """Visits the module, adding discovered tests to the test suite. + + Args: + module (module): Module to match against self.module_matcher; if matched + it has its tests loaded via self.loader into self.suite. + """ + if self.module_matcher.match(module.__name__): + module_suite = self.loader.loadTestsFromModule(module) + self.suite.addTest(module_suite) + + +def iterate_suite_cases(suite): + """Generator over all unittest.TestCases in a unittest.TestSuite. + + Args: + suite (unittest.TestSuite): Suite to iterate over in the generator. + + Returns: + generator: A generator over all unittest.TestCases in `suite`. + """ + for item in suite: + if isinstance(item, unittest.TestSuite): + for child_item in iterate_suite_cases(item): + yield child_item + elif isinstance(item, unittest.TestCase): + yield item + else: + raise ValueError('unexpected suite item of type {}'.format( + type(item))) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py new file mode 100644 index 00000000000..389d5f4f96a --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_result.py @@ -0,0 +1,439 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import itertools +import traceback +import unittest +from xml.etree import ElementTree + +import coverage +from six import moves + +from tests import _loader + + +class CaseResult( + collections.namedtuple('CaseResult', [ + 'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback' + ])): + """A serializable result of a single test case. + + Attributes: + id (object): Any serializable object used to denote the identity of this + test case. + name (str or None): A human-readable name of the test case. + kind (CaseResult.Kind): The kind of test result. + stdout (object or None): Output on stdout, or None if nothing was captured. + stderr (object or None): Output on stderr, or None if nothing was captured. + skip_reason (object or None): The reason the test was skipped. Must be + something if self.kind is CaseResult.Kind.SKIP, else None. + traceback (object or None): The traceback of the test. Must be something if + self.kind is CaseResult.Kind.{ERROR, FAILURE, EXPECTED_FAILURE}, else + None. + """ + + class Kind(object): + UNTESTED = 'untested' + RUNNING = 'running' + ERROR = 'error' + FAILURE = 'failure' + SUCCESS = 'success' + SKIP = 'skip' + EXPECTED_FAILURE = 'expected failure' + UNEXPECTED_SUCCESS = 'unexpected success' + + def __new__(cls, + id=None, + name=None, + kind=None, + stdout=None, + stderr=None, + skip_reason=None, + traceback=None): + """Helper keyword constructor for the namedtuple. + + See this class' attributes for information on the arguments.""" + assert id is not None + assert name is None or isinstance(name, str) + if kind is CaseResult.Kind.UNTESTED: + pass + elif kind is CaseResult.Kind.RUNNING: + pass + elif kind is CaseResult.Kind.ERROR: + assert traceback is not None + elif kind is CaseResult.Kind.FAILURE: + assert traceback is not None + elif kind is CaseResult.Kind.SUCCESS: + pass + elif kind is CaseResult.Kind.SKIP: + assert skip_reason is not None + elif kind is CaseResult.Kind.EXPECTED_FAILURE: + assert traceback is not None + elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS: + pass + else: + assert False + return super(cls, CaseResult).__new__(cls, id, name, kind, stdout, + stderr, skip_reason, traceback) + + def updated(self, + name=None, + kind=None, + stdout=None, + stderr=None, + skip_reason=None, + traceback=None): + """Get a new validated CaseResult with the fields updated. + + See this class' attributes for information on the arguments.""" + name = self.name if name is None else name + kind = self.kind if kind is None else kind + stdout = self.stdout if stdout is None else stdout + stderr = self.stderr if stderr is None else stderr + skip_reason = self.skip_reason if skip_reason is None else skip_reason + traceback = self.traceback if traceback is None else traceback + return CaseResult(id=self.id, + name=name, + kind=kind, + stdout=stdout, + stderr=stderr, + skip_reason=skip_reason, + traceback=traceback) + + +class AugmentedResult(unittest.TestResult): + """unittest.Result that keeps track of additional information. + + Uses CaseResult objects to store test-case results, providing additional + information beyond that of the standard Python unittest library, such as + standard output. + + Attributes: + id_map (callable): A unary callable mapping unittest.TestCase objects to + unique identifiers. + cases (dict): A dictionary mapping from the identifiers returned by id_map + to CaseResult objects corresponding to those IDs. + """ + + def __init__(self, id_map): + """Initialize the object with an identifier mapping. + + Arguments: + id_map (callable): Corresponds to the attribute `id_map`.""" + super(AugmentedResult, self).__init__() + self.id_map = id_map + self.cases = None + + def startTestRun(self): + """See unittest.TestResult.startTestRun.""" + super(AugmentedResult, self).startTestRun() + self.cases = dict() + + def startTest(self, test): + """See unittest.TestResult.startTest.""" + super(AugmentedResult, self).startTest(test) + case_id = self.id_map(test) + self.cases[case_id] = CaseResult(id=case_id, + name=test.id(), + kind=CaseResult.Kind.RUNNING) + + def addError(self, test, err): + """See unittest.TestResult.addError.""" + super(AugmentedResult, self).addError(test, err) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.ERROR, traceback=err) + + def addFailure(self, test, err): + """See unittest.TestResult.addFailure.""" + super(AugmentedResult, self).addFailure(test, err) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.FAILURE, traceback=err) + + def addSuccess(self, test): + """See unittest.TestResult.addSuccess.""" + super(AugmentedResult, self).addSuccess(test) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.SUCCESS) + + def addSkip(self, test, reason): + """See unittest.TestResult.addSkip.""" + super(AugmentedResult, self).addSkip(test, reason) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.SKIP, skip_reason=reason) + + def addExpectedFailure(self, test, err): + """See unittest.TestResult.addExpectedFailure.""" + super(AugmentedResult, self).addExpectedFailure(test, err) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=err) + + def addUnexpectedSuccess(self, test): + """See unittest.TestResult.addUnexpectedSuccess.""" + super(AugmentedResult, self).addUnexpectedSuccess(test) + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + kind=CaseResult.Kind.UNEXPECTED_SUCCESS) + + def set_output(self, test, stdout, stderr): + """Set the output attributes for the CaseResult corresponding to a test. + + Args: + test (unittest.TestCase): The TestCase to set the outputs of. + stdout (str): Output from stdout to assign to self.id_map(test). + stderr (str): Output from stderr to assign to self.id_map(test). + """ + case_id = self.id_map(test) + self.cases[case_id] = self.cases[case_id].updated( + stdout=stdout.decode(), stderr=stderr.decode()) + + def augmented_results(self, filter): + """Convenience method to retrieve filtered case results. + + Args: + filter (callable): A unary predicate to filter over CaseResult objects. + """ + return (self.cases[case_id] + for case_id in self.cases + if filter(self.cases[case_id])) + + +class CoverageResult(AugmentedResult): + """Extension to AugmentedResult adding coverage.py support per test.\ + + Attributes: + coverage_context (coverage.Coverage): coverage.py management object. + """ + + def __init__(self, id_map): + """See AugmentedResult.__init__.""" + super(CoverageResult, self).__init__(id_map=id_map) + self.coverage_context = None + + def startTest(self, test): + """See unittest.TestResult.startTest. + + Additionally initializes and begins code coverage tracking.""" + super(CoverageResult, self).startTest(test) + self.coverage_context = coverage.Coverage(data_suffix=True) + self.coverage_context.start() + + def stopTest(self, test): + """See unittest.TestResult.stopTest. + + Additionally stops and deinitializes code coverage tracking.""" + super(CoverageResult, self).stopTest(test) + self.coverage_context.stop() + self.coverage_context.save() + self.coverage_context = None + + +class _Colors(object): + """Namespaced constants for terminal color magic numbers.""" + HEADER = '\033[95m' + INFO = '\033[94m' + OK = '\033[92m' + WARN = '\033[93m' + FAIL = '\033[91m' + BOLD = '\033[1m' + UNDERLINE = '\033[4m' + END = '\033[0m' + + +class TerminalResult(CoverageResult): + """Extension to CoverageResult adding basic terminal reporting.""" + + def __init__(self, out, id_map): + """Initialize the result object. + + Args: + out (file-like): Output file to which terminal-colored live results will + be written. + id_map (callable): See AugmentedResult.__init__. + """ + super(TerminalResult, self).__init__(id_map=id_map) + self.out = out + + def startTestRun(self): + """See unittest.TestResult.startTestRun.""" + super(TerminalResult, self).startTestRun() + self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' + + _Colors.END) + + def stopTestRun(self): + """See unittest.TestResult.stopTestRun.""" + super(TerminalResult, self).stopTestRun() + self.out.write(summary(self)) + self.out.flush() + + def addError(self, test, err): + """See unittest.TestResult.addError.""" + super(TerminalResult, self).addError(test, err) + self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) + + _Colors.END) + self.out.flush() + + def addFailure(self, test, err): + """See unittest.TestResult.addFailure.""" + super(TerminalResult, self).addFailure(test, err) + self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) + + _Colors.END) + self.out.flush() + + def addSuccess(self, test): + """See unittest.TestResult.addSuccess.""" + super(TerminalResult, self).addSuccess(test) + self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) + + _Colors.END) + self.out.flush() + + def addSkip(self, test, reason): + """See unittest.TestResult.addSkip.""" + super(TerminalResult, self).addSkip(test, reason) + self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) + + _Colors.END) + self.out.flush() + + def addExpectedFailure(self, test, err): + """See unittest.TestResult.addExpectedFailure.""" + super(TerminalResult, self).addExpectedFailure(test, err) + self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) + + _Colors.END) + self.out.flush() + + def addUnexpectedSuccess(self, test): + """See unittest.TestResult.addUnexpectedSuccess.""" + super(TerminalResult, self).addUnexpectedSuccess(test) + self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) + + _Colors.END) + self.out.flush() + + +def _traceback_string(type, value, trace): + """Generate a descriptive string of a Python exception traceback. + + Args: + type (class): The type of the exception. + value (Exception): The value of the exception. + trace (traceback): Traceback of the exception. + + Returns: + str: Formatted exception descriptive string. + """ + buffer = moves.cStringIO() + traceback.print_exception(type, value, trace, file=buffer) + return buffer.getvalue() + + +def summary(result): + """A summary string of a result object. + + Args: + result (AugmentedResult): The result object to get the summary of. + + Returns: + str: The summary string. + """ + assert isinstance(result, AugmentedResult) + untested = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED)) + running = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.RUNNING)) + failures = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.FAILURE)) + errors = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.ERROR)) + successes = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS)) + skips = list( + result.augmented_results( + lambda case_result: case_result.kind is CaseResult.Kind.SKIP)) + expected_failures = list( + result.augmented_results(lambda case_result: case_result.kind is + CaseResult.Kind.EXPECTED_FAILURE)) + unexpected_successes = list( + result.augmented_results(lambda case_result: case_result.kind is + CaseResult.Kind.UNEXPECTED_SUCCESS)) + running_names = [case.name for case in running] + finished_count = (len(failures) + len(errors) + len(successes) + + len(expected_failures) + len(unexpected_successes)) + statistics = ('{finished} tests finished:\n' + '\t{successful} successful\n' + '\t{unsuccessful} unsuccessful\n' + '\t{skipped} skipped\n' + '\t{expected_fail} expected failures\n' + '\t{unexpected_successful} unexpected successes\n' + 'Interrupted Tests:\n' + '\t{interrupted}\n'.format( + finished=finished_count, + successful=len(successes), + unsuccessful=(len(failures) + len(errors)), + skipped=len(skips), + expected_fail=len(expected_failures), + unexpected_successful=len(unexpected_successes), + interrupted=str(running_names))) + tracebacks = '\n\n'.join([ + (_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD + + 'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD + + 'stdout:' + _Colors.END + '\n' + '{stdout}\n' + _Colors.BOLD + + 'stderr:' + _Colors.END + '\n' + '{stderr}\n').format( + test_name=result.name, + traceback=_traceback_string(*result.traceback), + stdout=result.stdout, + stderr=result.stderr) + for result in itertools.chain(failures, errors) + ]) + notes = 'Unexpected successes: {}\n'.format( + [result.name for result in unexpected_successes]) + return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes + + +def jenkins_junit_xml(result): + """An XML tree object that when written is recognizable by Jenkins. + + Args: + result (AugmentedResult): The result object to get the junit xml output of. + + Returns: + ElementTree.ElementTree: The XML tree. + """ + assert isinstance(result, AugmentedResult) + root = ElementTree.Element('testsuites') + suite = ElementTree.SubElement(root, 'testsuite', { + 'name': 'Python gRPC tests', + }) + for case in result.cases.values(): + if case.kind is CaseResult.Kind.SUCCESS: + ElementTree.SubElement(suite, 'testcase', { + 'name': case.name, + }) + elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE): + case_xml = ElementTree.SubElement(suite, 'testcase', { + 'name': case.name, + }) + error_xml = ElementTree.SubElement(case_xml, 'error', {}) + error_xml.text = ''.format(case.stderr, case.traceback) + return ElementTree.ElementTree(element=root) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py new file mode 100644 index 00000000000..39da0399b02 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_runner.py @@ -0,0 +1,239 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import os +import select +import signal +import sys +import tempfile +import threading +import time +import unittest +import uuid + +import six +from six import moves + +from tests import _loader +from tests import _result + + +class CaptureFile(object): + """A context-managed file to redirect output to a byte array. + + Use by invoking `start` (`__enter__`) and at some point invoking `stop` + (`__exit__`). At any point after the initial call to `start` call `output` to + get the current redirected output. Note that we don't currently use file + locking, so calling `output` between calls to `start` and `stop` may muddle + the result (you should only be doing this during a Python-handled interrupt as + a last ditch effort to provide output to the user). + + Attributes: + _redirected_fd (int): File descriptor of file to redirect writes from. + _saved_fd (int): A copy of the original value of the redirected file + descriptor. + _into_file (TemporaryFile or None): File to which writes are redirected. + Only non-None when self is started. + """ + + def __init__(self, fd): + self._redirected_fd = fd + self._saved_fd = os.dup(self._redirected_fd) + self._into_file = None + + def output(self): + """Get all output from the redirected-to file if it exists.""" + if self._into_file: + self._into_file.seek(0) + return bytes(self._into_file.read()) + else: + return bytes() + + def start(self): + """Start redirection of writes to the file descriptor.""" + self._into_file = tempfile.TemporaryFile() + os.dup2(self._into_file.fileno(), self._redirected_fd) + + def stop(self): + """Stop redirection of writes to the file descriptor.""" + # n.b. this dup2 call auto-closes self._redirected_fd + os.dup2(self._saved_fd, self._redirected_fd) + + def write_bypass(self, value): + """Bypass the redirection and write directly to the original file. + + Arguments: + value (str): What to write to the original file. + """ + if six.PY3 and not isinstance(value, six.binary_type): + value = bytes(value, 'ascii') + if self._saved_fd is None: + os.write(self._redirect_fd, value) + else: + os.write(self._saved_fd, value) + + def __enter__(self): + self.start() + return self + + def __exit__(self, type, value, traceback): + self.stop() + + def close(self): + """Close any resources used by self not closed by stop().""" + os.close(self._saved_fd) + + +class AugmentedCase(collections.namedtuple('AugmentedCase', ['case', 'id'])): + """A test case with a guaranteed unique externally specified identifier. + + Attributes: + case (unittest.TestCase): TestCase we're decorating with an additional + identifier. + id (object): Any identifier that may be considered 'unique' for testing + purposes. + """ + + def __new__(cls, case, id=None): + if id is None: + id = uuid.uuid4() + return super(cls, AugmentedCase).__new__(cls, case, id) + + +# NOTE(lidiz) This complex wrapper is not triggering setUpClass nor +# tearDownClass. Do not use those methods, or fix this wrapper! +class Runner(object): + + def __init__(self, dedicated_threads=False): + """Constructs the Runner object. + + Args: + dedicated_threads: A bool indicates whether to spawn each unit test + in separate thread or not. + """ + self._skipped_tests = [] + self._dedicated_threads = dedicated_threads + + def skip_tests(self, tests): + self._skipped_tests = tests + + def run(self, suite): + """See setuptools' test_runner setup argument for information.""" + # only run test cases with id starting with given prefix + testcase_filter = os.getenv('GRPC_PYTHON_TESTRUNNER_FILTER') + filtered_cases = [] + for case in _loader.iterate_suite_cases(suite): + if not testcase_filter or case.id().startswith(testcase_filter): + filtered_cases.append(case) + + # Ensure that every test case has no collision with any other test case in + # the augmented results. + augmented_cases = [ + AugmentedCase(case, uuid.uuid4()) for case in filtered_cases + ] + case_id_by_case = dict((augmented_case.case, augmented_case.id) + for augmented_case in augmented_cases) + result_out = moves.cStringIO() + result = _result.TerminalResult( + result_out, id_map=lambda case: case_id_by_case[case]) + stdout_pipe = CaptureFile(sys.stdout.fileno()) + stderr_pipe = CaptureFile(sys.stderr.fileno()) + kill_flag = [False] + + def sigint_handler(signal_number, frame): + if signal_number == signal.SIGINT: + kill_flag[0] = True # Python 2.7 not having 'local'... :-( + signal.signal(signal_number, signal.SIG_DFL) + + def fault_handler(signal_number, frame): + stdout_pipe.write_bypass( + 'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'.format( + signal_number, stdout_pipe.output(), stderr_pipe.output())) + os._exit(1) + + def check_kill_self(): + if kill_flag[0]: + stdout_pipe.write_bypass('Stopping tests short...') + result.stopTestRun() + stdout_pipe.write_bypass(result_out.getvalue()) + stdout_pipe.write_bypass('\ninterrupted stdout:\n{}\n'.format( + stdout_pipe.output().decode())) + stderr_pipe.write_bypass('\ninterrupted stderr:\n{}\n'.format( + stderr_pipe.output().decode())) + os._exit(1) + + def try_set_handler(name, handler): + try: + signal.signal(getattr(signal, name), handler) + except AttributeError: + pass + + try_set_handler('SIGINT', sigint_handler) + try_set_handler('SIGBUS', fault_handler) + try_set_handler('SIGABRT', fault_handler) + try_set_handler('SIGFPE', fault_handler) + try_set_handler('SIGILL', fault_handler) + # Sometimes output will lag after a test has successfully finished; we + # ignore such writes to our pipes. + try_set_handler('SIGPIPE', signal.SIG_IGN) + + # Run the tests + result.startTestRun() + for augmented_case in augmented_cases: + for skipped_test in self._skipped_tests: + if skipped_test in augmented_case.case.id(): + break + else: + sys.stdout.write('Running {}\n'.format( + augmented_case.case.id())) + sys.stdout.flush() + if self._dedicated_threads: + # (Deprecated) Spawns dedicated thread for each test case. + case_thread = threading.Thread( + target=augmented_case.case.run, args=(result,)) + try: + with stdout_pipe, stderr_pipe: + case_thread.start() + # If the thread is exited unexpected, stop testing. + while case_thread.is_alive(): + check_kill_self() + time.sleep(0) + case_thread.join() + except: # pylint: disable=try-except-raise + # re-raise the exception after forcing the with-block to end + raise + # Records the result of the test case run. + result.set_output(augmented_case.case, stdout_pipe.output(), + stderr_pipe.output()) + sys.stdout.write(result_out.getvalue()) + sys.stdout.flush() + result_out.truncate(0) + check_kill_self() + else: + # Donates current thread to test case execution. + augmented_case.case.run(result) + result.stopTestRun() + stdout_pipe.close() + stderr_pipe.close() + + # Report results + sys.stdout.write(result_out.getvalue()) + sys.stdout.flush() + signal.signal(signal.SIGINT, signal.SIG_DFL) + with open('report.xml', 'wb') as report_xml_file: + _result.jenkins_junit_xml(result).write(report_xml_file) + return result diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py new file mode 100644 index 00000000000..5772620b602 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py new file mode 100644 index 00000000000..3aa92f37fb1 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/_sanity/_sanity_test.py @@ -0,0 +1,48 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import pkgutil +import unittest + +import six + +import tests + + +class SanityTest(unittest.TestCase): + + maxDiff = 32768 + + TEST_PKG_MODULE_NAME = 'tests' + TEST_PKG_PATH = 'tests' + + def testTestsJsonUpToDate(self): + """Autodiscovers all test suites and checks that tests.json is up to date""" + loader = tests.Loader() + loader.loadTestsFromNames([self.TEST_PKG_MODULE_NAME]) + test_suite_names = sorted({ + test_case_class.id().rsplit('.', 1)[0] for test_case_class in + tests._loader.iterate_suite_cases(loader.suite) + }) + + tests_json_string = pkgutil.get_data(self.TEST_PKG_PATH, 'tests.json') + tests_json = json.loads( + tests_json_string.decode() if six.PY3 else tests_json_string) + + self.assertSequenceEqual(tests_json, test_suite_names) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py new file mode 100644 index 00000000000..994a8e1e800 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/bazel_namespace_package_hack.py @@ -0,0 +1,40 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import site +import sys + +_GRPC_BAZEL_RUNTIME_ENV = "GRPC_BAZEL_RUNTIME" + + +# TODO(https://github.com/bazelbuild/bazel/issues/6844) Bazel failed to +# interpret namespace packages correctly. This monkey patch will force the +# Python process to parse the .pth file in the sys.path to resolve namespace +# package in the right place. +# Analysis in depth: https://github.com/bazelbuild/rules_python/issues/55 +def sys_path_to_site_dir_hack(): + """Add valid sys.path item to site directory to parse the .pth files.""" + # Only run within our Bazel environment + if not os.environ.get(_GRPC_BAZEL_RUNTIME_ENV): + return + items = [] + for item in sys.path: + if os.path.exists(item): + # The only difference between sys.path and site-directory is + # whether the .pth file will be parsed or not. A site-directory + # will always exist in sys.path, but not another way around. + items.append(item) + for item in items: + site.addsitedir(item) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py new file mode 100644 index 00000000000..38fdfc9c5cf --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py new file mode 100644 index 00000000000..784307ae005 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/channelz/_channelz_servicer_test.py @@ -0,0 +1,469 @@ +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_channelz.v1.channelz.""" + +import unittest + +from concurrent import futures + +import grpc + +from grpc_channelz.v1 import channelz +from grpc_channelz.v1 import channelz_pb2 +from grpc_channelz.v1 import channelz_pb2_grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_SUCCESSFUL_UNARY_UNARY = '/test/SuccessfulUnaryUnary' +_FAILED_UNARY_UNARY = '/test/FailedUnaryUnary' +_SUCCESSFUL_STREAM_STREAM = '/test/SuccessfulStreamStream' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x01\x01\x01' + +_DISABLE_REUSE_PORT = (('grpc.so_reuseport', 0),) +_ENABLE_CHANNELZ = (('grpc.enable_channelz', 1),) +_DISABLE_CHANNELZ = (('grpc.enable_channelz', 0),) + + +def _successful_unary_unary(request, servicer_context): + return _RESPONSE + + +def _failed_unary_unary(request, servicer_context): + servicer_context.set_code(grpc.StatusCode.INTERNAL) + servicer_context.set_details("Channelz Test Intended Failure") + + +def _successful_stream_stream(request_iterator, servicer_context): + for _ in request_iterator: + yield _RESPONSE + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _SUCCESSFUL_UNARY_UNARY: + return grpc.unary_unary_rpc_method_handler(_successful_unary_unary) + elif handler_call_details.method == _FAILED_UNARY_UNARY: + return grpc.unary_unary_rpc_method_handler(_failed_unary_unary) + elif handler_call_details.method == _SUCCESSFUL_STREAM_STREAM: + return grpc.stream_stream_rpc_method_handler( + _successful_stream_stream) + else: + return None + + +class _ChannelServerPair(object): + + def __init__(self): + # Server will enable channelz service + self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=3), + options=_DISABLE_REUSE_PORT + + _ENABLE_CHANNELZ) + port = self.server.add_insecure_port('[::]:0') + self.server.add_generic_rpc_handlers((_GenericHandler(),)) + self.server.start() + + # Channel will enable channelz service... + self.channel = grpc.insecure_channel('localhost:%d' % port, + _ENABLE_CHANNELZ) + + +def _generate_channel_server_pairs(n): + return [_ChannelServerPair() for i in range(n)] + + +def _close_channel_server_pairs(pairs): + for pair in pairs: + pair.server.stop(None) + pair.channel.close() + + +class ChannelzServicerTest(unittest.TestCase): + + def _send_successful_unary_unary(self, idx): + _, r = self._pairs[idx].channel.unary_unary( + _SUCCESSFUL_UNARY_UNARY).with_call(_REQUEST) + self.assertEqual(r.code(), grpc.StatusCode.OK) + + def _send_failed_unary_unary(self, idx): + try: + self._pairs[idx].channel.unary_unary(_FAILED_UNARY_UNARY).with_call( + _REQUEST) + except grpc.RpcError: + return + else: + self.fail("This call supposed to fail") + + def _send_successful_stream_stream(self, idx): + response_iterator = self._pairs[idx].channel.stream_stream( + _SUCCESSFUL_STREAM_STREAM).__call__( + iter([_REQUEST] * test_constants.STREAM_LENGTH)) + cnt = 0 + for _ in response_iterator: + cnt += 1 + self.assertEqual(cnt, test_constants.STREAM_LENGTH) + + def _get_channel_id(self, idx): + """Channel id may not be consecutive""" + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertGreater(len(resp.channel), idx) + return resp.channel[idx].ref.channel_id + + def setUp(self): + self._pairs = [] + # This server is for Channelz info fetching only + # It self should not enable Channelz + self._server = grpc.server(futures.ThreadPoolExecutor(max_workers=3), + options=_DISABLE_REUSE_PORT + + _DISABLE_CHANNELZ) + port = self._server.add_insecure_port('[::]:0') + channelz.add_channelz_servicer(self._server) + self._server.start() + + # This channel is used to fetch Channelz info only + # Channelz should not be enabled + self._channel = grpc.insecure_channel('localhost:%d' % port, + _DISABLE_CHANNELZ) + self._channelz_stub = channelz_pb2_grpc.ChannelzStub(self._channel) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + _close_channel_server_pairs(self._pairs) + + def test_get_top_channels_basic(self): + self._pairs = _generate_channel_server_pairs(1) + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(resp.channel), 1) + self.assertEqual(resp.end, True) + + def test_get_top_channels_high_start_id(self): + self._pairs = _generate_channel_server_pairs(1) + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=10000)) + self.assertEqual(len(resp.channel), 0) + self.assertEqual(resp.end, True) + + def test_successful_request(self): + self._pairs = _generate_channel_server_pairs(1) + self._send_successful_unary_unary(0) + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, 1) + self.assertEqual(resp.channel.data.calls_succeeded, 1) + self.assertEqual(resp.channel.data.calls_failed, 0) + + def test_failed_request(self): + self._pairs = _generate_channel_server_pairs(1) + self._send_failed_unary_unary(0) + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, 1) + self.assertEqual(resp.channel.data.calls_succeeded, 0) + self.assertEqual(resp.channel.data.calls_failed, 1) + + def test_many_requests(self): + self._pairs = _generate_channel_server_pairs(1) + k_success = 7 + k_failed = 9 + for i in range(k_success): + self._send_successful_unary_unary(0) + for i in range(k_failed): + self._send_failed_unary_unary(0) + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, k_success + k_failed) + self.assertEqual(resp.channel.data.calls_succeeded, k_success) + self.assertEqual(resp.channel.data.calls_failed, k_failed) + + def test_many_channel(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(resp.channel), k_channels) + + def test_many_requests_many_channel(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + k_success = 11 + k_failed = 13 + for i in range(k_success): + self._send_successful_unary_unary(0) + self._send_successful_unary_unary(2) + for i in range(k_failed): + self._send_failed_unary_unary(1) + self._send_failed_unary_unary(2) + + # The first channel saw only successes + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(resp.channel.data.calls_started, k_success) + self.assertEqual(resp.channel.data.calls_succeeded, k_success) + self.assertEqual(resp.channel.data.calls_failed, 0) + + # The second channel saw only failures + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(1))) + self.assertEqual(resp.channel.data.calls_started, k_failed) + self.assertEqual(resp.channel.data.calls_succeeded, 0) + self.assertEqual(resp.channel.data.calls_failed, k_failed) + + # The third channel saw both successes and failures + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(2))) + self.assertEqual(resp.channel.data.calls_started, k_success + k_failed) + self.assertEqual(resp.channel.data.calls_succeeded, k_success) + self.assertEqual(resp.channel.data.calls_failed, k_failed) + + # The fourth channel saw nothing + resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(3))) + self.assertEqual(resp.channel.data.calls_started, 0) + self.assertEqual(resp.channel.data.calls_succeeded, 0) + self.assertEqual(resp.channel.data.calls_failed, 0) + + def test_many_subchannels(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + k_success = 17 + k_failed = 19 + for i in range(k_success): + self._send_successful_unary_unary(0) + self._send_successful_unary_unary(2) + for i in range(k_failed): + self._send_failed_unary_unary(1) + self._send_failed_unary_unary(2) + + gtc_resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(gtc_resp.channel), k_channels) + for i in range(k_channels): + # If no call performed in the channel, there shouldn't be any subchannel + if gtc_resp.channel[i].data.calls_started == 0: + self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0) + continue + + # Otherwise, the subchannel should exist + self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0) + gsc_resp = self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest( + subchannel_id=gtc_resp.channel[i].subchannel_ref[0]. + subchannel_id)) + self.assertEqual(gtc_resp.channel[i].data.calls_started, + gsc_resp.subchannel.data.calls_started) + self.assertEqual(gtc_resp.channel[i].data.calls_succeeded, + gsc_resp.subchannel.data.calls_succeeded) + self.assertEqual(gtc_resp.channel[i].data.calls_failed, + gsc_resp.subchannel.data.calls_failed) + + def test_server_basic(self): + self._pairs = _generate_channel_server_pairs(1) + resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(resp.server), 1) + + def test_get_one_server(self): + self._pairs = _generate_channel_server_pairs(1) + gss_resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(gss_resp.server), 1) + gs_resp = self._channelz_stub.GetServer( + channelz_pb2.GetServerRequest( + server_id=gss_resp.server[0].ref.server_id)) + self.assertEqual(gss_resp.server[0].ref.server_id, + gs_resp.server.ref.server_id) + + def test_server_call(self): + self._pairs = _generate_channel_server_pairs(1) + k_success = 23 + k_failed = 29 + for i in range(k_success): + self._send_successful_unary_unary(0) + for i in range(k_failed): + self._send_failed_unary_unary(0) + + resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(resp.server), 1) + self.assertEqual(resp.server[0].data.calls_started, + k_success + k_failed) + self.assertEqual(resp.server[0].data.calls_succeeded, k_success) + self.assertEqual(resp.server[0].data.calls_failed, k_failed) + + def test_many_subchannels_and_sockets(self): + k_channels = 4 + self._pairs = _generate_channel_server_pairs(k_channels) + k_success = 3 + k_failed = 5 + for i in range(k_success): + self._send_successful_unary_unary(0) + self._send_successful_unary_unary(2) + for i in range(k_failed): + self._send_failed_unary_unary(1) + self._send_failed_unary_unary(2) + + gtc_resp = self._channelz_stub.GetTopChannels( + channelz_pb2.GetTopChannelsRequest(start_channel_id=0)) + self.assertEqual(len(gtc_resp.channel), k_channels) + for i in range(k_channels): + # If no call performed in the channel, there shouldn't be any subchannel + if gtc_resp.channel[i].data.calls_started == 0: + self.assertEqual(len(gtc_resp.channel[i].subchannel_ref), 0) + continue + + # Otherwise, the subchannel should exist + self.assertGreater(len(gtc_resp.channel[i].subchannel_ref), 0) + gsc_resp = self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest( + subchannel_id=gtc_resp.channel[i].subchannel_ref[0]. + subchannel_id)) + self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1) + + gs_resp = self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest( + socket_id=gsc_resp.subchannel.socket_ref[0].socket_id)) + self.assertEqual(gsc_resp.subchannel.data.calls_started, + gs_resp.socket.data.streams_started) + self.assertEqual(gsc_resp.subchannel.data.calls_started, + gs_resp.socket.data.streams_succeeded) + # Calls started == messages sent, only valid for unary calls + self.assertEqual(gsc_resp.subchannel.data.calls_started, + gs_resp.socket.data.messages_sent) + # Only receive responses when the RPC was successful + self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, + gs_resp.socket.data.messages_received) + + def test_streaming_rpc(self): + self._pairs = _generate_channel_server_pairs(1) + # In C++, the argument for _send_successful_stream_stream is message length. + # Here the argument is still channel idx, to be consistent with the other two. + self._send_successful_stream_stream(0) + + gc_resp = self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=self._get_channel_id(0))) + self.assertEqual(gc_resp.channel.data.calls_started, 1) + self.assertEqual(gc_resp.channel.data.calls_succeeded, 1) + self.assertEqual(gc_resp.channel.data.calls_failed, 0) + # Subchannel exists + self.assertGreater(len(gc_resp.channel.subchannel_ref), 0) + + gsc_resp = self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest( + subchannel_id=gc_resp.channel.subchannel_ref[0].subchannel_id)) + self.assertEqual(gsc_resp.subchannel.data.calls_started, 1) + self.assertEqual(gsc_resp.subchannel.data.calls_succeeded, 1) + self.assertEqual(gsc_resp.subchannel.data.calls_failed, 0) + # Socket exists + self.assertEqual(len(gsc_resp.subchannel.socket_ref), 1) + + gs_resp = self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest( + socket_id=gsc_resp.subchannel.socket_ref[0].socket_id)) + self.assertEqual(gs_resp.socket.data.streams_started, 1) + self.assertEqual(gs_resp.socket.data.streams_succeeded, 1) + self.assertEqual(gs_resp.socket.data.streams_failed, 0) + self.assertEqual(gs_resp.socket.data.messages_sent, + test_constants.STREAM_LENGTH) + self.assertEqual(gs_resp.socket.data.messages_received, + test_constants.STREAM_LENGTH) + + def test_server_sockets(self): + self._pairs = _generate_channel_server_pairs(1) + self._send_successful_unary_unary(0) + self._send_failed_unary_unary(0) + + gs_resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(gs_resp.server), 1) + self.assertEqual(gs_resp.server[0].data.calls_started, 2) + self.assertEqual(gs_resp.server[0].data.calls_succeeded, 1) + self.assertEqual(gs_resp.server[0].data.calls_failed, 1) + + gss_resp = self._channelz_stub.GetServerSockets( + channelz_pb2.GetServerSocketsRequest( + server_id=gs_resp.server[0].ref.server_id, start_socket_id=0)) + # If the RPC call failed, it will raise a grpc.RpcError + # So, if there is no exception raised, considered pass + + def test_server_listen_sockets(self): + self._pairs = _generate_channel_server_pairs(1) + + gss_resp = self._channelz_stub.GetServers( + channelz_pb2.GetServersRequest(start_server_id=0)) + self.assertEqual(len(gss_resp.server), 1) + self.assertEqual(len(gss_resp.server[0].listen_socket), 1) + + gs_resp = self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest( + socket_id=gss_resp.server[0].listen_socket[0].socket_id)) + # If the RPC call failed, it will raise a grpc.RpcError + # So, if there is no exception raised, considered pass + + def test_invalid_query_get_server(self): + try: + self._channelz_stub.GetServer( + channelz_pb2.GetServerRequest(server_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_channel(self): + try: + self._channelz_stub.GetChannel( + channelz_pb2.GetChannelRequest(channel_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_subchannel(self): + try: + self._channelz_stub.GetSubchannel( + channelz_pb2.GetSubchannelRequest(subchannel_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_socket(self): + try: + self._channelz_stub.GetSocket( + channelz_pb2.GetSocketRequest(socket_id=10000)) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + def test_invalid_query_get_server_sockets(self): + try: + self._channelz_stub.GetServerSockets( + channelz_pb2.GetServerSocketsRequest( + server_id=10000, + start_socket_id=0, + )) + except BaseException as e: + self.assertIn('StatusCode.NOT_FOUND', str(e)) + else: + self.fail('Invalid query not detected') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py new file mode 100644 index 00000000000..9a26bac0101 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py new file mode 100644 index 00000000000..e2eff257fa1 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/_fork_interop_test.py @@ -0,0 +1,151 @@ +# Copyright 2019 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Client-side fork interop tests as a unit test.""" + +import six +import subprocess +import sys +import threading +import unittest +from grpc._cython import cygrpc +from tests.fork import methods + +# New instance of multiprocessing.Process using fork without exec can and will +# hang if the Python process has any other threads running. This includes the +# additional thread spawned by our _runner.py class. So in order to test our +# compatibility with multiprocessing, we first fork+exec a new process to ensure +# we don't have any conflicting background threads. +_CLIENT_FORK_SCRIPT_TEMPLATE = """if True: + import os + import sys + from grpc._cython import cygrpc + from tests.fork import methods + + cygrpc._GRPC_ENABLE_FORK_SUPPORT = True + os.environ['GRPC_POLL_STRATEGY'] = 'epoll1' + methods.TestCase.%s.run_test({ + 'server_host': 'localhost', + 'server_port': %d, + 'use_tls': False + }) +""" +_SUBPROCESS_TIMEOUT_S = 30 + + + sys.platform.startswith("linux"), + "not supported on windows, and fork+exec networking blocked on mac") [email protected](six.PY2, "https://github.com/grpc/grpc/issues/18075") +class ForkInteropTest(unittest.TestCase): + + def setUp(self): + start_server_script = """if True: + import sys + import time + + import grpc + from src.proto.grpc.testing import test_pb2_grpc + from tests.interop import service as interop_service + from tests.unit import test_common + + server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server( + interop_service.TestService(), server) + port = server.add_insecure_port('[::]:0') + server.start() + print(port) + sys.stdout.flush() + while True: + time.sleep(1) + """ + self._server_process = subprocess.Popen( + [sys.executable, '-c', start_server_script], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, + self._server_process.kill) + try: + timer.start() + self._port = int(self._server_process.stdout.readline()) + except ValueError: + raise Exception('Failed to get port from server') + finally: + timer.cancel() + + def testConnectivityWatch(self): + self._verifyTestCase(methods.TestCase.CONNECTIVITY_WATCH) + + def testCloseChannelBeforeFork(self): + self._verifyTestCase(methods.TestCase.CLOSE_CHANNEL_BEFORE_FORK) + + def testAsyncUnarySameChannel(self): + self._verifyTestCase(methods.TestCase.ASYNC_UNARY_SAME_CHANNEL) + + def testAsyncUnaryNewChannel(self): + self._verifyTestCase(methods.TestCase.ASYNC_UNARY_NEW_CHANNEL) + + def testBlockingUnarySameChannel(self): + self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_SAME_CHANNEL) + + def testBlockingUnaryNewChannel(self): + self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_NEW_CHANNEL) + + def testInProgressBidiContinueCall(self): + self._verifyTestCase(methods.TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL) + + def testInProgressBidiSameChannelAsyncCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL) + + def testInProgressBidiSameChannelBlockingCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL) + + def testInProgressBidiNewChannelAsyncCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL) + + def testInProgressBidiNewChannelBlockingCall(self): + self._verifyTestCase( + methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL) + + def tearDown(self): + self._server_process.kill() + + def _verifyTestCase(self, test_case): + script = _CLIENT_FORK_SCRIPT_TEMPLATE % (test_case.name, self._port) + process = subprocess.Popen([sys.executable, '-c', script], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, process.kill) + try: + timer.start() + try: + out, err = process.communicate(timeout=_SUBPROCESS_TIMEOUT_S) + except TypeError: + # The timeout parameter was added in Python 3.3. + out, err = process.communicate() + except subprocess.TimeoutExpired: + process.kill() + raise RuntimeError('Process failed to terminate') + finally: + timer.cancel() + self.assertEqual( + 0, process.returncode, + 'process failed with exit code %d (stdout: %s, stderr: %s)' % + (process.returncode, out, err)) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py new file mode 100644 index 00000000000..852e6da4d69 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/client.py @@ -0,0 +1,72 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the GRPC interoperability test client.""" + +import argparse +import logging +import sys + +from tests.fork import methods + + +def _args(): + + def parse_bool(value): + if value == 'true': + return True + if value == 'false': + return False + raise argparse.ArgumentTypeError('Only true/false allowed') + + parser = argparse.ArgumentParser() + parser.add_argument('--server_host', + default="localhost", + type=str, + help='the host to which to connect') + parser.add_argument('--server_port', + type=int, + required=True, + help='the port to which to connect') + parser.add_argument('--test_case', + default='large_unary', + type=str, + help='the test case to execute') + parser.add_argument('--use_tls', + default=False, + type=parse_bool, + help='require a secure connection') + return parser.parse_args() + + +def _test_case_from_arg(test_case_arg): + for test_case in methods.TestCase: + if test_case_arg == test_case.value: + return test_case + else: + raise ValueError('No test case "%s"!' % test_case_arg) + + +def test_fork(): + logging.basicConfig(level=logging.INFO) + args = vars(_args()) + if args['test_case'] == "all": + for test_case in methods.TestCase: + test_case.run_test(args) + else: + test_case = _test_case_from_arg(args['test_case']) + test_case.run_test(args) + + +if __name__ == '__main__': + test_fork() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py new file mode 100644 index 00000000000..2123c699161 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/fork/methods.py @@ -0,0 +1,451 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Implementations of fork support test methods.""" + +import enum +import json +import logging +import multiprocessing +import os +import threading +import time + +import grpc + +from six.moves import queue + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import test_pb2_grpc + +_LOGGER = logging.getLogger(__name__) +_RPC_TIMEOUT_S = 10 +_CHILD_FINISH_TIMEOUT_S = 60 + + +def _channel(args): + target = '{}:{}'.format(args['server_host'], args['server_port']) + if args['use_tls']: + channel_credentials = grpc.ssl_channel_credentials() + channel = grpc.secure_channel(target, channel_credentials) + else: + channel = grpc.insecure_channel(target) + return channel + + +def _validate_payload_type_and_length(response, expected_type, expected_length): + if response.payload.type is not expected_type: + raise ValueError('expected payload type %s, got %s' % + (expected_type, type(response.payload.type))) + elif len(response.payload.body) != expected_length: + raise ValueError('expected payload body size %d, got %d' % + (expected_length, len(response.payload.body))) + + +def _async_unary(stub): + size = 314159 + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=size, + payload=messages_pb2.Payload(body=b'\x00' * 271828)) + response_future = stub.UnaryCall.future(request, timeout=_RPC_TIMEOUT_S) + response = response_future.result() + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) + + +def _blocking_unary(stub): + size = 314159 + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=size, + payload=messages_pb2.Payload(body=b'\x00' * 271828)) + response = stub.UnaryCall(request, timeout=_RPC_TIMEOUT_S) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) + + +class _Pipe(object): + + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._open = True + + def __iter__(self): + return self + + def __next__(self): + return self.next() + + def next(self): + with self._condition: + while not self._values and self._open: + self._condition.wait() + if self._values: + return self._values.pop(0) + else: + raise StopIteration() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify() + + def close(self): + with self._condition: + self._open = False + self._condition.notify() + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +class _ChildProcess(object): + + def __init__(self, task, args=None): + if args is None: + args = () + self._exceptions = multiprocessing.Queue() + + def record_exceptions(): + try: + task(*args) + except grpc.RpcError as rpc_error: + self._exceptions.put('RpcError: %s' % rpc_error) + except Exception as e: # pylint: disable=broad-except + self._exceptions.put(e) + + self._process = multiprocessing.Process(target=record_exceptions) + + def start(self): + self._process.start() + + def finish(self): + self._process.join(timeout=_CHILD_FINISH_TIMEOUT_S) + if self._process.is_alive(): + raise RuntimeError('Child process did not terminate') + if self._process.exitcode != 0: + raise ValueError('Child process failed with exitcode %d' % + self._process.exitcode) + try: + exception = self._exceptions.get(block=False) + raise ValueError('Child process failed: %s' % exception) + except queue.Empty: + pass + + +def _async_unary_same_channel(channel): + + def child_target(): + try: + _async_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + stub = test_pb2_grpc.TestServiceStub(channel) + _async_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + _async_unary(stub) + child_process.finish() + + +def _async_unary_new_channel(channel, args): + + def child_target(): + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + _async_unary(child_stub) + child_channel.close() + + stub = test_pb2_grpc.TestServiceStub(channel) + _async_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + _async_unary(stub) + child_process.finish() + + +def _blocking_unary_same_channel(channel): + + def child_target(): + try: + _blocking_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + child_process.finish() + + +def _blocking_unary_new_channel(channel, args): + + def child_target(): + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + _blocking_unary(child_stub) + + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + child_process = _ChildProcess(child_target) + child_process.start() + _blocking_unary(stub) + child_process.finish() + + +# Verify that the fork channel registry can handle already closed channels +def _close_channel_before_fork(channel, args): + + def child_target(): + new_channel.close() + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + _blocking_unary(child_stub) + + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + channel.close() + + with _channel(args) as new_channel: + new_stub = test_pb2_grpc.TestServiceStub(new_channel) + child_process = _ChildProcess(child_target) + child_process.start() + _blocking_unary(new_stub) + child_process.finish() + + +def _connectivity_watch(channel, args): + + parent_states = [] + parent_channel_ready_event = threading.Event() + + def child_target(): + + child_channel_ready_event = threading.Event() + + def child_connectivity_callback(state): + if state is grpc.ChannelConnectivity.READY: + child_channel_ready_event.set() + + with _channel(args) as child_channel: + child_stub = test_pb2_grpc.TestServiceStub(child_channel) + child_channel.subscribe(child_connectivity_callback) + _async_unary(child_stub) + if not child_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S): + raise ValueError('Channel did not move to READY') + if len(parent_states) > 1: + raise ValueError( + 'Received connectivity updates on parent callback', + parent_states) + child_channel.unsubscribe(child_connectivity_callback) + + def parent_connectivity_callback(state): + parent_states.append(state) + if state is grpc.ChannelConnectivity.READY: + parent_channel_ready_event.set() + + channel.subscribe(parent_connectivity_callback) + stub = test_pb2_grpc.TestServiceStub(channel) + child_process = _ChildProcess(child_target) + child_process.start() + _async_unary(stub) + if not parent_channel_ready_event.wait(timeout=_RPC_TIMEOUT_S): + raise ValueError('Channel did not move to READY') + channel.unsubscribe(parent_connectivity_callback) + child_process.finish() + + +def _ping_pong_with_child_processes_after_first_response( + channel, args, child_target, run_after_close=True): + request_response_sizes = ( + 31415, + 9, + 2653, + 58979, + ) + request_payload_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + stub = test_pb2_grpc.TestServiceStub(channel) + pipe = _Pipe() + parent_bidi_call = stub.FullDuplexCall(pipe) + child_processes = [] + first_message_received = False + for response_size, payload_size in zip(request_response_sizes, + request_payload_sizes): + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=(messages_pb2.ResponseParameters( + size=response_size),), + payload=messages_pb2.Payload(body=b'\x00' * payload_size)) + pipe.add(request) + if first_message_received: + child_process = _ChildProcess(child_target, + (parent_bidi_call, channel, args)) + child_process.start() + child_processes.append(child_process) + response = next(parent_bidi_call) + first_message_received = True + child_process = _ChildProcess(child_target, + (parent_bidi_call, channel, args)) + child_process.start() + child_processes.append(child_process) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + response_size) + pipe.close() + if run_after_close: + child_process = _ChildProcess(child_target, + (parent_bidi_call, channel, args)) + child_process.start() + child_processes.append(child_process) + for child_process in child_processes: + child_process.finish() + + +def _in_progress_bidi_continue_call(channel): + + def child_target(parent_bidi_call, parent_channel, args): + stub = test_pb2_grpc.TestServiceStub(parent_channel) + try: + _async_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + inherited_code = parent_bidi_call.code() + inherited_details = parent_bidi_call.details() + if inherited_code != grpc.StatusCode.CANCELLED: + raise ValueError('Expected inherited code CANCELLED, got %s' % + inherited_code) + if inherited_details != 'Channel closed due to fork': + raise ValueError( + 'Expected inherited details Channel closed due to fork, got %s' + % inherited_details) + + # Don't run child_target after closing the parent call, as the call may have + # received a status from the server before fork occurs. + _ping_pong_with_child_processes_after_first_response(channel, + None, + child_target, + run_after_close=False) + + +def _in_progress_bidi_same_channel_async_call(channel): + + def child_target(parent_bidi_call, parent_channel, args): + stub = test_pb2_grpc.TestServiceStub(parent_channel) + try: + _async_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + _ping_pong_with_child_processes_after_first_response( + channel, None, child_target) + + +def _in_progress_bidi_same_channel_blocking_call(channel): + + def child_target(parent_bidi_call, parent_channel, args): + stub = test_pb2_grpc.TestServiceStub(parent_channel) + try: + _blocking_unary(stub) + raise Exception( + 'Child should not be able to re-use channel after fork') + except ValueError as expected_value_error: + pass + + _ping_pong_with_child_processes_after_first_response( + channel, None, child_target) + + +def _in_progress_bidi_new_channel_async_call(channel, args): + + def child_target(parent_bidi_call, parent_channel, args): + with _channel(args) as channel: + stub = test_pb2_grpc.TestServiceStub(channel) + _async_unary(stub) + + _ping_pong_with_child_processes_after_first_response( + channel, args, child_target) + + +def _in_progress_bidi_new_channel_blocking_call(channel, args): + + def child_target(parent_bidi_call, parent_channel, args): + with _channel(args) as channel: + stub = test_pb2_grpc.TestServiceStub(channel) + _blocking_unary(stub) + + _ping_pong_with_child_processes_after_first_response( + channel, args, child_target) + + +class TestCase(enum.Enum): + + CONNECTIVITY_WATCH = 'connectivity_watch' + CLOSE_CHANNEL_BEFORE_FORK = 'close_channel_before_fork' + ASYNC_UNARY_SAME_CHANNEL = 'async_unary_same_channel' + ASYNC_UNARY_NEW_CHANNEL = 'async_unary_new_channel' + BLOCKING_UNARY_SAME_CHANNEL = 'blocking_unary_same_channel' + BLOCKING_UNARY_NEW_CHANNEL = 'blocking_unary_new_channel' + IN_PROGRESS_BIDI_CONTINUE_CALL = 'in_progress_bidi_continue_call' + IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL = 'in_progress_bidi_same_channel_async_call' + IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_same_channel_blocking_call' + IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL = 'in_progress_bidi_new_channel_async_call' + IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL = 'in_progress_bidi_new_channel_blocking_call' + + def run_test(self, args): + _LOGGER.info("Running %s", self) + channel = _channel(args) + if self is TestCase.ASYNC_UNARY_SAME_CHANNEL: + _async_unary_same_channel(channel) + elif self is TestCase.ASYNC_UNARY_NEW_CHANNEL: + _async_unary_new_channel(channel, args) + elif self is TestCase.BLOCKING_UNARY_SAME_CHANNEL: + _blocking_unary_same_channel(channel) + elif self is TestCase.BLOCKING_UNARY_NEW_CHANNEL: + _blocking_unary_new_channel(channel, args) + elif self is TestCase.CLOSE_CHANNEL_BEFORE_FORK: + _close_channel_before_fork(channel, args) + elif self is TestCase.CONNECTIVITY_WATCH: + _connectivity_watch(channel, args) + elif self is TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL: + _in_progress_bidi_continue_call(channel) + elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL: + _in_progress_bidi_same_channel_async_call(channel) + elif self is TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL: + _in_progress_bidi_same_channel_blocking_call(channel) + elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL: + _in_progress_bidi_new_channel_async_call(channel, args) + elif self is TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL: + _in_progress_bidi_new_channel_blocking_call(channel, args) + else: + raise NotImplementedError('Test case "%s" not implemented!' % + self.name) + channel.close() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py new file mode 100644 index 00000000000..5772620b602 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py new file mode 100644 index 00000000000..01345aaca08 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/health_check/_health_servicer_test.py @@ -0,0 +1,282 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_health.v1.health.""" + +import logging +import threading +import time +import unittest + +import grpc + +from grpc_health.v1 import health +from grpc_health.v1 import health_pb2 +from grpc_health.v1 import health_pb2_grpc + +from tests.unit import test_common +from tests.unit import thread_pool +from tests.unit.framework.common import test_constants + +from six.moves import queue + +_SERVING_SERVICE = 'grpc.test.TestServiceServing' +_UNKNOWN_SERVICE = 'grpc.test.TestServiceUnknown' +_NOT_SERVING_SERVICE = 'grpc.test.TestServiceNotServing' +_WATCH_SERVICE = 'grpc.test.WatchService' + + +def _consume_responses(response_iterator, response_queue): + for response in response_iterator: + response_queue.put(response) + + +class BaseWatchTests(object): + + class WatchTests(unittest.TestCase): + + def start_server(self, non_blocking=False, thread_pool=None): + self._thread_pool = thread_pool + self._servicer = health.HealthServicer( + experimental_non_blocking=non_blocking, + experimental_thread_pool=thread_pool) + self._servicer.set(_SERVING_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + self._servicer.set(_UNKNOWN_SERVICE, + health_pb2.HealthCheckResponse.UNKNOWN) + self._servicer.set(_NOT_SERVING_SERVICE, + health_pb2.HealthCheckResponse.NOT_SERVING) + self._server = test_common.test_server() + port = self._server.add_insecure_port('[::]:0') + health_pb2_grpc.add_HealthServicer_to_server( + self._servicer, self._server) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + self._stub = health_pb2_grpc.HealthStub(self._channel) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def test_watch_empty_service(self): + request = health_pb2.HealthCheckRequest(service='') + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) + thread = threading.Thread(target=_consume_responses, + args=(rendezvous, response_queue)) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response.status) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + if self._thread_pool is not None: + self.assertTrue(self._thread_pool.was_used()) + + def test_watch_new_service(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) + thread = threading.Thread(target=_consume_responses, + args=(rendezvous, response_queue)) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response.status) + + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response.status) + + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.NOT_SERVING) + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, + response.status) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + def test_watch_service_isolation(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) + thread = threading.Thread(target=_consume_responses, + args=(rendezvous, response_queue)) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response.status) + + self._servicer.set('some-other-service', + health_pb2.HealthCheckResponse.SERVING) + with self.assertRaises(queue.Empty): + response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + def test_two_watchers(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue1 = queue.Queue() + response_queue2 = queue.Queue() + rendezvous1 = self._stub.Watch(request) + rendezvous2 = self._stub.Watch(request) + thread1 = threading.Thread(target=_consume_responses, + args=(rendezvous1, response_queue1)) + thread2 = threading.Thread(target=_consume_responses, + args=(rendezvous2, response_queue2)) + thread1.start() + thread2.start() + + response1 = response_queue1.get( + timeout=test_constants.SHORT_TIMEOUT) + response2 = response_queue2.get( + timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response1.status) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response2.status) + + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + response1 = response_queue1.get( + timeout=test_constants.SHORT_TIMEOUT) + response2 = response_queue2.get( + timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response1.status) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response2.status) + + rendezvous1.cancel() + rendezvous2.cancel() + thread1.join() + thread2.join() + self.assertTrue(response_queue1.empty()) + self.assertTrue(response_queue2.empty()) + + @unittest.skip("https://github.com/grpc/grpc/issues/18127") + def test_cancelled_watch_removed_from_watch_list(self): + request = health_pb2.HealthCheckRequest(service=_WATCH_SERVICE) + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) + thread = threading.Thread(target=_consume_responses, + args=(rendezvous, response_queue)) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVICE_UNKNOWN, + response.status) + + rendezvous.cancel() + self._servicer.set(_WATCH_SERVICE, + health_pb2.HealthCheckResponse.SERVING) + thread.join() + + # Wait, if necessary, for serving thread to process client cancellation + timeout = time.time() + test_constants.TIME_ALLOWANCE + while (time.time() < timeout and + self._servicer._send_response_callbacks[_WATCH_SERVICE]): + time.sleep(1) + self.assertFalse( + self._servicer._send_response_callbacks[_WATCH_SERVICE], + 'watch set should be empty') + self.assertTrue(response_queue.empty()) + + def test_graceful_shutdown(self): + request = health_pb2.HealthCheckRequest(service='') + response_queue = queue.Queue() + rendezvous = self._stub.Watch(request) + thread = threading.Thread(target=_consume_responses, + args=(rendezvous, response_queue)) + thread.start() + + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, + response.status) + + self._servicer.enter_graceful_shutdown() + response = response_queue.get(timeout=test_constants.SHORT_TIMEOUT) + self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, + response.status) + + # This should be a no-op. + self._servicer.set('', health_pb2.HealthCheckResponse.SERVING) + + rendezvous.cancel() + thread.join() + self.assertTrue(response_queue.empty()) + + +class HealthServicerTest(BaseWatchTests.WatchTests): + + def setUp(self): + self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None) + super(HealthServicerTest, + self).start_server(non_blocking=True, + thread_pool=self._thread_pool) + + def test_check_empty_service(self): + request = health_pb2.HealthCheckRequest() + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status) + + def test_check_serving_service(self): + request = health_pb2.HealthCheckRequest(service=_SERVING_SERVICE) + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status) + + def test_check_unknown_service(self): + request = health_pb2.HealthCheckRequest(service=_UNKNOWN_SERVICE) + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status) + + def test_check_not_serving_service(self): + request = health_pb2.HealthCheckRequest(service=_NOT_SERVING_SERVICE) + resp = self._stub.Check(request) + self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, + resp.status) + + def test_check_not_found_service(self): + request = health_pb2.HealthCheckRequest(service='not-found') + with self.assertRaises(grpc.RpcError) as context: + resp = self._stub.Check(request) + + self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code()) + + def test_health_service_name(self): + self.assertEqual(health.SERVICE_NAME, 'grpc.health.v1.Health') + + +class HealthServicerBackwardsCompatibleWatchTest(BaseWatchTests.WatchTests): + + def setUp(self): + super(HealthServicerBackwardsCompatibleWatchTest, + self).start_server(non_blocking=False, thread_pool=None) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py new file mode 100644 index 00000000000..0753872b5e4 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/http2/negative_http2_client.py @@ -0,0 +1,158 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python client used to test negative http2 conditions.""" + +import argparse + +import grpc +import time +from src.proto.grpc.testing import test_pb2_grpc +from src.proto.grpc.testing import messages_pb2 + + +def _validate_payload_type_and_length(response, expected_type, expected_length): + if response.payload.type is not expected_type: + raise ValueError('expected payload type %s, got %s' % + (expected_type, type(response.payload.type))) + elif len(response.payload.body) != expected_length: + raise ValueError('expected payload body size %d, got %d' % + (expected_length, len(response.payload.body))) + + +def _expect_status_code(call, expected_code): + if call.code() != expected_code: + raise ValueError('expected code %s, got %s' % + (expected_code, call.code())) + + +def _expect_status_details(call, expected_details): + if call.details() != expected_details: + raise ValueError('expected message %s, got %s' % + (expected_details, call.details())) + + +def _validate_status_code_and_details(call, expected_code, expected_details): + _expect_status_code(call, expected_code) + _expect_status_details(call, expected_details) + + +# common requests +_REQUEST_SIZE = 314159 +_RESPONSE_SIZE = 271828 + +_SIMPLE_REQUEST = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=_RESPONSE_SIZE, + payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE)) + + +def _goaway(stub): + first_response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(first_response, messages_pb2.COMPRESSABLE, + _RESPONSE_SIZE) + time.sleep(1) + second_response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(second_response, + messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) + + +def _rst_after_header(stub): + resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) + _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, + "Received RST_STREAM with error code 0") + + +def _rst_during_data(stub): + resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) + _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, + "Received RST_STREAM with error code 0") + + +def _rst_after_data(stub): + resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) + _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, + "Received RST_STREAM with error code 0") + + +def _ping(stub): + response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + _RESPONSE_SIZE) + + +def _max_streams(stub): + # send one req to ensure server sets MAX_STREAMS + response = stub.UnaryCall(_SIMPLE_REQUEST) + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + _RESPONSE_SIZE) + + # give the streams a workout + futures = [] + for _ in range(15): + futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST)) + for future in futures: + _validate_payload_type_and_length(future.result(), + messages_pb2.COMPRESSABLE, + _RESPONSE_SIZE) + + +def _run_test_case(test_case, stub): + if test_case == 'goaway': + _goaway(stub) + elif test_case == 'rst_after_header': + _rst_after_header(stub) + elif test_case == 'rst_during_data': + _rst_during_data(stub) + elif test_case == 'rst_after_data': + _rst_after_data(stub) + elif test_case == 'ping': + _ping(stub) + elif test_case == 'max_streams': + _max_streams(stub) + else: + raise ValueError("Invalid test case: %s" % test_case) + + +def _args(): + parser = argparse.ArgumentParser() + parser.add_argument('--server_host', + help='the host to which to connect', + type=str, + default="127.0.0.1") + parser.add_argument('--server_port', + help='the port to which to connect', + type=int, + default="8080") + parser.add_argument('--test_case', + help='the test case to execute', + type=str, + default="goaway") + return parser.parse_args() + + +def _stub(server_host, server_port): + target = '{}:{}'.format(server_host, server_port) + channel = grpc.insecure_channel(target) + grpc.channel_ready_future(channel).result() + return test_pb2_grpc.TestServiceStub(channel) + + +def main(): + args = _args() + stub = _stub(args.server_host, args.server_port) + _run_test_case(args.test_case, stub) + + +if __name__ == '__main__': + main() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py new file mode 100644 index 00000000000..5fb4f3c3cfd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py new file mode 100644 index 00000000000..fecf31767a7 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py @@ -0,0 +1,44 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Insecure client-server interoperability as a unit test.""" + +import unittest + +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import _intraop_test_case +from tests.interop import service +from tests.interop import server +from tests.unit import test_common + + +class InsecureIntraopTest(_intraop_test_case.IntraopTestCase, + unittest.TestCase): + + def setUp(self): + self.server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), + self.server) + port = self.server.add_insecure_port('[::]:0') + self.server.start() + self.stub = test_pb2_grpc.TestServiceStub( + grpc.insecure_channel('localhost:{}'.format(port))) + + def tearDown(self): + self.server.stop(None) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py new file mode 100644 index 00000000000..007db7ab41b --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_intraop_test_case.py @@ -0,0 +1,51 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common code for unit tests of the interoperability test code.""" + +from tests.interop import methods + + +class IntraopTestCase(object): + """Unit test methods. + + This class must be mixed in with unittest.TestCase and a class that defines + setUp and tearDown methods that manage a stub attribute. + """ + + def testEmptyUnary(self): + methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None) + + def testLargeUnary(self): + methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None) + + def testServerStreaming(self): + methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None) + + def testClientStreaming(self): + methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None) + + def testPingPong(self): + methods.TestCase.PING_PONG.test_interoperability(self.stub, None) + + def testCancelAfterBegin(self): + methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability( + self.stub, None) + + def testCancelAfterFirstResponse(self): + methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability( + self.stub, None) + + def testTimeoutOnSleepingServer(self): + methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability( + self.stub, None) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py new file mode 100644 index 00000000000..bf1f1b118b3 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/_secure_intraop_test.py @@ -0,0 +1,54 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Secure client-server interoperability as a unit test.""" + +import unittest + +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import _intraop_test_case +from tests.interop import service +from tests.interop import resources +from tests.unit import test_common + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' + + +class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase): + + def setUp(self): + self.server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), + self.server) + port = self.server.add_secure_port( + '[::]:0', + grpc.ssl_server_credentials([(resources.private_key(), + resources.certificate_chain())])) + self.server.start() + self.stub = test_pb2_grpc.TestServiceStub( + grpc.secure_channel( + 'localhost:{}'.format(port), + grpc.ssl_channel_credentials( + resources.test_root_certificates()), (( + 'grpc.ssl_target_name_override', + _SERVER_HOST_OVERRIDE, + ),))) + + def tearDown(self): + self.server.stop(None) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py new file mode 100644 index 00000000000..4d35f7ca32a --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/client.py @@ -0,0 +1,180 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the GRPC interoperability test client.""" + +import argparse +import os + +from google import auth as google_auth +from google.auth import jwt as google_auth_jwt +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import methods +from tests.interop import resources + + +def parse_interop_client_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--server_host', + default="localhost", + type=str, + help='the host to which to connect') + parser.add_argument('--server_port', + type=int, + required=True, + help='the port to which to connect') + parser.add_argument('--test_case', + default='large_unary', + type=str, + help='the test case to execute') + parser.add_argument('--use_tls', + default=False, + type=resources.parse_bool, + help='require a secure connection') + parser.add_argument('--use_alts', + default=False, + type=resources.parse_bool, + help='require an ALTS secure connection') + parser.add_argument('--use_test_ca', + default=False, + type=resources.parse_bool, + help='replace platform root CAs with ca.pem') + parser.add_argument('--custom_credentials_type', + choices=["compute_engine_channel_creds"], + default=None, + help='use google default credentials') + parser.add_argument('--server_host_override', + type=str, + help='the server host to which to claim to connect') + parser.add_argument('--oauth_scope', + type=str, + help='scope for OAuth tokens') + parser.add_argument('--default_service_account', + type=str, + help='email address of the default service account') + parser.add_argument( + "--grpc_test_use_grpclb_with_child_policy", + type=str, + help=( + "If non-empty, set a static service config on channels created by " + + "grpc::CreateTestChannel, that configures the grpclb LB policy " + + "with a child policy being the value of this flag (e.g. round_robin " + + "or pick_first).")) + return parser.parse_args() + + +def _create_call_credentials(args): + if args.test_case == 'oauth2_auth_token': + google_credentials, unused_project_id = google_auth.default( + scopes=[args.oauth_scope]) + google_credentials.refresh(google_auth.transport.requests.Request()) + return grpc.access_token_call_credentials(google_credentials.token) + elif args.test_case == 'compute_engine_creds': + google_credentials, unused_project_id = google_auth.default( + scopes=[args.oauth_scope]) + return grpc.metadata_call_credentials( + google_auth.transport.grpc.AuthMetadataPlugin( + credentials=google_credentials, + request=google_auth.transport.requests.Request())) + elif args.test_case == 'jwt_token_creds': + google_credentials = google_auth_jwt.OnDemandCredentials.from_service_account_file( + os.environ[google_auth.environment_vars.CREDENTIALS]) + return grpc.metadata_call_credentials( + google_auth.transport.grpc.AuthMetadataPlugin( + credentials=google_credentials, request=None)) + else: + return None + + +def get_secure_channel_parameters(args): + call_credentials = _create_call_credentials(args) + + channel_opts = () + if args.grpc_test_use_grpclb_with_child_policy: + channel_opts += (( + "grpc.service_config", + '{"loadBalancingConfig": [{"grpclb": {"childPolicy": [{"%s": {}}]}}]}' + % args.grpc_test_use_grpclb_with_child_policy),) + if args.custom_credentials_type is not None: + if args.custom_credentials_type == "compute_engine_channel_creds": + assert call_credentials is None + google_credentials, unused_project_id = google_auth.default( + scopes=[args.oauth_scope]) + call_creds = grpc.metadata_call_credentials( + google_auth.transport.grpc.AuthMetadataPlugin( + credentials=google_credentials, + request=google_auth.transport.requests.Request())) + channel_credentials = grpc.compute_engine_channel_credentials( + call_creds) + else: + raise ValueError("Unknown credentials type '{}'".format( + args.custom_credentials_type)) + elif args.use_tls: + if args.use_test_ca: + root_certificates = resources.test_root_certificates() + else: + root_certificates = None # will load default roots. + + channel_credentials = grpc.ssl_channel_credentials(root_certificates) + if call_credentials is not None: + channel_credentials = grpc.composite_channel_credentials( + channel_credentials, call_credentials) + + if args.server_host_override: + channel_opts += (( + 'grpc.ssl_target_name_override', + args.server_host_override, + ),) + elif args.use_alts: + channel_credentials = grpc.alts_channel_credentials() + + return channel_credentials, channel_opts + + +def _create_channel(args): + target = '{}:{}'.format(args.server_host, args.server_port) + + if args.use_tls or args.use_alts or args.custom_credentials_type is not None: + channel_credentials, options = get_secure_channel_parameters(args) + return grpc.secure_channel(target, channel_credentials, options) + else: + return grpc.insecure_channel(target) + + +def create_stub(channel, args): + if args.test_case == "unimplemented_service": + return test_pb2_grpc.UnimplementedServiceStub(channel) + else: + return test_pb2_grpc.TestServiceStub(channel) + + +def _test_case_from_arg(test_case_arg): + for test_case in methods.TestCase: + if test_case_arg == test_case.value: + return test_case + else: + raise ValueError('No test case "%s"!' % test_case_arg) + + +def test_interoperability(): + args = parse_interop_client_args() + channel = _create_channel(args) + stub = create_stub(channel, args) + test_case = _test_case_from_arg(args.test_case) + test_case.test_interoperability(stub, args) + + +if __name__ == '__main__': + test_interoperability() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py new file mode 100644 index 00000000000..44a1c38bb93 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/methods.py @@ -0,0 +1,482 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Implementations of interoperability test methods.""" + +# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details +# please refer to comments in the "bazel_namespace_package_hack" module. +try: + from tests import bazel_namespace_package_hack + bazel_namespace_package_hack.sys_path_to_site_dir_hack() +except ImportError: + pass + +import enum +import json +import os +import threading +import time + +from google import auth as google_auth +from google.auth import environment_vars as google_auth_environment_vars +from google.auth.transport import grpc as google_auth_transport_grpc +from google.auth.transport import requests as google_auth_transport_requests +import grpc + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing import messages_pb2 + +_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" +_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" + + +def _expect_status_code(call, expected_code): + if call.code() != expected_code: + raise ValueError('expected code %s, got %s' % + (expected_code, call.code())) + + +def _expect_status_details(call, expected_details): + if call.details() != expected_details: + raise ValueError('expected message %s, got %s' % + (expected_details, call.details())) + + +def _validate_status_code_and_details(call, expected_code, expected_details): + _expect_status_code(call, expected_code) + _expect_status_details(call, expected_details) + + +def _validate_payload_type_and_length(response, expected_type, expected_length): + if response.payload.type is not expected_type: + raise ValueError('expected payload type %s, got %s' % + (expected_type, type(response.payload.type))) + elif len(response.payload.body) != expected_length: + raise ValueError('expected payload body size %d, got %d' % + (expected_length, len(response.payload.body))) + + +def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope, + call_credentials): + size = 314159 + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=size, + payload=messages_pb2.Payload(body=b'\x00' * 271828), + fill_username=fill_username, + fill_oauth_scope=fill_oauth_scope) + response_future = stub.UnaryCall.future(request, + credentials=call_credentials) + response = response_future.result() + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size) + return response + + +def _empty_unary(stub): + response = stub.EmptyCall(empty_pb2.Empty()) + if not isinstance(response, empty_pb2.Empty): + raise TypeError('response is of type "%s", not empty_pb2.Empty!' % + type(response)) + + +def _large_unary(stub): + _large_unary_common_behavior(stub, False, False, None) + + +def _client_streaming(stub): + payload_body_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + payloads = (messages_pb2.Payload(body=b'\x00' * size) + for size in payload_body_sizes) + requests = (messages_pb2.StreamingInputCallRequest(payload=payload) + for payload in payloads) + response = stub.StreamingInputCall(requests) + if response.aggregated_payload_size != 74922: + raise ValueError('incorrect size %d!' % + response.aggregated_payload_size) + + +def _server_streaming(stub): + sizes = ( + 31415, + 9, + 2653, + 58979, + ) + + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=( + messages_pb2.ResponseParameters(size=sizes[0]), + messages_pb2.ResponseParameters(size=sizes[1]), + messages_pb2.ResponseParameters(size=sizes[2]), + messages_pb2.ResponseParameters(size=sizes[3]), + )) + response_iterator = stub.StreamingOutputCall(request) + for index, response in enumerate(response_iterator): + _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, + sizes[index]) + + +class _Pipe(object): + + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._open = True + + def __iter__(self): + return self + + def __next__(self): + return self.next() + + def next(self): + with self._condition: + while not self._values and self._open: + self._condition.wait() + if self._values: + return self._values.pop(0) + else: + raise StopIteration() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify() + + def close(self): + with self._condition: + self._open = False + self._condition.notify() + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +def _ping_pong(stub): + request_response_sizes = ( + 31415, + 9, + 2653, + 58979, + ) + request_payload_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + for response_size, payload_size in zip(request_response_sizes, + request_payload_sizes): + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=(messages_pb2.ResponseParameters( + size=response_size),), + payload=messages_pb2.Payload(body=b'\x00' * payload_size)) + pipe.add(request) + response = next(response_iterator) + _validate_payload_type_and_length(response, + messages_pb2.COMPRESSABLE, + response_size) + + +def _cancel_after_begin(stub): + with _Pipe() as pipe: + response_future = stub.StreamingInputCall.future(pipe) + response_future.cancel() + if not response_future.cancelled(): + raise ValueError('expected cancelled method to return True') + if response_future.code() is not grpc.StatusCode.CANCELLED: + raise ValueError('expected status code CANCELLED') + + +def _cancel_after_first_response(stub): + request_response_sizes = ( + 31415, + 9, + 2653, + 58979, + ) + request_payload_sizes = ( + 27182, + 8, + 1828, + 45904, + ) + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + + response_size = request_response_sizes[0] + payload_size = request_payload_sizes[0] + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=(messages_pb2.ResponseParameters( + size=response_size),), + payload=messages_pb2.Payload(body=b'\x00' * payload_size)) + pipe.add(request) + response = next(response_iterator) + # We test the contents of `response` in the Ping Pong test - don't check + # them here. + response_iterator.cancel() + + try: + next(response_iterator) + except grpc.RpcError as rpc_error: + if rpc_error.code() is not grpc.StatusCode.CANCELLED: + raise + else: + raise ValueError('expected call to be cancelled') + + +def _timeout_on_sleeping_server(stub): + request_payload_size = 27182 + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe, timeout=0.001) + + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + payload=messages_pb2.Payload(body=b'\x00' * request_payload_size)) + pipe.add(request) + try: + next(response_iterator) + except grpc.RpcError as rpc_error: + if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED: + raise + else: + raise ValueError('expected call to exceed deadline') + + +def _empty_stream(stub): + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + pipe.close() + try: + next(response_iterator) + raise ValueError('expected exactly 0 responses') + except StopIteration: + pass + + +def _status_code_and_message(stub): + details = 'test status message' + code = 2 + status = grpc.StatusCode.UNKNOWN # code = 2 + + # Test with a UnaryCall + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=1, + payload=messages_pb2.Payload(body=b'\x00'), + response_status=messages_pb2.EchoStatus(code=code, message=details)) + response_future = stub.UnaryCall.future(request) + _validate_status_code_and_details(response_future, status, details) + + # Test with a FullDuplexCall + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe) + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=(messages_pb2.ResponseParameters(size=1),), + payload=messages_pb2.Payload(body=b'\x00'), + response_status=messages_pb2.EchoStatus(code=code, message=details)) + pipe.add(request) # sends the initial request. + try: + next(response_iterator) + except grpc.RpcError as rpc_error: + assert rpc_error.code() == status + # Dropping out of with block closes the pipe + _validate_status_code_and_details(response_iterator, status, details) + + +def _unimplemented_method(test_service_stub): + response_future = (test_service_stub.UnimplementedCall.future( + empty_pb2.Empty())) + _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED) + + +def _unimplemented_service(unimplemented_service_stub): + response_future = (unimplemented_service_stub.UnimplementedCall.future( + empty_pb2.Empty())) + _expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED) + + +def _custom_metadata(stub): + initial_metadata_value = "test_initial_metadata_value" + trailing_metadata_value = b"\x0a\x0b\x0a\x0b\x0a\x0b" + metadata = ((_INITIAL_METADATA_KEY, initial_metadata_value), + (_TRAILING_METADATA_KEY, trailing_metadata_value)) + + def _validate_metadata(response): + initial_metadata = dict(response.initial_metadata()) + if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value: + raise ValueError('expected initial metadata %s, got %s' % + (initial_metadata_value, + initial_metadata[_INITIAL_METADATA_KEY])) + trailing_metadata = dict(response.trailing_metadata()) + if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value: + raise ValueError('expected trailing metadata %s, got %s' % + (trailing_metadata_value, + trailing_metadata[_TRAILING_METADATA_KEY])) + + # Testing with UnaryCall + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=1, + payload=messages_pb2.Payload(body=b'\x00')) + response_future = stub.UnaryCall.future(request, metadata=metadata) + _validate_metadata(response_future) + + # Testing with FullDuplexCall + with _Pipe() as pipe: + response_iterator = stub.FullDuplexCall(pipe, metadata=metadata) + request = messages_pb2.StreamingOutputCallRequest( + response_type=messages_pb2.COMPRESSABLE, + response_parameters=(messages_pb2.ResponseParameters(size=1),)) + pipe.add(request) # Sends the request + next(response_iterator) # Causes server to send trailing metadata + # Dropping out of the with block closes the pipe + _validate_metadata(response_iterator) + + +def _compute_engine_creds(stub, args): + response = _large_unary_common_behavior(stub, True, True, None) + if args.default_service_account != response.username: + raise ValueError('expected username %s, got %s' % + (args.default_service_account, response.username)) + + +def _oauth2_auth_token(stub, args): + json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] + wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] + response = _large_unary_common_behavior(stub, True, True, None) + if wanted_email != response.username: + raise ValueError('expected username %s, got %s' % + (wanted_email, response.username)) + if args.oauth_scope.find(response.oauth_scope) == -1: + raise ValueError( + 'expected to find oauth scope "{}" in received "{}"'.format( + response.oauth_scope, args.oauth_scope)) + + +def _jwt_token_creds(stub, args): + json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] + wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] + response = _large_unary_common_behavior(stub, True, False, None) + if wanted_email != response.username: + raise ValueError('expected username %s, got %s' % + (wanted_email, response.username)) + + +def _per_rpc_creds(stub, args): + json_key_filename = os.environ[google_auth_environment_vars.CREDENTIALS] + wanted_email = json.load(open(json_key_filename, 'r'))['client_email'] + google_credentials, unused_project_id = google_auth.default( + scopes=[args.oauth_scope]) + call_credentials = grpc.metadata_call_credentials( + google_auth_transport_grpc.AuthMetadataPlugin( + credentials=google_credentials, + request=google_auth_transport_requests.Request())) + response = _large_unary_common_behavior(stub, True, False, call_credentials) + if wanted_email != response.username: + raise ValueError('expected username %s, got %s' % + (wanted_email, response.username)) + + +def _special_status_message(stub, args): + details = b'\t\ntest with whitespace\r\nand Unicode BMP \xe2\x98\xba and non-BMP \xf0\x9f\x98\x88\t\n'.decode( + 'utf-8') + code = 2 + status = grpc.StatusCode.UNKNOWN # code = 2 + + # Test with a UnaryCall + request = messages_pb2.SimpleRequest( + response_type=messages_pb2.COMPRESSABLE, + response_size=1, + payload=messages_pb2.Payload(body=b'\x00'), + response_status=messages_pb2.EchoStatus(code=code, message=details)) + response_future = stub.UnaryCall.future(request) + _validate_status_code_and_details(response_future, status, details) + + +class TestCase(enum.Enum): + EMPTY_UNARY = 'empty_unary' + LARGE_UNARY = 'large_unary' + SERVER_STREAMING = 'server_streaming' + CLIENT_STREAMING = 'client_streaming' + PING_PONG = 'ping_pong' + CANCEL_AFTER_BEGIN = 'cancel_after_begin' + CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response' + EMPTY_STREAM = 'empty_stream' + STATUS_CODE_AND_MESSAGE = 'status_code_and_message' + UNIMPLEMENTED_METHOD = 'unimplemented_method' + UNIMPLEMENTED_SERVICE = 'unimplemented_service' + CUSTOM_METADATA = "custom_metadata" + COMPUTE_ENGINE_CREDS = 'compute_engine_creds' + OAUTH2_AUTH_TOKEN = 'oauth2_auth_token' + JWT_TOKEN_CREDS = 'jwt_token_creds' + PER_RPC_CREDS = 'per_rpc_creds' + TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server' + SPECIAL_STATUS_MESSAGE = 'special_status_message' + + def test_interoperability(self, stub, args): + if self is TestCase.EMPTY_UNARY: + _empty_unary(stub) + elif self is TestCase.LARGE_UNARY: + _large_unary(stub) + elif self is TestCase.SERVER_STREAMING: + _server_streaming(stub) + elif self is TestCase.CLIENT_STREAMING: + _client_streaming(stub) + elif self is TestCase.PING_PONG: + _ping_pong(stub) + elif self is TestCase.CANCEL_AFTER_BEGIN: + _cancel_after_begin(stub) + elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE: + _cancel_after_first_response(stub) + elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER: + _timeout_on_sleeping_server(stub) + elif self is TestCase.EMPTY_STREAM: + _empty_stream(stub) + elif self is TestCase.STATUS_CODE_AND_MESSAGE: + _status_code_and_message(stub) + elif self is TestCase.UNIMPLEMENTED_METHOD: + _unimplemented_method(stub) + elif self is TestCase.UNIMPLEMENTED_SERVICE: + _unimplemented_service(stub) + elif self is TestCase.CUSTOM_METADATA: + _custom_metadata(stub) + elif self is TestCase.COMPUTE_ENGINE_CREDS: + _compute_engine_creds(stub, args) + elif self is TestCase.OAUTH2_AUTH_TOKEN: + _oauth2_auth_token(stub, args) + elif self is TestCase.JWT_TOKEN_CREDS: + _jwt_token_creds(stub, args) + elif self is TestCase.PER_RPC_CREDS: + _per_rpc_creds(stub, args) + elif self is TestCase.SPECIAL_STATUS_MESSAGE: + _special_status_message(stub, args) + else: + raise NotImplementedError('Test case "%s" not implemented!' % + self.name) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py new file mode 100644 index 00000000000..a55919a60ae --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/resources.py @@ -0,0 +1,42 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants and functions for data used in interoperability testing.""" + +import argparse +import pkgutil +import os + +_ROOT_CERTIFICATES_RESOURCE_PATH = 'credentials/ca.pem' +_PRIVATE_KEY_RESOURCE_PATH = 'credentials/server1.key' +_CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem' + + +def test_root_certificates(): + return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH) + + +def private_key(): + return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH) + + +def certificate_chain(): + return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH) + + +def parse_bool(value): + if value == 'true': + return True + if value == 'false': + return False + raise argparse.ArgumentTypeError('Only true/false allowed') diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py new file mode 100644 index 00000000000..c85adb0b0bb --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/server.py @@ -0,0 +1,76 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the GRPC interoperability test server.""" + +import argparse +from concurrent import futures +import logging + +import grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import service +from tests.interop import resources +from tests.unit import test_common + +logging.basicConfig() +_LOGGER = logging.getLogger(__name__) + + +def parse_interop_server_arguments(): + parser = argparse.ArgumentParser() + parser.add_argument('--port', + type=int, + required=True, + help='the port on which to serve') + parser.add_argument('--use_tls', + default=False, + type=resources.parse_bool, + help='require a secure connection') + parser.add_argument('--use_alts', + default=False, + type=resources.parse_bool, + help='require an ALTS connection') + return parser.parse_args() + + +def get_server_credentials(use_tls): + if use_tls: + private_key = resources.private_key() + certificate_chain = resources.certificate_chain() + return grpc.ssl_server_credentials(((private_key, certificate_chain),)) + else: + return grpc.alts_server_credentials() + + +def serve(): + args = parse_interop_server_arguments() + + server = test_common.test_server() + test_pb2_grpc.add_TestServiceServicer_to_server(service.TestService(), + server) + if args.use_tls or args.use_alts: + credentials = get_server_credentials(args.use_tls) + server.add_secure_port('[::]:{}'.format(args.port), credentials) + else: + server.add_insecure_port('[::]:{}'.format(args.port)) + + server.start() + _LOGGER.info('Server serving.') + server.wait_for_termination() + _LOGGER.info('Server stopped; exiting.') + + +if __name__ == '__main__': + serve() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py new file mode 100644 index 00000000000..08bb0c45a24 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/interop/service.py @@ -0,0 +1,96 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Python implementation of the TestServicer.""" + +import time + +import grpc + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import test_pb2_grpc + +_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" +_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" +_US_IN_A_SECOND = 1000 * 1000 + + +def _maybe_echo_metadata(servicer_context): + """Copies metadata from request to response if it is present.""" + invocation_metadata = dict(servicer_context.invocation_metadata()) + if _INITIAL_METADATA_KEY in invocation_metadata: + initial_metadatum = (_INITIAL_METADATA_KEY, + invocation_metadata[_INITIAL_METADATA_KEY]) + servicer_context.send_initial_metadata((initial_metadatum,)) + if _TRAILING_METADATA_KEY in invocation_metadata: + trailing_metadatum = (_TRAILING_METADATA_KEY, + invocation_metadata[_TRAILING_METADATA_KEY]) + servicer_context.set_trailing_metadata((trailing_metadatum,)) + + +def _maybe_echo_status_and_message(request, servicer_context): + """Sets the response context code and details if the request asks for them""" + if request.HasField('response_status'): + servicer_context.set_code(request.response_status.code) + servicer_context.set_details(request.response_status.message) + + +class TestService(test_pb2_grpc.TestServiceServicer): + + def EmptyCall(self, request, context): + _maybe_echo_metadata(context) + return empty_pb2.Empty() + + def UnaryCall(self, request, context): + _maybe_echo_metadata(context) + _maybe_echo_status_and_message(request, context) + return messages_pb2.SimpleResponse( + payload=messages_pb2.Payload(type=messages_pb2.COMPRESSABLE, + body=b'\x00' * request.response_size)) + + def StreamingOutputCall(self, request, context): + _maybe_echo_status_and_message(request, context) + for response_parameters in request.response_parameters: + if response_parameters.interval_us != 0: + time.sleep(response_parameters.interval_us / _US_IN_A_SECOND) + yield messages_pb2.StreamingOutputCallResponse( + payload=messages_pb2.Payload(type=request.response_type, + body=b'\x00' * + response_parameters.size)) + + def StreamingInputCall(self, request_iterator, context): + aggregate_size = 0 + for request in request_iterator: + if request.payload is not None and request.payload.body: + aggregate_size += len(request.payload.body) + return messages_pb2.StreamingInputCallResponse( + aggregated_payload_size=aggregate_size) + + def FullDuplexCall(self, request_iterator, context): + _maybe_echo_metadata(context) + for request in request_iterator: + _maybe_echo_status_and_message(request, context) + for response_parameters in request.response_parameters: + if response_parameters.interval_us != 0: + time.sleep(response_parameters.interval_us / + _US_IN_A_SECOND) + yield messages_pb2.StreamingOutputCallResponse( + payload=messages_pb2.Payload(type=request.payload.type, + body=b'\x00' * + response_parameters.size)) + + # NOTE(nathaniel): Apparently this is the same as the full-duplex call? + # NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)... + def HalfDuplexCall(self, request_iterator, context): + return self.FullDuplexCall(request_iterator, context) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py new file mode 100644 index 00000000000..5772620b602 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py new file mode 100644 index 00000000000..17835e7c0db --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_client.py @@ -0,0 +1,202 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines test client behaviors (UNARY/STREAMING) (SYNC/ASYNC).""" + +import abc +import threading +import time + +from concurrent import futures +from six.moves import queue + +import grpc +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import benchmark_service_pb2_grpc +from tests.unit import resources +from tests.unit import test_common + +_TIMEOUT = 60 * 60 * 24 + + +class GenericStub(object): + + def __init__(self, channel): + self.UnaryCall = channel.unary_unary( + '/grpc.testing.BenchmarkService/UnaryCall') + self.StreamingCall = channel.stream_stream( + '/grpc.testing.BenchmarkService/StreamingCall') + + +class BenchmarkClient: + """Benchmark client interface that exposes a non-blocking send_request().""" + + __metaclass__ = abc.ABCMeta + + def __init__(self, server, config, hist): + # Create the stub + if config.HasField('security_params'): + creds = grpc.ssl_channel_credentials( + resources.test_root_certificates()) + channel = test_common.test_secure_channel( + server, creds, config.security_params.server_host_override) + else: + channel = grpc.insecure_channel(server) + + # waits for the channel to be ready before we start sending messages + grpc.channel_ready_future(channel).result() + + if config.payload_config.WhichOneof('payload') == 'simple_params': + self._generic = False + self._stub = benchmark_service_pb2_grpc.BenchmarkServiceStub( + channel) + payload = messages_pb2.Payload( + body=bytes(b'\0' * + config.payload_config.simple_params.req_size)) + self._request = messages_pb2.SimpleRequest( + payload=payload, + response_size=config.payload_config.simple_params.resp_size) + else: + self._generic = True + self._stub = GenericStub(channel) + self._request = bytes(b'\0' * + config.payload_config.bytebuf_params.req_size) + + self._hist = hist + self._response_callbacks = [] + + def add_response_callback(self, callback): + """callback will be invoked as callback(client, query_time)""" + self._response_callbacks.append(callback) + + @abc.abstractmethod + def send_request(self): + """Non-blocking wrapper for a client's request operation.""" + raise NotImplementedError() + + def start(self): + pass + + def stop(self): + pass + + def _handle_response(self, client, query_time): + self._hist.add(query_time * 1e9) # Report times in nanoseconds + for callback in self._response_callbacks: + callback(client, query_time) + + +class UnarySyncBenchmarkClient(BenchmarkClient): + + def __init__(self, server, config, hist): + super(UnarySyncBenchmarkClient, self).__init__(server, config, hist) + self._pool = futures.ThreadPoolExecutor( + max_workers=config.outstanding_rpcs_per_channel) + + def send_request(self): + # Send requests in separate threads to support multiple outstanding rpcs + # (See src/proto/grpc/testing/control.proto) + self._pool.submit(self._dispatch_request) + + def stop(self): + self._pool.shutdown(wait=True) + self._stub = None + + def _dispatch_request(self): + start_time = time.time() + self._stub.UnaryCall(self._request, _TIMEOUT) + end_time = time.time() + self._handle_response(self, end_time - start_time) + + +class UnaryAsyncBenchmarkClient(BenchmarkClient): + + def send_request(self): + # Use the Future callback api to support multiple outstanding rpcs + start_time = time.time() + response_future = self._stub.UnaryCall.future(self._request, _TIMEOUT) + response_future.add_done_callback( + lambda resp: self._response_received(start_time, resp)) + + def _response_received(self, start_time, resp): + resp.result() + end_time = time.time() + self._handle_response(self, end_time - start_time) + + def stop(self): + self._stub = None + + +class _SyncStream(object): + + def __init__(self, stub, generic, request, handle_response): + self._stub = stub + self._generic = generic + self._request = request + self._handle_response = handle_response + self._is_streaming = False + self._request_queue = queue.Queue() + self._send_time_queue = queue.Queue() + + def send_request(self): + self._send_time_queue.put(time.time()) + self._request_queue.put(self._request) + + def start(self): + self._is_streaming = True + response_stream = self._stub.StreamingCall(self._request_generator(), + _TIMEOUT) + for _ in response_stream: + self._handle_response( + self, + time.time() - self._send_time_queue.get_nowait()) + + def stop(self): + self._is_streaming = False + + def _request_generator(self): + while self._is_streaming: + try: + request = self._request_queue.get(block=True, timeout=1.0) + yield request + except queue.Empty: + pass + + +class StreamingSyncBenchmarkClient(BenchmarkClient): + + def __init__(self, server, config, hist): + super(StreamingSyncBenchmarkClient, self).__init__(server, config, hist) + self._pool = futures.ThreadPoolExecutor( + max_workers=config.outstanding_rpcs_per_channel) + self._streams = [ + _SyncStream(self._stub, self._generic, self._request, + self._handle_response) + for _ in range(config.outstanding_rpcs_per_channel) + ] + self._curr_stream = 0 + + def send_request(self): + # Use a round_robin scheduler to determine what stream to send on + self._streams[self._curr_stream].send_request() + self._curr_stream = (self._curr_stream + 1) % len(self._streams) + + def start(self): + for stream in self._streams: + self._pool.submit(stream.start) + + def stop(self): + for stream in self._streams: + stream.stop() + self._pool.shutdown(wait=True) + self._stub = None diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py new file mode 100644 index 00000000000..75280bd7719 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/benchmark_server.py @@ -0,0 +1,44 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from src.proto.grpc.testing import messages_pb2 +from src.proto.grpc.testing import benchmark_service_pb2_grpc + + +class BenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer): + """Synchronous Server implementation for the Benchmark service.""" + + def UnaryCall(self, request, context): + payload = messages_pb2.Payload(body=b'\0' * request.response_size) + return messages_pb2.SimpleResponse(payload=payload) + + def StreamingCall(self, request_iterator, context): + for request in request_iterator: + payload = messages_pb2.Payload(body=b'\0' * request.response_size) + yield messages_pb2.SimpleResponse(payload=payload) + + +class GenericBenchmarkServer(benchmark_service_pb2_grpc.BenchmarkServiceServicer + ): + """Generic Server implementation for the Benchmark service.""" + + def __init__(self, resp_size): + self._response = b'\0' * resp_size + + def UnaryCall(self, request, context): + return self._response + + def StreamingCall(self, request_iterator, context): + for request in request_iterator: + yield self._response diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py new file mode 100644 index 00000000000..c5d299f6463 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/client_runner.py @@ -0,0 +1,90 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines behavior for WHEN clients send requests. + +Each client exposes a non-blocking send_request() method that the +ClientRunner invokes either periodically or in response to some event. +""" + +import abc +import threading +import time + + +class ClientRunner: + """Abstract interface for sending requests from clients.""" + + __metaclass__ = abc.ABCMeta + + def __init__(self, client): + self._client = client + + @abc.abstractmethod + def start(self): + raise NotImplementedError() + + @abc.abstractmethod + def stop(self): + raise NotImplementedError() + + +class OpenLoopClientRunner(ClientRunner): + + def __init__(self, client, interval_generator): + super(OpenLoopClientRunner, self).__init__(client) + self._is_running = False + self._interval_generator = interval_generator + self._dispatch_thread = threading.Thread(target=self._dispatch_requests, + args=()) + + def start(self): + self._is_running = True + self._client.start() + self._dispatch_thread.start() + + def stop(self): + self._is_running = False + self._client.stop() + self._dispatch_thread.join() + self._client = None + + def _dispatch_requests(self): + while self._is_running: + self._client.send_request() + time.sleep(next(self._interval_generator)) + + +class ClosedLoopClientRunner(ClientRunner): + + def __init__(self, client, request_count): + super(ClosedLoopClientRunner, self).__init__(client) + self._is_running = False + self._request_count = request_count + # Send a new request on each response for closed loop + self._client.add_response_callback(self._send_request) + + def start(self): + self._is_running = True + self._client.start() + for _ in range(self._request_count): + self._client.send_request() + + def stop(self): + self._is_running = False + self._client.stop() + self._client = None + + def _send_request(self, client, response_time): + if self._is_running: + client.send_request() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py new file mode 100644 index 00000000000..8139a6ee2fb --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/histogram.py @@ -0,0 +1,80 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +import threading + +from src.proto.grpc.testing import stats_pb2 + + +class Histogram(object): + """Histogram class used for recording performance testing data. + + This class is thread safe. + """ + + def __init__(self, resolution, max_possible): + self._lock = threading.Lock() + self._resolution = resolution + self._max_possible = max_possible + self._sum = 0 + self._sum_of_squares = 0 + self.multiplier = 1.0 + self._resolution + self._count = 0 + self._min = self._max_possible + self._max = 0 + self._buckets = [0] * (self._bucket_for(self._max_possible) + 1) + + def reset(self): + with self._lock: + self._sum = 0 + self._sum_of_squares = 0 + self._count = 0 + self._min = self._max_possible + self._max = 0 + self._buckets = [0] * (self._bucket_for(self._max_possible) + 1) + + def add(self, val): + with self._lock: + self._sum += val + self._sum_of_squares += val * val + self._count += 1 + self._min = min(self._min, val) + self._max = max(self._max, val) + self._buckets[self._bucket_for(val)] += 1 + + def get_data(self): + with self._lock: + data = stats_pb2.HistogramData() + data.bucket.extend(self._buckets) + data.min_seen = self._min + data.max_seen = self._max + data.sum = self._sum + data.sum_of_squares = self._sum_of_squares + data.count = self._count + return data + + def merge(self, another_data): + with self._lock: + for i in range(len(self._buckets)): + self._buckets[i] += another_data.bucket[i] + self._min = min(self._min, another_data.min_seen) + self._max = max(self._max, another_data.max_seen) + self._sum += another_data.sum + self._sum_of_squares += another_data.sum_of_squares + self._count += another_data.count + + def _bucket_for(self, val): + val = min(val, self._max_possible) + return int(math.log(val, self.multiplier)) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py new file mode 100644 index 00000000000..a7e692821ac --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/qps_worker.py @@ -0,0 +1,46 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The entry point for the qps worker.""" + +import argparse +import time + +import grpc +from src.proto.grpc.testing import worker_service_pb2_grpc + +from tests.qps import worker_server +from tests.unit import test_common + + +def run_worker_server(port): + server = test_common.test_server() + servicer = worker_server.WorkerServer() + worker_service_pb2_grpc.add_WorkerServiceServicer_to_server( + servicer, server) + server.add_insecure_port('[::]:{}'.format(port)) + server.start() + servicer.wait_for_quit() + server.stop(0) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='gRPC Python performance testing worker') + parser.add_argument('--driver_port', + type=int, + dest='port', + help='The port the worker should listen on') + args = parser.parse_args() + + run_worker_server(args.port) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py new file mode 100644 index 00000000000..65b081e5d1c --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/qps/worker_server.py @@ -0,0 +1,186 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import multiprocessing +import random +import threading +import time + +from concurrent import futures +import grpc +from src.proto.grpc.testing import control_pb2 +from src.proto.grpc.testing import benchmark_service_pb2_grpc +from src.proto.grpc.testing import worker_service_pb2_grpc +from src.proto.grpc.testing import stats_pb2 + +from tests.qps import benchmark_client +from tests.qps import benchmark_server +from tests.qps import client_runner +from tests.qps import histogram +from tests.unit import resources +from tests.unit import test_common + + +class WorkerServer(worker_service_pb2_grpc.WorkerServiceServicer): + """Python Worker Server implementation.""" + + def __init__(self): + self._quit_event = threading.Event() + + def RunServer(self, request_iterator, context): + config = next(request_iterator).setup #pylint: disable=stop-iteration-return + server, port = self._create_server(config) + cores = multiprocessing.cpu_count() + server.start() + start_time = time.time() + yield self._get_server_status(start_time, start_time, port, cores) + + for request in request_iterator: + end_time = time.time() + status = self._get_server_status(start_time, end_time, port, cores) + if request.mark.reset: + start_time = end_time + yield status + server.stop(None) + + def _get_server_status(self, start_time, end_time, port, cores): + end_time = time.time() + elapsed_time = end_time - start_time + stats = stats_pb2.ServerStats(time_elapsed=elapsed_time, + time_user=elapsed_time, + time_system=elapsed_time) + return control_pb2.ServerStatus(stats=stats, port=port, cores=cores) + + def _create_server(self, config): + if config.async_server_threads == 0: + # This is the default concurrent.futures thread pool size, but + # None doesn't seem to work + server_threads = multiprocessing.cpu_count() * 5 + else: + server_threads = config.async_server_threads + server = test_common.test_server(max_workers=server_threads) + if config.server_type == control_pb2.ASYNC_SERVER: + servicer = benchmark_server.BenchmarkServer() + benchmark_service_pb2_grpc.add_BenchmarkServiceServicer_to_server( + servicer, server) + elif config.server_type == control_pb2.ASYNC_GENERIC_SERVER: + resp_size = config.payload_config.bytebuf_params.resp_size + servicer = benchmark_server.GenericBenchmarkServer(resp_size) + method_implementations = { + 'StreamingCall': + grpc.stream_stream_rpc_method_handler(servicer.StreamingCall + ), + 'UnaryCall': + grpc.unary_unary_rpc_method_handler(servicer.UnaryCall), + } + handler = grpc.method_handlers_generic_handler( + 'grpc.testing.BenchmarkService', method_implementations) + server.add_generic_rpc_handlers((handler,)) + else: + raise Exception('Unsupported server type {}'.format( + config.server_type)) + + if config.HasField('security_params'): # Use SSL + server_creds = grpc.ssl_server_credentials( + ((resources.private_key(), resources.certificate_chain()),)) + port = server.add_secure_port('[::]:{}'.format(config.port), + server_creds) + else: + port = server.add_insecure_port('[::]:{}'.format(config.port)) + + return (server, port) + + def RunClient(self, request_iterator, context): + config = next(request_iterator).setup #pylint: disable=stop-iteration-return + client_runners = [] + qps_data = histogram.Histogram(config.histogram_params.resolution, + config.histogram_params.max_possible) + start_time = time.time() + + # Create a client for each channel + for i in range(config.client_channels): + server = config.server_targets[i % len(config.server_targets)] + runner = self._create_client_runner(server, config, qps_data) + client_runners.append(runner) + runner.start() + + end_time = time.time() + yield self._get_client_status(start_time, end_time, qps_data) + + # Respond to stat requests + for request in request_iterator: + end_time = time.time() + status = self._get_client_status(start_time, end_time, qps_data) + if request.mark.reset: + qps_data.reset() + start_time = time.time() + yield status + + # Cleanup the clients + for runner in client_runners: + runner.stop() + + def _get_client_status(self, start_time, end_time, qps_data): + latencies = qps_data.get_data() + end_time = time.time() + elapsed_time = end_time - start_time + stats = stats_pb2.ClientStats(latencies=latencies, + time_elapsed=elapsed_time, + time_user=elapsed_time, + time_system=elapsed_time) + return control_pb2.ClientStatus(stats=stats) + + def _create_client_runner(self, server, config, qps_data): + if config.client_type == control_pb2.SYNC_CLIENT: + if config.rpc_type == control_pb2.UNARY: + client = benchmark_client.UnarySyncBenchmarkClient( + server, config, qps_data) + elif config.rpc_type == control_pb2.STREAMING: + client = benchmark_client.StreamingSyncBenchmarkClient( + server, config, qps_data) + elif config.client_type == control_pb2.ASYNC_CLIENT: + if config.rpc_type == control_pb2.UNARY: + client = benchmark_client.UnaryAsyncBenchmarkClient( + server, config, qps_data) + else: + raise Exception('Async streaming client not supported') + else: + raise Exception('Unsupported client type {}'.format( + config.client_type)) + + # In multi-channel tests, we split the load across all channels + load_factor = float(config.client_channels) + if config.load_params.WhichOneof('load') == 'closed_loop': + runner = client_runner.ClosedLoopClientRunner( + client, config.outstanding_rpcs_per_channel) + else: # Open loop Poisson + alpha = config.load_params.poisson.offered_load / load_factor + + def poisson(): + while True: + yield random.expovariate(alpha) + + runner = client_runner.OpenLoopClientRunner(client, poisson()) + + return runner + + def CoreCount(self, request, context): + return control_pb2.CoreResponse(cores=multiprocessing.cpu_count()) + + def QuitWorker(self, request, context): + self._quit_event.set() + return control_pb2.Void() + + def wait_for_quit(self): + self._quit_event.wait() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py new file mode 100644 index 00000000000..5772620b602 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py new file mode 100644 index 00000000000..169e55022da --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/reflection/_reflection_servicer_test.py @@ -0,0 +1,195 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_reflection.v1alpha.reflection.""" + +import unittest + +import grpc + +from grpc_reflection.v1alpha import reflection +from grpc_reflection.v1alpha import reflection_pb2 +from grpc_reflection.v1alpha import reflection_pb2_grpc + +from google.protobuf import descriptor_pool +from google.protobuf import descriptor_pb2 + +from src.proto.grpc.testing import empty_pb2 +from src.proto.grpc.testing.proto2 import empty2_extensions_pb2 + +from tests.unit import test_common + +_EMPTY_PROTO_FILE_NAME = 'src/proto/grpc/testing/empty.proto' +_EMPTY_PROTO_SYMBOL_NAME = 'grpc.testing.Empty' +_SERVICE_NAMES = ('Angstrom', 'Bohr', 'Curie', 'Dyson', 'Einstein', 'Feynman', + 'Galilei') +_EMPTY_EXTENSIONS_SYMBOL_NAME = 'grpc.testing.proto2.EmptyWithExtensions' +_EMPTY_EXTENSIONS_NUMBERS = ( + 124, + 125, + 126, + 127, + 128, +) + + +def _file_descriptor_to_proto(descriptor): + proto = descriptor_pb2.FileDescriptorProto() + descriptor.CopyToProto(proto) + return proto.SerializeToString() + + +class ReflectionServicerTest(unittest.TestCase): + + # TODO(https://github.com/grpc/grpc/issues/17844) + # Bazel + Python 3 will result in creating two different instance of + # DESCRIPTOR for each message. So, the equal comparison between protobuf + # returned by stub and manually crafted protobuf will always fail. + def _assert_sequence_of_proto_equal(self, x, y): + self.assertSequenceEqual( + tuple(proto.SerializeToString() for proto in x), + tuple(proto.SerializeToString() for proto in y), + ) + + def setUp(self): + self._server = test_common.test_server() + reflection.enable_server_reflection(_SERVICE_NAMES, self._server) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + self._stub = reflection_pb2_grpc.ServerReflectionStub(self._channel) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def testFileByName(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + file_by_filename=_EMPTY_PROTO_FILE_NAME), + reflection_pb2.ServerReflectionRequest( + file_by_filename='i-donut-exist'), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + file_descriptor_response=reflection_pb2.FileDescriptorResponse( + file_descriptor_proto=( + _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testFileBySymbol(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + file_containing_symbol=_EMPTY_PROTO_SYMBOL_NAME), + reflection_pb2.ServerReflectionRequest( + file_containing_symbol='i.donut.exist.co.uk.org.net.me.name.foo' + ), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + file_descriptor_response=reflection_pb2.FileDescriptorResponse( + file_descriptor_proto=( + _file_descriptor_to_proto(empty_pb2.DESCRIPTOR),))), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testFileContainingExtension(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + file_containing_extension=reflection_pb2.ExtensionRequest( + containing_type=_EMPTY_EXTENSIONS_SYMBOL_NAME, + extension_number=125, + ),), + reflection_pb2.ServerReflectionRequest( + file_containing_extension=reflection_pb2.ExtensionRequest( + containing_type='i.donut.exist.co.uk.org.net.me.name.foo', + extension_number=55, + ),), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + file_descriptor_response=reflection_pb2.FileDescriptorResponse( + file_descriptor_proto=(_file_descriptor_to_proto( + empty2_extensions_pb2.DESCRIPTOR),))), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testExtensionNumbersOfType(self): + requests = ( + reflection_pb2.ServerReflectionRequest( + all_extension_numbers_of_type=_EMPTY_EXTENSIONS_SYMBOL_NAME), + reflection_pb2.ServerReflectionRequest( + all_extension_numbers_of_type='i.donut.exist.co.uk.net.name.foo' + ), + ) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = ( + reflection_pb2.ServerReflectionResponse( + valid_host='', + all_extension_numbers_response=reflection_pb2. + ExtensionNumberResponse( + base_type_name=_EMPTY_EXTENSIONS_SYMBOL_NAME, + extension_number=_EMPTY_EXTENSIONS_NUMBERS)), + reflection_pb2.ServerReflectionResponse( + valid_host='', + error_response=reflection_pb2.ErrorResponse( + error_code=grpc.StatusCode.NOT_FOUND.value[0], + error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), + )), + ) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testListServices(self): + requests = (reflection_pb2.ServerReflectionRequest(list_services='',),) + responses = tuple(self._stub.ServerReflectionInfo(iter(requests))) + expected_responses = (reflection_pb2.ServerReflectionResponse( + valid_host='', + list_services_response=reflection_pb2.ListServiceResponse( + service=tuple( + reflection_pb2.ServiceResponse(name=name) + for name in _SERVICE_NAMES))),) + self._assert_sequence_of_proto_equal(expected_responses, responses) + + def testReflectionServiceName(self): + self.assertEqual(reflection.SERVICE_NAME, + 'grpc.reflection.v1alpha.ServerReflection') + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py new file mode 100644 index 00000000000..38fdfc9c5cf --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py new file mode 100644 index 00000000000..54a3b624203 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/status/_grpc_status_test.py @@ -0,0 +1,180 @@ +# Copyright 2018 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc_status.""" + +# NOTE(lidiz) This module only exists in Bazel BUILD file, for more details +# please refer to comments in the "bazel_namespace_package_hack" module. +try: + from tests import bazel_namespace_package_hack + bazel_namespace_package_hack.sys_path_to_site_dir_hack() +except ImportError: + pass + +import unittest + +import logging +import traceback + +import grpc +from grpc_status import rpc_status + +from tests.unit import test_common + +from google.protobuf import any_pb2 +from google.rpc import code_pb2, status_pb2, error_details_pb2 + +_STATUS_OK = '/test/StatusOK' +_STATUS_NOT_OK = '/test/StatusNotOk' +_ERROR_DETAILS = '/test/ErrorDetails' +_INCONSISTENT = '/test/Inconsistent' +_INVALID_CODE = '/test/InvalidCode' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x01\x01\x01' + +_GRPC_DETAILS_METADATA_KEY = 'grpc-status-details-bin' + +_STATUS_DETAILS = 'This is an error detail' +_STATUS_DETAILS_ANOTHER = 'This is another error detail' + + +def _ok_unary_unary(request, servicer_context): + return _RESPONSE + + +def _not_ok_unary_unary(request, servicer_context): + servicer_context.abort(grpc.StatusCode.INTERNAL, _STATUS_DETAILS) + + +def _error_details_unary_unary(request, servicer_context): + details = any_pb2.Any() + details.Pack( + error_details_pb2.DebugInfo(stack_entries=traceback.format_stack(), + detail='Intentionally invoked')) + rich_status = status_pb2.Status( + code=code_pb2.INTERNAL, + message=_STATUS_DETAILS, + details=[details], + ) + servicer_context.abort_with_status(rpc_status.to_status(rich_status)) + + +def _inconsistent_unary_unary(request, servicer_context): + rich_status = status_pb2.Status( + code=code_pb2.INTERNAL, + message=_STATUS_DETAILS, + ) + servicer_context.set_code(grpc.StatusCode.NOT_FOUND) + servicer_context.set_details(_STATUS_DETAILS_ANOTHER) + # User put inconsistent status information in trailing metadata + servicer_context.set_trailing_metadata( + ((_GRPC_DETAILS_METADATA_KEY, rich_status.SerializeToString()),)) + + +def _invalid_code_unary_unary(request, servicer_context): + rich_status = status_pb2.Status( + code=42, + message='Invalid code', + ) + servicer_context.abort_with_status(rpc_status.to_status(rich_status)) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _STATUS_OK: + return grpc.unary_unary_rpc_method_handler(_ok_unary_unary) + elif handler_call_details.method == _STATUS_NOT_OK: + return grpc.unary_unary_rpc_method_handler(_not_ok_unary_unary) + elif handler_call_details.method == _ERROR_DETAILS: + return grpc.unary_unary_rpc_method_handler( + _error_details_unary_unary) + elif handler_call_details.method == _INCONSISTENT: + return grpc.unary_unary_rpc_method_handler( + _inconsistent_unary_unary) + elif handler_call_details.method == _INVALID_CODE: + return grpc.unary_unary_rpc_method_handler( + _invalid_code_unary_unary) + else: + return None + + +class StatusTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers((_GenericHandler(),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def test_status_ok(self): + _, call = self._channel.unary_unary(_STATUS_OK).with_call(_REQUEST) + + # Succeed RPC doesn't have status + status = rpc_status.from_call(call) + self.assertIs(status, None) + + def test_status_not_ok(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_STATUS_NOT_OK).with_call(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + # Failed RPC doesn't automatically generate status + status = rpc_status.from_call(rpc_error) + self.assertIs(status, None) + + def test_error_details(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_ERROR_DETAILS).with_call(_REQUEST) + rpc_error = exception_context.exception + + status = rpc_status.from_call(rpc_error) + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + self.assertEqual(status.code, code_pb2.Code.Value('INTERNAL')) + + # Check if the underlying proto message is intact + self.assertEqual( + status.details[0].Is(error_details_pb2.DebugInfo.DESCRIPTOR), True) + info = error_details_pb2.DebugInfo() + status.details[0].Unpack(info) + self.assertIn('_error_details_unary_unary', info.stack_entries[-1]) + + def test_code_message_validation(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_INCONSISTENT).with_call(_REQUEST) + rpc_error = exception_context.exception + self.assertEqual(rpc_error.code(), grpc.StatusCode.NOT_FOUND) + + # Code/Message validation failed + self.assertRaises(ValueError, rpc_status.from_call, rpc_error) + + def test_invalid_code(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_INVALID_CODE).with_call(_REQUEST) + rpc_error = exception_context.exception + self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN) + # Invalid status code exception raised during coversion + self.assertIn('Invalid status code', rpc_error.details()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py new file mode 100644 index 00000000000..5772620b602 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py new file mode 100644 index 00000000000..01c14ba3e20 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/client.py @@ -0,0 +1,159 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Entry point for running stress tests.""" + +import argparse +from concurrent import futures +import threading + +import grpc +from six.moves import queue +from src.proto.grpc.testing import metrics_pb2_grpc +from src.proto.grpc.testing import test_pb2_grpc + +from tests.interop import methods +from tests.interop import resources +from tests.qps import histogram +from tests.stress import metrics_server +from tests.stress import test_runner + + +def _args(): + parser = argparse.ArgumentParser( + description='gRPC Python stress test client') + parser.add_argument( + '--server_addresses', + help='comma separated list of hostname:port to run servers on', + default='localhost:8080', + type=str) + parser.add_argument( + '--test_cases', + help='comma separated list of testcase:weighting of tests to run', + default='large_unary:100', + type=str) + parser.add_argument('--test_duration_secs', + help='number of seconds to run the stress test', + default=-1, + type=int) + parser.add_argument('--num_channels_per_server', + help='number of channels per server', + default=1, + type=int) + parser.add_argument('--num_stubs_per_channel', + help='number of stubs to create per channel', + default=1, + type=int) + parser.add_argument('--metrics_port', + help='the port to listen for metrics requests on', + default=8081, + type=int) + parser.add_argument( + '--use_test_ca', + help='Whether to use our fake CA. Requires --use_tls=true', + default=False, + type=bool) + parser.add_argument('--use_tls', + help='Whether to use TLS', + default=False, + type=bool) + parser.add_argument('--server_host_override', + help='the server host to which to claim to connect', + type=str) + return parser.parse_args() + + +def _test_case_from_arg(test_case_arg): + for test_case in methods.TestCase: + if test_case_arg == test_case.value: + return test_case + else: + raise ValueError('No test case {}!'.format(test_case_arg)) + + +def _parse_weighted_test_cases(test_case_args): + weighted_test_cases = {} + for test_case_arg in test_case_args.split(','): + name, weight = test_case_arg.split(':', 1) + test_case = _test_case_from_arg(name) + weighted_test_cases[test_case] = int(weight) + return weighted_test_cases + + +def _get_channel(target, args): + if args.use_tls: + if args.use_test_ca: + root_certificates = resources.test_root_certificates() + else: + root_certificates = None # will load default roots. + channel_credentials = grpc.ssl_channel_credentials( + root_certificates=root_certificates) + options = (( + 'grpc.ssl_target_name_override', + args.server_host_override, + ),) + channel = grpc.secure_channel(target, + channel_credentials, + options=options) + else: + channel = grpc.insecure_channel(target) + + # waits for the channel to be ready before we start sending messages + grpc.channel_ready_future(channel).result() + return channel + + +def run_test(args): + test_cases = _parse_weighted_test_cases(args.test_cases) + test_server_targets = args.server_addresses.split(',') + # Propagate any client exceptions with a queue + exception_queue = queue.Queue() + stop_event = threading.Event() + hist = histogram.Histogram(1, 1) + runners = [] + + server = grpc.server(futures.ThreadPoolExecutor(max_workers=25)) + metrics_pb2_grpc.add_MetricsServiceServicer_to_server( + metrics_server.MetricsServer(hist), server) + server.add_insecure_port('[::]:{}'.format(args.metrics_port)) + server.start() + + for test_server_target in test_server_targets: + for _ in range(args.num_channels_per_server): + channel = _get_channel(test_server_target, args) + for _ in range(args.num_stubs_per_channel): + stub = test_pb2_grpc.TestServiceStub(channel) + runner = test_runner.TestRunner(stub, test_cases, hist, + exception_queue, stop_event) + runners.append(runner) + + for runner in runners: + runner.start() + try: + timeout_secs = args.test_duration_secs + if timeout_secs < 0: + timeout_secs = None + raise exception_queue.get(block=True, timeout=timeout_secs) + except queue.Empty: + # No exceptions thrown, success + pass + finally: + stop_event.set() + for runner in runners: + runner.join() + runner = None + server.stop(None) + + +if __name__ == '__main__': + run_test(_args()) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py new file mode 100644 index 00000000000..33a74b4a388 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/metrics_server.py @@ -0,0 +1,45 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""MetricsService for publishing stress test qps data.""" + +import time + +from src.proto.grpc.testing import metrics_pb2 +from src.proto.grpc.testing import metrics_pb2_grpc + +GAUGE_NAME = 'python_overall_qps' + + +class MetricsServer(metrics_pb2_grpc.MetricsServiceServicer): + + def __init__(self, histogram): + self._start_time = time.time() + self._histogram = histogram + + def _get_qps(self): + count = self._histogram.get_data().count + delta = time.time() - self._start_time + self._histogram.reset() + self._start_time = time.time() + return int(count / delta) + + def GetAllGauges(self, request, context): + qps = self._get_qps() + return [metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps)] + + def GetGauge(self, request, context): + if request.name != GAUGE_NAME: + raise Exception('Gauge {} does not exist'.format(request.name)) + qps = self._get_qps() + return metrics_pb2.GaugeResponse(name=GAUGE_NAME, long_value=qps) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py new file mode 100644 index 00000000000..1b6003fc698 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/test_runner.py @@ -0,0 +1,58 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Thread that sends random weighted requests on a TestService stub.""" + +import random +import threading +import time +import traceback + + +def _weighted_test_case_generator(weighted_cases): + weight_sum = sum(weighted_cases.itervalues()) + + while True: + val = random.uniform(0, weight_sum) + partial_sum = 0 + for case in weighted_cases: + partial_sum += weighted_cases[case] + if val <= partial_sum: + yield case + break + + +class TestRunner(threading.Thread): + + def __init__(self, stub, test_cases, hist, exception_queue, stop_event): + super(TestRunner, self).__init__() + self._exception_queue = exception_queue + self._stop_event = stop_event + self._stub = stub + self._test_cases = _weighted_test_case_generator(test_cases) + self._histogram = hist + + def run(self): + while not self._stop_event.is_set(): + try: + test_case = next(self._test_cases) + start_time = time.time() + test_case.test_interoperability(self._stub, None) + end_time = time.time() + self._histogram.add((end_time - start_time) * 1e9) + except Exception as e: # pylint: disable=broad-except + traceback.print_exc() + self._exception_queue.put( + Exception( + "An exception occurred during test {}".format( + test_case), e)) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py new file mode 100644 index 00000000000..cd872ece29d --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py @@ -0,0 +1,102 @@ +# Copyright 2019 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import threading +import grpc +import grpc.experimental +import subprocess +import sys +import time +import contextlib + +_PORT = 5741 +_MESSAGE_SIZE = 4 +_RESPONSE_COUNT = 32 * 1024 + +_SERVER_CODE = """ +import datetime +import threading +import grpc +from concurrent import futures +from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2 +from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc + +class Handler(unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceServicer): + + def Benchmark(self, request, context): + payload = b'\\x00\\x01' * int(request.message_size / 2) + for _ in range(request.response_count): + yield unary_stream_benchmark_pb2.BenchmarkResponse(response=payload) + + +server = grpc.server(futures.ThreadPoolExecutor(max_workers=1)) +server.add_insecure_port('[::]:%d') +unary_stream_benchmark_pb2_grpc.add_UnaryStreamBenchmarkServiceServicer_to_server(Handler(), server) +server.start() +server.wait_for_termination() +""" % _PORT + +try: + from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2 + from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc + + _GRPC_CHANNEL_OPTIONS = [ + ('grpc.max_metadata_size', 16 * 1024 * 1024), + ('grpc.max_receive_message_length', 64 * 1024 * 1024), + (grpc.experimental.ChannelOptions.SingleThreadedUnaryStream, 1), + ] + + @contextlib.contextmanager + def _running_server(): + server_process = subprocess.Popen([sys.executable, '-c', _SERVER_CODE], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + try: + yield + finally: + server_process.terminate() + server_process.wait() + sys.stdout.write("stdout: {}".format(server_process.stdout.read())) + sys.stdout.flush() + sys.stdout.write("stderr: {}".format(server_process.stderr.read())) + sys.stdout.flush() + + def profile(message_size, response_count): + request = unary_stream_benchmark_pb2.BenchmarkRequest( + message_size=message_size, response_count=response_count) + with grpc.insecure_channel('[::]:{}'.format(_PORT), + options=_GRPC_CHANNEL_OPTIONS) as channel: + stub = unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceStub( + channel) + start = datetime.datetime.now() + call = stub.Benchmark(request, wait_for_ready=True) + for message in call: + pass + end = datetime.datetime.now() + return end - start + + def main(): + with _running_server(): + for i in range(1000): + latency = profile(_MESSAGE_SIZE, 1024) + sys.stdout.write("{}\n".format(latency.total_seconds())) + sys.stdout.flush() + + if __name__ == '__main__': + main() + +except ImportError: + # NOTE(rbellevi): The test runner should not load this module. + pass diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py new file mode 100644 index 00000000000..1e120359cf9 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py new file mode 100644 index 00000000000..3226d1fb020 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_common.py @@ -0,0 +1,43 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""An example gRPC Python-using application's common code elements.""" + +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 + +SERVICE_NAME = 'tests_of_grpc_testing.FirstService' +UNARY_UNARY_METHOD_NAME = 'UnUn' +UNARY_STREAM_METHOD_NAME = 'UnStre' +STREAM_UNARY_METHOD_NAME = 'StreUn' +STREAM_STREAM_METHOD_NAME = 'StreStre' + +UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=2) +ERRONEOUS_UNARY_UNARY_REQUEST = requests_pb2.Up(first_up_field=3) +UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=5) +ERRONEOUS_UNARY_UNARY_RESPONSE = services_pb2.Down(first_down_field=7) +UNARY_STREAM_REQUEST = requests_pb2.Charm(first_charm_field=11) +STREAM_UNARY_REQUEST = requests_pb2.Charm(first_charm_field=13) +STREAM_UNARY_RESPONSE = services_pb2.Strange(first_strange_field=17) +STREAM_STREAM_REQUEST = requests_pb2.Top(first_top_field=19) +STREAM_STREAM_RESPONSE = services_pb2.Bottom(first_bottom_field=23) +TWO_STREAM_STREAM_RESPONSES = (STREAM_STREAM_RESPONSE,) * 2 +ABORT_REQUEST = requests_pb2.Up(first_up_field=42) +ABORT_SUCCESS_QUERY = requests_pb2.Up(first_up_field=43) +ABORT_NO_STATUS_RESPONSE = services_pb2.Down(first_down_field=50) +ABORT_SUCCESS_RESPONSE = services_pb2.Down(first_down_field=51) +ABORT_FAILURE_RESPONSE = services_pb2.Down(first_down_field=52) +STREAM_STREAM_MUTATING_REQUEST = requests_pb2.Top(first_top_field=24601) +STREAM_STREAM_MUTATING_COUNT = 2 + +INFINITE_REQUEST_STREAM_TIMEOUT = 0.2 diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py new file mode 100644 index 00000000000..9c9e485a783 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_application_testing_common.py @@ -0,0 +1,33 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import grpc_testing + +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 + +# TODO(https://github.com/grpc/grpc/issues/11657): Eliminate this entirely. +# TODO(https://github.com/google/protobuf/issues/3452): Eliminate this if/else. +if services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None: + FIRST_SERVICE = 'Fix protobuf issue 3452!' + FIRST_SERVICE_UNUN = 'Fix protobuf issue 3452!' + FIRST_SERVICE_UNSTRE = 'Fix protobuf issue 3452!' + FIRST_SERVICE_STREUN = 'Fix protobuf issue 3452!' + FIRST_SERVICE_STRESTRE = 'Fix protobuf issue 3452!' +else: + FIRST_SERVICE = services_pb2.DESCRIPTOR.services_by_name['FirstService'] + FIRST_SERVICE_UNUN = FIRST_SERVICE.methods_by_name['UnUn'] + FIRST_SERVICE_UNSTRE = FIRST_SERVICE.methods_by_name['UnStre'] + FIRST_SERVICE_STREUN = FIRST_SERVICE.methods_by_name['StreUn'] + FIRST_SERVICE_STRESTRE = FIRST_SERVICE.methods_by_name['StreStre'] diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py new file mode 100644 index 00000000000..57fa5109139 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_application.py @@ -0,0 +1,236 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""An example gRPC Python-using client-side application.""" + +import collections +import enum +import threading +import time + +import grpc +from tests.unit.framework.common import test_constants + +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 +from tests.testing.proto import services_pb2_grpc + +from tests.testing import _application_common + + +class Scenario(enum.Enum): + UNARY_UNARY = 'unary unary' + UNARY_STREAM = 'unary stream' + STREAM_UNARY = 'stream unary' + STREAM_STREAM = 'stream stream' + CONCURRENT_STREAM_UNARY = 'concurrent stream unary' + CONCURRENT_STREAM_STREAM = 'concurrent stream stream' + CANCEL_UNARY_UNARY = 'cancel unary unary' + CANCEL_UNARY_STREAM = 'cancel unary stream' + INFINITE_REQUEST_STREAM = 'infinite request stream' + + +class Outcome(collections.namedtuple('Outcome', ('kind', 'code', 'details'))): + """Outcome of a client application scenario. + + Attributes: + kind: A Kind value describing the overall kind of scenario execution. + code: A grpc.StatusCode value. Only valid if kind is Kind.RPC_ERROR. + details: A status details string. Only valid if kind is Kind.RPC_ERROR. + """ + + @enum.unique + class Kind(enum.Enum): + SATISFACTORY = 'satisfactory' + UNSATISFACTORY = 'unsatisfactory' + RPC_ERROR = 'rpc error' + + +_SATISFACTORY_OUTCOME = Outcome(Outcome.Kind.SATISFACTORY, None, None) +_UNSATISFACTORY_OUTCOME = Outcome(Outcome.Kind.UNSATISFACTORY, None, None) + + +class _Pipe(object): + + def __init__(self): + self._condition = threading.Condition() + self._values = [] + self._open = True + + def __iter__(self): + return self + + def _next(self): + with self._condition: + while True: + if self._values: + return self._values.pop(0) + elif not self._open: + raise StopIteration() + else: + self._condition.wait() + + def __next__(self): # (Python 3 Iterator Protocol) + return self._next() + + def next(self): # (Python 2 Iterator Protocol) + return self._next() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify_all() + + def close(self): + with self._condition: + self._open = False + self._condition.notify_all() + + +def _run_unary_unary(stub): + response = stub.UnUn(_application_common.UNARY_UNARY_REQUEST) + if _application_common.UNARY_UNARY_RESPONSE == response: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_unary_stream(stub): + response_iterator = stub.UnStre(_application_common.UNARY_STREAM_REQUEST) + try: + next(response_iterator) + except StopIteration: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_stream_unary(stub): + response, call = stub.StreUn.with_call( + iter((_application_common.STREAM_UNARY_REQUEST,) * 3)) + if (_application_common.STREAM_UNARY_RESPONSE == response and + call.code() is grpc.StatusCode.OK): + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_stream_stream(stub): + request_pipe = _Pipe() + response_iterator = stub.StreStre(iter(request_pipe)) + request_pipe.add(_application_common.STREAM_STREAM_REQUEST) + first_responses = next(response_iterator), next(response_iterator) + request_pipe.add(_application_common.STREAM_STREAM_REQUEST) + second_responses = next(response_iterator), next(response_iterator) + request_pipe.close() + try: + next(response_iterator) + except StopIteration: + unexpected_extra_response = False + else: + unexpected_extra_response = True + if (first_responses == _application_common.TWO_STREAM_STREAM_RESPONSES and + second_responses == _application_common.TWO_STREAM_STREAM_RESPONSES + and not unexpected_extra_response): + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_concurrent_stream_unary(stub): + future_calls = tuple( + stub.StreUn.future(iter((_application_common.STREAM_UNARY_REQUEST,) * + 3)) + for _ in range(test_constants.THREAD_CONCURRENCY)) + for future_call in future_calls: + if future_call.code() is grpc.StatusCode.OK: + response = future_call.result() + if _application_common.STREAM_UNARY_RESPONSE != response: + return _UNSATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + else: + return _SATISFACTORY_OUTCOME + + +def _run_concurrent_stream_stream(stub): + condition = threading.Condition() + outcomes = [None] * test_constants.RPC_CONCURRENCY + + def run_stream_stream(index): + outcome = _run_stream_stream(stub) + with condition: + outcomes[index] = outcome + condition.notify() + + for index in range(test_constants.RPC_CONCURRENCY): + thread = threading.Thread(target=run_stream_stream, args=(index,)) + thread.start() + with condition: + while True: + if all(outcomes): + for outcome in outcomes: + if outcome.kind is not Outcome.Kind.SATISFACTORY: + return _UNSATISFACTORY_OUTCOME + else: + return _SATISFACTORY_OUTCOME + else: + condition.wait() + + +def _run_cancel_unary_unary(stub): + response_future_call = stub.UnUn.future( + _application_common.UNARY_UNARY_REQUEST) + initial_metadata = response_future_call.initial_metadata() + cancelled = response_future_call.cancel() + if initial_metadata is not None and cancelled: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +def _run_infinite_request_stream(stub): + + def infinite_request_iterator(): + while True: + yield _application_common.STREAM_UNARY_REQUEST + + response_future_call = stub.StreUn.future( + infinite_request_iterator(), + timeout=_application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + if response_future_call.code() is grpc.StatusCode.DEADLINE_EXCEEDED: + return _SATISFACTORY_OUTCOME + else: + return _UNSATISFACTORY_OUTCOME + + +_IMPLEMENTATIONS = { + Scenario.UNARY_UNARY: _run_unary_unary, + Scenario.UNARY_STREAM: _run_unary_stream, + Scenario.STREAM_UNARY: _run_stream_unary, + Scenario.STREAM_STREAM: _run_stream_stream, + Scenario.CONCURRENT_STREAM_UNARY: _run_concurrent_stream_unary, + Scenario.CONCURRENT_STREAM_STREAM: _run_concurrent_stream_stream, + Scenario.CANCEL_UNARY_UNARY: _run_cancel_unary_unary, + Scenario.INFINITE_REQUEST_STREAM: _run_infinite_request_stream, +} + + +def run(scenario, channel): + stub = services_pb2_grpc.FirstServiceStub(channel) + try: + return _IMPLEMENTATIONS[scenario](stub) + except grpc.RpcError as rpc_error: + return Outcome(Outcome.Kind.RPC_ERROR, rpc_error.code(), + rpc_error.details()) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py new file mode 100644 index 00000000000..5b051c39390 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_client_test.py @@ -0,0 +1,308 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from concurrent import futures +import time +import unittest + +import grpc +from grpc.framework.foundation import logging_pool +from tests.unit.framework.common import test_constants +import grpc_testing + +from tests.testing import _application_common +from tests.testing import _application_testing_common +from tests.testing import _client_application +from tests.testing.proto import requests_pb2 +from tests.testing.proto import services_pb2 + + +# TODO(https://github.com/google/protobuf/issues/3452): Drop this skip. + services_pb2.DESCRIPTOR.services_by_name.get('FirstService') is None, + 'Fix protobuf issue 3452!') +class ClientTest(unittest.TestCase): + + def setUp(self): + # In this test the client-side application under test executes in + # a separate thread while we retain use of the test thread to "play + # server". + self._client_execution_thread_pool = logging_pool.pool(1) + + self._fake_time = grpc_testing.strict_fake_time(time.time()) + self._real_time = grpc_testing.strict_real_time() + self._fake_time_channel = grpc_testing.channel( + services_pb2.DESCRIPTOR.services_by_name.values(), self._fake_time) + self._real_time_channel = grpc_testing.channel( + services_pb2.DESCRIPTOR.services_by_name.values(), self._real_time) + + def tearDown(self): + self._client_execution_thread_pool.shutdown(wait=True) + + def test_successful_unary_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.UNARY_UNARY, + self._real_time_channel) + invocation_metadata, request, rpc = ( + self._real_time_channel.take_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN)) + rpc.send_initial_metadata(()) + rpc.terminate(_application_common.UNARY_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_successful_unary_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.UNARY_STREAM, + self._fake_time_channel) + invocation_metadata, request, rpc = ( + self._fake_time_channel.take_unary_stream( + _application_testing_common.FIRST_SERVICE_UNSTRE)) + rpc.send_initial_metadata(()) + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_STREAM_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_successful_stream_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_UNARY, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN) + rpc.send_initial_metadata(()) + first_request = rpc.take_request() + second_request = rpc.take_request() + third_request = rpc.take_request() + rpc.requests_closed() + rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + second_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + third_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_successful_stream_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_STREAM, + self._fake_time_channel) + invocation_metadata, rpc = self._fake_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE) + first_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + second_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.requests_closed() + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + second_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_concurrent_stream_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.CONCURRENT_STREAM_STREAM, + self._real_time_channel) + rpcs = [] + for _ in range(test_constants.RPC_CONCURRENCY): + invocation_metadata, rpc = ( + self._real_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE)) + rpcs.append(rpc) + requests = {} + for rpc in rpcs: + requests[rpc] = [rpc.take_request()] + for rpc in rpcs: + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + for rpc in rpcs: + requests[rpc].append(rpc.take_request()) + for rpc in rpcs: + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + for rpc in rpcs: + rpc.requests_closed() + for rpc in rpcs: + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + for requests_of_one_rpc in requests.values(): + for request in requests_of_one_rpc: + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_cancelled_unary_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.CANCEL_UNARY_UNARY, + self._fake_time_channel) + invocation_metadata, request, rpc = ( + self._fake_time_channel.take_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN)) + rpc.send_initial_metadata(()) + rpc.cancelled() + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + def test_status_stream_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.CONCURRENT_STREAM_UNARY, + self._fake_time_channel) + rpcs = tuple( + self._fake_time_channel.take_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN)[1] + for _ in range(test_constants.THREAD_CONCURRENCY)) + for rpc in rpcs: + rpc.take_request() + rpc.take_request() + rpc.take_request() + rpc.requests_closed() + rpc.send_initial_metadata((( + 'my_metadata_key', + 'My Metadata Value!', + ),)) + for rpc in rpcs[:-1]: + rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + rpcs[-1].terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.RESOURCE_EXHAUSTED, + 'nope; not able to handle all those RPCs!') + application_return_value = application_future.result() + + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.UNSATISFACTORY) + + def test_status_stream_stream(self): + code = grpc.StatusCode.DEADLINE_EXCEEDED + details = 'test deadline exceeded!' + + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_STREAM, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE) + first_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + second_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.requests_closed() + rpc.terminate((), code, details) + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + second_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.RPC_ERROR) + self.assertIs(application_return_value.code, code) + self.assertEqual(application_return_value.details, details) + + def test_misbehaving_server_unary_unary(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.UNARY_UNARY, + self._fake_time_channel) + invocation_metadata, request, rpc = ( + self._fake_time_channel.take_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN)) + rpc.send_initial_metadata(()) + rpc.terminate(_application_common.ERRONEOUS_UNARY_UNARY_RESPONSE, (), + grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.UNARY_UNARY_REQUEST, request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.UNSATISFACTORY) + + def test_misbehaving_server_stream_stream(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, _client_application.Scenario.STREAM_STREAM, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE) + first_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + second_request = rpc.take_request() + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.send_response(_application_common.STREAM_STREAM_RESPONSE) + rpc.requests_closed() + rpc.terminate((), grpc.StatusCode.OK, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_STREAM_REQUEST, + second_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.UNSATISFACTORY) + + def test_infinite_request_stream_real_time(self): + application_future = self._client_execution_thread_pool.submit( + _client_application.run, + _client_application.Scenario.INFINITE_REQUEST_STREAM, + self._real_time_channel) + invocation_metadata, rpc = self._real_time_channel.take_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN) + rpc.send_initial_metadata(()) + first_request = rpc.take_request() + second_request = rpc.take_request() + third_request = rpc.take_request() + self._real_time.sleep_for( + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + rpc.terminate(_application_common.STREAM_UNARY_RESPONSE, (), + grpc.StatusCode.DEADLINE_EXCEEDED, '') + application_return_value = application_future.result() + + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + first_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + second_request) + self.assertEqual(_application_common.STREAM_UNARY_REQUEST, + third_request) + self.assertIs(application_return_value.kind, + _client_application.Outcome.Kind.SATISFACTORY) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py new file mode 100644 index 00000000000..51ed977b8fe --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_application.py @@ -0,0 +1,95 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""An example gRPC Python-using server-side application.""" + +import grpc + +import threading + +# requests_pb2 is a semantic dependency of this module. +from tests.testing import _application_common +from tests.testing.proto import requests_pb2 # pylint: disable=unused-import +from tests.testing.proto import services_pb2 +from tests.testing.proto import services_pb2_grpc + + +class FirstServiceServicer(services_pb2_grpc.FirstServiceServicer): + """Services RPCs.""" + + def __init__(self): + self._abort_lock = threading.RLock() + self._abort_response = _application_common.ABORT_NO_STATUS_RESPONSE + + def UnUn(self, request, context): + if request == _application_common.UNARY_UNARY_REQUEST: + return _application_common.UNARY_UNARY_RESPONSE + elif request == _application_common.ABORT_REQUEST: + with self._abort_lock: + try: + context.abort(grpc.StatusCode.PERMISSION_DENIED, + "Denying permission to test abort.") + except Exception as e: # pylint: disable=broad-except + self._abort_response = _application_common.ABORT_SUCCESS_RESPONSE + else: + self._abort_status = _application_common.ABORT_FAILURE_RESPONSE + return None # NOTE: For the linter. + elif request == _application_common.ABORT_SUCCESS_QUERY: + with self._abort_lock: + return self._abort_response + else: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return services_pb2.Down() + + def UnStre(self, request, context): + if _application_common.UNARY_STREAM_REQUEST != request: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return + yield services_pb2.Strange() # pylint: disable=unreachable + + def StreUn(self, request_iterator, context): + context.send_initial_metadata((( + 'server_application_metadata_key', + 'Hi there!', + ),)) + for request in request_iterator: + if request != _application_common.STREAM_UNARY_REQUEST: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return services_pb2.Strange() + elif not context.is_active(): + return services_pb2.Strange() + else: + return _application_common.STREAM_UNARY_RESPONSE + + def StreStre(self, request_iterator, context): + valid_requests = (_application_common.STREAM_STREAM_REQUEST, + _application_common.STREAM_STREAM_MUTATING_REQUEST) + for request in request_iterator: + if request not in valid_requests: + context.set_code(grpc.StatusCode.INVALID_ARGUMENT) + context.set_details('Something is wrong with your request!') + return + elif not context.is_active(): + return + elif request == _application_common.STREAM_STREAM_REQUEST: + yield _application_common.STREAM_STREAM_RESPONSE + yield _application_common.STREAM_STREAM_RESPONSE + elif request == _application_common.STREAM_STREAM_MUTATING_REQUEST: + response = services_pb2.Bottom() + for i in range( + _application_common.STREAM_STREAM_MUTATING_COUNT): + response.first_bottom_field = i + yield response diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py new file mode 100644 index 00000000000..617a41b7e54 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_server_test.py @@ -0,0 +1,207 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import unittest + +import grpc +import grpc_testing + +from tests.testing import _application_common +from tests.testing import _application_testing_common +from tests.testing import _server_application +from tests.testing.proto import services_pb2 + + +class FirstServiceServicerTest(unittest.TestCase): + + def setUp(self): + self._real_time = grpc_testing.strict_real_time() + self._fake_time = grpc_testing.strict_fake_time(time.time()) + servicer = _server_application.FirstServiceServicer() + descriptors_to_servicers = { + _application_testing_common.FIRST_SERVICE: servicer + } + self._real_time_server = grpc_testing.server_from_dictionary( + descriptors_to_servicers, self._real_time) + self._fake_time_server = grpc_testing.server_from_dictionary( + descriptors_to_servicers, self._fake_time) + + def test_successful_unary_unary(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.UNARY_UNARY_REQUEST, None) + initial_metadata = rpc.initial_metadata() + response, trailing_metadata, code, details = rpc.termination() + + self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_successful_unary_stream(self): + rpc = self._real_time_server.invoke_unary_stream( + _application_testing_common.FIRST_SERVICE_UNSTRE, (), + _application_common.UNARY_STREAM_REQUEST, None) + initial_metadata = rpc.initial_metadata() + trailing_metadata, code, details = rpc.termination() + + self.assertIs(code, grpc.StatusCode.OK) + + def test_successful_stream_unary(self): + rpc = self._real_time_server.invoke_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN, (), None) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.requests_closed() + initial_metadata = rpc.initial_metadata() + response, trailing_metadata, code, details = rpc.termination() + + self.assertEqual(_application_common.STREAM_UNARY_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_successful_stream_stream(self): + rpc = self._real_time_server.invoke_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE, (), None) + rpc.send_request(_application_common.STREAM_STREAM_REQUEST) + initial_metadata = rpc.initial_metadata() + responses = [ + rpc.take_response(), + rpc.take_response(), + ] + rpc.send_request(_application_common.STREAM_STREAM_REQUEST) + rpc.send_request(_application_common.STREAM_STREAM_REQUEST) + responses.extend([ + rpc.take_response(), + rpc.take_response(), + rpc.take_response(), + rpc.take_response(), + ]) + rpc.requests_closed() + trailing_metadata, code, details = rpc.termination() + + for response in responses: + self.assertEqual(_application_common.STREAM_STREAM_RESPONSE, + response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_mutating_stream_stream(self): + rpc = self._real_time_server.invoke_stream_stream( + _application_testing_common.FIRST_SERVICE_STRESTRE, (), None) + rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST) + initial_metadata = rpc.initial_metadata() + responses = [ + rpc.take_response() + for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT) + ] + rpc.send_request(_application_common.STREAM_STREAM_MUTATING_REQUEST) + responses.extend([ + rpc.take_response() + for _ in range(_application_common.STREAM_STREAM_MUTATING_COUNT) + ]) + rpc.requests_closed() + _, _, _ = rpc.termination() + expected_responses = ( + services_pb2.Bottom(first_bottom_field=0), + services_pb2.Bottom(first_bottom_field=1), + services_pb2.Bottom(first_bottom_field=0), + services_pb2.Bottom(first_bottom_field=1), + ) + self.assertSequenceEqual(expected_responses, responses) + + def test_server_rpc_idempotence(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.UNARY_UNARY_REQUEST, None) + first_initial_metadata = rpc.initial_metadata() + second_initial_metadata = rpc.initial_metadata() + third_initial_metadata = rpc.initial_metadata() + first_termination = rpc.termination() + second_termination = rpc.termination() + third_termination = rpc.termination() + + for later_initial_metadata in ( + second_initial_metadata, + third_initial_metadata, + ): + self.assertEqual(first_initial_metadata, later_initial_metadata) + response = first_termination[0] + terminal_metadata = first_termination[1] + code = first_termination[2] + details = first_termination[3] + for later_termination in ( + second_termination, + third_termination, + ): + self.assertEqual(response, later_termination[0]) + self.assertEqual(terminal_metadata, later_termination[1]) + self.assertIs(code, later_termination[2]) + self.assertEqual(details, later_termination[3]) + self.assertEqual(_application_common.UNARY_UNARY_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + def test_misbehaving_client_unary_unary(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.ERRONEOUS_UNARY_UNARY_REQUEST, None) + initial_metadata = rpc.initial_metadata() + response, trailing_metadata, code, details = rpc.termination() + + self.assertIsNot(code, grpc.StatusCode.OK) + + def test_infinite_request_stream_real_time(self): + rpc = self._real_time_server.invoke_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN, (), + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + initial_metadata = rpc.initial_metadata() + self._real_time.sleep_for( + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + response, trailing_metadata, code, details = rpc.termination() + + self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED) + + def test_infinite_request_stream_fake_time(self): + rpc = self._fake_time_server.invoke_stream_unary( + _application_testing_common.FIRST_SERVICE_STREUN, (), + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + initial_metadata = rpc.initial_metadata() + self._fake_time.sleep_for( + _application_common.INFINITE_REQUEST_STREAM_TIMEOUT * 2) + rpc.send_request(_application_common.STREAM_UNARY_REQUEST) + response, trailing_metadata, code, details = rpc.termination() + + self.assertIs(code, grpc.StatusCode.DEADLINE_EXCEEDED) + + def test_servicer_context_abort(self): + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.ABORT_REQUEST, None) + _, _, code, _ = rpc.termination() + self.assertIs(code, grpc.StatusCode.PERMISSION_DENIED) + rpc = self._real_time_server.invoke_unary_unary( + _application_testing_common.FIRST_SERVICE_UNUN, (), + _application_common.ABORT_SUCCESS_QUERY, None) + response, _, code, _ = rpc.termination() + self.assertEqual(_application_common.ABORT_SUCCESS_RESPONSE, response) + self.assertIs(code, grpc.StatusCode.OK) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py new file mode 100644 index 00000000000..cab665c045c --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/_time_test.py @@ -0,0 +1,165 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import random +import threading +import time +import unittest + +import grpc_testing + +_QUANTUM = 0.3 +_MANY = 10000 +# Tests that run in real time can either wait for the scheduler to +# eventually run what needs to be run (and risk timing out) or declare +# that the scheduler didn't schedule work reasonably fast enough. We +# choose the latter for this test. +_PATHOLOGICAL_SCHEDULING = 'pathological thread scheduling!' + + +class _TimeNoter(object): + + def __init__(self, time): + self._condition = threading.Condition() + self._time = time + self._call_times = [] + + def __call__(self): + with self._condition: + self._call_times.append(self._time.time()) + + def call_times(self): + with self._condition: + return tuple(self._call_times) + + +class TimeTest(object): + + def test_sleep_for(self): + start_time = self._time.time() + self._time.sleep_for(_QUANTUM) + end_time = self._time.time() + + self.assertLessEqual(start_time + _QUANTUM, end_time) + + def test_sleep_until(self): + start_time = self._time.time() + self._time.sleep_until(start_time + _QUANTUM) + end_time = self._time.time() + + self.assertLessEqual(start_time + _QUANTUM, end_time) + + def test_call_in(self): + time_noter = _TimeNoter(self._time) + + start_time = self._time.time() + self._time.call_in(time_noter, _QUANTUM) + self._time.sleep_for(_QUANTUM * 2) + call_times = time_noter.call_times() + + self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING) + self.assertLessEqual(start_time + _QUANTUM, call_times[0]) + + def test_call_at(self): + time_noter = _TimeNoter(self._time) + + start_time = self._time.time() + self._time.call_at(time_noter, self._time.time() + _QUANTUM) + self._time.sleep_for(_QUANTUM * 2) + call_times = time_noter.call_times() + + self.assertTrue(call_times, msg=_PATHOLOGICAL_SCHEDULING) + self.assertLessEqual(start_time + _QUANTUM, call_times[0]) + + def test_cancel(self): + time_noter = _TimeNoter(self._time) + + future = self._time.call_in(time_noter, _QUANTUM * 2) + self._time.sleep_for(_QUANTUM) + cancelled = future.cancel() + self._time.sleep_for(_QUANTUM * 2) + call_times = time_noter.call_times() + + self.assertFalse(call_times, msg=_PATHOLOGICAL_SCHEDULING) + self.assertTrue(cancelled) + self.assertTrue(future.cancelled()) + + def test_many(self): + test_events = tuple(threading.Event() for _ in range(_MANY)) + possibly_cancelled_futures = {} + background_noise_futures = [] + + for test_event in test_events: + possibly_cancelled_futures[test_event] = self._time.call_in( + test_event.set, _QUANTUM * (2 + random.random())) + for _ in range(_MANY): + background_noise_futures.append( + self._time.call_in(threading.Event().set, + _QUANTUM * 1000 * random.random())) + self._time.sleep_for(_QUANTUM) + cancelled = set() + for test_event, test_future in possibly_cancelled_futures.items(): + if bool(random.randint(0, 1)) and test_future.cancel(): + cancelled.add(test_event) + self._time.sleep_for(_QUANTUM * 3) + + for test_event in test_events: + (self.assertFalse if test_event in cancelled else self.assertTrue)( + test_event.is_set()) + for background_noise_future in background_noise_futures: + background_noise_future.cancel() + + def test_same_behavior_used_several_times(self): + time_noter = _TimeNoter(self._time) + + start_time = self._time.time() + first_future_at_one = self._time.call_in(time_noter, _QUANTUM) + second_future_at_one = self._time.call_in(time_noter, _QUANTUM) + first_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3) + second_future_at_three = self._time.call_in(time_noter, _QUANTUM * 3) + self._time.sleep_for(_QUANTUM * 2) + first_future_at_one_cancelled = first_future_at_one.cancel() + second_future_at_one_cancelled = second_future_at_one.cancel() + first_future_at_three_cancelled = first_future_at_three.cancel() + self._time.sleep_for(_QUANTUM * 2) + second_future_at_three_cancelled = second_future_at_three.cancel() + first_future_at_three_cancelled_again = first_future_at_three.cancel() + call_times = time_noter.call_times() + + self.assertEqual(3, len(call_times), msg=_PATHOLOGICAL_SCHEDULING) + self.assertFalse(first_future_at_one_cancelled) + self.assertFalse(second_future_at_one_cancelled) + self.assertTrue(first_future_at_three_cancelled) + self.assertFalse(second_future_at_three_cancelled) + self.assertTrue(first_future_at_three_cancelled_again) + self.assertLessEqual(start_time + _QUANTUM, call_times[0]) + self.assertLessEqual(start_time + _QUANTUM, call_times[1]) + self.assertLessEqual(start_time + _QUANTUM * 3, call_times[2]) + + +class StrictRealTimeTest(TimeTest, unittest.TestCase): + + def setUp(self): + self._time = grpc_testing.strict_real_time() + + +class StrictFakeTimeTest(TimeTest, unittest.TestCase): + + def setUp(self): + self._time = grpc_testing.strict_fake_time( + random.randint(0, int(time.time()))) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py new file mode 100644 index 00000000000..1e120359cf9 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/testing/proto/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/__init__.py new file mode 100644 index 00000000000..5fb4f3c3cfd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py new file mode 100644 index 00000000000..d2eaf97d5f4 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_abort_test.py @@ -0,0 +1,154 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server context abort mechanism""" + +import unittest +import collections +import gc +import logging +import weakref + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_ABORT = '/test/abort' +_ABORT_WITH_STATUS = '/test/AbortWithStatus' +_INVALID_CODE = '/test/InvalidCode' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_ABORT_DETAILS = 'Abandon ship!' +_ABORT_METADATA = (('a-trailing-metadata', '42'),) + + +class _Status( + collections.namedtuple('_Status', + ('code', 'details', 'trailing_metadata')), + grpc.Status): + pass + + +class _Object(object): + pass + + +do_not_leak_me = _Object() + + +def abort_unary_unary(request, servicer_context): + this_should_not_be_leaked = do_not_leak_me + servicer_context.abort( + grpc.StatusCode.INTERNAL, + _ABORT_DETAILS, + ) + raise Exception('This line should not be executed!') + + +def abort_with_status_unary_unary(request, servicer_context): + servicer_context.abort_with_status( + _Status( + code=grpc.StatusCode.INTERNAL, + details=_ABORT_DETAILS, + trailing_metadata=_ABORT_METADATA, + )) + raise Exception('This line should not be executed!') + + +def invalid_code_unary_unary(request, servicer_context): + servicer_context.abort( + 42, + _ABORT_DETAILS, + ) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _ABORT: + return grpc.unary_unary_rpc_method_handler(abort_unary_unary) + elif handler_call_details.method == _ABORT_WITH_STATUS: + return grpc.unary_unary_rpc_method_handler( + abort_with_status_unary_unary) + elif handler_call_details.method == _INVALID_CODE: + return grpc.stream_stream_rpc_method_handler( + invalid_code_unary_unary) + else: + return None + + +class AbortTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + port = self._server.add_insecure_port('[::]:0') + self._server.add_generic_rpc_handlers((_GenericHandler(),)) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._channel.close() + self._server.stop(0) + + def test_abort(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_ABORT)(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + self.assertEqual(rpc_error.details(), _ABORT_DETAILS) + + # This test ensures that abort() does not store the raised exception, which + # on Python 3 (via the `__traceback__` attribute) holds a reference to + # all local vars. Storing the raised exception can prevent GC and stop the + # grpc_call from being unref'ed, even after server shutdown. + @unittest.skip("https://github.com/grpc/grpc/issues/17927") + def test_abort_does_not_leak_local_vars(self): + global do_not_leak_me # pylint: disable=global-statement + weak_ref = weakref.ref(do_not_leak_me) + + # Servicer will abort() after creating a local ref to do_not_leak_me. + with self.assertRaises(grpc.RpcError): + self._channel.unary_unary(_ABORT)(_REQUEST) + + # Server may still have a stack frame reference to the exception even + # after client sees error, so ensure server has shutdown. + self._server.stop(None) + do_not_leak_me = None + self.assertIsNone(weak_ref()) + + def test_abort_with_status(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_ABORT_WITH_STATUS)(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.INTERNAL) + self.assertEqual(rpc_error.details(), _ABORT_DETAILS) + self.assertEqual(rpc_error.trailing_metadata(), _ABORT_METADATA) + + def test_invalid_code(self): + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary(_INVALID_CODE)(_REQUEST) + rpc_error = exception_context.exception + + self.assertEqual(rpc_error.code(), grpc.StatusCode.UNKNOWN) + self.assertEqual(rpc_error.details(), _ABORT_DETAILS) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py new file mode 100644 index 00000000000..a459ee6e192 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_api_test.py @@ -0,0 +1,118 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of gRPC Python's application-layer API.""" + +import unittest +import logging + +import six + +import grpc + +from tests.unit import _from_grpc_import_star + + +class AllTest(unittest.TestCase): + + def testAll(self): + expected_grpc_code_elements = ( + 'FutureTimeoutError', + 'FutureCancelledError', + 'Future', + 'ChannelConnectivity', + 'Compression', + 'StatusCode', + 'Status', + 'RpcError', + 'RpcContext', + 'Call', + 'ChannelCredentials', + 'CallCredentials', + 'AuthMetadataContext', + 'AuthMetadataPluginCallback', + 'AuthMetadataPlugin', + 'ServerCertificateConfiguration', + 'ServerCredentials', + 'UnaryUnaryMultiCallable', + 'UnaryStreamMultiCallable', + 'StreamUnaryMultiCallable', + 'StreamStreamMultiCallable', + 'UnaryUnaryClientInterceptor', + 'UnaryStreamClientInterceptor', + 'StreamUnaryClientInterceptor', + 'StreamStreamClientInterceptor', + 'Channel', + 'ServicerContext', + 'RpcMethodHandler', + 'HandlerCallDetails', + 'GenericRpcHandler', + 'ServiceRpcHandler', + 'Server', + 'ServerInterceptor', + 'LocalConnectionType', + 'local_channel_credentials', + 'local_server_credentials', + 'alts_channel_credentials', + 'alts_server_credentials', + 'unary_unary_rpc_method_handler', + 'unary_stream_rpc_method_handler', + 'stream_unary_rpc_method_handler', + 'ClientCallDetails', + 'stream_stream_rpc_method_handler', + 'method_handlers_generic_handler', + 'ssl_channel_credentials', + 'metadata_call_credentials', + 'access_token_call_credentials', + 'composite_call_credentials', + 'composite_channel_credentials', + 'ssl_server_credentials', + 'ssl_server_certificate_configuration', + 'dynamic_ssl_server_credentials', + 'channel_ready_future', + 'insecure_channel', + 'secure_channel', + 'intercept_channel', + 'server', + 'protos', + 'services', + 'protos_and_services', + ) + + six.assertCountEqual(self, expected_grpc_code_elements, + _from_grpc_import_star.GRPC_ELEMENTS) + + +class ChannelConnectivityTest(unittest.TestCase): + + def testChannelConnectivity(self): + self.assertSequenceEqual(( + grpc.ChannelConnectivity.IDLE, + grpc.ChannelConnectivity.CONNECTING, + grpc.ChannelConnectivity.READY, + grpc.ChannelConnectivity.TRANSIENT_FAILURE, + grpc.ChannelConnectivity.SHUTDOWN, + ), tuple(grpc.ChannelConnectivity)) + + +class ChannelTest(unittest.TestCase): + + def test_secure_channel(self): + channel_credentials = grpc.ssl_channel_credentials() + channel = grpc.secure_channel('google.com:443', channel_credentials) + channel.close() + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py new file mode 100644 index 00000000000..817c528237b --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_context_test.py @@ -0,0 +1,193 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests exposure of SSL auth context""" + +import pickle +import unittest +import logging + +import grpc +from grpc import _channel +from grpc.experimental import session_cache +import six + +from tests.unit import test_common +from tests.unit import resources + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' +_CLIENT_IDS = ( + b'*.test.google.fr', + b'waterzooi.test.google.be', + b'*.test.youtube.com', + b'192.168.1.3', +) +_ID = 'id' +_ID_KEY = 'id_key' +_AUTH_CTX = 'auth_ctx' + +_PRIVATE_KEY = resources.private_key() +_CERTIFICATE_CHAIN = resources.certificate_chain() +_TEST_ROOT_CERTIFICATES = resources.test_root_certificates() +_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),) +_PROPERTY_OPTIONS = (( + 'grpc.ssl_target_name_override', + _SERVER_HOST_OVERRIDE, +),) + + +def handle_unary_unary(request, servicer_context): + return pickle.dumps({ + _ID: servicer_context.peer_identities(), + _ID_KEY: servicer_context.peer_identity_key(), + _AUTH_CTX: servicer_context.auth_context() + }) + + +class AuthContextTest(unittest.TestCase): + + def testInsecure(self): + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': + grpc.unary_unary_rpc_method_handler(handle_unary_unary) + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + port = server.add_insecure_port('[::]:0') + server.start() + + with grpc.insecure_channel('localhost:%d' % port) as channel: + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + server.stop(None) + + auth_data = pickle.loads(response) + self.assertIsNone(auth_data[_ID]) + self.assertIsNone(auth_data[_ID_KEY]) + self.assertDictEqual({}, auth_data[_AUTH_CTX]) + + def testSecureNoCert(self): + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': + grpc.unary_unary_rpc_method_handler(handle_unary_unary) + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES) + channel = grpc.secure_channel('localhost:{}'.format(port), + channel_creds, + options=_PROPERTY_OPTIONS) + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + channel.close() + server.stop(None) + + auth_data = pickle.loads(response) + self.assertIsNone(auth_data[_ID]) + self.assertIsNone(auth_data[_ID_KEY]) + self.assertDictEqual( + { + 'security_level': [b'TSI_PRIVACY_AND_INTEGRITY'], + 'transport_security_type': [b'ssl'], + 'ssl_session_reused': [b'false'], + }, auth_data[_AUTH_CTX]) + + def testSecureClientCert(self): + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': + grpc.unary_unary_rpc_method_handler(handle_unary_unary) + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials( + _SERVER_CERTS, + root_certificates=_TEST_ROOT_CERTIFICATES, + require_client_auth=True) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES, + private_key=_PRIVATE_KEY, + certificate_chain=_CERTIFICATE_CHAIN) + channel = grpc.secure_channel('localhost:{}'.format(port), + channel_creds, + options=_PROPERTY_OPTIONS) + + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + channel.close() + server.stop(None) + + auth_data = pickle.loads(response) + auth_ctx = auth_data[_AUTH_CTX] + six.assertCountEqual(self, _CLIENT_IDS, auth_data[_ID]) + self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY]) + self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type']) + self.assertSequenceEqual([b'*.test.google.com'], + auth_ctx['x509_common_name']) + + def _do_one_shot_client_rpc(self, channel_creds, channel_options, port, + expect_ssl_session_reused): + channel = grpc.secure_channel('localhost:{}'.format(port), + channel_creds, + options=channel_options) + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + auth_data = pickle.loads(response) + self.assertEqual(expect_ssl_session_reused, + auth_data[_AUTH_CTX]['ssl_session_reused']) + channel.close() + + def testSessionResumption(self): + # Set up a secure server + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': + grpc.unary_unary_rpc_method_handler(handle_unary_unary) + }) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + # Create a cache for TLS session tickets + cache = session_cache.ssl_session_cache_lru(1) + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES) + channel_options = _PROPERTY_OPTIONS + ( + ('grpc.ssl_session_cache', cache),) + + # Initial connection has no session to resume + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port, + expect_ssl_session_reused=[b'false']) + + # Subsequent connections resume sessions + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port, + expect_ssl_session_reused=[b'true']) + server.stop(None) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_test.py new file mode 100644 index 00000000000..d9df2add4f2 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_auth_test.py @@ -0,0 +1,82 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of standard AuthMetadataPlugins.""" + +import collections +import threading +import unittest +import logging + +from grpc import _auth + + +class MockGoogleCreds(object): + + def get_access_token(self): + token = collections.namedtuple('MockAccessTokenInfo', + ('access_token', 'expires_in')) + token.access_token = 'token' + return token + + +class MockExceptionGoogleCreds(object): + + def get_access_token(self): + raise Exception() + + +class GoogleCallCredentialsTest(unittest.TestCase): + + def test_google_call_credentials_success(self): + callback_event = threading.Event() + + def mock_callback(metadata, error): + self.assertEqual(metadata, (('authorization', 'Bearer token'),)) + self.assertIsNone(error) + callback_event.set() + + call_creds = _auth.GoogleCallCredentials(MockGoogleCreds()) + call_creds(None, mock_callback) + self.assertTrue(callback_event.wait(1.0)) + + def test_google_call_credentials_error(self): + callback_event = threading.Event() + + def mock_callback(metadata, error): + self.assertIsNotNone(error) + callback_event.set() + + call_creds = _auth.GoogleCallCredentials(MockExceptionGoogleCreds()) + call_creds(None, mock_callback) + self.assertTrue(callback_event.wait(1.0)) + + +class AccessTokenAuthMetadataPluginTest(unittest.TestCase): + + def test_google_call_credentials_success(self): + callback_event = threading.Event() + + def mock_callback(metadata, error): + self.assertEqual(metadata, (('authorization', 'Bearer token'),)) + self.assertIsNone(error) + callback_event.set() + + metadata_plugin = _auth.AccessTokenAuthMetadataPlugin('token') + metadata_plugin(None, mock_callback) + self.assertTrue(callback_event.wait(1.0)) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py new file mode 100644 index 00000000000..2f2eea61dbd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_args_test.py @@ -0,0 +1,65 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of channel arguments on client/server side.""" + +from concurrent import futures +import unittest +import logging + +import grpc + + +class TestPointerWrapper(object): + + def __int__(self): + return 123456 + + +TEST_CHANNEL_ARGS = ( + ('arg1', b'bytes_val'), + ('arg2', 'str_val'), + ('arg3', 1), + (b'arg4', 'str_val'), + ('arg6', TestPointerWrapper()), +) + +INVALID_TEST_CHANNEL_ARGS = [ + { + 'foo': 'bar' + }, + (('key',),), + 'str', +] + + +class ChannelArgsTest(unittest.TestCase): + + def test_client(self): + grpc.insecure_channel('localhost:8080', options=TEST_CHANNEL_ARGS) + + def test_server(self): + grpc.server(futures.ThreadPoolExecutor(max_workers=1), + options=TEST_CHANNEL_ARGS) + + def test_invalid_client_args(self): + for invalid_arg in INVALID_TEST_CHANNEL_ARGS: + self.assertRaises(ValueError, + grpc.insecure_channel, + 'localhost:8080', + options=invalid_arg) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_close_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_close_test.py new file mode 100644 index 00000000000..47f52b4890e --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_close_test.py @@ -0,0 +1,220 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server and client side compression.""" + +import itertools +import logging +import threading +import time +import unittest + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_BEAT = 0.5 +_SOME_TIME = 5 +_MORE_TIME = 10 + +_STREAM_URI = 'Meffod' +_UNARY_URI = 'MeffodMan' + + +class _StreamingMethodHandler(grpc.RpcMethodHandler): + + request_streaming = True + response_streaming = True + request_deserializer = None + response_serializer = None + + def stream_stream(self, request_iterator, servicer_context): + for request in request_iterator: + yield request * 2 + + +class _UnaryMethodHandler(grpc.RpcMethodHandler): + + request_streaming = False + response_streaming = False + request_deserializer = None + response_serializer = None + + def unary_unary(self, request, servicer_context): + return request * 2 + + +_STREAMING_METHOD_HANDLER = _StreamingMethodHandler() +_UNARY_METHOD_HANDLER = _UnaryMethodHandler() + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _STREAM_URI: + return _STREAMING_METHOD_HANDLER + else: + return _UNARY_METHOD_HANDLER + + +_GENERIC_HANDLER = _GenericHandler() + + +class _Pipe(object): + + def __init__(self, values): + self._condition = threading.Condition() + self._values = list(values) + self._open = True + + def __iter__(self): + return self + + def _next(self): + with self._condition: + while not self._values and self._open: + self._condition.wait() + if self._values: + return self._values.pop(0) + else: + raise StopIteration() + + def next(self): + return self._next() + + def __next__(self): + return self._next() + + def add(self, value): + with self._condition: + self._values.append(value) + self._condition.notify() + + def close(self): + with self._condition: + self._open = False + self._condition.notify() + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +class ChannelCloseTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server( + max_workers=test_constants.THREAD_CONCURRENCY) + self._server.add_generic_rpc_handlers((_GENERIC_HANDLER,)) + self._port = self._server.add_insecure_port('[::]:0') + self._server.start() + + def tearDown(self): + self._server.stop(None) + + def test_close_immediately_after_call_invocation(self): + channel = grpc.insecure_channel('localhost:{}'.format(self._port)) + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe(()) + response_iterator = multi_callable(request_iterator) + channel.close() + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_close_while_call_active(self): + channel = grpc.insecure_channel('localhost:{}'.format(self._port)) + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe((b'abc',)) + response_iterator = multi_callable(request_iterator) + next(response_iterator) + channel.close() + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_context_manager_close_while_call_active(self): + with grpc.insecure_channel('localhost:{}'.format( + self._port)) as channel: # pylint: disable=bad-continuation + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe((b'abc',)) + response_iterator = multi_callable(request_iterator) + next(response_iterator) + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_context_manager_close_while_many_calls_active(self): + with grpc.insecure_channel('localhost:{}'.format( + self._port)) as channel: # pylint: disable=bad-continuation + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterators = tuple( + _Pipe((b'abc',)) + for _ in range(test_constants.THREAD_CONCURRENCY)) + response_iterators = [] + for request_iterator in request_iterators: + response_iterator = multi_callable(request_iterator) + next(response_iterator) + response_iterators.append(response_iterator) + for request_iterator in request_iterators: + request_iterator.close() + + for response_iterator in response_iterators: + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_many_concurrent_closes(self): + channel = grpc.insecure_channel('localhost:{}'.format(self._port)) + multi_callable = channel.stream_stream(_STREAM_URI) + request_iterator = _Pipe((b'abc',)) + response_iterator = multi_callable(request_iterator) + next(response_iterator) + start = time.time() + end = start + _MORE_TIME + + def sleep_some_time_then_close(): + time.sleep(_SOME_TIME) + channel.close() + + for _ in range(test_constants.THREAD_CONCURRENCY): + close_thread = threading.Thread(target=sleep_some_time_then_close) + close_thread.start() + while True: + request_iterator.add(b'def') + time.sleep(_BEAT) + if end < time.time(): + break + request_iterator.close() + + self.assertIs(response_iterator.code(), grpc.StatusCode.CANCELLED) + + def test_exception_in_callback(self): + with grpc.insecure_channel('localhost:{}'.format( + self._port)) as channel: + stream_multi_callable = channel.stream_stream(_STREAM_URI) + endless_iterator = itertools.repeat(b'abc') + stream_response_iterator = stream_multi_callable(endless_iterator) + future = channel.unary_unary(_UNARY_URI).future(b'abc') + + def on_done_callback(future): + raise Exception("This should not cause a deadlock.") + + future.add_done_callback(on_done_callback) + future.result() + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py new file mode 100644 index 00000000000..d1b4c3c932f --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_connectivity_test.py @@ -0,0 +1,155 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc._channel.Channel connectivity.""" + +import logging +import threading +import time +import unittest + +import grpc +from tests.unit.framework.common import test_constants +from tests.unit import thread_pool + + +def _ready_in_connectivities(connectivities): + return grpc.ChannelConnectivity.READY in connectivities + + +def _last_connectivity_is_not_ready(connectivities): + return connectivities[-1] is not grpc.ChannelConnectivity.READY + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._connectivities = [] + + def update(self, connectivity): + with self._condition: + self._connectivities.append(connectivity) + self._condition.notify() + + def connectivities(self): + with self._condition: + return tuple(self._connectivities) + + def block_until_connectivities_satisfy(self, predicate): + with self._condition: + while True: + connectivities = tuple(self._connectivities) + if predicate(connectivities): + return connectivities + else: + self._condition.wait() + + +class ChannelConnectivityTest(unittest.TestCase): + + def test_lonely_channel_connectivity(self): + callback = _Callback() + + channel = grpc.insecure_channel('localhost:12345') + channel.subscribe(callback.update, try_to_connect=False) + first_connectivities = callback.block_until_connectivities_satisfy(bool) + channel.subscribe(callback.update, try_to_connect=True) + second_connectivities = callback.block_until_connectivities_satisfy( + lambda connectivities: 2 <= len(connectivities)) + # Wait for a connection that will never happen. + time.sleep(test_constants.SHORT_TIMEOUT) + third_connectivities = callback.connectivities() + channel.unsubscribe(callback.update) + fourth_connectivities = callback.connectivities() + channel.unsubscribe(callback.update) + fifth_connectivities = callback.connectivities() + + channel.close() + + self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), + first_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, second_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, third_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, fourth_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.READY, fifth_connectivities) + + def test_immediately_connectable_channel_connectivity(self): + recording_thread_pool = thread_pool.RecordingThreadPool( + max_workers=None) + server = grpc.server(recording_thread_pool, + options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.start() + first_callback = _Callback() + second_callback = _Callback() + + channel = grpc.insecure_channel('localhost:{}'.format(port)) + channel.subscribe(first_callback.update, try_to_connect=False) + first_connectivities = first_callback.block_until_connectivities_satisfy( + bool) + # Wait for a connection that will never happen because try_to_connect=True + # has not yet been passed. + time.sleep(test_constants.SHORT_TIMEOUT) + second_connectivities = first_callback.connectivities() + channel.subscribe(second_callback.update, try_to_connect=True) + third_connectivities = first_callback.block_until_connectivities_satisfy( + lambda connectivities: 2 <= len(connectivities)) + fourth_connectivities = second_callback.block_until_connectivities_satisfy( + bool) + # Wait for a connection that will happen (or may already have happened). + first_callback.block_until_connectivities_satisfy( + _ready_in_connectivities) + second_callback.block_until_connectivities_satisfy( + _ready_in_connectivities) + channel.close() + server.stop(None) + + self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), + first_connectivities) + self.assertSequenceEqual((grpc.ChannelConnectivity.IDLE,), + second_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.TRANSIENT_FAILURE, + third_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.SHUTDOWN, + third_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.TRANSIENT_FAILURE, + fourth_connectivities) + self.assertNotIn(grpc.ChannelConnectivity.SHUTDOWN, + fourth_connectivities) + self.assertFalse(recording_thread_pool.was_used()) + + def test_reachable_then_unreachable_channel_connectivity(self): + recording_thread_pool = thread_pool.RecordingThreadPool( + max_workers=None) + server = grpc.server(recording_thread_pool, + options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.start() + callback = _Callback() + + channel = grpc.insecure_channel('localhost:{}'.format(port)) + channel.subscribe(callback.update, try_to_connect=True) + callback.block_until_connectivities_satisfy(_ready_in_connectivities) + # Now take down the server and confirm that channel readiness is repudiated. + server.stop(None) + callback.block_until_connectivities_satisfy( + _last_connectivity_is_not_ready) + channel.unsubscribe(callback.update) + channel.close() + self.assertFalse(recording_thread_pool.was_used()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py new file mode 100644 index 00000000000..ca9ebc16fe9 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_channel_ready_future_test.py @@ -0,0 +1,97 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc.channel_ready_future.""" + +import threading +import unittest +import logging + +import grpc +from tests.unit.framework.common import test_constants +from tests.unit import thread_pool + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._value = None + + def accept_value(self, value): + with self._condition: + self._value = value + self._condition.notify_all() + + def block_until_called(self): + with self._condition: + while self._value is None: + self._condition.wait() + return self._value + + +class ChannelReadyFutureTest(unittest.TestCase): + + def test_lonely_channel_connectivity(self): + channel = grpc.insecure_channel('localhost:12345') + callback = _Callback() + + ready_future = grpc.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + with self.assertRaises(grpc.FutureTimeoutError): + ready_future.result(timeout=test_constants.SHORT_TIMEOUT) + self.assertFalse(ready_future.cancelled()) + self.assertFalse(ready_future.done()) + self.assertTrue(ready_future.running()) + ready_future.cancel() + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertTrue(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + + channel.close() + + def test_immediately_connectable_channel_connectivity(self): + recording_thread_pool = thread_pool.RecordingThreadPool( + max_workers=None) + server = grpc.server(recording_thread_pool, + options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.start() + channel = grpc.insecure_channel('localhost:{}'.format(port)) + callback = _Callback() + + ready_future = grpc.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + self.assertIsNone( + ready_future.result(timeout=test_constants.LONG_TIMEOUT)) + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + # Cancellation after maturity has no effect. + ready_future.cancel() + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + self.assertFalse(recording_thread_pool.was_used()) + + channel.close() + server.stop(None) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py new file mode 100644 index 00000000000..bc58e1032ca --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_compression_test.py @@ -0,0 +1,382 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server and client side compression.""" + +import unittest + +import contextlib +from concurrent import futures +import functools +import itertools +import logging +import os + +import grpc +from grpc import _grpcio_metadata + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit import _tcp_proxy + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + +# Cut down on test time. +_STREAM_LENGTH = test_constants.STREAM_LENGTH // 16 + +_HOST = 'localhost' + +_REQUEST = b'\x00' * 100 +_COMPRESSION_RATIO_THRESHOLD = 0.05 +_COMPRESSION_METHODS = ( + None, + # Disabled for test tractability. + # grpc.Compression.NoCompression, + # grpc.Compression.Deflate, + grpc.Compression.Gzip, +) +_COMPRESSION_NAMES = { + None: 'Uncompressed', + grpc.Compression.NoCompression: 'NoCompression', + grpc.Compression.Deflate: 'DeflateCompression', + grpc.Compression.Gzip: 'GzipCompression', +} + +_TEST_OPTIONS = { + 'client_streaming': (True, False), + 'server_streaming': (True, False), + 'channel_compression': _COMPRESSION_METHODS, + 'multicallable_compression': _COMPRESSION_METHODS, + 'server_compression': _COMPRESSION_METHODS, + 'server_call_compression': _COMPRESSION_METHODS, +} + + +def _make_handle_unary_unary(pre_response_callback): + + def _handle_unary(request, servicer_context): + if pre_response_callback: + pre_response_callback(request, servicer_context) + return request + + return _handle_unary + + +def _make_handle_unary_stream(pre_response_callback): + + def _handle_unary_stream(request, servicer_context): + if pre_response_callback: + pre_response_callback(request, servicer_context) + for _ in range(_STREAM_LENGTH): + yield request + + return _handle_unary_stream + + +def _make_handle_stream_unary(pre_response_callback): + + def _handle_stream_unary(request_iterator, servicer_context): + if pre_response_callback: + pre_response_callback(request_iterator, servicer_context) + response = None + for request in request_iterator: + if not response: + response = request + return response + + return _handle_stream_unary + + +def _make_handle_stream_stream(pre_response_callback): + + def _handle_stream(request_iterator, servicer_context): + # TODO(issue:#6891) We should be able to remove this loop, + # and replace with return; yield + for request in request_iterator: + if pre_response_callback: + pre_response_callback(request, servicer_context) + yield request + + return _handle_stream + + +def set_call_compression(compression_method, request_or_iterator, + servicer_context): + del request_or_iterator + servicer_context.set_compression(compression_method) + + +def disable_next_compression(request, servicer_context): + del request + servicer_context.disable_next_message_compression() + + +def disable_first_compression(request, servicer_context): + if int(request.decode('ascii')) == 0: + servicer_context.disable_next_message_compression() + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, + pre_response_callback): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + + if self.request_streaming and self.response_streaming: + self.stream_stream = _make_handle_stream_stream( + pre_response_callback) + elif not self.request_streaming and not self.response_streaming: + self.unary_unary = _make_handle_unary_unary(pre_response_callback) + elif not self.request_streaming and self.response_streaming: + self.unary_stream = _make_handle_unary_stream(pre_response_callback) + else: + self.stream_unary = _make_handle_stream_unary(pre_response_callback) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, pre_response_callback): + self._pre_response_callback = pre_response_callback + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False, self._pre_response_callback) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True, self._pre_response_callback) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False, self._pre_response_callback) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True, self._pre_response_callback) + else: + return None + + +def _instrumented_client_server_pair(channel_kwargs, server_kwargs, + server_handler): + server = grpc.server(futures.ThreadPoolExecutor(), **server_kwargs) + server.add_generic_rpc_handlers((server_handler,)) + server_port = server.add_insecure_port('{}:0'.format(_HOST)) + server.start() + with _tcp_proxy.TcpProxy(_HOST, _HOST, server_port) as proxy: + proxy_port = proxy.get_port() + with grpc.insecure_channel('{}:{}'.format(_HOST, proxy_port), + **channel_kwargs) as client_channel: + try: + yield client_channel, proxy, server + finally: + server.stop(None) + + +def _get_byte_counts(channel_kwargs, multicallable_kwargs, client_function, + server_kwargs, server_handler, message): + with _instrumented_client_server_pair(channel_kwargs, server_kwargs, + server_handler) as pipeline: + client_channel, proxy, server = pipeline + client_function(client_channel, multicallable_kwargs, message) + return proxy.get_byte_count() + + +def _get_compression_ratios(client_function, first_channel_kwargs, + first_multicallable_kwargs, first_server_kwargs, + first_server_handler, second_channel_kwargs, + second_multicallable_kwargs, second_server_kwargs, + second_server_handler, message): + try: + # This test requires the byte length of each connection to be deterministic. As + # it turns out, flow control puts bytes on the wire in a nondeterministic + # manner. We disable it here in order to measure compression ratios + # deterministically. + os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL'] = 'true' + first_bytes_sent, first_bytes_received = _get_byte_counts( + first_channel_kwargs, first_multicallable_kwargs, client_function, + first_server_kwargs, first_server_handler, message) + second_bytes_sent, second_bytes_received = _get_byte_counts( + second_channel_kwargs, second_multicallable_kwargs, client_function, + second_server_kwargs, second_server_handler, message) + return ((second_bytes_sent - first_bytes_sent) / + float(first_bytes_sent), + (second_bytes_received - first_bytes_received) / + float(first_bytes_received)) + finally: + del os.environ['GRPC_EXPERIMENTAL_DISABLE_FLOW_CONTROL'] + + +def _unary_unary_client(channel, multicallable_kwargs, message): + multi_callable = channel.unary_unary(_UNARY_UNARY) + response = multi_callable(message, **multicallable_kwargs) + if response != message: + raise RuntimeError("Request '{}' != Response '{}'".format( + message, response)) + + +def _unary_stream_client(channel, multicallable_kwargs, message): + multi_callable = channel.unary_stream(_UNARY_STREAM) + response_iterator = multi_callable(message, **multicallable_kwargs) + for response in response_iterator: + if response != message: + raise RuntimeError("Request '{}' != Response '{}'".format( + message, response)) + + +def _stream_unary_client(channel, multicallable_kwargs, message): + multi_callable = channel.stream_unary(_STREAM_UNARY) + requests = (_REQUEST for _ in range(_STREAM_LENGTH)) + response = multi_callable(requests, **multicallable_kwargs) + if response != message: + raise RuntimeError("Request '{}' != Response '{}'".format( + message, response)) + + +def _stream_stream_client(channel, multicallable_kwargs, message): + multi_callable = channel.stream_stream(_STREAM_STREAM) + request_prefix = str(0).encode('ascii') * 100 + requests = ( + request_prefix + str(i).encode('ascii') for i in range(_STREAM_LENGTH)) + response_iterator = multi_callable(requests, **multicallable_kwargs) + for i, response in enumerate(response_iterator): + if int(response.decode('ascii')) != i: + raise RuntimeError("Request '{}' != Response '{}'".format( + i, response)) + + +class CompressionTest(unittest.TestCase): + + def assertCompressed(self, compression_ratio): + self.assertLess( + compression_ratio, + -1.0 * _COMPRESSION_RATIO_THRESHOLD, + msg='Actual compression ratio: {}'.format(compression_ratio)) + + def assertNotCompressed(self, compression_ratio): + self.assertGreaterEqual( + compression_ratio, + -1.0 * _COMPRESSION_RATIO_THRESHOLD, + msg='Actual compession ratio: {}'.format(compression_ratio)) + + def assertConfigurationCompressed(self, client_streaming, server_streaming, + channel_compression, + multicallable_compression, + server_compression, + server_call_compression): + client_side_compressed = channel_compression or multicallable_compression + server_side_compressed = server_compression or server_call_compression + channel_kwargs = { + 'compression': channel_compression, + } if channel_compression else {} + multicallable_kwargs = { + 'compression': multicallable_compression, + } if multicallable_compression else {} + + client_function = None + if not client_streaming and not server_streaming: + client_function = _unary_unary_client + elif not client_streaming and server_streaming: + client_function = _unary_stream_client + elif client_streaming and not server_streaming: + client_function = _stream_unary_client + else: + client_function = _stream_stream_client + + server_kwargs = { + 'compression': server_compression, + } if server_compression else {} + server_handler = _GenericHandler( + functools.partial(set_call_compression, grpc.Compression.Gzip) + ) if server_call_compression else _GenericHandler(None) + sent_ratio, received_ratio = _get_compression_ratios( + client_function, {}, {}, {}, _GenericHandler(None), channel_kwargs, + multicallable_kwargs, server_kwargs, server_handler, _REQUEST) + + if client_side_compressed: + self.assertCompressed(sent_ratio) + else: + self.assertNotCompressed(sent_ratio) + + if server_side_compressed: + self.assertCompressed(received_ratio) + else: + self.assertNotCompressed(received_ratio) + + def testDisableNextCompressionStreaming(self): + server_kwargs = { + 'compression': grpc.Compression.Deflate, + } + _, received_ratio = _get_compression_ratios( + _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {}, + server_kwargs, _GenericHandler(disable_next_compression), _REQUEST) + self.assertNotCompressed(received_ratio) + + def testDisableNextCompressionStreamingResets(self): + server_kwargs = { + 'compression': grpc.Compression.Deflate, + } + _, received_ratio = _get_compression_ratios( + _stream_stream_client, {}, {}, {}, _GenericHandler(None), {}, {}, + server_kwargs, _GenericHandler(disable_first_compression), _REQUEST) + self.assertCompressed(received_ratio) + + +def _get_compression_str(name, value): + return '{}{}'.format(name, _COMPRESSION_NAMES[value]) + + +def _get_compression_test_name(client_streaming, server_streaming, + channel_compression, multicallable_compression, + server_compression, server_call_compression): + client_arity = 'Stream' if client_streaming else 'Unary' + server_arity = 'Stream' if server_streaming else 'Unary' + arity = '{}{}'.format(client_arity, server_arity) + channel_compression_str = _get_compression_str('Channel', + channel_compression) + multicallable_compression_str = _get_compression_str( + 'Multicallable', multicallable_compression) + server_compression_str = _get_compression_str('Server', server_compression) + server_call_compression_str = _get_compression_str('ServerCall', + server_call_compression) + return 'test{}{}{}{}{}'.format(arity, channel_compression_str, + multicallable_compression_str, + server_compression_str, + server_call_compression_str) + + +def _test_options(): + for test_parameters in itertools.product(*_TEST_OPTIONS.values()): + yield dict(zip(_TEST_OPTIONS.keys(), test_parameters)) + + +for options in _test_options(): + + def test_compression(**kwargs): + + def _test_compression(self): + self.assertConfigurationCompressed(**kwargs) + + return _test_compression + + setattr(CompressionTest, _get_compression_test_name(**options), + test_compression(**options)) + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py new file mode 100644 index 00000000000..fec0fbd7df4 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_contextvars_propagation_test.py @@ -0,0 +1,118 @@ +# Copyright 2020 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of propagation of contextvars to AuthMetadataPlugin threads..""" + +import contextlib +import logging +import os +import sys +import unittest + +import grpc + +from tests.unit import test_common + +_UNARY_UNARY = "/test/UnaryUnary" +_REQUEST = b"0000" + + +def _unary_unary_handler(request, context): + return request + + +def contextvars_supported(): + try: + import contextvars + return True + except ImportError: + return False + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return grpc.unary_unary_rpc_method_handler(_unary_unary_handler) + else: + raise NotImplementedError() + + +def _server(): + try: + server = test_common.test_server() + target = 'localhost:0' + port = server.add_insecure_port(target) + server.add_generic_rpc_handlers((_GenericHandler(),)) + server.start() + yield port + finally: + server.stop(None) + + +if contextvars_supported(): + import contextvars + + _EXPECTED_VALUE = 24601 + test_var = contextvars.ContextVar("test_var", default=None) + + def set_up_expected_context(): + test_var.set(_EXPECTED_VALUE) + + class TestCallCredentials(grpc.AuthMetadataPlugin): + + def __call__(self, context, callback): + if test_var.get() != _EXPECTED_VALUE: + raise AssertionError("{} != {}".format(test_var.get(), + _EXPECTED_VALUE)) + callback((), None) + + def assert_called(self, test): + test.assertTrue(self._invoked) + test.assertEqual(_EXPECTED_VALUE, self._recorded_value) + +else: + + def set_up_expected_context(): + pass + + class TestCallCredentials(grpc.AuthMetadataPlugin): + + def __call__(self, context, callback): + callback((), None) + + +# TODO(https://github.com/grpc/grpc/issues/22257) [email protected](os.name == "nt", "LocalCredentials not supported on Windows.") +class ContextVarsPropagationTest(unittest.TestCase): + + def test_propagation_to_auth_plugin(self): + set_up_expected_context() + with _server() as port: + target = "localhost:{}".format(port) + local_credentials = grpc.local_channel_credentials() + test_call_credentials = TestCallCredentials() + call_credentials = grpc.metadata_call_credentials( + test_call_credentials, "test call credentials") + composite_credentials = grpc.composite_channel_credentials( + local_credentials, call_credentials) + with grpc.secure_channel(target, composite_credentials) as channel: + stub = channel.unary_unary(_UNARY_UNARY) + response = stub(_REQUEST, wait_for_ready=True) + self.assertEqual(_REQUEST, response) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_credentials_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_credentials_test.py new file mode 100644 index 00000000000..187a6f03881 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_credentials_test.py @@ -0,0 +1,70 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of credentials.""" + +import unittest +import logging +import six + +import grpc + + +class CredentialsTest(unittest.TestCase): + + def test_call_credentials_composition(self): + first = grpc.access_token_call_credentials('abc') + second = grpc.access_token_call_credentials('def') + third = grpc.access_token_call_credentials('ghi') + + first_and_second = grpc.composite_call_credentials(first, second) + first_second_and_third = grpc.composite_call_credentials( + first, second, third) + + self.assertIsInstance(first_and_second, grpc.CallCredentials) + self.assertIsInstance(first_second_and_third, grpc.CallCredentials) + + def test_channel_credentials_composition(self): + first_call_credentials = grpc.access_token_call_credentials('abc') + second_call_credentials = grpc.access_token_call_credentials('def') + third_call_credentials = grpc.access_token_call_credentials('ghi') + channel_credentials = grpc.ssl_channel_credentials() + + channel_and_first = grpc.composite_channel_credentials( + channel_credentials, first_call_credentials) + channel_first_and_second = grpc.composite_channel_credentials( + channel_credentials, first_call_credentials, + second_call_credentials) + channel_first_second_and_third = grpc.composite_channel_credentials( + channel_credentials, first_call_credentials, + second_call_credentials, third_call_credentials) + + self.assertIsInstance(channel_and_first, grpc.ChannelCredentials) + self.assertIsInstance(channel_first_and_second, grpc.ChannelCredentials) + self.assertIsInstance(channel_first_second_and_third, + grpc.ChannelCredentials) + + @unittest.skipIf(six.PY2, 'only invalid in Python3') + def test_invalid_string_certificate(self): + self.assertRaises( + TypeError, + grpc.ssl_channel_credentials, + root_certificates='A Certificate', + private_key=None, + certificate_chain=None, + ) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/__init__.py new file mode 100644 index 00000000000..5fb4f3c3cfd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py new file mode 100644 index 00000000000..b279f3d07c5 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_cancel_many_calls_test.py @@ -0,0 +1,223 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test making many calls and immediately cancelling most of them.""" + +import threading +import unittest + +from grpc._cython import cygrpc +from grpc.framework.foundation import logging_pool +from tests.unit.framework.common import test_constants +from tests.unit._cython import test_utilities + +_EMPTY_FLAGS = 0 +_EMPTY_METADATA = () + +_SERVER_SHUTDOWN_TAG = 'server_shutdown' +_REQUEST_CALL_TAG = 'request_call' +_RECEIVE_CLOSE_ON_SERVER_TAG = 'receive_close_on_server' +_RECEIVE_MESSAGE_TAG = 'receive_message' +_SERVER_COMPLETE_CALL_TAG = 'server_complete_call' + +_SUCCESS_CALL_FRACTION = 1.0 / 8.0 +_SUCCESSFUL_CALLS = int(test_constants.RPC_CONCURRENCY * _SUCCESS_CALL_FRACTION) +_UNSUCCESSFUL_CALLS = test_constants.RPC_CONCURRENCY - _SUCCESSFUL_CALLS + + +class _State(object): + + def __init__(self): + self.condition = threading.Condition() + self.handlers_released = False + self.parked_handlers = 0 + self.handled_rpcs = 0 + + +def _is_cancellation_event(event): + return (event.tag is _RECEIVE_CLOSE_ON_SERVER_TAG and + event.batch_operations[0].cancelled()) + + +class _Handler(object): + + def __init__(self, state, completion_queue, rpc_event): + self._state = state + self._lock = threading.Lock() + self._completion_queue = completion_queue + self._call = rpc_event.call + + def __call__(self): + with self._state.condition: + self._state.parked_handlers += 1 + if self._state.parked_handlers == test_constants.THREAD_CONCURRENCY: + self._state.condition.notify_all() + while not self._state.handlers_released: + self._state.condition.wait() + + with self._lock: + self._call.start_server_batch( + (cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),), + _RECEIVE_CLOSE_ON_SERVER_TAG) + self._call.start_server_batch( + (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), + _RECEIVE_MESSAGE_TAG) + first_event = self._completion_queue.poll() + if _is_cancellation_event(first_event): + self._completion_queue.poll() + else: + with self._lock: + operations = ( + cygrpc.SendInitialMetadataOperation(_EMPTY_METADATA, + _EMPTY_FLAGS), + cygrpc.SendMessageOperation(b'\x79\x57', _EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + _EMPTY_METADATA, cygrpc.StatusCode.ok, b'test details!', + _EMPTY_FLAGS), + ) + self._call.start_server_batch(operations, + _SERVER_COMPLETE_CALL_TAG) + self._completion_queue.poll() + self._completion_queue.poll() + + +def _serve(state, server, server_completion_queue, thread_pool): + for _ in range(test_constants.RPC_CONCURRENCY): + call_completion_queue = cygrpc.CompletionQueue() + server.request_call(call_completion_queue, server_completion_queue, + _REQUEST_CALL_TAG) + rpc_event = server_completion_queue.poll() + thread_pool.submit(_Handler(state, call_completion_queue, rpc_event)) + with state.condition: + state.handled_rpcs += 1 + if test_constants.RPC_CONCURRENCY <= state.handled_rpcs: + state.condition.notify_all() + server_completion_queue.poll() + + +class _QueueDriver(object): + + def __init__(self, condition, completion_queue, due): + self._condition = condition + self._completion_queue = completion_queue + self._due = due + self._events = [] + self._returned = False + + def start(self): + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events.append(event) + self._due.remove(event.tag) + self._condition.notify_all() + if not self._due: + self._returned = True + return + + thread = threading.Thread(target=in_thread) + thread.start() + + def events(self, at_least): + with self._condition: + while len(self._events) < at_least: + self._condition.wait() + return tuple(self._events) + + +class CancelManyCallsTest(unittest.TestCase): + + def testCancelManyCalls(self): + server_thread_pool = logging_pool.pool( + test_constants.THREAD_CONCURRENCY) + + server_completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + server.register_completion_queue(server_completion_queue) + port = server.add_http2_port(b'[::]:0') + server.start() + channel = cygrpc.Channel('localhost:{}'.format(port).encode(), None, + None) + + state = _State() + + server_thread_args = ( + state, + server, + server_completion_queue, + server_thread_pool, + ) + server_thread = threading.Thread(target=_serve, args=server_thread_args) + server_thread.start() + + client_condition = threading.Condition() + client_due = set() + + with client_condition: + client_calls = [] + for index in range(test_constants.RPC_CONCURRENCY): + tag = 'client_complete_call_{0:04d}_tag'.format(index) + client_call = channel.integrated_call( + _EMPTY_FLAGS, b'/twinkies', None, None, _EMPTY_METADATA, + None, (( + ( + cygrpc.SendInitialMetadataOperation( + _EMPTY_METADATA, _EMPTY_FLAGS), + cygrpc.SendMessageOperation(b'\x45\x56', + _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveInitialMetadataOperation( + _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ), + tag, + ),)) + client_due.add(tag) + client_calls.append(client_call) + + client_events_future = test_utilities.SimpleFuture(lambda: tuple( + channel.next_call_event() for _ in range(_SUCCESSFUL_CALLS))) + + with state.condition: + while True: + if state.parked_handlers < test_constants.THREAD_CONCURRENCY: + state.condition.wait() + elif state.handled_rpcs < test_constants.RPC_CONCURRENCY: + state.condition.wait() + else: + state.handlers_released = True + state.condition.notify_all() + break + + client_events_future.result() + with client_condition: + for client_call in client_calls: + client_call.cancel(cygrpc.StatusCode.cancelled, 'Cancelled!') + for _ in range(_UNSUCCESSFUL_CALLS): + channel.next_call_event() + + channel.close(cygrpc.StatusCode.unknown, 'Cancelled on channel close!') + with state.condition: + server.shutdown(server_completion_queue, _SERVER_SHUTDOWN_TAG) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py new file mode 100644 index 00000000000..54f620523ea --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_channel_test.py @@ -0,0 +1,70 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import threading +import unittest + +from grpc._cython import cygrpc + +from tests.unit.framework.common import test_constants + + +def _channel(): + return cygrpc.Channel(b'localhost:54321', (), None) + + +def _connectivity_loop(channel): + for _ in range(100): + connectivity = channel.check_connectivity_state(True) + channel.watch_connectivity_state(connectivity, time.time() + 0.2) + + +def _create_loop_destroy(): + channel = _channel() + _connectivity_loop(channel) + channel.close(cygrpc.StatusCode.ok, 'Channel close!') + + +def _in_parallel(behavior, arguments): + threads = tuple( + threading.Thread(target=behavior, args=arguments) + for _ in range(test_constants.THREAD_CONCURRENCY)) + for thread in threads: + thread.start() + for thread in threads: + thread.join() + + +class ChannelTest(unittest.TestCase): + + def test_single_channel_lonely_connectivity(self): + channel = _channel() + _connectivity_loop(channel) + channel.close(cygrpc.StatusCode.ok, 'Channel close!') + + def test_multiple_channels_lonely_connectivity(self): + _in_parallel(_create_loop_destroy, ()) + + def test_negative_deadline_connectivity(self): + channel = _channel() + connectivity = channel.check_connectivity_state(True) + channel.watch_connectivity_state(connectivity, -3.14) + channel.close(cygrpc.StatusCode.ok, 'Channel close!') + # NOTE(lidiz) The negative timeout should not trigger SIGABRT. + # Bug report: https://github.com/grpc/grpc/issues/18244 + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_common.py new file mode 100644 index 00000000000..d8210f36f80 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_common.py @@ -0,0 +1,123 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common utilities for tests of the Cython layer of gRPC Python.""" + +import collections +import threading + +from grpc._cython import cygrpc + +RPC_COUNT = 4000 + +EMPTY_FLAGS = 0 + +INVOCATION_METADATA = ( + ('client-md-key', 'client-md-key'), + ('client-md-key-bin', b'\x00\x01' * 3000), +) + +INITIAL_METADATA = ( + ('server-initial-md-key', 'server-initial-md-value'), + ('server-initial-md-key-bin', b'\x00\x02' * 3000), +) + +TRAILING_METADATA = ( + ('server-trailing-md-key', 'server-trailing-md-value'), + ('server-trailing-md-key-bin', b'\x00\x03' * 3000), +) + + +class QueueDriver(object): + + def __init__(self, condition, completion_queue): + self._condition = condition + self._completion_queue = completion_queue + self._due = collections.defaultdict(int) + self._events = collections.defaultdict(list) + + def add_due(self, tags): + if not self._due: + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events[event.tag].append(event) + self._due[event.tag] -= 1 + self._condition.notify_all() + if self._due[event.tag] <= 0: + self._due.pop(event.tag) + if not self._due: + return + + thread = threading.Thread(target=in_thread) + thread.start() + for tag in tags: + self._due[tag] += 1 + + def event_with_tag(self, tag): + with self._condition: + while True: + if self._events[tag]: + return self._events[tag].pop(0) + else: + self._condition.wait() + + +def execute_many_times(behavior): + return tuple(behavior() for _ in range(RPC_COUNT)) + + +class OperationResult( + collections.namedtuple('OperationResult', ( + 'start_batch_result', + 'completion_type', + 'success', + ))): + pass + + +SUCCESSFUL_OPERATION_RESULT = OperationResult( + cygrpc.CallError.ok, cygrpc.CompletionType.operation_complete, True) + + +class RpcTest(object): + + def setUp(self): + self.server_completion_queue = cygrpc.CompletionQueue() + self.server = cygrpc.Server([(b'grpc.so_reuseport', 0)]) + self.server.register_completion_queue(self.server_completion_queue) + port = self.server.add_http2_port(b'[::]:0') + self.server.start() + self.channel = cygrpc.Channel('localhost:{}'.format(port).encode(), [], + None) + + self._server_shutdown_tag = 'server_shutdown_tag' + self.server_condition = threading.Condition() + self.server_driver = QueueDriver(self.server_condition, + self.server_completion_queue) + with self.server_condition: + self.server_driver.add_due({ + self._server_shutdown_tag, + }) + + self.client_condition = threading.Condition() + self.client_completion_queue = cygrpc.CompletionQueue() + self.client_driver = QueueDriver(self.client_condition, + self.client_completion_queue) + + def tearDown(self): + self.server.shutdown(self.server_completion_queue, + self._server_shutdown_tag) + self.server.cancel_all_calls() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py new file mode 100644 index 00000000000..5a5dedd5f26 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_fork_test.py @@ -0,0 +1,72 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import threading +import unittest + +from grpc._cython import cygrpc + + +def _get_number_active_threads(): + return cygrpc._fork_state.active_thread_count._num_active_threads + + [email protected](os.name == 'nt', 'Posix-specific tests') +class ForkPosixTester(unittest.TestCase): + + def setUp(self): + self._saved_fork_support_flag = cygrpc._GRPC_ENABLE_FORK_SUPPORT + cygrpc._GRPC_ENABLE_FORK_SUPPORT = True + + def testForkManagedThread(self): + + def cb(): + self.assertEqual(1, _get_number_active_threads()) + + thread = cygrpc.ForkManagedThread(cb) + thread.start() + thread.join() + self.assertEqual(0, _get_number_active_threads()) + + def testForkManagedThreadThrowsException(self): + + def cb(): + self.assertEqual(1, _get_number_active_threads()) + raise Exception("expected exception") + + thread = cygrpc.ForkManagedThread(cb) + thread.start() + thread.join() + self.assertEqual(0, _get_number_active_threads()) + + def tearDown(self): + cygrpc._GRPC_ENABLE_FORK_SUPPORT = self._saved_fork_support_flag + + [email protected](os.name == 'nt', 'Windows-specific tests') +class ForkWindowsTester(unittest.TestCase): + + def testForkManagedThreadIsNoOp(self): + + def cb(): + pass + + thread = cygrpc.ForkManagedThread(cb) + thread.start() + thread.join() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py new file mode 100644 index 00000000000..144a2fcae3f --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_server_completion_queue_per_call_test.py @@ -0,0 +1,132 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test a corner-case at the level of the Cython API.""" + +import threading +import unittest + +from grpc._cython import cygrpc + +from tests.unit._cython import _common +from tests.unit._cython import test_utilities + + +class Test(_common.RpcTest, unittest.TestCase): + + def _do_rpcs(self): + server_call_condition = threading.Condition() + server_call_completion_queue = cygrpc.CompletionQueue() + server_call_driver = _common.QueueDriver(server_call_condition, + server_call_completion_queue) + + server_request_call_tag = 'server_request_call_tag' + server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' + server_complete_rpc_tag = 'server_complete_rpc_tag' + + with self.server_condition: + server_request_call_start_batch_result = self.server.request_call( + server_call_completion_queue, self.server_completion_queue, + server_request_call_tag) + self.server_driver.add_due({ + server_request_call_tag, + }) + + client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' + client_complete_rpc_tag = 'client_complete_rpc_tag' + client_call = self.channel.integrated_call( + _common.EMPTY_FLAGS, b'/twinkies', None, None, + _common.INVOCATION_METADATA, None, [( + [ + cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS), + ], + client_receive_initial_metadata_tag, + )]) + client_call.operate([ + cygrpc.SendInitialMetadataOperation(_common.INVOCATION_METADATA, + _common.EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_common.EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_common.EMPTY_FLAGS), + ], client_complete_rpc_tag) + + client_events_future = test_utilities.SimpleFuture(lambda: [ + self.channel.next_call_event(), + self.channel.next_call_event(), + ]) + + server_request_call_event = self.server_driver.event_with_tag( + server_request_call_tag) + + with server_call_condition: + server_send_initial_metadata_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.SendInitialMetadataOperation( + _common.INITIAL_METADATA, _common.EMPTY_FLAGS), + ], server_send_initial_metadata_tag)) + server_call_driver.add_due({ + server_send_initial_metadata_tag, + }) + server_send_initial_metadata_event = server_call_driver.event_with_tag( + server_send_initial_metadata_tag) + + with server_call_condition: + server_complete_rpc_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.ReceiveCloseOnServerOperation(_common.EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + _common.TRAILING_METADATA, cygrpc.StatusCode.ok, + b'test details', _common.EMPTY_FLAGS), + ], server_complete_rpc_tag)) + server_call_driver.add_due({ + server_complete_rpc_tag, + }) + server_complete_rpc_event = server_call_driver.event_with_tag( + server_complete_rpc_tag) + + client_events = client_events_future.result() + if client_events[0].tag is client_receive_initial_metadata_tag: + client_receive_initial_metadata_event = client_events[0] + client_complete_rpc_event = client_events[1] + else: + client_complete_rpc_event = client_events[0] + client_receive_initial_metadata_event = client_events[1] + + return ( + _common.OperationResult(server_request_call_start_batch_result, + server_request_call_event.completion_type, + server_request_call_event.success), + _common.OperationResult( + cygrpc.CallError.ok, + client_receive_initial_metadata_event.completion_type, + client_receive_initial_metadata_event.success), + _common.OperationResult(cygrpc.CallError.ok, + client_complete_rpc_event.completion_type, + client_complete_rpc_event.success), + _common.OperationResult( + server_send_initial_metadata_start_batch_result, + server_send_initial_metadata_event.completion_type, + server_send_initial_metadata_event.success), + _common.OperationResult(server_complete_rpc_start_batch_result, + server_complete_rpc_event.completion_type, + server_complete_rpc_event.success), + ) + + def test_rpcs(self): + expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5 + ] * _common.RPC_COUNT + actuallys = _common.execute_many_times(self._do_rpcs) + self.assertSequenceEqual(expecteds, actuallys) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py new file mode 100644 index 00000000000..38964768db7 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_no_messages_single_server_completion_queue_test.py @@ -0,0 +1,126 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test a corner-case at the level of the Cython API.""" + +import threading +import unittest + +from grpc._cython import cygrpc + +from tests.unit._cython import _common +from tests.unit._cython import test_utilities + + +class Test(_common.RpcTest, unittest.TestCase): + + def _do_rpcs(self): + server_request_call_tag = 'server_request_call_tag' + server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' + server_complete_rpc_tag = 'server_complete_rpc_tag' + + with self.server_condition: + server_request_call_start_batch_result = self.server.request_call( + self.server_completion_queue, self.server_completion_queue, + server_request_call_tag) + self.server_driver.add_due({ + server_request_call_tag, + }) + + client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' + client_complete_rpc_tag = 'client_complete_rpc_tag' + client_call = self.channel.integrated_call( + _common.EMPTY_FLAGS, b'/twinkies', None, None, + _common.INVOCATION_METADATA, None, [ + ( + [ + cygrpc.SendInitialMetadataOperation( + _common.INVOCATION_METADATA, _common.EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation( + _common.EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation( + _common.EMPTY_FLAGS), + ], + client_complete_rpc_tag, + ), + ]) + client_call.operate([ + cygrpc.ReceiveInitialMetadataOperation(_common.EMPTY_FLAGS), + ], client_receive_initial_metadata_tag) + + client_events_future = test_utilities.SimpleFuture(lambda: [ + self.channel.next_call_event(), + self.channel.next_call_event(), + ]) + server_request_call_event = self.server_driver.event_with_tag( + server_request_call_tag) + + with self.server_condition: + server_send_initial_metadata_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.SendInitialMetadataOperation( + _common.INITIAL_METADATA, _common.EMPTY_FLAGS), + ], server_send_initial_metadata_tag)) + self.server_driver.add_due({ + server_send_initial_metadata_tag, + }) + server_send_initial_metadata_event = self.server_driver.event_with_tag( + server_send_initial_metadata_tag) + + with self.server_condition: + server_complete_rpc_start_batch_result = ( + server_request_call_event.call.start_server_batch([ + cygrpc.ReceiveCloseOnServerOperation(_common.EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + _common.TRAILING_METADATA, cygrpc.StatusCode.ok, + 'test details', _common.EMPTY_FLAGS), + ], server_complete_rpc_tag)) + self.server_driver.add_due({ + server_complete_rpc_tag, + }) + server_complete_rpc_event = self.server_driver.event_with_tag( + server_complete_rpc_tag) + + client_events = client_events_future.result() + client_receive_initial_metadata_event = client_events[0] + client_complete_rpc_event = client_events[1] + + return ( + _common.OperationResult(server_request_call_start_batch_result, + server_request_call_event.completion_type, + server_request_call_event.success), + _common.OperationResult( + cygrpc.CallError.ok, + client_receive_initial_metadata_event.completion_type, + client_receive_initial_metadata_event.success), + _common.OperationResult(cygrpc.CallError.ok, + client_complete_rpc_event.completion_type, + client_complete_rpc_event.success), + _common.OperationResult( + server_send_initial_metadata_start_batch_result, + server_send_initial_metadata_event.completion_type, + server_send_initial_metadata_event.success), + _common.OperationResult(server_complete_rpc_start_batch_result, + server_complete_rpc_event.completion_type, + server_complete_rpc_event.success), + ) + + def test_rpcs(self): + expecteds = [(_common.SUCCESSFUL_OPERATION_RESULT,) * 5 + ] * _common.RPC_COUNT + actuallys = _common.execute_many_times(self._do_rpcs) + self.assertSequenceEqual(expecteds, actuallys) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py new file mode 100644 index 00000000000..8a903bfaf91 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_read_some_but_not_all_responses_test.py @@ -0,0 +1,240 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test a corner-case at the level of the Cython API.""" + +import threading +import unittest + +from grpc._cython import cygrpc +from tests.unit._cython import test_utilities + +_EMPTY_FLAGS = 0 +_EMPTY_METADATA = () + + +class _ServerDriver(object): + + def __init__(self, completion_queue, shutdown_tag): + self._condition = threading.Condition() + self._completion_queue = completion_queue + self._shutdown_tag = shutdown_tag + self._events = [] + self._saw_shutdown_tag = False + + def start(self): + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events.append(event) + self._condition.notify() + if event.tag is self._shutdown_tag: + self._saw_shutdown_tag = True + break + + thread = threading.Thread(target=in_thread) + thread.start() + + def done(self): + with self._condition: + return self._saw_shutdown_tag + + def first_event(self): + with self._condition: + while not self._events: + self._condition.wait() + return self._events[0] + + def events(self): + with self._condition: + while not self._saw_shutdown_tag: + self._condition.wait() + return tuple(self._events) + + +class _QueueDriver(object): + + def __init__(self, condition, completion_queue, due): + self._condition = condition + self._completion_queue = completion_queue + self._due = due + self._events = [] + self._returned = False + + def start(self): + + def in_thread(): + while True: + event = self._completion_queue.poll() + with self._condition: + self._events.append(event) + self._due.remove(event.tag) + self._condition.notify_all() + if not self._due: + self._returned = True + return + + thread = threading.Thread(target=in_thread) + thread.start() + + def done(self): + with self._condition: + return self._returned + + def event_with_tag(self, tag): + with self._condition: + while True: + for event in self._events: + if event.tag is tag: + return event + self._condition.wait() + + def events(self): + with self._condition: + while not self._returned: + self._condition.wait() + return tuple(self._events) + + +class ReadSomeButNotAllResponsesTest(unittest.TestCase): + + def testReadSomeButNotAllResponses(self): + server_completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server([( + b'grpc.so_reuseport', + 0, + )]) + server.register_completion_queue(server_completion_queue) + port = server.add_http2_port(b'[::]:0') + server.start() + channel = cygrpc.Channel('localhost:{}'.format(port).encode(), set(), + None) + + server_shutdown_tag = 'server_shutdown_tag' + server_driver = _ServerDriver(server_completion_queue, + server_shutdown_tag) + server_driver.start() + + client_condition = threading.Condition() + client_due = set() + + server_call_condition = threading.Condition() + server_send_initial_metadata_tag = 'server_send_initial_metadata_tag' + server_send_first_message_tag = 'server_send_first_message_tag' + server_send_second_message_tag = 'server_send_second_message_tag' + server_complete_rpc_tag = 'server_complete_rpc_tag' + server_call_due = set(( + server_send_initial_metadata_tag, + server_send_first_message_tag, + server_send_second_message_tag, + server_complete_rpc_tag, + )) + server_call_completion_queue = cygrpc.CompletionQueue() + server_call_driver = _QueueDriver(server_call_condition, + server_call_completion_queue, + server_call_due) + server_call_driver.start() + + server_rpc_tag = 'server_rpc_tag' + request_call_result = server.request_call(server_call_completion_queue, + server_completion_queue, + server_rpc_tag) + + client_receive_initial_metadata_tag = 'client_receive_initial_metadata_tag' + client_complete_rpc_tag = 'client_complete_rpc_tag' + client_call = channel.segregated_call( + _EMPTY_FLAGS, b'/twinkies', None, None, _EMPTY_METADATA, None, ( + ( + [ + cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), + ], + client_receive_initial_metadata_tag, + ), + ( + [ + cygrpc.SendInitialMetadataOperation( + _EMPTY_METADATA, _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ], + client_complete_rpc_tag, + ), + )) + client_receive_initial_metadata_event_future = test_utilities.SimpleFuture( + client_call.next_event) + + server_rpc_event = server_driver.first_event() + + with server_call_condition: + server_send_initial_metadata_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.SendInitialMetadataOperation(_EMPTY_METADATA, + _EMPTY_FLAGS), + ], server_send_initial_metadata_tag)) + server_send_first_message_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.SendMessageOperation(b'\x07', _EMPTY_FLAGS), + ], server_send_first_message_tag)) + server_send_initial_metadata_event = server_call_driver.event_with_tag( + server_send_initial_metadata_tag) + server_send_first_message_event = server_call_driver.event_with_tag( + server_send_first_message_tag) + with server_call_condition: + server_send_second_message_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.SendMessageOperation(b'\x07', _EMPTY_FLAGS), + ], server_send_second_message_tag)) + server_complete_rpc_start_batch_result = ( + server_rpc_event.call.start_server_batch([ + cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + (), cygrpc.StatusCode.ok, b'test details', + _EMPTY_FLAGS), + ], server_complete_rpc_tag)) + server_send_second_message_event = server_call_driver.event_with_tag( + server_send_second_message_tag) + server_complete_rpc_event = server_call_driver.event_with_tag( + server_complete_rpc_tag) + server_call_driver.events() + + client_recieve_initial_metadata_event = client_receive_initial_metadata_event_future.result( + ) + + client_receive_first_message_tag = 'client_receive_first_message_tag' + client_call.operate([ + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + ], client_receive_first_message_tag) + client_receive_first_message_event = client_call.next_event() + + client_call_cancel_result = client_call.cancel( + cygrpc.StatusCode.cancelled, 'Cancelled during test!') + client_complete_rpc_event = client_call.next_event() + + channel.close(cygrpc.StatusCode.unknown, 'Channel closed!') + server.shutdown(server_completion_queue, server_shutdown_tag) + server.cancel_all_calls() + server_driver.events() + + self.assertEqual(cygrpc.CallError.ok, request_call_result) + self.assertEqual(cygrpc.CallError.ok, + server_send_initial_metadata_start_batch_result) + self.assertIs(server_rpc_tag, server_rpc_event.tag) + self.assertEqual(cygrpc.CompletionType.operation_complete, + server_rpc_event.completion_type) + self.assertIsInstance(server_rpc_event.call, cygrpc.Call) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_server_test.py new file mode 100644 index 00000000000..bbd25457b3e --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/_server_test.py @@ -0,0 +1,49 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test servers at the level of the Cython API.""" + +import threading +import time +import unittest + +from grpc._cython import cygrpc + + +class Test(unittest.TestCase): + + def test_lonely_server(self): + server_call_completion_queue = cygrpc.CompletionQueue() + server_shutdown_completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server(None) + server.register_completion_queue(server_call_completion_queue) + server.register_completion_queue(server_shutdown_completion_queue) + port = server.add_http2_port(b'[::]:0') + server.start() + + server_request_call_tag = 'server_request_call_tag' + server_request_call_start_batch_result = server.request_call( + server_call_completion_queue, server_call_completion_queue, + server_request_call_tag) + + time.sleep(4) + + server_shutdown_tag = 'server_shutdown_tag' + server_shutdown_result = server.shutdown( + server_shutdown_completion_queue, server_shutdown_tag) + server_request_call_event = server_call_completion_queue.poll() + server_shutdown_event = server_shutdown_completion_queue.poll() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py new file mode 100644 index 00000000000..1182f83a425 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/cygrpc_test.py @@ -0,0 +1,422 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import threading +import unittest +import platform + +from grpc._cython import cygrpc +from tests.unit._cython import test_utilities +from tests.unit import test_common +from tests.unit import resources + +_SSL_HOST_OVERRIDE = b'foo.test.google.fr' +_CALL_CREDENTIALS_METADATA_KEY = 'call-creds-key' +_CALL_CREDENTIALS_METADATA_VALUE = 'call-creds-value' +_EMPTY_FLAGS = 0 + + +def _metadata_plugin(context, callback): + callback((( + _CALL_CREDENTIALS_METADATA_KEY, + _CALL_CREDENTIALS_METADATA_VALUE, + ),), cygrpc.StatusCode.ok, b'') + + +class TypeSmokeTest(unittest.TestCase): + + def testCompletionQueueUpDown(self): + completion_queue = cygrpc.CompletionQueue() + del completion_queue + + def testServerUpDown(self): + server = cygrpc.Server(set([ + ( + b'grpc.so_reuseport', + 0, + ), + ])) + del server + + def testChannelUpDown(self): + channel = cygrpc.Channel(b'[::]:0', None, None) + channel.close(cygrpc.StatusCode.cancelled, 'Test method anyway!') + + def test_metadata_plugin_call_credentials_up_down(self): + cygrpc.MetadataPluginCallCredentials(_metadata_plugin, + b'test plugin name!') + + def testServerStartNoExplicitShutdown(self): + server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + completion_queue = cygrpc.CompletionQueue() + server.register_completion_queue(completion_queue) + port = server.add_http2_port(b'[::]:0') + self.assertIsInstance(port, int) + server.start() + del server + + def testServerStartShutdown(self): + completion_queue = cygrpc.CompletionQueue() + server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + server.add_http2_port(b'[::]:0') + server.register_completion_queue(completion_queue) + server.start() + shutdown_tag = object() + server.shutdown(completion_queue, shutdown_tag) + event = completion_queue.poll() + self.assertEqual(cygrpc.CompletionType.operation_complete, + event.completion_type) + self.assertIs(shutdown_tag, event.tag) + del server + del completion_queue + + +class ServerClientMixin(object): + + def setUpMixin(self, server_credentials, client_credentials, host_override): + self.server_completion_queue = cygrpc.CompletionQueue() + self.server = cygrpc.Server([ + ( + b'grpc.so_reuseport', + 0, + ), + ]) + self.server.register_completion_queue(self.server_completion_queue) + if server_credentials: + self.port = self.server.add_http2_port(b'[::]:0', + server_credentials) + else: + self.port = self.server.add_http2_port(b'[::]:0') + self.server.start() + self.client_completion_queue = cygrpc.CompletionQueue() + if client_credentials: + client_channel_arguments = (( + cygrpc.ChannelArgKey.ssl_target_name_override, + host_override, + ),) + self.client_channel = cygrpc.Channel( + 'localhost:{}'.format(self.port).encode(), + client_channel_arguments, client_credentials) + else: + self.client_channel = cygrpc.Channel( + 'localhost:{}'.format(self.port).encode(), set(), None) + if host_override: + self.host_argument = None # default host + self.expected_host = host_override + else: + # arbitrary host name necessitating no further identification + self.host_argument = b'hostess' + self.expected_host = self.host_argument + + def tearDownMixin(self): + self.client_channel.close(cygrpc.StatusCode.ok, 'test being torn down!') + del self.client_channel + del self.server + del self.client_completion_queue + del self.server_completion_queue + + def _perform_queue_operations(self, operations, call, queue, deadline, + description): + """Perform the operations with given call, queue, and deadline. + + Invocation errors are reported with as an exception with `description` + in the message. Performs the operations asynchronously, returning a + future. + """ + + def performer(): + tag = object() + try: + call_result = call.start_client_batch(operations, tag) + self.assertEqual(cygrpc.CallError.ok, call_result) + event = queue.poll(deadline=deadline) + self.assertEqual(cygrpc.CompletionType.operation_complete, + event.completion_type) + self.assertTrue(event.success) + self.assertIs(tag, event.tag) + except Exception as error: + raise Exception("Error in '{}': {}".format( + description, error.message)) + return event + + return test_utilities.SimpleFuture(performer) + + def test_echo(self): + DEADLINE = time.time() + 5 + DEADLINE_TOLERANCE = 0.25 + CLIENT_METADATA_ASCII_KEY = 'key' + CLIENT_METADATA_ASCII_VALUE = 'val' + CLIENT_METADATA_BIN_KEY = 'key-bin' + CLIENT_METADATA_BIN_VALUE = b'\0' * 1000 + SERVER_INITIAL_METADATA_KEY = 'init_me_me_me' + SERVER_INITIAL_METADATA_VALUE = 'whodawha?' + SERVER_TRAILING_METADATA_KEY = 'california_is_in_a_drought' + SERVER_TRAILING_METADATA_VALUE = 'zomg it is' + SERVER_STATUS_CODE = cygrpc.StatusCode.ok + SERVER_STATUS_DETAILS = 'our work is never over' + REQUEST = b'in death a member of project mayhem has a name' + RESPONSE = b'his name is robert paulson' + METHOD = b'twinkies' + + server_request_tag = object() + request_call_result = self.server.request_call( + self.server_completion_queue, self.server_completion_queue, + server_request_tag) + + self.assertEqual(cygrpc.CallError.ok, request_call_result) + + client_call_tag = object() + client_initial_metadata = ( + ( + CLIENT_METADATA_ASCII_KEY, + CLIENT_METADATA_ASCII_VALUE, + ), + ( + CLIENT_METADATA_BIN_KEY, + CLIENT_METADATA_BIN_VALUE, + ), + ) + client_call = self.client_channel.integrated_call( + 0, METHOD, self.host_argument, DEADLINE, client_initial_metadata, + None, [ + ( + [ + cygrpc.SendInitialMetadataOperation( + client_initial_metadata, _EMPTY_FLAGS), + cygrpc.SendMessageOperation(REQUEST, _EMPTY_FLAGS), + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ], + client_call_tag, + ), + ]) + client_event_future = test_utilities.SimpleFuture( + self.client_channel.next_call_event) + + request_event = self.server_completion_queue.poll(deadline=DEADLINE) + self.assertEqual(cygrpc.CompletionType.operation_complete, + request_event.completion_type) + self.assertIsInstance(request_event.call, cygrpc.Call) + self.assertIs(server_request_tag, request_event.tag) + self.assertTrue( + test_common.metadata_transmitted(client_initial_metadata, + request_event.invocation_metadata)) + self.assertEqual(METHOD, request_event.call_details.method) + self.assertEqual(self.expected_host, request_event.call_details.host) + self.assertLess(abs(DEADLINE - request_event.call_details.deadline), + DEADLINE_TOLERANCE) + + server_call_tag = object() + server_call = request_event.call + server_initial_metadata = (( + SERVER_INITIAL_METADATA_KEY, + SERVER_INITIAL_METADATA_VALUE, + ),) + server_trailing_metadata = (( + SERVER_TRAILING_METADATA_KEY, + SERVER_TRAILING_METADATA_VALUE, + ),) + server_start_batch_result = server_call.start_server_batch([ + cygrpc.SendInitialMetadataOperation(server_initial_metadata, + _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + cygrpc.SendMessageOperation(RESPONSE, _EMPTY_FLAGS), + cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + server_trailing_metadata, SERVER_STATUS_CODE, + SERVER_STATUS_DETAILS, _EMPTY_FLAGS) + ], server_call_tag) + self.assertEqual(cygrpc.CallError.ok, server_start_batch_result) + + server_event = self.server_completion_queue.poll(deadline=DEADLINE) + client_event = client_event_future.result() + + self.assertEqual(6, len(client_event.batch_operations)) + found_client_op_types = set() + for client_result in client_event.batch_operations: + # we expect each op type to be unique + self.assertNotIn(client_result.type(), found_client_op_types) + found_client_op_types.add(client_result.type()) + if client_result.type( + ) == cygrpc.OperationType.receive_initial_metadata: + self.assertTrue( + test_common.metadata_transmitted( + server_initial_metadata, + client_result.initial_metadata())) + elif client_result.type() == cygrpc.OperationType.receive_message: + self.assertEqual(RESPONSE, client_result.message()) + elif client_result.type( + ) == cygrpc.OperationType.receive_status_on_client: + self.assertTrue( + test_common.metadata_transmitted( + server_trailing_metadata, + client_result.trailing_metadata())) + self.assertEqual(SERVER_STATUS_DETAILS, client_result.details()) + self.assertEqual(SERVER_STATUS_CODE, client_result.code()) + self.assertEqual( + set([ + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.send_message, + cygrpc.OperationType.send_close_from_client, + cygrpc.OperationType.receive_initial_metadata, + cygrpc.OperationType.receive_message, + cygrpc.OperationType.receive_status_on_client + ]), found_client_op_types) + + self.assertEqual(5, len(server_event.batch_operations)) + found_server_op_types = set() + for server_result in server_event.batch_operations: + self.assertNotIn(server_result.type(), found_server_op_types) + found_server_op_types.add(server_result.type()) + if server_result.type() == cygrpc.OperationType.receive_message: + self.assertEqual(REQUEST, server_result.message()) + elif server_result.type( + ) == cygrpc.OperationType.receive_close_on_server: + self.assertFalse(server_result.cancelled()) + self.assertEqual( + set([ + cygrpc.OperationType.send_initial_metadata, + cygrpc.OperationType.receive_message, + cygrpc.OperationType.send_message, + cygrpc.OperationType.receive_close_on_server, + cygrpc.OperationType.send_status_from_server + ]), found_server_op_types) + + del client_call + del server_call + + def test_6522(self): + DEADLINE = time.time() + 5 + DEADLINE_TOLERANCE = 0.25 + METHOD = b'twinkies' + + empty_metadata = () + + # Prologue + server_request_tag = object() + self.server.request_call(self.server_completion_queue, + self.server_completion_queue, + server_request_tag) + client_call = self.client_channel.segregated_call( + 0, METHOD, self.host_argument, DEADLINE, None, None, + ([( + [ + cygrpc.SendInitialMetadataOperation(empty_metadata, + _EMPTY_FLAGS), + cygrpc.ReceiveInitialMetadataOperation(_EMPTY_FLAGS), + ], + object(), + ), + ( + [ + cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS), + ], + object(), + )])) + + client_initial_metadata_event_future = test_utilities.SimpleFuture( + client_call.next_event) + + request_event = self.server_completion_queue.poll(deadline=DEADLINE) + server_call = request_event.call + + def perform_server_operations(operations, description): + return self._perform_queue_operations(operations, server_call, + self.server_completion_queue, + DEADLINE, description) + + server_event_future = perform_server_operations([ + cygrpc.SendInitialMetadataOperation(empty_metadata, _EMPTY_FLAGS), + ], "Server prologue") + + client_initial_metadata_event_future.result() # force completion + server_event_future.result() + + # Messaging + for _ in range(10): + client_call.operate([ + cygrpc.SendMessageOperation(b'', _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + ], "Client message") + client_message_event_future = test_utilities.SimpleFuture( + client_call.next_event) + server_event_future = perform_server_operations([ + cygrpc.SendMessageOperation(b'', _EMPTY_FLAGS), + cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS), + ], "Server receive") + + client_message_event_future.result() # force completion + server_event_future.result() + + # Epilogue + client_call.operate([ + cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS), + ], "Client epilogue") + # One for ReceiveStatusOnClient, one for SendCloseFromClient. + client_events_future = test_utilities.SimpleFuture(lambda: { + client_call.next_event(), + client_call.next_event(), + }) + + server_event_future = perform_server_operations([ + cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS), + cygrpc.SendStatusFromServerOperation( + empty_metadata, cygrpc.StatusCode.ok, b'', _EMPTY_FLAGS) + ], "Server epilogue") + + client_events_future.result() # force completion + server_event_future.result() + + +class InsecureServerInsecureClient(unittest.TestCase, ServerClientMixin): + + def setUp(self): + self.setUpMixin(None, None, None) + + def tearDown(self): + self.tearDownMixin() + + +class SecureServerSecureClient(unittest.TestCase, ServerClientMixin): + + def setUp(self): + server_credentials = cygrpc.server_credentials_ssl( + None, [ + cygrpc.SslPemKeyCertPair(resources.private_key(), + resources.certificate_chain()) + ], False) + client_credentials = cygrpc.SSLChannelCredentials( + resources.test_root_certificates(), None, None) + self.setUpMixin(server_credentials, client_credentials, + _SSL_HOST_OVERRIDE) + + def tearDown(self): + self.tearDownMixin() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py new file mode 100644 index 00000000000..7d5eaaaa842 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_cython/test_utilities.py @@ -0,0 +1,52 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +from grpc._cython import cygrpc + + +class SimpleFuture(object): + """A simple future mechanism.""" + + def __init__(self, function, *args, **kwargs): + + def wrapped_function(): + try: + self._result = function(*args, **kwargs) + except Exception as error: # pylint: disable=broad-except + self._error = error + + self._result = None + self._error = None + self._thread = threading.Thread(target=wrapped_function) + self._thread.start() + + def result(self): + """The resulting value of this future. + + Re-raises any exceptions. + """ + self._thread.join() + if self._error: + # TODO(atash): re-raise exceptions in a way that preserves tracebacks + raise self._error # pylint: disable=raising-bad-type + return self._result + + +class CompletionQueuePollFuture(SimpleFuture): + + def __init__(self, completion_queue, deadline): + super(CompletionQueuePollFuture, + self).__init__(lambda: completion_queue.poll(deadline=deadline)) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py new file mode 100644 index 00000000000..43141255f1c --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dns_resolver_test.py @@ -0,0 +1,63 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for an actual dns resolution.""" + +import unittest +import logging +import six + +import grpc +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_METHOD = '/ANY/METHOD' +_REQUEST = b'\x00\x00\x00' +_RESPONSE = _REQUEST + + +class GenericHandler(grpc.GenericRpcHandler): + + def service(self, unused_handler_details): + return grpc.unary_unary_rpc_method_handler( + lambda request, unused_context: request, + ) + + +class DNSResolverTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers((GenericHandler(),)) + self._port = self._server.add_insecure_port('[::]:0') + self._server.start() + + def tearDown(self): + self._server.stop(None) + + def test_connect_loopback(self): + # NOTE(https://github.com/grpc/grpc/issues/18422) + # In short, Gevent + C-Ares = Segfault. The C-Ares driver is not + # supported by custom io manager like "gevent" or "libuv". + with grpc.insecure_channel('loopback4.unittest.grpc.io:%d' % + self._port) as channel: + self.assertEqual( + channel.unary_unary(_METHOD)( + _REQUEST, + timeout=test_constants.SHORT_TIMEOUT, + ), _RESPONSE) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py new file mode 100644 index 00000000000..d2d8ce9f60b --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_dynamic_stubs_test.py @@ -0,0 +1,119 @@ +# Copyright 2019 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of dynamic stub import API.""" + +import contextlib +import functools +import logging +import multiprocessing +import os +import sys +import unittest + + +def _grpc_tools_unimportable(): + original_sys_path = sys.path + sys.path = [path for path in sys.path if "grpcio_tools" not in path] + try: + import grpc_tools + except ImportError: + pass + else: + del grpc_tools + sys.path = original_sys_path + raise unittest.SkipTest("Failed to make grpc_tools unimportable.") + try: + yield + finally: + sys.path = original_sys_path + + +def _collect_errors(fn): + + @functools.wraps(fn) + def _wrapped(error_queue): + try: + fn() + except Exception as e: + error_queue.put(e) + raise + + return _wrapped + + +def _run_in_subprocess(test_case): + sys.path.insert( + 0, os.path.join(os.path.realpath(os.path.dirname(__file__)), "..")) + error_queue = multiprocessing.Queue() + proc = multiprocessing.Process(target=test_case, args=(error_queue,)) + proc.start() + proc.join() + sys.path.pop(0) + if not error_queue.empty(): + raise error_queue.get() + assert proc.exitcode == 0, "Process exited with code {}".format( + proc.exitcode) + + +def _assert_unimplemented(msg_substr): + import grpc + try: + protos, services = grpc.protos_and_services( + "tests/unit/data/foo/bar.proto") + except NotImplementedError as e: + assert msg_substr in str(e), "{} was not in '{}'".format( + msg_substr, str(e)) + else: + assert False, "Did not raise NotImplementedError" + + +@_collect_errors +def _test_sunny_day(): + if sys.version_info[0] == 3: + import grpc + protos, services = grpc.protos_and_services( + os.path.join("tests", "unit", "data", "foo", "bar.proto")) + assert protos.BarMessage is not None + assert services.BarStub is not None + else: + _assert_unimplemented("Python 3") + + +@_collect_errors +def _test_grpc_tools_unimportable(): + with _grpc_tools_unimportable(): + if sys.version_info[0] == 3: + _assert_unimplemented("grpcio-tools") + else: + _assert_unimplemented("Python 3") + + +# NOTE(rbellevi): multiprocessing.Process fails to pickle function objects +# when they do not come from the "__main__" module, so this test passes +# if run directly on Windows, but not if started by the test runner. [email protected](os.name == "nt", "Windows multiprocessing unsupported") +class DynamicStubTest(unittest.TestCase): + + @unittest.skip('grpcio-tools package required') + def test_sunny_day(self): + _run_in_subprocess(_test_sunny_day) + + def test_grpc_tools_unimportable(self): + _run_in_subprocess(_test_grpc_tools_unimportable) + + +if __name__ == "__main__": + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_empty_message_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_empty_message_test.py new file mode 100644 index 00000000000..f27ea422d0c --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_empty_message_test.py @@ -0,0 +1,124 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import logging + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_REQUEST = b'' +_RESPONSE = b'' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +def handle_unary_unary(request, servicer_context): + return _RESPONSE + + +def handle_unary_stream(request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(request_iterator, servicer_context): + for request in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(request_iterator, servicer_context): + for request in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + self.stream_stream = handle_stream_stream + elif self.request_streaming: + self.stream_unary = handle_stream_unary + elif self.response_streaming: + self.unary_stream = handle_unary_stream + else: + self.unary_unary = handle_unary_unary + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True) + else: + return None + + +class EmptyMessageTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers((_GenericHandler(),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testUnaryUnary(self): + response = self._channel.unary_unary(_UNARY_UNARY)(_REQUEST) + self.assertEqual(_RESPONSE, response) + + def testUnaryStream(self): + response_iterator = self._channel.unary_stream(_UNARY_STREAM)(_REQUEST) + self.assertSequenceEqual([_RESPONSE] * test_constants.STREAM_LENGTH, + list(response_iterator)) + + def testStreamUnary(self): + response = self._channel.stream_unary(_STREAM_UNARY)(iter( + [_REQUEST] * test_constants.STREAM_LENGTH)) + self.assertEqual(_RESPONSE, response) + + def testStreamStream(self): + response_iterator = self._channel.stream_stream(_STREAM_STREAM)(iter( + [_REQUEST] * test_constants.STREAM_LENGTH)) + self.assertSequenceEqual([_RESPONSE] * test_constants.STREAM_LENGTH, + list(response_iterator)) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py new file mode 100644 index 00000000000..e58007ad3ed --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_error_message_encoding_test.py @@ -0,0 +1,87 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests 'utf-8' encoded error message.""" + +import unittest +import weakref + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_UNICODE_ERROR_MESSAGES = [ + b'\xe2\x80\x9d'.decode('utf-8'), + b'abc\x80\xd0\xaf'.decode('latin-1'), + b'\xc3\xa9'.decode('utf-8'), +] + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming=None, response_streaming=None): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + + def unary_unary(self, request, servicer_context): + servicer_context.set_code(grpc.StatusCode.UNKNOWN) + servicer_context.set_details(request.decode('utf-8')) + return _RESPONSE + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, test): + self._test = test + + def service(self, handler_call_details): + return _MethodHandler() + + +class ErrorMessageEncodingTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers( + (_GenericHandler(weakref.proxy(self)),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testMessageEncoding(self): + for message in _UNICODE_ERROR_MESSAGES: + multi_callable = self._channel.unary_unary(_UNARY_UNARY) + with self.assertRaises(grpc.RpcError) as cm: + multi_callable(message.encode('utf-8')) + + self.assertEqual(cm.exception.code(), grpc.StatusCode.UNKNOWN) + self.assertEqual(cm.exception.details(), message) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py new file mode 100644 index 00000000000..48ea054d2d7 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_scenarios.py @@ -0,0 +1,236 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines a number of module-scope gRPC scenarios to test clean exit.""" + +import argparse +import threading +import time +import logging + +import grpc + +from tests.unit.framework.common import test_constants + +WAIT_TIME = 1000 + +REQUEST = b'request' + +UNSTARTED_SERVER = 'unstarted_server' +RUNNING_SERVER = 'running_server' +POLL_CONNECTIVITY_NO_SERVER = 'poll_connectivity_no_server' +POLL_CONNECTIVITY = 'poll_connectivity' +IN_FLIGHT_UNARY_UNARY_CALL = 'in_flight_unary_unary_call' +IN_FLIGHT_UNARY_STREAM_CALL = 'in_flight_unary_stream_call' +IN_FLIGHT_STREAM_UNARY_CALL = 'in_flight_stream_unary_call' +IN_FLIGHT_STREAM_STREAM_CALL = 'in_flight_stream_stream_call' +IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL = 'in_flight_partial_unary_stream_call' +IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL = 'in_flight_partial_stream_unary_call' +IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL = 'in_flight_partial_stream_stream_call' + +UNARY_UNARY = b'/test/UnaryUnary' +UNARY_STREAM = b'/test/UnaryStream' +STREAM_UNARY = b'/test/StreamUnary' +STREAM_STREAM = b'/test/StreamStream' +PARTIAL_UNARY_STREAM = b'/test/PartialUnaryStream' +PARTIAL_STREAM_UNARY = b'/test/PartialStreamUnary' +PARTIAL_STREAM_STREAM = b'/test/PartialStreamStream' + +TEST_TO_METHOD = { + IN_FLIGHT_UNARY_UNARY_CALL: UNARY_UNARY, + IN_FLIGHT_UNARY_STREAM_CALL: UNARY_STREAM, + IN_FLIGHT_STREAM_UNARY_CALL: STREAM_UNARY, + IN_FLIGHT_STREAM_STREAM_CALL: STREAM_STREAM, + IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL: PARTIAL_UNARY_STREAM, + IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL: PARTIAL_STREAM_UNARY, + IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL: PARTIAL_STREAM_STREAM, +} + + +def hang_unary_unary(request, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_unary_stream(request, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_partial_unary_stream(request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH // 2): + yield request + time.sleep(WAIT_TIME) + + +def hang_stream_unary(request_iterator, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_partial_stream_unary(request_iterator, servicer_context): + for _ in range(test_constants.STREAM_LENGTH // 2): + next(request_iterator) + time.sleep(WAIT_TIME) + + +def hang_stream_stream(request_iterator, servicer_context): + time.sleep(WAIT_TIME) + + +def hang_partial_stream_stream(request_iterator, servicer_context): + for _ in range(test_constants.STREAM_LENGTH // 2): + yield next(request_iterator) #pylint: disable=stop-iteration-return + time.sleep(WAIT_TIME) + + +class MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, partial_hang): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + if partial_hang: + self.stream_stream = hang_partial_stream_stream + else: + self.stream_stream = hang_stream_stream + elif self.request_streaming: + if partial_hang: + self.stream_unary = hang_partial_stream_unary + else: + self.stream_unary = hang_stream_unary + elif self.response_streaming: + if partial_hang: + self.unary_stream = hang_partial_unary_stream + else: + self.unary_stream = hang_unary_stream + else: + self.unary_unary = hang_unary_unary + + +class GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == UNARY_UNARY: + return MethodHandler(False, False, False) + elif handler_call_details.method == UNARY_STREAM: + return MethodHandler(False, True, False) + elif handler_call_details.method == STREAM_UNARY: + return MethodHandler(True, False, False) + elif handler_call_details.method == STREAM_STREAM: + return MethodHandler(True, True, False) + elif handler_call_details.method == PARTIAL_UNARY_STREAM: + return MethodHandler(False, True, True) + elif handler_call_details.method == PARTIAL_STREAM_UNARY: + return MethodHandler(True, False, True) + elif handler_call_details.method == PARTIAL_STREAM_STREAM: + return MethodHandler(True, True, True) + else: + return None + + +# Traditional executors will not exit until all their +# current jobs complete. Because we submit jobs that will +# never finish, we don't want to block exit on these jobs. +class DaemonPool(object): + + def submit(self, fn, *args, **kwargs): + thread = threading.Thread(target=fn, args=args, kwargs=kwargs) + thread.daemon = True + thread.start() + + def shutdown(self, wait=True): + pass + + +def infinite_request_iterator(): + while True: + yield REQUEST + + +if __name__ == '__main__': + logging.basicConfig() + parser = argparse.ArgumentParser() + parser.add_argument('scenario', type=str) + parser.add_argument('--wait_for_interrupt', + dest='wait_for_interrupt', + action='store_true') + args = parser.parse_args() + + if args.scenario == UNSTARTED_SERVER: + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + elif args.scenario == RUNNING_SERVER: + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.start() + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + elif args.scenario == POLL_CONNECTIVITY_NO_SERVER: + channel = grpc.insecure_channel('localhost:12345') + + def connectivity_callback(connectivity): + pass + + channel.subscribe(connectivity_callback, try_to_connect=True) + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + elif args.scenario == POLL_CONNECTIVITY: + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.start() + channel = grpc.insecure_channel('localhost:%d' % port) + + def connectivity_callback(connectivity): + pass + + channel.subscribe(connectivity_callback, try_to_connect=True) + if args.wait_for_interrupt: + time.sleep(WAIT_TIME) + + else: + handler = GenericHandler() + server = grpc.server(DaemonPool(), options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('[::]:0') + server.add_generic_rpc_handlers((handler,)) + server.start() + channel = grpc.insecure_channel('localhost:%d' % port) + + method = TEST_TO_METHOD[args.scenario] + + if args.scenario == IN_FLIGHT_UNARY_UNARY_CALL: + multi_callable = channel.unary_unary(method) + future = multi_callable.future(REQUEST) + result, call = multi_callable.with_call(REQUEST) + elif (args.scenario == IN_FLIGHT_UNARY_STREAM_CALL or + args.scenario == IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL): + multi_callable = channel.unary_stream(method) + response_iterator = multi_callable(REQUEST) + for response in response_iterator: + pass + elif (args.scenario == IN_FLIGHT_STREAM_UNARY_CALL or + args.scenario == IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL): + multi_callable = channel.stream_unary(method) + future = multi_callable.future(infinite_request_iterator()) + result, call = multi_callable.with_call( + iter([REQUEST] * test_constants.STREAM_LENGTH)) + elif (args.scenario == IN_FLIGHT_STREAM_STREAM_CALL or + args.scenario == IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL): + multi_callable = channel.stream_stream(method) + response_iterator = multi_callable(infinite_request_iterator()) + for response in response_iterator: + pass diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py new file mode 100644 index 00000000000..4cf5ab63bdf --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_exit_test.py @@ -0,0 +1,261 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests clean exit of server/client on Python Interpreter exit/sigint. + +The tests in this module spawn a subprocess for each test case, the +test is considered successful if it doesn't hang/timeout. +""" + +import atexit +import os +import signal +import six +import subprocess +import sys +import threading +import datetime +import time +import unittest +import logging + +from tests.unit import _exit_scenarios + +# SCENARIO_FILE = os.path.abspath( +# os.path.join(os.path.dirname(os.path.realpath(__file__)), +# '_exit_scenarios.py')) +INTERPRETER = sys.executable +BASE_COMMAND = [INTERPRETER, '-m', 'tests.unit._exit_scenarios'] +BASE_SIGTERM_COMMAND = BASE_COMMAND + ['--wait_for_interrupt'] + +INIT_TIME = datetime.timedelta(seconds=1) +WAIT_CHECK_INTERVAL = datetime.timedelta(milliseconds=100) +WAIT_CHECK_DEFAULT_TIMEOUT = datetime.timedelta(seconds=5) + +processes = [] +process_lock = threading.Lock() + + +# Make sure we attempt to clean up any +# processes we may have left running +def cleanup_processes(): + with process_lock: + for process in processes: + try: + process.kill() + except Exception: # pylint: disable=broad-except + pass + + +atexit.register(cleanup_processes) + + +def _process_wait_with_timeout(process, timeout=WAIT_CHECK_DEFAULT_TIMEOUT): + """A funciton to mimic 3.3+ only timeout argument in process.wait.""" + deadline = datetime.datetime.now() + timeout + while (process.poll() is None) and (datetime.datetime.now() < deadline): + time.sleep(WAIT_CHECK_INTERVAL.total_seconds()) + if process.returncode is None: + raise RuntimeError('Process failed to exit within %s' % timeout) + + +def interrupt_and_wait(process): + with process_lock: + processes.append(process) + time.sleep(INIT_TIME.total_seconds()) + os.kill(process.pid, signal.SIGINT) + _process_wait_with_timeout(process) + + +def wait(process): + with process_lock: + processes.append(process) + _process_wait_with_timeout(process) + + +# TODO(lidiz) enable exit tests once the root cause found. [email protected]('https://github.com/grpc/grpc/issues/23982') [email protected]('https://github.com/grpc/grpc/issues/23028') +class ExitTest(unittest.TestCase): + + def test_unstarted_server(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen(BASE_COMMAND + + [_exit_scenarios.UNSTARTED_SERVER], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + def test_unstarted_server_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen(BASE_SIGTERM_COMMAND + + [_exit_scenarios.UNSTARTED_SERVER], + stdout=sys.stdout, + env=env) + interrupt_and_wait(process) + + def test_running_server(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen(BASE_COMMAND + + [_exit_scenarios.RUNNING_SERVER], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + def test_running_server_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen(BASE_SIGTERM_COMMAND + + [_exit_scenarios.RUNNING_SERVER], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + def test_poll_connectivity_no_server(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + def test_poll_connectivity_no_server_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_SIGTERM_COMMAND + + [_exit_scenarios.POLL_CONNECTIVITY_NO_SERVER], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + def test_poll_connectivity(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen(BASE_COMMAND + + [_exit_scenarios.POLL_CONNECTIVITY], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + def test_poll_connectivity_terminate(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen(BASE_SIGTERM_COMMAND + + [_exit_scenarios.POLL_CONNECTIVITY], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_unary_unary_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen(BASE_COMMAND + + [_exit_scenarios.IN_FLIGHT_UNARY_UNARY_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_unary_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_UNARY_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_stream_unary_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_UNARY_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_stream_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_exit_scenarios.IN_FLIGHT_STREAM_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_partial_unary_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + + [_exit_scenarios.IN_FLIGHT_PARTIAL_UNARY_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_partial_stream_unary_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + + [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_UNARY_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + @unittest.skipIf(os.name == 'nt', + 'os.kill does not have required permission on Windows') + def test_in_flight_partial_stream_stream_call(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + + [_exit_scenarios.IN_FLIGHT_PARTIAL_STREAM_STREAM_CALL], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + interrupt_and_wait(process) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py new file mode 100644 index 00000000000..1ada25382de --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_from_grpc_import_star.py @@ -0,0 +1,23 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +_BEFORE_IMPORT = tuple(globals()) + +from grpc import * # pylint: disable=wildcard-import,unused-wildcard-import + +_AFTER_IMPORT = tuple(globals()) + +GRPC_ELEMENTS = tuple( + element for element in _AFTER_IMPORT + if element not in _BEFORE_IMPORT and element != '_BEFORE_IMPORT') diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py new file mode 100644 index 00000000000..1c4890b97f1 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_grpc_shutdown_test.py @@ -0,0 +1,54 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the gRPC Core shutdown path.""" + +import time +import threading +import unittest +import datetime + +import grpc + +_TIMEOUT_FOR_SEGFAULT = datetime.timedelta(seconds=10) + + +class GrpcShutdownTest(unittest.TestCase): + + def test_channel_close_with_connectivity_watcher(self): + """Originated by https://github.com/grpc/grpc/issues/20299. + + The grpc_shutdown happens synchronously, but there might be Core object + references left in Cython which might lead to ABORT or SIGSEGV. + """ + connection_failed = threading.Event() + + def on_state_change(state): + if state in (grpc.ChannelConnectivity.TRANSIENT_FAILURE, + grpc.ChannelConnectivity.SHUTDOWN): + connection_failed.set() + + # Connects to an void address, and subscribes state changes + channel = grpc.insecure_channel("0.1.1.1:12345") + channel.subscribe(on_state_change, True) + + deadline = datetime.datetime.now() + _TIMEOUT_FOR_SEGFAULT + + while datetime.datetime.now() < deadline: + time.sleep(0.1) + if connection_failed.is_set(): + channel.close() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py new file mode 100644 index 00000000000..619db7b3ffd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_interceptor_test.py @@ -0,0 +1,708 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of gRPC Python interceptors.""" + +import collections +import itertools +import threading +import unittest +import logging +import os +from concurrent import futures + +import grpc +from grpc.framework.foundation import logging_pool + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_control + +_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 +_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] +_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 +_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] + +_EXCEPTION_REQUEST = b'\x09\x0a' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +class _ApplicationErrorStandin(Exception): + pass + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._value = None + self._called = False + + def __call__(self, value): + with self._condition: + self._value = value + self._called = True + self._condition.notify_all() + + def value(self): + with self._condition: + while not self._called: + self._condition.wait() + return self._value + + +class _Handler(object): + + def __init__(self, control): + self._control = control + + def handle_unary_unary(self, request, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + if request == _EXCEPTION_REQUEST: + raise _ApplicationErrorStandin() + return request + + def handle_unary_stream(self, request, servicer_context): + if request == _EXCEPTION_REQUEST: + raise _ApplicationErrorStandin() + for _ in range(test_constants.STREAM_LENGTH): + self._control.control() + yield request + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + + def handle_stream_unary(self, request_iterator, servicer_context): + if servicer_context is not None: + servicer_context.invocation_metadata() + self._control.control() + response_elements = [] + for request in request_iterator: + self._control.control() + response_elements.append(request) + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + if _EXCEPTION_REQUEST in response_elements: + raise _ApplicationErrorStandin() + return b''.join(response_elements) + + def handle_stream_stream(self, request_iterator, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + for request in request_iterator: + if request == _EXCEPTION_REQUEST: + raise _ApplicationErrorStandin() + self._control.control() + yield request + self._control.control() + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, + request_deserializer, response_serializer, unary_unary, + unary_stream, stream_unary, stream_stream): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = request_deserializer + self.response_serializer = response_serializer + self.unary_unary = unary_unary + self.unary_stream = unary_stream + self.stream_unary = stream_unary + self.stream_stream = stream_stream + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, handler): + self._handler = handler + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False, None, None, + self._handler.handle_unary_unary, None, None, + None) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, + self._handler.handle_unary_stream, None, None) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, None, + self._handler.handle_stream_unary, None) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True, None, None, None, None, None, + self._handler.handle_stream_stream) + else: + return None + + +def _unary_unary_multi_callable(channel): + return channel.unary_unary(_UNARY_UNARY) + + +def _unary_stream_multi_callable(channel): + return channel.unary_stream(_UNARY_STREAM, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def _stream_unary_multi_callable(channel): + return channel.stream_unary(_STREAM_UNARY, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def _stream_stream_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM) + + +class _ClientCallDetails( + collections.namedtuple( + '_ClientCallDetails', + ('method', 'timeout', 'metadata', 'credentials')), + grpc.ClientCallDetails): + pass + + +class _GenericClientInterceptor(grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor): + + def __init__(self, interceptor_function): + self._fn = interceptor_function + + def intercept_unary_unary(self, continuation, client_call_details, request): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, iter((request,)), False, False) + response = continuation(new_details, next(new_request_iterator)) + return postprocess(response) if postprocess else response + + def intercept_unary_stream(self, continuation, client_call_details, + request): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, iter((request,)), False, True) + response_it = continuation(new_details, new_request_iterator) + return postprocess(response_it) if postprocess else response_it + + def intercept_stream_unary(self, continuation, client_call_details, + request_iterator): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, request_iterator, True, False) + response = continuation(new_details, next(new_request_iterator)) + return postprocess(response) if postprocess else response + + def intercept_stream_stream(self, continuation, client_call_details, + request_iterator): + new_details, new_request_iterator, postprocess = self._fn( + client_call_details, request_iterator, True, True) + response_it = continuation(new_details, new_request_iterator) + return postprocess(response_it) if postprocess else response_it + + +class _LoggingInterceptor(grpc.ServerInterceptor, + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor): + + def __init__(self, tag, record): + self.tag = tag + self.record = record + + def intercept_service(self, continuation, handler_call_details): + self.record.append(self.tag + ':intercept_service') + return continuation(handler_call_details) + + def intercept_unary_unary(self, continuation, client_call_details, request): + self.record.append(self.tag + ':intercept_unary_unary') + result = continuation(client_call_details, request) + assert isinstance( + result, + grpc.Call), '{} ({}) is not an instance of grpc.Call'.format( + result, type(result)) + assert isinstance( + result, + grpc.Future), '{} ({}) is not an instance of grpc.Future'.format( + result, type(result)) + return result + + def intercept_unary_stream(self, continuation, client_call_details, + request): + self.record.append(self.tag + ':intercept_unary_stream') + return continuation(client_call_details, request) + + def intercept_stream_unary(self, continuation, client_call_details, + request_iterator): + self.record.append(self.tag + ':intercept_stream_unary') + result = continuation(client_call_details, request_iterator) + assert isinstance( + result, + grpc.Call), '{} is not an instance of grpc.Call'.format(result) + assert isinstance( + result, + grpc.Future), '{} is not an instance of grpc.Future'.format(result) + return result + + def intercept_stream_stream(self, continuation, client_call_details, + request_iterator): + self.record.append(self.tag + ':intercept_stream_stream') + return continuation(client_call_details, request_iterator) + + +class _DefectiveClientInterceptor(grpc.UnaryUnaryClientInterceptor): + + def intercept_unary_unary(self, ignored_continuation, + ignored_client_call_details, ignored_request): + raise test_control.Defect() + + +def _wrap_request_iterator_stream_interceptor(wrapper): + + def intercept_call(client_call_details, request_iterator, request_streaming, + ignored_response_streaming): + if request_streaming: + return client_call_details, wrapper(request_iterator), None + else: + return client_call_details, request_iterator, None + + return _GenericClientInterceptor(intercept_call) + + +def _append_request_header_interceptor(header, value): + + def intercept_call(client_call_details, request_iterator, + ignored_request_streaming, ignored_response_streaming): + metadata = [] + if client_call_details.metadata: + metadata = list(client_call_details.metadata) + metadata.append(( + header, + value, + )) + client_call_details = _ClientCallDetails( + client_call_details.method, client_call_details.timeout, metadata, + client_call_details.credentials) + return client_call_details, request_iterator, None + + return _GenericClientInterceptor(intercept_call) + + +class _GenericServerInterceptor(grpc.ServerInterceptor): + + def __init__(self, fn): + self._fn = fn + + def intercept_service(self, continuation, handler_call_details): + return self._fn(continuation, handler_call_details) + + +def _filter_server_interceptor(condition, interceptor): + + def intercept_service(continuation, handler_call_details): + if condition(handler_call_details): + return interceptor.intercept_service(continuation, + handler_call_details) + return continuation(handler_call_details) + + return _GenericServerInterceptor(intercept_service) + + +class InterceptorTest(unittest.TestCase): + + def setUp(self): + self._control = test_control.PauseFailControl() + self._handler = _Handler(self._control) + self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + + self._record = [] + conditional_interceptor = _filter_server_interceptor( + lambda x: ('secret', '42') in x.invocation_metadata, + _LoggingInterceptor('s3', self._record)) + + self._server = grpc.server(self._server_pool, + options=(('grpc.so_reuseport', 0),), + interceptors=( + _LoggingInterceptor('s1', self._record), + conditional_interceptor, + _LoggingInterceptor('s2', self._record), + )) + port = self._server.add_insecure_port('[::]:0') + self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),)) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(None) + self._server_pool.shutdown(wait=True) + self._channel.close() + + def testTripleRequestMessagesClientInterceptor(self): + + def triple(request_iterator): + while True: + try: + item = next(request_iterator) + yield item + yield item + yield item + except StopIteration: + break + + interceptor = _wrap_request_iterator_stream_interceptor(triple) + channel = grpc.intercept_channel(self._channel, interceptor) + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + + multi_callable = _stream_stream_multi_callable(channel) + response_iterator = multi_callable( + iter(requests), + metadata=( + ('test', + 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) + + responses = tuple(response_iterator) + self.assertEqual(len(responses), 3 * test_constants.STREAM_LENGTH) + + multi_callable = _stream_stream_multi_callable(self._channel) + response_iterator = multi_callable( + iter(requests), + metadata=( + ('test', + 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) + + responses = tuple(response_iterator) + self.assertEqual(len(responses), test_constants.STREAM_LENGTH) + + def testDefectiveClientInterceptor(self): + interceptor = _DefectiveClientInterceptor() + defective_channel = grpc.intercept_channel(self._channel, interceptor) + + request = b'\x07\x08' + + multi_callable = _unary_unary_multi_callable(defective_channel) + call_future = multi_callable.future( + request, + metadata=(('test', + 'InterceptedUnaryRequestBlockingUnaryResponse'),)) + + self.assertIsNotNone(call_future.exception()) + self.assertEqual(call_future.code(), grpc.StatusCode.INTERNAL) + + def testInterceptedHeaderManipulationWithServerSideVerification(self): + request = b'\x07\x08' + + channel = grpc.intercept_channel( + self._channel, _append_request_header_interceptor('secret', '42')) + channel = grpc.intercept_channel( + channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + self._record[:] = [] + + multi_callable = _unary_unary_multi_callable(channel) + multi_callable.with_call( + request, + metadata=( + ('test', + 'InterceptedUnaryRequestBlockingUnaryResponseWithCall'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's3:intercept_service', + 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestBlockingUnaryResponse(self): + request = b'\x07\x08' + + self._record[:] = [] + + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _unary_unary_multi_callable(channel) + multi_callable( + request, + metadata=(('test', + 'InterceptedUnaryRequestBlockingUnaryResponse'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestBlockingUnaryResponseWithError(self): + request = _EXCEPTION_REQUEST + + self._record[:] = [] + + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _unary_unary_multi_callable(channel) + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable( + request, + metadata=(('test', + 'InterceptedUnaryRequestBlockingUnaryResponse'),)) + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + def testInterceptedUnaryRequestBlockingUnaryResponseWithCall(self): + request = b'\x07\x08' + + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + self._record[:] = [] + + multi_callable = _unary_unary_multi_callable(channel) + multi_callable.with_call( + request, + metadata=( + ('test', + 'InterceptedUnaryRequestBlockingUnaryResponseWithCall'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestFutureUnaryResponse(self): + request = b'\x07\x08' + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _unary_unary_multi_callable(channel) + response_future = multi_callable.future( + request, + metadata=(('test', 'InterceptedUnaryRequestFutureUnaryResponse'),)) + response_future.result() + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_unary', 'c2:intercept_unary_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestStreamResponse(self): + request = b'\x37\x58' + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _unary_stream_multi_callable(channel) + response_iterator = multi_callable( + request, + metadata=(('test', 'InterceptedUnaryRequestStreamResponse'),)) + tuple(response_iterator) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_unary_stream', 'c2:intercept_unary_stream', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedUnaryRequestStreamResponseWithError(self): + request = _EXCEPTION_REQUEST + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _unary_stream_multi_callable(channel) + response_iterator = multi_callable( + request, + metadata=(('test', 'InterceptedUnaryRequestStreamResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: + tuple(response_iterator) + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + def testInterceptedStreamRequestBlockingUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _stream_unary_multi_callable(channel) + multi_callable( + request_iterator, + metadata=(('test', + 'InterceptedStreamRequestBlockingUnaryResponse'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestBlockingUnaryResponseWithCall(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _stream_unary_multi_callable(channel) + multi_callable.with_call( + request_iterator, + metadata=( + ('test', + 'InterceptedStreamRequestBlockingUnaryResponseWithCall'),)) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestFutureUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _stream_unary_multi_callable(channel) + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestFutureUnaryResponse'),)) + response_future.result() + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_unary', 'c2:intercept_stream_unary', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestFutureUnaryResponseWithError(self): + requests = tuple( + _EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _stream_unary_multi_callable(channel) + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestFutureUnaryResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: + response_future.result() + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + def testInterceptedStreamRequestStreamResponse(self): + requests = tuple( + b'\x77\x58' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _stream_stream_multi_callable(channel) + response_iterator = multi_callable( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestStreamResponse'),)) + tuple(response_iterator) + + self.assertSequenceEqual(self._record, [ + 'c1:intercept_stream_stream', 'c2:intercept_stream_stream', + 's1:intercept_service', 's2:intercept_service' + ]) + + def testInterceptedStreamRequestStreamResponseWithError(self): + requests = tuple( + _EXCEPTION_REQUEST for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + self._record[:] = [] + channel = grpc.intercept_channel( + self._channel, _LoggingInterceptor('c1', self._record), + _LoggingInterceptor('c2', self._record)) + + multi_callable = _stream_stream_multi_callable(channel) + response_iterator = multi_callable( + request_iterator, + metadata=(('test', 'InterceptedStreamRequestStreamResponse'),)) + with self.assertRaises(grpc.RpcError) as exception_context: + tuple(response_iterator) + exception = exception_context.exception + self.assertFalse(exception.cancelled()) + self.assertFalse(exception.running()) + self.assertTrue(exception.done()) + with self.assertRaises(grpc.RpcError): + exception.result() + self.assertIsInstance(exception.exception(), grpc.RpcError) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py new file mode 100644 index 00000000000..d1f1499d8cd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invalid_metadata_test.py @@ -0,0 +1,140 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of RPCs made against gRPC Python's application-layer API.""" + +import unittest +import logging + +import grpc + +from tests.unit.framework.common import test_constants + +_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 +_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] +_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 +_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +def _unary_unary_multi_callable(channel): + return channel.unary_unary(_UNARY_UNARY) + + +def _unary_stream_multi_callable(channel): + return channel.unary_stream(_UNARY_STREAM, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def _stream_unary_multi_callable(channel): + return channel.stream_unary(_STREAM_UNARY, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def _stream_stream_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM) + + +class InvalidMetadataTest(unittest.TestCase): + + def setUp(self): + self._channel = grpc.insecure_channel('localhost:8080') + self._unary_unary = _unary_unary_multi_callable(self._channel) + self._unary_stream = _unary_stream_multi_callable(self._channel) + self._stream_unary = _stream_unary_multi_callable(self._channel) + self._stream_stream = _stream_stream_multi_callable(self._channel) + + def tearDown(self): + self._channel.close() + + def testUnaryRequestBlockingUnaryResponse(self): + request = b'\x07\x08' + metadata = (('InVaLiD', 'UnaryRequestBlockingUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_unary(request, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testUnaryRequestBlockingUnaryResponseWithCall(self): + request = b'\x07\x08' + metadata = (('InVaLiD', 'UnaryRequestBlockingUnaryResponseWithCall'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_unary.with_call(request, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testUnaryRequestFutureUnaryResponse(self): + request = b'\x07\x08' + metadata = (('InVaLiD', 'UnaryRequestFutureUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_unary.future(request, metadata=metadata) + + def testUnaryRequestStreamResponse(self): + request = b'\x37\x58' + metadata = (('InVaLiD', 'UnaryRequestStreamResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._unary_stream(request, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestBlockingUnaryResponse(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestBlockingUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._stream_unary(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestBlockingUnaryResponseWithCall(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestBlockingUnaryResponseWithCall'),) + expected_error_details = "metadata was invalid: %s" % metadata + multi_callable = _stream_unary_multi_callable(self._channel) + with self.assertRaises(ValueError) as exception_context: + multi_callable.with_call(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestFutureUnaryResponse(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestFutureUnaryResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._stream_unary.future(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testStreamRequestStreamResponse(self): + request_iterator = ( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + metadata = (('InVaLiD', 'StreamRequestStreamResponse'),) + expected_error_details = "metadata was invalid: %s" % metadata + with self.assertRaises(ValueError) as exception_context: + self._stream_stream(request_iterator, metadata=metadata) + self.assertIn(expected_error_details, str(exception_context.exception)) + + def testInvalidMetadata(self): + self.assertRaises(TypeError, self._unary_unary, b'', metadata=42) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py new file mode 100644 index 00000000000..a0208b51df4 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_invocation_defects_test.py @@ -0,0 +1,266 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import logging + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_control + +_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 +_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] +_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 +_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' +_DEFECTIVE_GENERIC_RPC_HANDLER = '/test/DefectiveGenericRpcHandler' + + +class _Handler(object): + + def __init__(self, control): + self._control = control + + def handle_unary_unary(self, request, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + return request + + def handle_unary_stream(self, request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH): + self._control.control() + yield request + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + + def handle_stream_unary(self, request_iterator, servicer_context): + if servicer_context is not None: + servicer_context.invocation_metadata() + self._control.control() + response_elements = [] + for request in request_iterator: + self._control.control() + response_elements.append(request) + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + return b''.join(response_elements) + + def handle_stream_stream(self, request_iterator, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + for request in request_iterator: + self._control.control() + yield request + self._control.control() + + def defective_generic_rpc_handler(self): + raise test_control.Defect() + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, + request_deserializer, response_serializer, unary_unary, + unary_stream, stream_unary, stream_stream): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = request_deserializer + self.response_serializer = response_serializer + self.unary_unary = unary_unary + self.unary_stream = unary_stream + self.stream_unary = stream_unary + self.stream_stream = stream_stream + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, handler): + self._handler = handler + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False, None, None, + self._handler.handle_unary_unary, None, None, + None) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, + self._handler.handle_unary_stream, None, None) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, None, + self._handler.handle_stream_unary, None) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True, None, None, None, None, None, + self._handler.handle_stream_stream) + elif handler_call_details.method == _DEFECTIVE_GENERIC_RPC_HANDLER: + return self._handler.defective_generic_rpc_handler() + else: + return None + + +class FailAfterFewIterationsCounter(object): + + def __init__(self, high, bytestring): + self._current = 0 + self._high = high + self._bytestring = bytestring + + def __iter__(self): + return self + + def __next__(self): + if self._current >= self._high: + raise test_control.Defect() + else: + self._current += 1 + return self._bytestring + + next = __next__ + + +def _unary_unary_multi_callable(channel): + return channel.unary_unary(_UNARY_UNARY) + + +def _unary_stream_multi_callable(channel): + return channel.unary_stream(_UNARY_STREAM, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def _stream_unary_multi_callable(channel): + return channel.stream_unary(_STREAM_UNARY, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def _stream_stream_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM) + + +def _defective_handler_multi_callable(channel): + return channel.unary_unary(_DEFECTIVE_GENERIC_RPC_HANDLER) + + +class InvocationDefectsTest(unittest.TestCase): + """Tests the handling of exception-raising user code on the client-side.""" + + def setUp(self): + self._control = test_control.PauseFailControl() + self._handler = _Handler(self._control) + + self._server = test_common.test_server() + port = self._server.add_insecure_port('[::]:0') + self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),)) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testIterableStreamRequestBlockingUnaryResponse(self): + requests = object() + multi_callable = _stream_unary_multi_callable(self._channel) + + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable( + requests, + metadata=(('test', + 'IterableStreamRequestBlockingUnaryResponse'),)) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + def testIterableStreamRequestFutureUnaryResponse(self): + requests = object() + multi_callable = _stream_unary_multi_callable(self._channel) + response_future = multi_callable.future( + requests, + metadata=(('test', 'IterableStreamRequestFutureUnaryResponse'),)) + + with self.assertRaises(grpc.RpcError) as exception_context: + response_future.result() + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + def testIterableStreamRequestStreamResponse(self): + requests = object() + multi_callable = _stream_stream_multi_callable(self._channel) + response_iterator = multi_callable( + requests, + metadata=(('test', 'IterableStreamRequestStreamResponse'),)) + + with self.assertRaises(grpc.RpcError) as exception_context: + next(response_iterator) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + def testIteratorStreamRequestStreamResponse(self): + requests_iterator = FailAfterFewIterationsCounter( + test_constants.STREAM_LENGTH // 2, b'\x07\x08') + multi_callable = _stream_stream_multi_callable(self._channel) + response_iterator = multi_callable( + requests_iterator, + metadata=(('test', 'IteratorStreamRequestStreamResponse'),)) + + with self.assertRaises(grpc.RpcError) as exception_context: + for _ in range(test_constants.STREAM_LENGTH // 2 + 1): + next(response_iterator) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + def testDefectiveGenericRpcHandlerUnaryResponse(self): + request = b'\x07\x08' + multi_callable = _defective_handler_multi_callable(self._channel) + + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable(request, + metadata=(('test', + 'DefectiveGenericRpcHandlerUnary'),)) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py new file mode 100644 index 00000000000..cd1f71dbeed --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_local_credentials_test.py @@ -0,0 +1,77 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of RPCs made using local credentials.""" + +import unittest +import os +from concurrent.futures import ThreadPoolExecutor +import grpc + + +class _GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + return grpc.unary_unary_rpc_method_handler( + lambda request, unused_context: request) + + +class LocalCredentialsTest(unittest.TestCase): + + def _create_server(self): + server = grpc.server(ThreadPoolExecutor()) + server.add_generic_rpc_handlers((_GenericHandler(),)) + return server + + @unittest.skipIf(os.name == 'nt', + 'TODO(https://github.com/grpc/grpc/issues/20078)') + def test_local_tcp(self): + server_addr = 'localhost:{}' + channel_creds = grpc.local_channel_credentials( + grpc.LocalConnectionType.LOCAL_TCP) + server_creds = grpc.local_server_credentials( + grpc.LocalConnectionType.LOCAL_TCP) + + server = self._create_server() + port = server.add_secure_port(server_addr.format(0), server_creds) + server.start() + with grpc.secure_channel(server_addr.format(port), + channel_creds) as channel: + self.assertEqual( + b'abc', + channel.unary_unary('/test/method')(b'abc', + wait_for_ready=True)) + server.stop(None) + + @unittest.skipIf(os.name == 'nt', + 'Unix Domain Socket is not supported on Windows') + def test_uds(self): + server_addr = 'unix:/tmp/grpc_fullstack_test' + channel_creds = grpc.local_channel_credentials( + grpc.LocalConnectionType.UDS) + server_creds = grpc.local_server_credentials( + grpc.LocalConnectionType.UDS) + + server = self._create_server() + server.add_secure_port(server_addr, server_creds) + server.start() + with grpc.secure_channel(server_addr, channel_creds) as channel: + self.assertEqual( + b'abc', + channel.unary_unary('/test/method')(b'abc', + wait_for_ready=True)) + server.stop(None) + + +if __name__ == '__main__': + unittest.main() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py new file mode 100644 index 00000000000..1304bb55879 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_logging_test.py @@ -0,0 +1,103 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of gRPC Python's interaction with the python logging module""" + +import unittest +import logging +import grpc +import os +import subprocess +import sys + +INTERPRETER = sys.executable + + +class LoggingTest(unittest.TestCase): + + def test_logger_not_occupied(self): + script = """if True: + import logging + + import grpc + + if len(logging.getLogger().handlers) != 0: + raise Exception('expected 0 logging handlers') + + """ + self._verifyScriptSucceeds(script) + + def test_handler_found(self): + script = """if True: + import logging + + import grpc + """ + out, err = self._verifyScriptSucceeds(script) + self.assertEqual(0, len(err), 'unexpected output to stderr') + + def test_can_configure_logger(self): + script = """if True: + import logging + import six + + import grpc + + + intended_stream = six.StringIO() + logging.basicConfig(stream=intended_stream) + + if len(logging.getLogger().handlers) != 1: + raise Exception('expected 1 logging handler') + + if logging.getLogger().handlers[0].stream is not intended_stream: + raise Exception('wrong handler stream') + + """ + self._verifyScriptSucceeds(script) + + def test_grpc_logger(self): + script = """if True: + import logging + + import grpc + + if "grpc" not in logging.Logger.manager.loggerDict: + raise Exception('grpc logger not found') + + root_logger = logging.getLogger("grpc") + if len(root_logger.handlers) != 1: + raise Exception('expected 1 root logger handler') + if not isinstance(root_logger.handlers[0], logging.NullHandler): + raise Exception('expected logging.NullHandler') + + """ + self._verifyScriptSucceeds(script) + + def _verifyScriptSucceeds(self, script): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen([INTERPRETER, '-c', script], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env) + out, err = process.communicate() + self.assertEqual( + 0, process.returncode, + 'process failed with exit code %d (stdout: %s, stderr: %s)' % + (process.returncode, out, err)) + return out, err + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py new file mode 100644 index 00000000000..5b06eb2bfe8 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_code_details_test.py @@ -0,0 +1,663 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests application-provided metadata, status code, and details.""" + +import threading +import unittest +import logging + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_control + +_SERIALIZED_REQUEST = b'\x46\x47\x48' +_SERIALIZED_RESPONSE = b'\x49\x50\x51' + +_REQUEST_SERIALIZER = lambda unused_request: _SERIALIZED_REQUEST +_REQUEST_DESERIALIZER = lambda unused_serialized_request: object() +_RESPONSE_SERIALIZER = lambda unused_response: _SERIALIZED_RESPONSE +_RESPONSE_DESERIALIZER = lambda unused_serialized_response: object() + +_SERVICE = 'test.TestService' +_UNARY_UNARY = 'UnaryUnary' +_UNARY_STREAM = 'UnaryStream' +_STREAM_UNARY = 'StreamUnary' +_STREAM_STREAM = 'StreamStream' + +_CLIENT_METADATA = (('client-md-key', 'client-md-key'), ('client-md-key-bin', + b'\x00\x01')) + +_SERVER_INITIAL_METADATA = (('server-initial-md-key', + 'server-initial-md-value'), + ('server-initial-md-key-bin', b'\x00\x02')) + +_SERVER_TRAILING_METADATA = (('server-trailing-md-key', + 'server-trailing-md-value'), + ('server-trailing-md-key-bin', b'\x00\x03')) + +_NON_OK_CODE = grpc.StatusCode.NOT_FOUND +_DETAILS = 'Test details!' + +# calling abort should always fail an RPC, even for "invalid" codes +_ABORT_CODES = (_NON_OK_CODE, 3, grpc.StatusCode.OK) +_EXPECTED_CLIENT_CODES = (_NON_OK_CODE, grpc.StatusCode.UNKNOWN, + grpc.StatusCode.UNKNOWN) +_EXPECTED_DETAILS = (_DETAILS, _DETAILS, '') + + +class _Servicer(object): + + def __init__(self): + self._lock = threading.Lock() + self._abort_call = False + self._code = None + self._details = None + self._exception = False + self._return_none = False + self._received_client_metadata = None + + def unary_unary(self, request, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + if self._exception: + raise test_control.Defect() + else: + return None if self._return_none else object() + + def unary_stream(self, request, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + for _ in range(test_constants.STREAM_LENGTH // 2): + yield _SERIALIZED_RESPONSE + if self._exception: + raise test_control.Defect() + + def stream_unary(self, request_iterator, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + # TODO(https://github.com/grpc/grpc/issues/6891): just ignore the + # request iterator. + list(request_iterator) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + if self._exception: + raise test_control.Defect() + else: + return None if self._return_none else _SERIALIZED_RESPONSE + + def stream_stream(self, request_iterator, context): + with self._lock: + self._received_client_metadata = context.invocation_metadata() + context.send_initial_metadata(_SERVER_INITIAL_METADATA) + context.set_trailing_metadata(_SERVER_TRAILING_METADATA) + # TODO(https://github.com/grpc/grpc/issues/6891): just ignore the + # request iterator. + list(request_iterator) + if self._abort_call: + context.abort(self._code, self._details) + else: + if self._code is not None: + context.set_code(self._code) + if self._details is not None: + context.set_details(self._details) + for _ in range(test_constants.STREAM_LENGTH // 3): + yield object() + if self._exception: + raise test_control.Defect() + + def set_abort_call(self): + with self._lock: + self._abort_call = True + + def set_code(self, code): + with self._lock: + self._code = code + + def set_details(self, details): + with self._lock: + self._details = details + + def set_exception(self): + with self._lock: + self._exception = True + + def set_return_none(self): + with self._lock: + self._return_none = True + + def received_client_metadata(self): + with self._lock: + return self._received_client_metadata + + +def _generic_handler(servicer): + method_handlers = { + _UNARY_UNARY: + grpc.unary_unary_rpc_method_handler( + servicer.unary_unary, + request_deserializer=_REQUEST_DESERIALIZER, + response_serializer=_RESPONSE_SERIALIZER), + _UNARY_STREAM: + grpc.unary_stream_rpc_method_handler(servicer.unary_stream), + _STREAM_UNARY: + grpc.stream_unary_rpc_method_handler(servicer.stream_unary), + _STREAM_STREAM: + grpc.stream_stream_rpc_method_handler( + servicer.stream_stream, + request_deserializer=_REQUEST_DESERIALIZER, + response_serializer=_RESPONSE_SERIALIZER), + } + return grpc.method_handlers_generic_handler(_SERVICE, method_handlers) + + +class MetadataCodeDetailsTest(unittest.TestCase): + + def setUp(self): + self._servicer = _Servicer() + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers( + (_generic_handler(self._servicer),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + + self._channel = grpc.insecure_channel('localhost:{}'.format(port)) + self._unary_unary = self._channel.unary_unary( + '/'.join(( + '', + _SERVICE, + _UNARY_UNARY, + )), + request_serializer=_REQUEST_SERIALIZER, + response_deserializer=_RESPONSE_DESERIALIZER, + ) + self._unary_stream = self._channel.unary_stream( + '/'.join(( + '', + _SERVICE, + _UNARY_STREAM, + )),) + self._stream_unary = self._channel.stream_unary( + '/'.join(( + '', + _SERVICE, + _STREAM_UNARY, + )),) + self._stream_stream = self._channel.stream_stream( + '/'.join(( + '', + _SERVICE, + _STREAM_STREAM, + )), + request_serializer=_REQUEST_SERIALIZER, + response_deserializer=_RESPONSE_DESERIALIZER, + ) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def testSuccessfulUnaryUnary(self): + self._servicer.set_details(_DETAILS) + + unused_response, call = self._unary_unary.with_call( + object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_TRAILING_METADATA, + call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, call.code()) + + def testSuccessfulUnaryStream(self): + self._servicer.set_details(_DETAILS) + + response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST, + metadata=_CLIENT_METADATA) + received_initial_metadata = response_iterator_call.initial_metadata() + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, response_iterator_call.code()) + + def testSuccessfulStreamUnary(self): + self._servicer.set_details(_DETAILS) + + unused_response, call = self._stream_unary.with_call( + iter([_SERIALIZED_REQUEST] * test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_TRAILING_METADATA, + call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, call.code()) + + def testSuccessfulStreamStream(self): + self._servicer.set_details(_DETAILS) + + response_iterator_call = self._stream_stream(iter( + [object()] * test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + received_initial_metadata = response_iterator_call.initial_metadata() + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(grpc.StatusCode.OK, response_iterator_call.code()) + + def testAbortedUnaryUnary(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(expected_code, exception_context.exception.code()) + self.assertEqual(expected_details, + exception_context.exception.details()) + + def testAbortedUnaryStream(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + response_iterator_call = self._unary_stream( + _SERIALIZED_REQUEST, metadata=_CLIENT_METADATA) + received_initial_metadata = \ + response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + self.assertEqual(len(list(response_iterator_call)), 0) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(expected_code, response_iterator_call.code()) + self.assertEqual(expected_details, response_iterator_call.details()) + + def testAbortedStreamUnary(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * + test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(expected_code, exception_context.exception.code()) + self.assertEqual(expected_details, + exception_context.exception.details()) + + def testAbortedStreamStream(self): + test_cases = zip(_ABORT_CODES, _EXPECTED_CLIENT_CODES, + _EXPECTED_DETAILS) + for abort_code, expected_code, expected_details in test_cases: + self._servicer.set_code(abort_code) + self._servicer.set_details(_DETAILS) + self._servicer.set_abort_call() + + response_iterator_call = self._stream_stream( + iter([object()] * test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + received_initial_metadata = \ + response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + self.assertEqual(len(list(response_iterator_call)), 0) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, + self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(expected_code, response_iterator_call.code()) + self.assertEqual(expected_details, response_iterator_call.details()) + + def testCustomCodeUnaryUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeUnaryStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + + response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST, + metadata=_CLIENT_METADATA) + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(_NON_OK_CODE, response_iterator_call.code()) + self.assertEqual(_DETAILS, response_iterator_call.details()) + + def testCustomCodeStreamUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + + with self.assertRaises(grpc.RpcError) as exception_context: + self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * + test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeStreamStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + + response_iterator_call = self._stream_stream(iter( + [object()] * test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError) as exception_context: + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeExceptionUnaryUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeExceptionUnaryStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + + response_iterator_call = self._unary_stream(_SERIALIZED_REQUEST, + metadata=_CLIENT_METADATA) + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(_NON_OK_CODE, response_iterator_call.code()) + self.assertEqual(_DETAILS, response_iterator_call.details()) + + def testCustomCodeExceptionStreamUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * + test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeExceptionStreamStream(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_exception() + + response_iterator_call = self._stream_stream(iter( + [object()] * test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + received_initial_metadata = response_iterator_call.initial_metadata() + with self.assertRaises(grpc.RpcError): + list(response_iterator_call) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_SERVER_INITIAL_METADATA, + received_initial_metadata)) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + response_iterator_call.trailing_metadata())) + self.assertIs(_NON_OK_CODE, response_iterator_call.code()) + self.assertEqual(_DETAILS, response_iterator_call.details()) + + def testCustomCodeReturnNoneUnaryUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_return_none() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._unary_unary.with_call(object(), metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + def testCustomCodeReturnNoneStreamUnary(self): + self._servicer.set_code(_NON_OK_CODE) + self._servicer.set_details(_DETAILS) + self._servicer.set_return_none() + + with self.assertRaises(grpc.RpcError) as exception_context: + self._stream_unary.with_call(iter([_SERIALIZED_REQUEST] * + test_constants.STREAM_LENGTH), + metadata=_CLIENT_METADATA) + + self.assertTrue( + test_common.metadata_transmitted( + _CLIENT_METADATA, self._servicer.received_client_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_INITIAL_METADATA, + exception_context.exception.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted( + _SERVER_TRAILING_METADATA, + exception_context.exception.trailing_metadata())) + self.assertIs(_NON_OK_CODE, exception_context.exception.code()) + self.assertEqual(_DETAILS, exception_context.exception.details()) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py new file mode 100644 index 00000000000..e2b36b1c70f --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_flags_test.py @@ -0,0 +1,260 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests metadata flags feature by testing wait-for-ready semantics""" + +import time +import weakref +import unittest +import threading +import logging +import socket +from six.moves import queue + +import grpc + +from tests.unit import test_common +from tests.unit.framework.common import test_constants +import tests.unit.framework.common +from tests.unit.framework.common import get_socket + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + + +def handle_unary_unary(test, request, servicer_context): + return _RESPONSE + + +def handle_unary_stream(test, request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(test, request_iterator, servicer_context): + for _ in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(test, request_iterator, servicer_context): + for _ in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, test, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + self.stream_stream = lambda req, ctx: handle_stream_stream( + test, req, ctx) + elif self.request_streaming: + self.stream_unary = lambda req, ctx: handle_stream_unary( + test, req, ctx) + elif self.response_streaming: + self.unary_stream = lambda req, ctx: handle_unary_stream( + test, req, ctx) + else: + self.unary_unary = lambda req, ctx: handle_unary_unary( + test, req, ctx) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, test): + self._test = test + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(self._test, False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(self._test, False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(self._test, True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(self._test, True, True) + else: + return None + + +def create_dummy_channel(): + """Creating dummy channels is a workaround for retries""" + host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,)) + sock.close() + return grpc.insecure_channel('{}:{}'.format(host, port)) + + +def perform_unary_unary_call(channel, wait_for_ready=None): + channel.unary_unary(_UNARY_UNARY).__call__( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_unary_unary_with_call(channel, wait_for_ready=None): + channel.unary_unary(_UNARY_UNARY).with_call( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_unary_unary_future(channel, wait_for_ready=None): + channel.unary_unary(_UNARY_UNARY).future( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready).result( + timeout=test_constants.LONG_TIMEOUT) + + +def perform_unary_stream_call(channel, wait_for_ready=None): + response_iterator = channel.unary_stream(_UNARY_STREAM).__call__( + _REQUEST, + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + for _ in response_iterator: + pass + + +def perform_stream_unary_call(channel, wait_for_ready=None): + channel.stream_unary(_STREAM_UNARY).__call__( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_stream_unary_with_call(channel, wait_for_ready=None): + channel.stream_unary(_STREAM_UNARY).with_call( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + + +def perform_stream_unary_future(channel, wait_for_ready=None): + channel.stream_unary(_STREAM_UNARY).future( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready).result( + timeout=test_constants.LONG_TIMEOUT) + + +def perform_stream_stream_call(channel, wait_for_ready=None): + response_iterator = channel.stream_stream(_STREAM_STREAM).__call__( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + timeout=test_constants.LONG_TIMEOUT, + wait_for_ready=wait_for_ready) + for _ in response_iterator: + pass + + +_ALL_CALL_CASES = [ + perform_unary_unary_call, perform_unary_unary_with_call, + perform_unary_unary_future, perform_unary_stream_call, + perform_stream_unary_call, perform_stream_unary_with_call, + perform_stream_unary_future, perform_stream_stream_call +] + + +class MetadataFlagsTest(unittest.TestCase): + + def check_connection_does_failfast(self, fn, channel, wait_for_ready=None): + try: + fn(channel, wait_for_ready) + self.fail("The Call should fail") + except BaseException as e: # pylint: disable=broad-except + self.assertIs(grpc.StatusCode.UNAVAILABLE, e.code()) + + def test_call_wait_for_ready_default(self): + for perform_call in _ALL_CALL_CASES: + with create_dummy_channel() as channel: + self.check_connection_does_failfast(perform_call, channel) + + def test_call_wait_for_ready_disabled(self): + for perform_call in _ALL_CALL_CASES: + with create_dummy_channel() as channel: + self.check_connection_does_failfast(perform_call, + channel, + wait_for_ready=False) + + def test_call_wait_for_ready_enabled(self): + # To test the wait mechanism, Python thread is required to make + # client set up first without handling them case by case. + # Also, Python thread don't pass the unhandled exceptions to + # main thread. So, it need another method to store the + # exceptions and raise them again in main thread. + unhandled_exceptions = queue.Queue() + + # We just need an unused TCP port + host, port, sock = get_socket(sock_options=(socket.SO_REUSEADDR,)) + sock.close() + + addr = '{}:{}'.format(host, port) + wg = test_common.WaitGroup(len(_ALL_CALL_CASES)) + + def wait_for_transient_failure(channel_connectivity): + if channel_connectivity == grpc.ChannelConnectivity.TRANSIENT_FAILURE: + wg.done() + + def test_call(perform_call): + with grpc.insecure_channel(addr) as channel: + try: + channel.subscribe(wait_for_transient_failure) + perform_call(channel, wait_for_ready=True) + except BaseException as e: # pylint: disable=broad-except + # If the call failed, the thread would be destroyed. The + # channel object can be collected before calling the + # callback, which will result in a deadlock. + wg.done() + unhandled_exceptions.put(e, True) + + test_threads = [] + for perform_call in _ALL_CALL_CASES: + test_thread = threading.Thread(target=test_call, + args=(perform_call,)) + test_thread.daemon = True + test_thread.exception = None + test_thread.start() + test_threads.append(test_thread) + + # Start the server after the connections are waiting + wg.wait() + server = test_common.test_server(reuse_port=True) + server.add_generic_rpc_handlers((_GenericHandler(weakref.proxy(self)),)) + server.add_insecure_port(addr) + server.start() + + for test_thread in test_threads: + test_thread.join() + + # Stop the server to make test end properly + server.stop(0) + + if not unhandled_exceptions.empty(): + raise unhandled_exceptions.get(True) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.DEBUG) + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py new file mode 100644 index 00000000000..3e7717b04c7 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_metadata_test.py @@ -0,0 +1,242 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server and client side metadata API.""" + +import unittest +import weakref +import logging + +import grpc +from grpc import _channel + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_CHANNEL_ARGS = (('grpc.primary_user_agent', 'primary-agent'), + ('grpc.secondary_user_agent', 'secondary-agent')) + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + +_INVOCATION_METADATA = ( + ( + b'invocation-md-key', + u'invocation-md-value', + ), + ( + u'invocation-md-key-bin', + b'\x00\x01', + ), +) +_EXPECTED_INVOCATION_METADATA = ( + ( + 'invocation-md-key', + 'invocation-md-value', + ), + ( + 'invocation-md-key-bin', + b'\x00\x01', + ), +) + +_INITIAL_METADATA = ((b'initial-md-key', u'initial-md-value'), + (u'initial-md-key-bin', b'\x00\x02')) +_EXPECTED_INITIAL_METADATA = ( + ( + 'initial-md-key', + 'initial-md-value', + ), + ( + 'initial-md-key-bin', + b'\x00\x02', + ), +) + +_TRAILING_METADATA = ( + ( + 'server-trailing-md-key', + 'server-trailing-md-value', + ), + ( + 'server-trailing-md-key-bin', + b'\x00\x03', + ), +) +_EXPECTED_TRAILING_METADATA = _TRAILING_METADATA + + +def _user_agent(metadata): + for key, val in metadata: + if key == 'user-agent': + return val + raise KeyError('No user agent!') + + +def validate_client_metadata(test, servicer_context): + invocation_metadata = servicer_context.invocation_metadata() + test.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INVOCATION_METADATA, + invocation_metadata)) + user_agent = _user_agent(invocation_metadata) + test.assertTrue( + user_agent.startswith('primary-agent ' + _channel._USER_AGENT)) + test.assertTrue(user_agent.endswith('secondary-agent')) + + +def handle_unary_unary(test, request, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + return _RESPONSE + + +def handle_unary_stream(test, request, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(test, request_iterator, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + # TODO(issue:#6891) We should be able to remove this loop + for request in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(test, request_iterator, servicer_context): + validate_client_metadata(test, servicer_context) + servicer_context.send_initial_metadata(_INITIAL_METADATA) + servicer_context.set_trailing_metadata(_TRAILING_METADATA) + # TODO(issue:#6891) We should be able to remove this loop, + # and replace with return; yield + for request in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, test, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + self.stream_stream = lambda x, y: handle_stream_stream(test, x, y) + elif self.request_streaming: + self.stream_unary = lambda x, y: handle_stream_unary(test, x, y) + elif self.response_streaming: + self.unary_stream = lambda x, y: handle_unary_stream(test, x, y) + else: + self.unary_unary = lambda x, y: handle_unary_unary(test, x, y) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, test): + self._test = test + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(self._test, False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(self._test, False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(self._test, True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(self._test, True, True) + else: + return None + + +class MetadataTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._server.add_generic_rpc_handlers( + (_GenericHandler(weakref.proxy(self)),)) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + self._channel = grpc.insecure_channel('localhost:%d' % port, + options=_CHANNEL_ARGS) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testUnaryUnary(self): + multi_callable = self._channel.unary_unary(_UNARY_UNARY) + unused_response, call = multi_callable.with_call( + _REQUEST, metadata=_INVOCATION_METADATA) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + def testUnaryStream(self): + multi_callable = self._channel.unary_stream(_UNARY_STREAM) + call = multi_callable(_REQUEST, metadata=_INVOCATION_METADATA) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + for _ in call: + pass + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + def testStreamUnary(self): + multi_callable = self._channel.stream_unary(_STREAM_UNARY) + unused_response, call = multi_callable.with_call( + iter([_REQUEST] * test_constants.STREAM_LENGTH), + metadata=_INVOCATION_METADATA) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + def testStreamStream(self): + multi_callable = self._channel.stream_stream(_STREAM_STREAM) + call = multi_callable(iter([_REQUEST] * test_constants.STREAM_LENGTH), + metadata=_INVOCATION_METADATA) + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_INITIAL_METADATA, + call.initial_metadata())) + for _ in call: + pass + self.assertTrue( + test_common.metadata_transmitted(_EXPECTED_TRAILING_METADATA, + call.trailing_metadata())) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py new file mode 100644 index 00000000000..16feb4b1ff4 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_reconnect_test.py @@ -0,0 +1,69 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests that a channel will reconnect if a connection is dropped""" + +import socket +import time +import logging +import unittest + +import grpc +from grpc.framework.foundation import logging_pool + +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import bound_socket + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x01' + +_UNARY_UNARY = '/test/UnaryUnary' + + +def _handle_unary_unary(unused_request, unused_servicer_context): + return _RESPONSE + + +class ReconnectTest(unittest.TestCase): + + def test_reconnect(self): + server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + handler = grpc.method_handlers_generic_handler('test', { + 'UnaryUnary': + grpc.unary_unary_rpc_method_handler(_handle_unary_unary) + }) + options = (('grpc.so_reuseport', 1),) + with bound_socket() as (host, port): + addr = '{}:{}'.format(host, port) + server = grpc.server(server_pool, (handler,), options=options) + server.add_insecure_port(addr) + server.start() + channel = grpc.insecure_channel(addr) + multi_callable = channel.unary_unary(_UNARY_UNARY) + self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) + server.stop(None) + # By default, the channel connectivity is checked every 5s + # GRPC_CLIENT_CHANNEL_BACKUP_POLL_INTERVAL_MS can be set to change + # this. + time.sleep(5.1) + server = grpc.server(server_pool, (handler,), options=options) + server.add_insecure_port(addr) + server.start() + self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) + server.stop(None) + channel.close() + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py new file mode 100644 index 00000000000..ecd2ccadbde --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_resource_exhausted_test.py @@ -0,0 +1,259 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server responding with RESOURCE_EXHAUSTED.""" + +import threading +import unittest +import logging + +import grpc +from grpc import _channel +from grpc.framework.foundation import logging_pool + +from tests.unit import test_common +from tests.unit.framework.common import test_constants + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' + + +class _TestTrigger(object): + + def __init__(self, total_call_count): + self._total_call_count = total_call_count + self._pending_calls = 0 + self._triggered = False + self._finish_condition = threading.Condition() + self._start_condition = threading.Condition() + + # Wait for all calls be blocked in their handler + def await_calls(self): + with self._start_condition: + while self._pending_calls < self._total_call_count: + self._start_condition.wait() + + # Block in a response handler and wait for a trigger + def await_trigger(self): + with self._start_condition: + self._pending_calls += 1 + self._start_condition.notify() + + with self._finish_condition: + if not self._triggered: + self._finish_condition.wait() + + # Finish all response handlers + def trigger(self): + with self._finish_condition: + self._triggered = True + self._finish_condition.notify_all() + + +def handle_unary_unary(trigger, request, servicer_context): + trigger.await_trigger() + return _RESPONSE + + +def handle_unary_stream(trigger, request, servicer_context): + trigger.await_trigger() + for _ in range(test_constants.STREAM_LENGTH): + yield _RESPONSE + + +def handle_stream_unary(trigger, request_iterator, servicer_context): + trigger.await_trigger() + # TODO(issue:#6891) We should be able to remove this loop + for request in request_iterator: + pass + return _RESPONSE + + +def handle_stream_stream(trigger, request_iterator, servicer_context): + trigger.await_trigger() + # TODO(issue:#6891) We should be able to remove this loop, + # and replace with return; yield + for request in request_iterator: + yield _RESPONSE + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, trigger, request_streaming, response_streaming): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = None + self.response_serializer = None + self.unary_unary = None + self.unary_stream = None + self.stream_unary = None + self.stream_stream = None + if self.request_streaming and self.response_streaming: + self.stream_stream = ( + lambda x, y: handle_stream_stream(trigger, x, y)) + elif self.request_streaming: + self.stream_unary = lambda x, y: handle_stream_unary(trigger, x, y) + elif self.response_streaming: + self.unary_stream = lambda x, y: handle_unary_stream(trigger, x, y) + else: + self.unary_unary = lambda x, y: handle_unary_unary(trigger, x, y) + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, trigger): + self._trigger = trigger + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(self._trigger, False, False) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(self._trigger, False, True) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(self._trigger, True, False) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(self._trigger, True, True) + else: + return None + + +class ResourceExhaustedTest(unittest.TestCase): + + def setUp(self): + self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + self._trigger = _TestTrigger(test_constants.THREAD_CONCURRENCY) + self._server = grpc.server( + self._server_pool, + handlers=(_GenericHandler(self._trigger),), + options=(('grpc.so_reuseport', 0),), + maximum_concurrent_rpcs=test_constants.THREAD_CONCURRENCY) + port = self._server.add_insecure_port('[::]:0') + self._server.start() + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(0) + self._channel.close() + + def testUnaryUnary(self): + multi_callable = self._channel.unary_unary(_UNARY_UNARY) + futures = [] + for _ in range(test_constants.THREAD_CONCURRENCY): + futures.append(multi_callable.future(_REQUEST)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable(_REQUEST) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + future_exception = multi_callable.future(_REQUEST) + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + future_exception.exception().code()) + + self._trigger.trigger() + for future in futures: + self.assertEqual(_RESPONSE, future.result()) + + # Ensure a new request can be handled + self.assertEqual(_RESPONSE, multi_callable(_REQUEST)) + + def testUnaryStream(self): + multi_callable = self._channel.unary_stream(_UNARY_STREAM) + calls = [] + for _ in range(test_constants.THREAD_CONCURRENCY): + calls.append(multi_callable(_REQUEST)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + next(multi_callable(_REQUEST)) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + self._trigger.trigger() + + for call in calls: + for response in call: + self.assertEqual(_RESPONSE, response) + + # Ensure a new request can be handled + new_call = multi_callable(_REQUEST) + for response in new_call: + self.assertEqual(_RESPONSE, response) + + def testStreamUnary(self): + multi_callable = self._channel.stream_unary(_STREAM_UNARY) + futures = [] + request = iter([_REQUEST] * test_constants.STREAM_LENGTH) + for _ in range(test_constants.THREAD_CONCURRENCY): + futures.append(multi_callable.future(request)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable(request) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + future_exception = multi_callable.future(request) + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + future_exception.exception().code()) + + self._trigger.trigger() + + for future in futures: + self.assertEqual(_RESPONSE, future.result()) + + # Ensure a new request can be handled + self.assertEqual(_RESPONSE, multi_callable(request)) + + def testStreamStream(self): + multi_callable = self._channel.stream_stream(_STREAM_STREAM) + calls = [] + request = iter([_REQUEST] * test_constants.STREAM_LENGTH) + for _ in range(test_constants.THREAD_CONCURRENCY): + calls.append(multi_callable(request)) + + self._trigger.await_calls() + + with self.assertRaises(grpc.RpcError) as exception_context: + next(multi_callable(request)) + + self.assertEqual(grpc.StatusCode.RESOURCE_EXHAUSTED, + exception_context.exception.code()) + + self._trigger.trigger() + + for call in calls: + for response in call: + self.assertEqual(_RESPONSE, response) + + # Ensure a new request can be handled + new_call = multi_callable(request) + for response in new_call: + self.assertEqual(_RESPONSE, response) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py new file mode 100644 index 00000000000..9b0cb29a0d5 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_1_test.py @@ -0,0 +1,232 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of RPCs made against gRPC Python's application-layer API.""" + +import itertools +import threading +import unittest +import logging +from concurrent import futures + +import grpc +from grpc.framework.foundation import logging_pool + +from tests.unit._rpc_test_helpers import ( + TIMEOUT_SHORT, Callback, unary_unary_multi_callable, + unary_stream_multi_callable, unary_stream_non_blocking_multi_callable, + stream_unary_multi_callable, stream_stream_multi_callable, + stream_stream_non_blocking_multi_callable, BaseRPCTest) +from tests.unit.framework.common import test_constants + + +class RPCPart1Test(BaseRPCTest, unittest.TestCase): + + def testExpiredStreamRequestBlockingUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + multi_callable = stream_unary_multi_callable(self._channel) + with self._control.pause(): + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable( + request_iterator, + timeout=TIMEOUT_SHORT, + metadata=(('test', + 'ExpiredStreamRequestBlockingUnaryResponse'),)) + + self.assertIsInstance(exception_context.exception, grpc.RpcError) + self.assertIsInstance(exception_context.exception, grpc.Call) + self.assertIsNotNone(exception_context.exception.initial_metadata()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + exception_context.exception.code()) + self.assertIsNotNone(exception_context.exception.details()) + self.assertIsNotNone(exception_context.exception.trailing_metadata()) + + def testExpiredStreamRequestFutureUnaryResponse(self): + requests = tuple( + b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + callback = Callback() + + multi_callable = stream_unary_multi_callable(self._channel) + with self._control.pause(): + response_future = multi_callable.future( + request_iterator, + timeout=TIMEOUT_SHORT, + metadata=(('test', 'ExpiredStreamRequestFutureUnaryResponse'),)) + with self.assertRaises(grpc.FutureTimeoutError): + response_future.result(timeout=TIMEOUT_SHORT / 2.0) + response_future.add_done_callback(callback) + value_passed_to_callback = callback.value() + + with self.assertRaises(grpc.RpcError) as exception_context: + response_future.result() + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + exception_context.exception.code()) + self.assertIsInstance(response_future.exception(), grpc.RpcError) + self.assertIsNotNone(response_future.traceback()) + self.assertIs(response_future, value_passed_to_callback) + self.assertIsNotNone(response_future.initial_metadata()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code()) + self.assertIsNotNone(response_future.details()) + self.assertIsNotNone(response_future.trailing_metadata()) + + def testExpiredStreamRequestStreamResponse(self): + self._expired_stream_request_stream_response( + stream_stream_multi_callable(self._channel)) + + def testExpiredStreamRequestStreamResponseNonBlocking(self): + self._expired_stream_request_stream_response( + stream_stream_non_blocking_multi_callable(self._channel)) + + def testFailedUnaryRequestBlockingUnaryResponse(self): + request = b'\x37\x17' + + multi_callable = unary_unary_multi_callable(self._channel) + with self._control.fail(): + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable.with_call( + request, + metadata=(('test', + 'FailedUnaryRequestBlockingUnaryResponse'),)) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + # sanity checks on to make sure returned string contains default members + # of the error + debug_error_string = exception_context.exception.debug_error_string() + self.assertIn('created', debug_error_string) + self.assertIn('description', debug_error_string) + self.assertIn('file', debug_error_string) + self.assertIn('file_line', debug_error_string) + + def testFailedUnaryRequestFutureUnaryResponse(self): + request = b'\x37\x17' + callback = Callback() + + multi_callable = unary_unary_multi_callable(self._channel) + with self._control.fail(): + response_future = multi_callable.future( + request, + metadata=(('test', 'FailedUnaryRequestFutureUnaryResponse'),)) + response_future.add_done_callback(callback) + value_passed_to_callback = callback.value() + + self.assertIsInstance(response_future, grpc.Future) + self.assertIsInstance(response_future, grpc.Call) + with self.assertRaises(grpc.RpcError) as exception_context: + response_future.result() + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + self.assertIsInstance(response_future.exception(), grpc.RpcError) + self.assertIsNotNone(response_future.traceback()) + self.assertIs(grpc.StatusCode.UNKNOWN, + response_future.exception().code()) + self.assertIs(response_future, value_passed_to_callback) + + def testFailedUnaryRequestStreamResponse(self): + self._failed_unary_request_stream_response( + unary_stream_multi_callable(self._channel)) + + def testFailedUnaryRequestStreamResponseNonBlocking(self): + self._failed_unary_request_stream_response( + unary_stream_non_blocking_multi_callable(self._channel)) + + def testFailedStreamRequestBlockingUnaryResponse(self): + requests = tuple( + b'\x47\x58' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + multi_callable = stream_unary_multi_callable(self._channel) + with self._control.fail(): + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable( + request_iterator, + metadata=(('test', + 'FailedStreamRequestBlockingUnaryResponse'),)) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + def testFailedStreamRequestFutureUnaryResponse(self): + requests = tuple( + b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + callback = Callback() + + multi_callable = stream_unary_multi_callable(self._channel) + with self._control.fail(): + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'FailedStreamRequestFutureUnaryResponse'),)) + response_future.add_done_callback(callback) + value_passed_to_callback = callback.value() + + with self.assertRaises(grpc.RpcError) as exception_context: + response_future.result() + self.assertIs(grpc.StatusCode.UNKNOWN, response_future.code()) + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + self.assertIsInstance(response_future.exception(), grpc.RpcError) + self.assertIsNotNone(response_future.traceback()) + self.assertIs(response_future, value_passed_to_callback) + + def testFailedStreamRequestStreamResponse(self): + self._failed_stream_request_stream_response( + stream_stream_multi_callable(self._channel)) + + def testFailedStreamRequestStreamResponseNonBlocking(self): + self._failed_stream_request_stream_response( + stream_stream_non_blocking_multi_callable(self._channel)) + + def testIgnoredUnaryRequestFutureUnaryResponse(self): + request = b'\x37\x17' + + multi_callable = unary_unary_multi_callable(self._channel) + multi_callable.future( + request, + metadata=(('test', 'IgnoredUnaryRequestFutureUnaryResponse'),)) + + def testIgnoredUnaryRequestStreamResponse(self): + self._ignored_unary_stream_request_future_unary_response( + unary_stream_multi_callable(self._channel)) + + def testIgnoredUnaryRequestStreamResponseNonBlocking(self): + self._ignored_unary_stream_request_future_unary_response( + unary_stream_non_blocking_multi_callable(self._channel)) + + def testIgnoredStreamRequestFutureUnaryResponse(self): + requests = tuple( + b'\x07\x18' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + multi_callable = stream_unary_multi_callable(self._channel) + multi_callable.future( + request_iterator, + metadata=(('test', 'IgnoredStreamRequestFutureUnaryResponse'),)) + + def testIgnoredStreamRequestStreamResponse(self): + self._ignored_stream_request_stream_response( + stream_stream_multi_callable(self._channel)) + + def testIgnoredStreamRequestStreamResponseNonBlocking(self): + self._ignored_stream_request_stream_response( + stream_stream_non_blocking_multi_callable(self._channel)) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py new file mode 100644 index 00000000000..0e559efec2a --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_part_2_test.py @@ -0,0 +1,426 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of RPCs made against gRPC Python's application-layer API.""" + +import itertools +import threading +import unittest +import logging +from concurrent import futures + +import grpc +from grpc.framework.foundation import logging_pool + +from tests.unit._rpc_test_helpers import ( + TIMEOUT_SHORT, Callback, unary_unary_multi_callable, + unary_stream_multi_callable, unary_stream_non_blocking_multi_callable, + stream_unary_multi_callable, stream_stream_multi_callable, + stream_stream_non_blocking_multi_callable, BaseRPCTest) +from tests.unit.framework.common import test_constants + + +class RPCPart2Test(BaseRPCTest, unittest.TestCase): + + def testDefaultThreadPoolIsUsed(self): + self._consume_one_stream_response_unary_request( + unary_stream_multi_callable(self._channel)) + self.assertFalse(self._thread_pool.was_used()) + + def testExperimentalThreadPoolIsUsed(self): + self._consume_one_stream_response_unary_request( + unary_stream_non_blocking_multi_callable(self._channel)) + self.assertTrue(self._thread_pool.was_used()) + + def testUnrecognizedMethod(self): + request = b'abc' + + with self.assertRaises(grpc.RpcError) as exception_context: + self._channel.unary_unary('NoSuchMethod')(request) + + self.assertEqual(grpc.StatusCode.UNIMPLEMENTED, + exception_context.exception.code()) + + def testSuccessfulUnaryRequestBlockingUnaryResponse(self): + request = b'\x07\x08' + expected_response = self._handler.handle_unary_unary(request, None) + + multi_callable = unary_unary_multi_callable(self._channel) + response = multi_callable( + request, + metadata=(('test', 'SuccessfulUnaryRequestBlockingUnaryResponse'),)) + + self.assertEqual(expected_response, response) + + def testSuccessfulUnaryRequestBlockingUnaryResponseWithCall(self): + request = b'\x07\x08' + expected_response = self._handler.handle_unary_unary(request, None) + + multi_callable = unary_unary_multi_callable(self._channel) + response, call = multi_callable.with_call( + request, + metadata=(('test', + 'SuccessfulUnaryRequestBlockingUnaryResponseWithCall'),)) + + self.assertEqual(expected_response, response) + self.assertIs(grpc.StatusCode.OK, call.code()) + self.assertEqual('', call.debug_error_string()) + + def testSuccessfulUnaryRequestFutureUnaryResponse(self): + request = b'\x07\x08' + expected_response = self._handler.handle_unary_unary(request, None) + + multi_callable = unary_unary_multi_callable(self._channel) + response_future = multi_callable.future( + request, + metadata=(('test', 'SuccessfulUnaryRequestFutureUnaryResponse'),)) + response = response_future.result() + + self.assertIsInstance(response_future, grpc.Future) + self.assertIsInstance(response_future, grpc.Call) + self.assertEqual(expected_response, response) + self.assertIsNone(response_future.exception()) + self.assertIsNone(response_future.traceback()) + + def testSuccessfulUnaryRequestStreamResponse(self): + request = b'\x37\x58' + expected_responses = tuple( + self._handler.handle_unary_stream(request, None)) + + multi_callable = unary_stream_multi_callable(self._channel) + response_iterator = multi_callable( + request, + metadata=(('test', 'SuccessfulUnaryRequestStreamResponse'),)) + responses = tuple(response_iterator) + + self.assertSequenceEqual(expected_responses, responses) + + def testSuccessfulStreamRequestBlockingUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + expected_response = self._handler.handle_stream_unary( + iter(requests), None) + request_iterator = iter(requests) + + multi_callable = stream_unary_multi_callable(self._channel) + response = multi_callable( + request_iterator, + metadata=(('test', + 'SuccessfulStreamRequestBlockingUnaryResponse'),)) + + self.assertEqual(expected_response, response) + + def testSuccessfulStreamRequestBlockingUnaryResponseWithCall(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + expected_response = self._handler.handle_stream_unary( + iter(requests), None) + request_iterator = iter(requests) + + multi_callable = stream_unary_multi_callable(self._channel) + response, call = multi_callable.with_call( + request_iterator, + metadata=( + ('test', + 'SuccessfulStreamRequestBlockingUnaryResponseWithCall'),)) + + self.assertEqual(expected_response, response) + self.assertIs(grpc.StatusCode.OK, call.code()) + + def testSuccessfulStreamRequestFutureUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + expected_response = self._handler.handle_stream_unary( + iter(requests), None) + request_iterator = iter(requests) + + multi_callable = stream_unary_multi_callable(self._channel) + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'SuccessfulStreamRequestFutureUnaryResponse'),)) + response = response_future.result() + + self.assertEqual(expected_response, response) + self.assertIsNone(response_future.exception()) + self.assertIsNone(response_future.traceback()) + + def testSuccessfulStreamRequestStreamResponse(self): + requests = tuple( + b'\x77\x58' for _ in range(test_constants.STREAM_LENGTH)) + + expected_responses = tuple( + self._handler.handle_stream_stream(iter(requests), None)) + request_iterator = iter(requests) + + multi_callable = stream_stream_multi_callable(self._channel) + response_iterator = multi_callable( + request_iterator, + metadata=(('test', 'SuccessfulStreamRequestStreamResponse'),)) + responses = tuple(response_iterator) + + self.assertSequenceEqual(expected_responses, responses) + + def testSequentialInvocations(self): + first_request = b'\x07\x08' + second_request = b'\x0809' + expected_first_response = self._handler.handle_unary_unary( + first_request, None) + expected_second_response = self._handler.handle_unary_unary( + second_request, None) + + multi_callable = unary_unary_multi_callable(self._channel) + first_response = multi_callable(first_request, + metadata=(('test', + 'SequentialInvocations'),)) + second_response = multi_callable(second_request, + metadata=(('test', + 'SequentialInvocations'),)) + + self.assertEqual(expected_first_response, first_response) + self.assertEqual(expected_second_response, second_response) + + def testConcurrentBlockingInvocations(self): + pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + expected_response = self._handler.handle_stream_unary( + iter(requests), None) + expected_responses = [expected_response + ] * test_constants.THREAD_CONCURRENCY + response_futures = [None] * test_constants.THREAD_CONCURRENCY + + multi_callable = stream_unary_multi_callable(self._channel) + for index in range(test_constants.THREAD_CONCURRENCY): + request_iterator = iter(requests) + response_future = pool.submit( + multi_callable, + request_iterator, + metadata=(('test', 'ConcurrentBlockingInvocations'),)) + response_futures[index] = response_future + responses = tuple( + response_future.result() for response_future in response_futures) + + pool.shutdown(wait=True) + self.assertSequenceEqual(expected_responses, responses) + + def testConcurrentFutureInvocations(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + expected_response = self._handler.handle_stream_unary( + iter(requests), None) + expected_responses = [expected_response + ] * test_constants.THREAD_CONCURRENCY + response_futures = [None] * test_constants.THREAD_CONCURRENCY + + multi_callable = stream_unary_multi_callable(self._channel) + for index in range(test_constants.THREAD_CONCURRENCY): + request_iterator = iter(requests) + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'ConcurrentFutureInvocations'),)) + response_futures[index] = response_future + responses = tuple( + response_future.result() for response_future in response_futures) + + self.assertSequenceEqual(expected_responses, responses) + + def testWaitingForSomeButNotAllConcurrentFutureInvocations(self): + pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY) + request = b'\x67\x68' + expected_response = self._handler.handle_unary_unary(request, None) + response_futures = [None] * test_constants.THREAD_CONCURRENCY + lock = threading.Lock() + test_is_running_cell = [True] + + def wrap_future(future): + + def wrap(): + try: + return future.result() + except grpc.RpcError: + with lock: + if test_is_running_cell[0]: + raise + return None + + return wrap + + multi_callable = unary_unary_multi_callable(self._channel) + for index in range(test_constants.THREAD_CONCURRENCY): + inner_response_future = multi_callable.future( + request, + metadata=( + ('test', + 'WaitingForSomeButNotAllConcurrentFutureInvocations'),)) + outer_response_future = pool.submit( + wrap_future(inner_response_future)) + response_futures[index] = outer_response_future + + some_completed_response_futures_iterator = itertools.islice( + futures.as_completed(response_futures), + test_constants.THREAD_CONCURRENCY // 2) + for response_future in some_completed_response_futures_iterator: + self.assertEqual(expected_response, response_future.result()) + with lock: + test_is_running_cell[0] = False + + def testConsumingOneStreamResponseUnaryRequest(self): + self._consume_one_stream_response_unary_request( + unary_stream_multi_callable(self._channel)) + + def testConsumingOneStreamResponseUnaryRequestNonBlocking(self): + self._consume_one_stream_response_unary_request( + unary_stream_non_blocking_multi_callable(self._channel)) + + def testConsumingSomeButNotAllStreamResponsesUnaryRequest(self): + self._consume_some_but_not_all_stream_responses_unary_request( + unary_stream_multi_callable(self._channel)) + + def testConsumingSomeButNotAllStreamResponsesUnaryRequestNonBlocking(self): + self._consume_some_but_not_all_stream_responses_unary_request( + unary_stream_non_blocking_multi_callable(self._channel)) + + def testConsumingSomeButNotAllStreamResponsesStreamRequest(self): + self._consume_some_but_not_all_stream_responses_stream_request( + stream_stream_multi_callable(self._channel)) + + def testConsumingSomeButNotAllStreamResponsesStreamRequestNonBlocking(self): + self._consume_some_but_not_all_stream_responses_stream_request( + stream_stream_non_blocking_multi_callable(self._channel)) + + def testConsumingTooManyStreamResponsesStreamRequest(self): + self._consume_too_many_stream_responses_stream_request( + stream_stream_multi_callable(self._channel)) + + def testConsumingTooManyStreamResponsesStreamRequestNonBlocking(self): + self._consume_too_many_stream_responses_stream_request( + stream_stream_non_blocking_multi_callable(self._channel)) + + def testCancelledUnaryRequestUnaryResponse(self): + request = b'\x07\x17' + + multi_callable = unary_unary_multi_callable(self._channel) + with self._control.pause(): + response_future = multi_callable.future( + request, + metadata=(('test', 'CancelledUnaryRequestUnaryResponse'),)) + response_future.cancel() + + self.assertIs(grpc.StatusCode.CANCELLED, response_future.code()) + self.assertTrue(response_future.cancelled()) + with self.assertRaises(grpc.FutureCancelledError): + response_future.result() + with self.assertRaises(grpc.FutureCancelledError): + response_future.exception() + with self.assertRaises(grpc.FutureCancelledError): + response_future.traceback() + + def testCancelledUnaryRequestStreamResponse(self): + self._cancelled_unary_request_stream_response( + unary_stream_multi_callable(self._channel)) + + def testCancelledUnaryRequestStreamResponseNonBlocking(self): + self._cancelled_unary_request_stream_response( + unary_stream_non_blocking_multi_callable(self._channel)) + + def testCancelledStreamRequestUnaryResponse(self): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + multi_callable = stream_unary_multi_callable(self._channel) + with self._control.pause(): + response_future = multi_callable.future( + request_iterator, + metadata=(('test', 'CancelledStreamRequestUnaryResponse'),)) + self._control.block_until_paused() + response_future.cancel() + + self.assertIs(grpc.StatusCode.CANCELLED, response_future.code()) + self.assertTrue(response_future.cancelled()) + with self.assertRaises(grpc.FutureCancelledError): + response_future.result() + with self.assertRaises(grpc.FutureCancelledError): + response_future.exception() + with self.assertRaises(grpc.FutureCancelledError): + response_future.traceback() + self.assertIsNotNone(response_future.initial_metadata()) + self.assertIsNotNone(response_future.details()) + self.assertIsNotNone(response_future.trailing_metadata()) + + def testCancelledStreamRequestStreamResponse(self): + self._cancelled_stream_request_stream_response( + stream_stream_multi_callable(self._channel)) + + def testCancelledStreamRequestStreamResponseNonBlocking(self): + self._cancelled_stream_request_stream_response( + stream_stream_non_blocking_multi_callable(self._channel)) + + def testExpiredUnaryRequestBlockingUnaryResponse(self): + request = b'\x07\x17' + + multi_callable = unary_unary_multi_callable(self._channel) + with self._control.pause(): + with self.assertRaises(grpc.RpcError) as exception_context: + multi_callable.with_call( + request, + timeout=TIMEOUT_SHORT, + metadata=(('test', + 'ExpiredUnaryRequestBlockingUnaryResponse'),)) + + self.assertIsInstance(exception_context.exception, grpc.Call) + self.assertIsNotNone(exception_context.exception.initial_metadata()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + exception_context.exception.code()) + self.assertIsNotNone(exception_context.exception.details()) + self.assertIsNotNone(exception_context.exception.trailing_metadata()) + + def testExpiredUnaryRequestFutureUnaryResponse(self): + request = b'\x07\x17' + callback = Callback() + + multi_callable = unary_unary_multi_callable(self._channel) + with self._control.pause(): + response_future = multi_callable.future( + request, + timeout=TIMEOUT_SHORT, + metadata=(('test', 'ExpiredUnaryRequestFutureUnaryResponse'),)) + response_future.add_done_callback(callback) + value_passed_to_callback = callback.value() + + self.assertIs(response_future, value_passed_to_callback) + self.assertIsNotNone(response_future.initial_metadata()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, response_future.code()) + self.assertIsNotNone(response_future.details()) + self.assertIsNotNone(response_future.trailing_metadata()) + with self.assertRaises(grpc.RpcError) as exception_context: + response_future.result() + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + exception_context.exception.code()) + self.assertIsInstance(response_future.exception(), grpc.RpcError) + self.assertIsNotNone(response_future.traceback()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + response_future.exception().code()) + + def testExpiredUnaryRequestStreamResponse(self): + self._expired_unary_request_stream_response( + unary_stream_multi_callable(self._channel)) + + def testExpiredUnaryRequestStreamResponseNonBlocking(self): + self._expired_unary_request_stream_response( + unary_stream_non_blocking_multi_callable(self._channel)) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py new file mode 100644 index 00000000000..a3f18a9a490 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_rpc_test_helpers.py @@ -0,0 +1,417 @@ +# Copyright 2020 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test helpers for RPC invocation tests.""" + +import datetime +import threading + +import grpc +from grpc.framework.foundation import logging_pool + +from tests.unit import test_common +from tests.unit import thread_pool +from tests.unit.framework.common import test_constants +from tests.unit.framework.common import test_control + +_SERIALIZE_REQUEST = lambda bytestring: bytestring * 2 +_DESERIALIZE_REQUEST = lambda bytestring: bytestring[len(bytestring) // 2:] +_SERIALIZE_RESPONSE = lambda bytestring: bytestring * 3 +_DESERIALIZE_RESPONSE = lambda bytestring: bytestring[:len(bytestring) // 3] + +_UNARY_UNARY = '/test/UnaryUnary' +_UNARY_STREAM = '/test/UnaryStream' +_UNARY_STREAM_NON_BLOCKING = '/test/UnaryStreamNonBlocking' +_STREAM_UNARY = '/test/StreamUnary' +_STREAM_STREAM = '/test/StreamStream' +_STREAM_STREAM_NON_BLOCKING = '/test/StreamStreamNonBlocking' + +TIMEOUT_SHORT = datetime.timedelta(seconds=1).total_seconds() + + +class Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._value = None + self._called = False + + def __call__(self, value): + with self._condition: + self._value = value + self._called = True + self._condition.notify_all() + + def value(self): + with self._condition: + while not self._called: + self._condition.wait() + return self._value + + +class _Handler(object): + + def __init__(self, control, thread_pool): + self._control = control + self._thread_pool = thread_pool + non_blocking_functions = (self.handle_unary_stream_non_blocking, + self.handle_stream_stream_non_blocking) + for non_blocking_function in non_blocking_functions: + non_blocking_function.__func__.experimental_non_blocking = True + non_blocking_function.__func__.experimental_thread_pool = self._thread_pool + + def handle_unary_unary(self, request, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + # TODO(https://github.com/grpc/grpc/issues/8483): test the values + # returned by these methods rather than only "smoke" testing that + # the return after having been called. + servicer_context.is_active() + servicer_context.time_remaining() + return request + + def handle_unary_stream(self, request, servicer_context): + for _ in range(test_constants.STREAM_LENGTH): + self._control.control() + yield request + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + + def handle_unary_stream_non_blocking(self, request, servicer_context, + on_next): + for _ in range(test_constants.STREAM_LENGTH): + self._control.control() + on_next(request) + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + on_next(None) + + def handle_stream_unary(self, request_iterator, servicer_context): + if servicer_context is not None: + servicer_context.invocation_metadata() + self._control.control() + response_elements = [] + for request in request_iterator: + self._control.control() + response_elements.append(request) + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + return b''.join(response_elements) + + def handle_stream_stream(self, request_iterator, servicer_context): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + for request in request_iterator: + self._control.control() + yield request + self._control.control() + + def handle_stream_stream_non_blocking(self, request_iterator, + servicer_context, on_next): + self._control.control() + if servicer_context is not None: + servicer_context.set_trailing_metadata((( + 'testkey', + 'testvalue', + ),)) + for request in request_iterator: + self._control.control() + on_next(request) + self._control.control() + on_next(None) + + +class _MethodHandler(grpc.RpcMethodHandler): + + def __init__(self, request_streaming, response_streaming, + request_deserializer, response_serializer, unary_unary, + unary_stream, stream_unary, stream_stream): + self.request_streaming = request_streaming + self.response_streaming = response_streaming + self.request_deserializer = request_deserializer + self.response_serializer = response_serializer + self.unary_unary = unary_unary + self.unary_stream = unary_stream + self.stream_unary = stream_unary + self.stream_stream = stream_stream + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self, handler): + self._handler = handler + + def service(self, handler_call_details): + if handler_call_details.method == _UNARY_UNARY: + return _MethodHandler(False, False, None, None, + self._handler.handle_unary_unary, None, None, + None) + elif handler_call_details.method == _UNARY_STREAM: + return _MethodHandler(False, True, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, + self._handler.handle_unary_stream, None, None) + elif handler_call_details.method == _UNARY_STREAM_NON_BLOCKING: + return _MethodHandler( + False, True, _DESERIALIZE_REQUEST, _SERIALIZE_RESPONSE, None, + self._handler.handle_unary_stream_non_blocking, None, None) + elif handler_call_details.method == _STREAM_UNARY: + return _MethodHandler(True, False, _DESERIALIZE_REQUEST, + _SERIALIZE_RESPONSE, None, None, + self._handler.handle_stream_unary, None) + elif handler_call_details.method == _STREAM_STREAM: + return _MethodHandler(True, True, None, None, None, None, None, + self._handler.handle_stream_stream) + elif handler_call_details.method == _STREAM_STREAM_NON_BLOCKING: + return _MethodHandler( + True, True, None, None, None, None, None, + self._handler.handle_stream_stream_non_blocking) + else: + return None + + +def unary_unary_multi_callable(channel): + return channel.unary_unary(_UNARY_UNARY) + + +def unary_stream_multi_callable(channel): + return channel.unary_stream(_UNARY_STREAM, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def unary_stream_non_blocking_multi_callable(channel): + return channel.unary_stream(_UNARY_STREAM_NON_BLOCKING, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def stream_unary_multi_callable(channel): + return channel.stream_unary(_STREAM_UNARY, + request_serializer=_SERIALIZE_REQUEST, + response_deserializer=_DESERIALIZE_RESPONSE) + + +def stream_stream_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM) + + +def stream_stream_non_blocking_multi_callable(channel): + return channel.stream_stream(_STREAM_STREAM_NON_BLOCKING) + + +class BaseRPCTest(object): + + def setUp(self): + self._control = test_control.PauseFailControl() + self._thread_pool = thread_pool.RecordingThreadPool(max_workers=None) + self._handler = _Handler(self._control, self._thread_pool) + + self._server = test_common.test_server() + port = self._server.add_insecure_port('[::]:0') + self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),)) + self._server.start() + + self._channel = grpc.insecure_channel('localhost:%d' % port) + + def tearDown(self): + self._server.stop(None) + self._channel.close() + + def _consume_one_stream_response_unary_request(self, multi_callable): + request = b'\x57\x38' + + response_iterator = multi_callable( + request, + metadata=(('test', 'ConsumingOneStreamResponseUnaryRequest'),)) + next(response_iterator) + + def _consume_some_but_not_all_stream_responses_unary_request( + self, multi_callable): + request = b'\x57\x38' + + response_iterator = multi_callable( + request, + metadata=(('test', + 'ConsumingSomeButNotAllStreamResponsesUnaryRequest'),)) + for _ in range(test_constants.STREAM_LENGTH // 2): + next(response_iterator) + + def _consume_some_but_not_all_stream_responses_stream_request( + self, multi_callable): + requests = tuple( + b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + response_iterator = multi_callable( + request_iterator, + metadata=(('test', + 'ConsumingSomeButNotAllStreamResponsesStreamRequest'),)) + for _ in range(test_constants.STREAM_LENGTH // 2): + next(response_iterator) + + def _consume_too_many_stream_responses_stream_request(self, multi_callable): + requests = tuple( + b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + response_iterator = multi_callable( + request_iterator, + metadata=(('test', + 'ConsumingTooManyStreamResponsesStreamRequest'),)) + for _ in range(test_constants.STREAM_LENGTH): + next(response_iterator) + for _ in range(test_constants.STREAM_LENGTH): + with self.assertRaises(StopIteration): + next(response_iterator) + + self.assertIsNotNone(response_iterator.initial_metadata()) + self.assertIs(grpc.StatusCode.OK, response_iterator.code()) + self.assertIsNotNone(response_iterator.details()) + self.assertIsNotNone(response_iterator.trailing_metadata()) + + def _cancelled_unary_request_stream_response(self, multi_callable): + request = b'\x07\x19' + + with self._control.pause(): + response_iterator = multi_callable( + request, + metadata=(('test', 'CancelledUnaryRequestStreamResponse'),)) + self._control.block_until_paused() + response_iterator.cancel() + + with self.assertRaises(grpc.RpcError) as exception_context: + next(response_iterator) + self.assertIs(grpc.StatusCode.CANCELLED, + exception_context.exception.code()) + self.assertIsNotNone(response_iterator.initial_metadata()) + self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code()) + self.assertIsNotNone(response_iterator.details()) + self.assertIsNotNone(response_iterator.trailing_metadata()) + + def _cancelled_stream_request_stream_response(self, multi_callable): + requests = tuple( + b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + with self._control.pause(): + response_iterator = multi_callable( + request_iterator, + metadata=(('test', 'CancelledStreamRequestStreamResponse'),)) + response_iterator.cancel() + + with self.assertRaises(grpc.RpcError): + next(response_iterator) + self.assertIsNotNone(response_iterator.initial_metadata()) + self.assertIs(grpc.StatusCode.CANCELLED, response_iterator.code()) + self.assertIsNotNone(response_iterator.details()) + self.assertIsNotNone(response_iterator.trailing_metadata()) + + def _expired_unary_request_stream_response(self, multi_callable): + request = b'\x07\x19' + + with self._control.pause(): + with self.assertRaises(grpc.RpcError) as exception_context: + response_iterator = multi_callable( + request, + timeout=test_constants.SHORT_TIMEOUT, + metadata=(('test', 'ExpiredUnaryRequestStreamResponse'),)) + next(response_iterator) + + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + exception_context.exception.code()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + response_iterator.code()) + + def _expired_stream_request_stream_response(self, multi_callable): + requests = tuple( + b'\x67\x18' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + with self._control.pause(): + with self.assertRaises(grpc.RpcError) as exception_context: + response_iterator = multi_callable( + request_iterator, + timeout=test_constants.SHORT_TIMEOUT, + metadata=(('test', 'ExpiredStreamRequestStreamResponse'),)) + next(response_iterator) + + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + exception_context.exception.code()) + self.assertIs(grpc.StatusCode.DEADLINE_EXCEEDED, + response_iterator.code()) + + def _failed_unary_request_stream_response(self, multi_callable): + request = b'\x37\x17' + + with self.assertRaises(grpc.RpcError) as exception_context: + with self._control.fail(): + response_iterator = multi_callable( + request, + metadata=(('test', 'FailedUnaryRequestStreamResponse'),)) + next(response_iterator) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + + def _failed_stream_request_stream_response(self, multi_callable): + requests = tuple( + b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + with self._control.fail(): + with self.assertRaises(grpc.RpcError) as exception_context: + response_iterator = multi_callable( + request_iterator, + metadata=(('test', 'FailedStreamRequestStreamResponse'),)) + tuple(response_iterator) + + self.assertIs(grpc.StatusCode.UNKNOWN, + exception_context.exception.code()) + self.assertIs(grpc.StatusCode.UNKNOWN, response_iterator.code()) + + def _ignored_unary_stream_request_future_unary_response( + self, multi_callable): + request = b'\x37\x17' + + multi_callable(request, + metadata=(('test', + 'IgnoredUnaryRequestStreamResponse'),)) + + def _ignored_stream_request_stream_response(self, multi_callable): + requests = tuple( + b'\x67\x88' for _ in range(test_constants.STREAM_LENGTH)) + request_iterator = iter(requests) + + multi_callable(request_iterator, + metadata=(('test', + 'IgnoredStreamRequestStreamResponse'),)) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py new file mode 100644 index 00000000000..1d1fdba11ee --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py @@ -0,0 +1,97 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Defines a number of module-scope gRPC scenarios to test server shutdown.""" + +import argparse +import os +import threading +import time +import logging + +import grpc +from tests.unit import test_common + +from concurrent import futures +from six.moves import queue + +WAIT_TIME = 1000 + +REQUEST = b'request' +RESPONSE = b'response' + +SERVER_RAISES_EXCEPTION = 'server_raises_exception' +SERVER_DEALLOCATED = 'server_deallocated' +SERVER_FORK_CAN_EXIT = 'server_fork_can_exit' + +FORK_EXIT = '/test/ForkExit' + + +def fork_and_exit(request, servicer_context): + pid = os.fork() + if pid == 0: + os._exit(0) + return RESPONSE + + +class GenericHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + if handler_call_details.method == FORK_EXIT: + return grpc.unary_unary_rpc_method_handler(fork_and_exit) + else: + return None + + +def run_server(port_queue): + server = test_common.test_server() + port = server.add_insecure_port('[::]:0') + port_queue.put(port) + server.add_generic_rpc_handlers((GenericHandler(),)) + server.start() + # threading.Event.wait() does not exhibit the bug identified in + # https://github.com/grpc/grpc/issues/17093, sleep instead + time.sleep(WAIT_TIME) + + +def run_test(args): + if args.scenario == SERVER_RAISES_EXCEPTION: + server = test_common.test_server() + server.start() + raise Exception() + elif args.scenario == SERVER_DEALLOCATED: + server = test_common.test_server() + server.start() + server.__del__() + while server._state.stage != grpc._server._ServerStage.STOPPED: + pass + elif args.scenario == SERVER_FORK_CAN_EXIT: + port_queue = queue.Queue() + thread = threading.Thread(target=run_server, args=(port_queue,)) + thread.daemon = True + thread.start() + port = port_queue.get() + channel = grpc.insecure_channel('localhost:%d' % port) + multi_callable = channel.unary_unary(FORK_EXIT) + result, call = multi_callable.with_call(REQUEST, wait_for_ready=True) + os.wait() + else: + raise ValueError('unknown test scenario') + + +if __name__ == '__main__': + logging.basicConfig() + parser = argparse.ArgumentParser() + parser.add_argument('scenario', type=str) + args = parser.parse_args() + run_test(args) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py new file mode 100644 index 00000000000..c1dc7585f80 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_shutdown_test.py @@ -0,0 +1,95 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests clean shutdown of server on various interpreter exit conditions. + +The tests in this module spawn a subprocess for each test case, the +test is considered successful if it doesn't hang/timeout. +""" + +import atexit +import os +import subprocess +import sys +import threading +import unittest +import logging + +from tests.unit import _server_shutdown_scenarios + +INTERPRETER = sys.executable +BASE_COMMAND = [INTERPRETER, '-m', 'tests.unit._server_shutdown_scenarios'] + +processes = [] +process_lock = threading.Lock() + + +# Make sure we attempt to clean up any +# processes we may have left running +def cleanup_processes(): + with process_lock: + for process in processes: + try: + process.kill() + except Exception: # pylint: disable=broad-except + pass + + +atexit.register(cleanup_processes) + + +def wait(process): + with process_lock: + processes.append(process) + process.wait() + + +class ServerShutdown(unittest.TestCase): + + # Currently we shut down a server (if possible) after the Python server + # instance is garbage collected. This behavior may change in the future. + def test_deallocated_server_stops(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_server_shutdown_scenarios.SERVER_DEALLOCATED], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + def test_server_exception_exits(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_server_shutdown_scenarios.SERVER_RAISES_EXCEPTION], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + @unittest.skipIf(os.name == 'nt', 'fork not supported on windows') + def test_server_fork_can_exit(self): + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + process = subprocess.Popen( + BASE_COMMAND + [_server_shutdown_scenarios.SERVER_FORK_CAN_EXIT], + stdout=sys.stdout, + stderr=sys.stderr, + env=env) + wait(process) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py new file mode 100644 index 00000000000..35d992a33d6 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_ssl_cert_config_test.py @@ -0,0 +1,511 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests server certificate rotation. + +Here we test various aspects of gRPC Python, and in some cases gRPC +Core by extension, support for server certificate rotation. + +* ServerSSLCertReloadTestWithClientAuth: test ability to rotate + server's SSL cert for use in future channels with clients while not + affecting any existing channel. The server requires client + authentication. + +* ServerSSLCertReloadTestWithoutClientAuth: like + ServerSSLCertReloadTestWithClientAuth except that the server does + not authenticate the client. + +* ServerSSLCertReloadTestCertConfigReuse: tests gRPC Python's ability + to deal with user's reuse of ServerCertificateConfiguration instances. +""" + +import abc +import collections +import os +import six +import threading +import unittest +import logging + +from concurrent import futures + +import grpc +from tests.unit import resources +from tests.unit import test_common +from tests.testing import _application_common +from tests.testing import _server_application +from tests.testing.proto import services_pb2_grpc + +CA_1_PEM = resources.cert_hier_1_root_ca_cert() +CA_2_PEM = resources.cert_hier_2_root_ca_cert() + +CLIENT_KEY_1_PEM = resources.cert_hier_1_client_1_key() +CLIENT_CERT_CHAIN_1_PEM = (resources.cert_hier_1_client_1_cert() + + resources.cert_hier_1_intermediate_ca_cert()) + +CLIENT_KEY_2_PEM = resources.cert_hier_2_client_1_key() +CLIENT_CERT_CHAIN_2_PEM = (resources.cert_hier_2_client_1_cert() + + resources.cert_hier_2_intermediate_ca_cert()) + +SERVER_KEY_1_PEM = resources.cert_hier_1_server_1_key() +SERVER_CERT_CHAIN_1_PEM = (resources.cert_hier_1_server_1_cert() + + resources.cert_hier_1_intermediate_ca_cert()) + +SERVER_KEY_2_PEM = resources.cert_hier_2_server_1_key() +SERVER_CERT_CHAIN_2_PEM = (resources.cert_hier_2_server_1_cert() + + resources.cert_hier_2_intermediate_ca_cert()) + +# for use with the CertConfigFetcher. Roughly a simple custom mock +# implementation +Call = collections.namedtuple('Call', ['did_raise', 'returned_cert_config']) + + +def _create_channel(port, credentials): + return grpc.secure_channel('localhost:{}'.format(port), credentials) + + +def _create_client_stub(channel, expect_success): + if expect_success: + # per Nathaniel: there's some robustness issue if we start + # using a channel without waiting for it to be actually ready + grpc.channel_ready_future(channel).result(timeout=10) + return services_pb2_grpc.FirstServiceStub(channel) + + +class CertConfigFetcher(object): + + def __init__(self): + self._lock = threading.Lock() + self._calls = [] + self._should_raise = False + self._cert_config = None + + def reset(self): + with self._lock: + self._calls = [] + self._should_raise = False + self._cert_config = None + + def configure(self, should_raise, cert_config): + assert not (should_raise and cert_config), ( + "should not specify both should_raise and a cert_config at the same time" + ) + with self._lock: + self._should_raise = should_raise + self._cert_config = cert_config + + def getCalls(self): + with self._lock: + return self._calls + + def __call__(self): + with self._lock: + if self._should_raise: + self._calls.append(Call(True, None)) + raise ValueError('just for fun, should not affect the test') + else: + self._calls.append(Call(False, self._cert_config)) + return self._cert_config + + +class _ServerSSLCertReloadTest( + six.with_metaclass(abc.ABCMeta, unittest.TestCase)): + + def __init__(self, *args, **kwargs): + super(_ServerSSLCertReloadTest, self).__init__(*args, **kwargs) + self.server = None + self.port = None + + @abc.abstractmethod + def require_client_auth(self): + raise NotImplementedError() + + def setUp(self): + self.server = test_common.test_server() + services_pb2_grpc.add_FirstServiceServicer_to_server( + _server_application.FirstServiceServicer(), self.server) + switch_cert_on_client_num = 10 + initial_cert_config = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)], + root_certificates=CA_2_PEM) + self.cert_config_fetcher = CertConfigFetcher() + server_credentials = grpc.dynamic_ssl_server_credentials( + initial_cert_config, + self.cert_config_fetcher, + require_client_authentication=self.require_client_auth()) + self.port = self.server.add_secure_port('[::]:0', server_credentials) + self.server.start() + + def tearDown(self): + if self.server: + self.server.stop(None) + + def _perform_rpc(self, client_stub, expect_success): + # we don't care about the actual response of the rpc; only + # whether we can perform it or not, and if not, the status + # code must be UNAVAILABLE + request = _application_common.UNARY_UNARY_REQUEST + if expect_success: + response = client_stub.UnUn(request) + self.assertEqual(response, _application_common.UNARY_UNARY_RESPONSE) + else: + with self.assertRaises(grpc.RpcError) as exception_context: + client_stub.UnUn(request) + # If TLS 1.2 is used, then the client receives an alert message + # before the handshake is complete, so the status is UNAVAILABLE. If + # TLS 1.3 is used, then the client receives the alert message after + # the handshake is complete, so the TSI handshaker returns the + # TSI_PROTOCOL_FAILURE result. This result does not have a + # corresponding status code, so this yields an UNKNOWN status. + self.assertTrue(exception_context.exception.code( + ) in [grpc.StatusCode.UNAVAILABLE, grpc.StatusCode.UNKNOWN]) + + def _do_one_shot_client_rpc(self, + expect_success, + root_certificates=None, + private_key=None, + certificate_chain=None): + credentials = grpc.ssl_channel_credentials( + root_certificates=root_certificates, + private_key=private_key, + certificate_chain=certificate_chain) + with _create_channel(self.port, credentials) as client_channel: + client_stub = _create_client_stub(client_channel, expect_success) + self._perform_rpc(client_stub, expect_success) + + def _test(self): + # things should work... + self.cert_config_fetcher.configure(False, None) + self._do_one_shot_client_rpc(True, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # client should reject server... + # fails because client trusts ca2 and so will reject server + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._do_one_shot_client_rpc(False, + root_certificates=CA_2_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # should work again... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(True, None) + self._do_one_shot_client_rpc(True, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertTrue(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # if with_client_auth, then client should be rejected by + # server because client uses key/cert1, but server trusts ca2, + # so server will reject + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._do_one_shot_client_rpc(not self.require_client_auth(), + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_1_PEM, + certificate_chain=CLIENT_CERT_CHAIN_1_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # should work again... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._do_one_shot_client_rpc(True, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # now create the "persistent" clients + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + channel_A = _create_channel( + self.port, + grpc.ssl_channel_credentials( + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM)) + persistent_client_stub_A = _create_client_stub(channel_A, True) + self._perform_rpc(persistent_client_stub_A, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + channel_B = _create_channel( + self.port, + grpc.ssl_channel_credentials( + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM)) + persistent_client_stub_B = _create_client_stub(channel_B, True) + self._perform_rpc(persistent_client_stub_B, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # moment of truth!! client should reject server because the + # server switch cert... + cert_config = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], + root_certificates=CA_1_PEM) + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, cert_config) + self._do_one_shot_client_rpc(False, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertEqual(call.returned_cert_config, cert_config, + 'i= {}'.format(i)) + + # now should work again... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._do_one_shot_client_rpc(True, + root_certificates=CA_2_PEM, + private_key=CLIENT_KEY_1_PEM, + certificate_chain=CLIENT_CERT_CHAIN_1_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertIsNone(actual_calls[0].returned_cert_config) + + # client should be rejected by server if with_client_auth + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._do_one_shot_client_rpc(not self.require_client_auth(), + root_certificates=CA_2_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # here client should reject server... + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._do_one_shot_client_rpc(False, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertIsNone(call.returned_cert_config, 'i= {}'.format(i)) + + # persistent clients should continue to work + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._perform_rpc(persistent_client_stub_A, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 0) + + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, None) + self._perform_rpc(persistent_client_stub_B, True) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 0) + + channel_A.close() + channel_B.close() + + +class ServerSSLCertConfigFetcherParamsChecks(unittest.TestCase): + + def test_check_on_initial_config(self): + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(None, str) + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(1, str) + + def test_check_on_config_fetcher(self): + cert_config = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], + root_certificates=CA_1_PEM) + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(cert_config, None) + with self.assertRaises(TypeError): + grpc.dynamic_ssl_server_credentials(cert_config, 1) + + +class ServerSSLCertReloadTestWithClientAuth(_ServerSSLCertReloadTest): + + def require_client_auth(self): + return True + + test = _ServerSSLCertReloadTest._test + + +class ServerSSLCertReloadTestWithoutClientAuth(_ServerSSLCertReloadTest): + + def require_client_auth(self): + return False + + test = _ServerSSLCertReloadTest._test + + +class ServerSSLCertReloadTestCertConfigReuse(_ServerSSLCertReloadTest): + """Ensures that `ServerCertificateConfiguration` instances can be reused. + + Because gRPC Core takes ownership of the + `grpc_ssl_server_certificate_config` encapsulated by + `ServerCertificateConfiguration`, this test reuses the same + `ServerCertificateConfiguration` instances multiple times to make sure + gRPC Python takes care of maintaining the validity of + `ServerCertificateConfiguration` instances, so that such instances can be + re-used by user application. + """ + + def require_client_auth(self): + return True + + def setUp(self): + self.server = test_common.test_server() + services_pb2_grpc.add_FirstServiceServicer_to_server( + _server_application.FirstServiceServicer(), self.server) + self.cert_config_A = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_1_PEM, SERVER_CERT_CHAIN_1_PEM)], + root_certificates=CA_2_PEM) + self.cert_config_B = grpc.ssl_server_certificate_configuration( + [(SERVER_KEY_2_PEM, SERVER_CERT_CHAIN_2_PEM)], + root_certificates=CA_1_PEM) + self.cert_config_fetcher = CertConfigFetcher() + server_credentials = grpc.dynamic_ssl_server_credentials( + self.cert_config_A, + self.cert_config_fetcher, + require_client_authentication=True) + self.port = self.server.add_secure_port('[::]:0', server_credentials) + self.server.start() + + def test_cert_config_reuse(self): + + # succeed with A + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_A) + self._do_one_shot_client_rpc(True, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_A) + + # fail with A + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_A) + self._do_one_shot_client_rpc(False, + root_certificates=CA_2_PEM, + private_key=CLIENT_KEY_1_PEM, + certificate_chain=CLIENT_CERT_CHAIN_1_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertEqual(call.returned_cert_config, self.cert_config_A, + 'i= {}'.format(i)) + + # succeed again with A + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_A) + self._do_one_shot_client_rpc(True, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_A) + + # succeed with B + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_B) + self._do_one_shot_client_rpc(True, + root_certificates=CA_2_PEM, + private_key=CLIENT_KEY_1_PEM, + certificate_chain=CLIENT_CERT_CHAIN_1_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_B) + + # fail with B + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_B) + self._do_one_shot_client_rpc(False, + root_certificates=CA_1_PEM, + private_key=CLIENT_KEY_2_PEM, + certificate_chain=CLIENT_CERT_CHAIN_2_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertGreaterEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + for i, call in enumerate(actual_calls): + self.assertFalse(call.did_raise, 'i= {}'.format(i)) + self.assertEqual(call.returned_cert_config, self.cert_config_B, + 'i= {}'.format(i)) + + # succeed again with B + self.cert_config_fetcher.reset() + self.cert_config_fetcher.configure(False, self.cert_config_B) + self._do_one_shot_client_rpc(True, + root_certificates=CA_2_PEM, + private_key=CLIENT_KEY_1_PEM, + certificate_chain=CLIENT_CERT_CHAIN_1_PEM) + actual_calls = self.cert_config_fetcher.getCalls() + self.assertEqual(len(actual_calls), 1) + self.assertFalse(actual_calls[0].did_raise) + self.assertEqual(actual_calls[0].returned_cert_config, + self.cert_config_B) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py new file mode 100644 index 00000000000..3c519219d59 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_test.py @@ -0,0 +1,69 @@ +# Copyright 2018 The gRPC Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from concurrent import futures +import unittest +import logging + +import grpc + +from tests.unit import resources + + +class _ActualGenericRpcHandler(grpc.GenericRpcHandler): + + def service(self, handler_call_details): + return None + + +class ServerTest(unittest.TestCase): + + def test_not_a_generic_rpc_handler_at_construction(self): + with self.assertRaises(AttributeError) as exception_context: + grpc.server(futures.ThreadPoolExecutor(max_workers=5), + handlers=[ + _ActualGenericRpcHandler(), + object(), + ]) + self.assertIn('grpc.GenericRpcHandler', + str(exception_context.exception)) + + def test_not_a_generic_rpc_handler_after_construction(self): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=5)) + with self.assertRaises(AttributeError) as exception_context: + server.add_generic_rpc_handlers([ + _ActualGenericRpcHandler(), + object(), + ]) + self.assertIn('grpc.GenericRpcHandler', + str(exception_context.exception)) + + def test_failed_port_binding_exception(self): + server = grpc.server(None, options=(('grpc.so_reuseport', 0),)) + port = server.add_insecure_port('localhost:0') + bind_address = "localhost:%d" % port + + with self.assertRaises(RuntimeError): + server.add_insecure_port(bind_address) + + server_credentials = grpc.ssl_server_credentials([ + (resources.private_key(), resources.certificate_chain()) + ]) + with self.assertRaises(RuntimeError): + server.add_secure_port(bind_address, server_credentials) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py new file mode 100644 index 00000000000..3dd95ea8bf6 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_server_wait_for_termination_test.py @@ -0,0 +1,91 @@ +# Copyright 2019 The gRPC Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import division + +import datetime +from concurrent import futures +import unittest +import time +import threading +import six + +import grpc +from tests.unit.framework.common import test_constants + +_WAIT_FOR_BLOCKING = datetime.timedelta(seconds=1) + + +def _block_on_waiting(server, termination_event, timeout=None): + server.start() + server.wait_for_termination(timeout=timeout) + termination_event.set() + + +class ServerWaitForTerminationTest(unittest.TestCase): + + def test_unblock_by_invoking_stop(self): + termination_event = threading.Event() + server = grpc.server(futures.ThreadPoolExecutor()) + + wait_thread = threading.Thread(target=_block_on_waiting, + args=( + server, + termination_event, + )) + wait_thread.daemon = True + wait_thread.start() + time.sleep(_WAIT_FOR_BLOCKING.total_seconds()) + + server.stop(None) + termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) + self.assertTrue(termination_event.is_set()) + + def test_unblock_by_del(self): + termination_event = threading.Event() + server = grpc.server(futures.ThreadPoolExecutor()) + + wait_thread = threading.Thread(target=_block_on_waiting, + args=( + server, + termination_event, + )) + wait_thread.daemon = True + wait_thread.start() + time.sleep(_WAIT_FOR_BLOCKING.total_seconds()) + + # Invoke manually here, in Python 2 it will be invoked by GC sometime. + server.__del__() + termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) + self.assertTrue(termination_event.is_set()) + + def test_unblock_by_timeout(self): + termination_event = threading.Event() + server = grpc.server(futures.ThreadPoolExecutor()) + + wait_thread = threading.Thread(target=_block_on_waiting, + args=( + server, + termination_event, + test_constants.SHORT_TIMEOUT / 2, + )) + wait_thread.daemon = True + wait_thread.start() + + termination_event.wait(timeout=test_constants.SHORT_TIMEOUT) + self.assertTrue(termination_event.is_set()) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py new file mode 100644 index 00000000000..9bff4d2af00 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_session_cache_test.py @@ -0,0 +1,140 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests experimental TLS Session Resumption API""" + +import pickle +import unittest +import logging + +import grpc +from grpc import _channel +from grpc.experimental import session_cache + +from tests.unit import test_common +from tests.unit import resources + +_REQUEST = b'\x00\x00\x00' +_RESPONSE = b'\x00\x00\x00' + +_UNARY_UNARY = '/test/UnaryUnary' + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' +_ID = 'id' +_ID_KEY = 'id_key' +_AUTH_CTX = 'auth_ctx' + +_PRIVATE_KEY = resources.private_key() +_CERTIFICATE_CHAIN = resources.certificate_chain() +_TEST_ROOT_CERTIFICATES = resources.test_root_certificates() +_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),) +_PROPERTY_OPTIONS = (( + 'grpc.ssl_target_name_override', + _SERVER_HOST_OVERRIDE, +),) + + +def handle_unary_unary(request, servicer_context): + return pickle.dumps({ + _ID: servicer_context.peer_identities(), + _ID_KEY: servicer_context.peer_identity_key(), + _AUTH_CTX: servicer_context.auth_context() + }) + + +def start_secure_server(): + handler = grpc.method_handlers_generic_handler( + 'test', + {'UnaryUnary': grpc.unary_unary_rpc_method_handler(handle_unary_unary)}) + server = test_common.test_server() + server.add_generic_rpc_handlers((handler,)) + server_cred = grpc.ssl_server_credentials(_SERVER_CERTS) + port = server.add_secure_port('[::]:0', server_cred) + server.start() + + return server, port + + +class SSLSessionCacheTest(unittest.TestCase): + + def _do_one_shot_client_rpc(self, channel_creds, channel_options, port, + expect_ssl_session_reused): + channel = grpc.secure_channel('localhost:{}'.format(port), + channel_creds, + options=channel_options) + response = channel.unary_unary(_UNARY_UNARY)(_REQUEST) + auth_data = pickle.loads(response) + self.assertEqual(expect_ssl_session_reused, + auth_data[_AUTH_CTX]['ssl_session_reused']) + channel.close() + + def testSSLSessionCacheLRU(self): + server_1, port_1 = start_secure_server() + + cache = session_cache.ssl_session_cache_lru(1) + channel_creds = grpc.ssl_channel_credentials( + root_certificates=_TEST_ROOT_CERTIFICATES) + channel_options = _PROPERTY_OPTIONS + ( + ('grpc.ssl_session_cache', cache),) + + # Initial connection has no session to resume + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port_1, + expect_ssl_session_reused=[b'false']) + + # Connection to server_1 resumes from initial session + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port_1, + expect_ssl_session_reused=[b'true']) + + # Connection to a different server with the same name overwrites the cache entry + server_2, port_2 = start_secure_server() + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port_2, + expect_ssl_session_reused=[b'false']) + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port_2, + expect_ssl_session_reused=[b'true']) + server_2.stop(None) + + # Connection to server_1 now falls back to full TLS handshake + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port_1, + expect_ssl_session_reused=[b'false']) + + # Re-creating server_1 causes old sessions to become invalid + server_1.stop(None) + server_1, port_1 = start_secure_server() + + # Old sessions should no longer be valid + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port_1, + expect_ssl_session_reused=[b'false']) + + # Resumption should work for subsequent connections + self._do_one_shot_client_rpc(channel_creds, + channel_options, + port_1, + expect_ssl_session_reused=[b'true']) + server_1.stop(None) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py new file mode 100644 index 00000000000..0be1270749c --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_client.py @@ -0,0 +1,119 @@ +# Copyright 2019 the gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Client for testing responsiveness to signals.""" + +from __future__ import print_function + +import argparse +import functools +import logging +import signal +import sys + +import grpc + +SIGTERM_MESSAGE = "Handling sigterm!" + +UNARY_UNARY = "/test/Unary" +UNARY_STREAM = "/test/ServerStreaming" + +_MESSAGE = b'\x00\x00\x00' + +_ASSERTION_MESSAGE = "Control flow should never reach here." + +# NOTE(gnossen): We use a global variable here so that the signal handler can be +# installed before the RPC begins. If we do not do this, then we may receive the +# SIGINT before the signal handler is installed. I'm not happy with per-process +# global state, but the per-process global state that is signal handlers +# somewhat forces my hand. +per_process_rpc_future = None + + +def handle_sigint(unused_signum, unused_frame): + print(SIGTERM_MESSAGE) + if per_process_rpc_future is not None: + per_process_rpc_future.cancel() + sys.stderr.flush() + # This sys.exit(0) avoids an exception caused by the cancelled RPC. + sys.exit(0) + + +def main_unary(server_target): + """Initiate a unary RPC to be interrupted by a SIGINT.""" + global per_process_rpc_future # pylint: disable=global-statement + with grpc.insecure_channel(server_target) as channel: + multicallable = channel.unary_unary(UNARY_UNARY) + signal.signal(signal.SIGINT, handle_sigint) + per_process_rpc_future = multicallable.future(_MESSAGE, + wait_for_ready=True) + result = per_process_rpc_future.result() + assert False, _ASSERTION_MESSAGE + + +def main_streaming(server_target): + """Initiate a streaming RPC to be interrupted by a SIGINT.""" + global per_process_rpc_future # pylint: disable=global-statement + with grpc.insecure_channel(server_target) as channel: + signal.signal(signal.SIGINT, handle_sigint) + per_process_rpc_future = channel.unary_stream(UNARY_STREAM)( + _MESSAGE, wait_for_ready=True) + for result in per_process_rpc_future: + pass + assert False, _ASSERTION_MESSAGE + + +def main_unary_with_exception(server_target): + """Initiate a unary RPC with a signal handler that will raise.""" + channel = grpc.insecure_channel(server_target) + try: + channel.unary_unary(UNARY_UNARY)(_MESSAGE, wait_for_ready=True) + except KeyboardInterrupt: + sys.stderr.write("Running signal handler.\n") + sys.stderr.flush() + + # This call should not hang. + channel.close() + + +def main_streaming_with_exception(server_target): + """Initiate a streaming RPC with a signal handler that will raise.""" + channel = grpc.insecure_channel(server_target) + try: + for _ in channel.unary_stream(UNARY_STREAM)(_MESSAGE, + wait_for_ready=True): + pass + except KeyboardInterrupt: + sys.stderr.write("Running signal handler.\n") + sys.stderr.flush() + + # This call should not hang. + channel.close() + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Signal test client.') + parser.add_argument('server', help='Server target') + parser.add_argument('arity', help='Arity', choices=('unary', 'streaming')) + parser.add_argument('--exception', + help='Whether the signal throws an exception', + action='store_true') + args = parser.parse_args() + if args.arity == 'unary' and not args.exception: + main_unary(args.server) + elif args.arity == 'streaming' and not args.exception: + main_streaming(args.server) + elif args.arity == 'unary' and args.exception: + main_unary_with_exception(args.server) + else: + main_streaming_with_exception(args.server) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_handling_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_handling_test.py new file mode 100644 index 00000000000..a05e42d5a31 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_signal_handling_test.py @@ -0,0 +1,200 @@ +# Copyright 2019 the gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test of responsiveness to signals.""" + +import logging +import os +import signal +import subprocess +import tempfile +import threading +import unittest +import sys + +import grpc + +from tests.unit import test_common +from tests.unit import _signal_client + +_CLIENT_PATH = None +if sys.executable is not None: + _CLIENT_PATH = 'tests.unit._signal_client' +else: + # NOTE(rbellevi): For compatibility with internal testing. + if len(sys.argv) != 2: + raise RuntimeError("Must supply path to executable client.") + client_name = sys.argv[1].split("/")[-1] + del sys.argv[1] # For compatibility with test runner. + _CLIENT_PATH = os.path.realpath( + os.path.join(os.path.dirname(os.path.abspath(__file__)), client_name)) + +_HOST = 'localhost' + + +class _GenericHandler(grpc.GenericRpcHandler): + + def __init__(self): + self._connected_clients_lock = threading.RLock() + self._connected_clients_event = threading.Event() + self._connected_clients = 0 + + self._unary_unary_handler = grpc.unary_unary_rpc_method_handler( + self._handle_unary_unary) + self._unary_stream_handler = grpc.unary_stream_rpc_method_handler( + self._handle_unary_stream) + + def _on_client_connect(self): + with self._connected_clients_lock: + self._connected_clients += 1 + self._connected_clients_event.set() + + def _on_client_disconnect(self): + with self._connected_clients_lock: + self._connected_clients -= 1 + if self._connected_clients == 0: + self._connected_clients_event.clear() + + def await_connected_client(self): + """Blocks until a client connects to the server.""" + self._connected_clients_event.wait() + + def _handle_unary_unary(self, request, servicer_context): + """Handles a unary RPC. + + Blocks until the client disconnects and then echoes. + """ + stop_event = threading.Event() + + def on_rpc_end(): + self._on_client_disconnect() + stop_event.set() + + servicer_context.add_callback(on_rpc_end) + self._on_client_connect() + stop_event.wait() + return request + + def _handle_unary_stream(self, request, servicer_context): + """Handles a server streaming RPC. + + Blocks until the client disconnects and then echoes. + """ + stop_event = threading.Event() + + def on_rpc_end(): + self._on_client_disconnect() + stop_event.set() + + servicer_context.add_callback(on_rpc_end) + self._on_client_connect() + stop_event.wait() + yield request + + def service(self, handler_call_details): + if handler_call_details.method == _signal_client.UNARY_UNARY: + return self._unary_unary_handler + elif handler_call_details.method == _signal_client.UNARY_STREAM: + return self._unary_stream_handler + else: + return None + + +def _read_stream(stream): + stream.seek(0) + return stream.read() + + +def _start_client(args, stdout, stderr): + invocation = None + if sys.executable is not None: + invocation = (sys.executable, '-m', _CLIENT_PATH) + tuple(args) + else: + invocation = (_CLIENT_PATH,) + tuple(args) + env = os.environ.copy() + env['Y_PYTHON_ENTRY_POINT'] = ':main' + return subprocess.Popen(invocation, stdout=stdout, stderr=stderr, env=env) + + +class SignalHandlingTest(unittest.TestCase): + + def setUp(self): + self._server = test_common.test_server() + self._port = self._server.add_insecure_port('{}:0'.format(_HOST)) + self._handler = _GenericHandler() + self._server.add_generic_rpc_handlers((self._handler,)) + self._server.start() + + def tearDown(self): + self._server.stop(None) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testUnary(self): + """Tests that the server unary code path does not stall signal handlers.""" + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client((server_target, 'unary'), client_stdout, + client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + self.assertFalse(client.wait(), msg=_read_stream(client_stderr)) + client_stdout.seek(0) + self.assertIn(_signal_client.SIGTERM_MESSAGE, + client_stdout.read()) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testStreaming(self): + """Tests that the server streaming code path does not stall signal handlers.""" + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client((server_target, 'streaming'), + client_stdout, client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + self.assertFalse(client.wait(), msg=_read_stream(client_stderr)) + client_stdout.seek(0) + self.assertIn(_signal_client.SIGTERM_MESSAGE, + client_stdout.read()) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testUnaryWithException(self): + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client(('--exception', server_target, 'unary'), + client_stdout, client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + client.wait() + self.assertEqual(0, client.returncode) + + @unittest.skipIf(os.name == 'nt', 'SIGINT not supported on windows') + def testStreamingHandlerWithException(self): + server_target = '{}:{}'.format(_HOST, self._port) + with tempfile.TemporaryFile(mode='r') as client_stdout: + with tempfile.TemporaryFile(mode='r') as client_stderr: + client = _start_client( + ('--exception', server_target, 'streaming'), client_stdout, + client_stderr) + self._handler.await_connected_client() + client.send_signal(signal.SIGINT) + client.wait() + print(_read_stream(client_stderr)) + self.assertEqual(0, client.returncode) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_tcp_proxy.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_tcp_proxy.py new file mode 100644 index 00000000000..84dc0e2d6cf --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_tcp_proxy.py @@ -0,0 +1,141 @@ +# Copyright 2019 the gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" Proxies a TCP connection between a single client-server pair. + +This proxy is not suitable for production, but should work well for cases in +which a test needs to spy on the bytes put on the wire between a server and +a client. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import datetime +import select +import socket +import threading + +from tests.unit.framework.common import get_socket + +_TCP_PROXY_BUFFER_SIZE = 1024 +_TCP_PROXY_TIMEOUT = datetime.timedelta(milliseconds=500) + + +def _init_proxy_socket(gateway_address, gateway_port): + proxy_socket = socket.create_connection((gateway_address, gateway_port)) + return proxy_socket + + +class TcpProxy(object): + """Proxies a TCP connection between one client and one server.""" + + def __init__(self, bind_address, gateway_address, gateway_port): + self._bind_address = bind_address + self._gateway_address = gateway_address + self._gateway_port = gateway_port + + self._byte_count_lock = threading.RLock() + self._sent_byte_count = 0 + self._received_byte_count = 0 + + self._stop_event = threading.Event() + + self._port = None + self._listen_socket = None + self._proxy_socket = None + + # The following three attributes are owned by the serving thread. + self._northbound_data = b"" + self._southbound_data = b"" + self._client_sockets = [] + + self._thread = threading.Thread(target=self._run_proxy) + + def start(self): + _, self._port, self._listen_socket = get_socket( + bind_address=self._bind_address) + self._proxy_socket = _init_proxy_socket(self._gateway_address, + self._gateway_port) + self._thread.start() + + def get_port(self): + return self._port + + def _handle_reads(self, sockets_to_read): + for socket_to_read in sockets_to_read: + if socket_to_read is self._listen_socket: + client_socket, client_address = socket_to_read.accept() + self._client_sockets.append(client_socket) + elif socket_to_read is self._proxy_socket: + data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE) + with self._byte_count_lock: + self._received_byte_count += len(data) + self._northbound_data += data + elif socket_to_read in self._client_sockets: + data = socket_to_read.recv(_TCP_PROXY_BUFFER_SIZE) + if data: + with self._byte_count_lock: + self._sent_byte_count += len(data) + self._southbound_data += data + else: + self._client_sockets.remove(socket_to_read) + else: + raise RuntimeError('Unidentified socket appeared in read set.') + + def _handle_writes(self, sockets_to_write): + for socket_to_write in sockets_to_write: + if socket_to_write is self._proxy_socket: + if self._southbound_data: + self._proxy_socket.sendall(self._southbound_data) + self._southbound_data = b"" + elif socket_to_write in self._client_sockets: + if self._northbound_data: + socket_to_write.sendall(self._northbound_data) + self._northbound_data = b"" + + def _run_proxy(self): + while not self._stop_event.is_set(): + expected_reads = (self._listen_socket, self._proxy_socket) + tuple( + self._client_sockets) + expected_writes = expected_reads + sockets_to_read, sockets_to_write, _ = select.select( + expected_reads, expected_writes, (), + _TCP_PROXY_TIMEOUT.total_seconds()) + self._handle_reads(sockets_to_read) + self._handle_writes(sockets_to_write) + for client_socket in self._client_sockets: + client_socket.close() + + def stop(self): + self._stop_event.set() + self._thread.join() + self._listen_socket.close() + self._proxy_socket.close() + + def get_byte_count(self): + with self._byte_count_lock: + return self._sent_byte_count, self._received_byte_count + + def reset_byte_count(self): + with self._byte_count_lock: + self._byte_count = 0 + self._received_byte_count = 0 + + def __enter__(self): + self.start() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.stop() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_version_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_version_test.py new file mode 100644 index 00000000000..3d37b319e5a --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/_version_test.py @@ -0,0 +1,30 @@ +# Copyright 2018 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test for grpc.__version__""" + +import unittest +import grpc +import logging +from grpc import _grpcio_metadata + + +class VersionTest(unittest.TestCase): + + def test_get_version(self): + self.assertEqual(grpc.__version__, _grpcio_metadata.__version__) + + +if __name__ == '__main__': + logging.basicConfig() + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/__init__.py new file mode 100644 index 00000000000..5fb4f3c3cfd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py new file mode 100644 index 00000000000..a111d687641 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_beta_features_test.py @@ -0,0 +1,354 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests Face interface compliance of the gRPC Python Beta API.""" + +import threading +import unittest + +from grpc.beta import implementations +from grpc.beta import interfaces +from grpc.framework.common import cardinality +from grpc.framework.interfaces.face import utilities +from tests.unit import resources +from tests.unit.beta import test_utilities +from tests.unit.framework.common import test_constants + +_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' + +_PER_RPC_CREDENTIALS_METADATA_KEY = b'my-call-credentials-metadata-key' +_PER_RPC_CREDENTIALS_METADATA_VALUE = b'my-call-credentials-metadata-value' + +_GROUP = 'group' +_UNARY_UNARY = 'unary-unary' +_UNARY_STREAM = 'unary-stream' +_STREAM_UNARY = 'stream-unary' +_STREAM_STREAM = 'stream-stream' + +_REQUEST = b'abc' +_RESPONSE = b'123' + + +class _Servicer(object): + + def __init__(self): + self._condition = threading.Condition() + self._peer = None + self._serviced = False + + def unary_unary(self, request, context): + with self._condition: + self._request = request + self._peer = context.protocol_context().peer() + self._invocation_metadata = context.invocation_metadata() + context.protocol_context().disable_next_response_compression() + self._serviced = True + self._condition.notify_all() + return _RESPONSE + + def unary_stream(self, request, context): + with self._condition: + self._request = request + self._peer = context.protocol_context().peer() + self._invocation_metadata = context.invocation_metadata() + context.protocol_context().disable_next_response_compression() + self._serviced = True + self._condition.notify_all() + return + yield # pylint: disable=unreachable + + def stream_unary(self, request_iterator, context): + for request in request_iterator: + self._request = request + with self._condition: + self._peer = context.protocol_context().peer() + self._invocation_metadata = context.invocation_metadata() + context.protocol_context().disable_next_response_compression() + self._serviced = True + self._condition.notify_all() + return _RESPONSE + + def stream_stream(self, request_iterator, context): + for request in request_iterator: + with self._condition: + self._peer = context.protocol_context().peer() + context.protocol_context().disable_next_response_compression() + yield _RESPONSE + with self._condition: + self._invocation_metadata = context.invocation_metadata() + self._serviced = True + self._condition.notify_all() + + def peer(self): + with self._condition: + return self._peer + + def block_until_serviced(self): + with self._condition: + while not self._serviced: + self._condition.wait() + + +class _BlockingIterator(object): + + def __init__(self, upstream): + self._condition = threading.Condition() + self._upstream = upstream + self._allowed = [] + + def __iter__(self): + return self + + def __next__(self): + return self.next() + + def next(self): + with self._condition: + while True: + if self._allowed is None: + raise StopIteration() + elif self._allowed: + return self._allowed.pop(0) + else: + self._condition.wait() + + def allow(self): + with self._condition: + try: + self._allowed.append(next(self._upstream)) + except StopIteration: + self._allowed = None + self._condition.notify_all() + + +def _metadata_plugin(context, callback): + callback([ + (_PER_RPC_CREDENTIALS_METADATA_KEY, _PER_RPC_CREDENTIALS_METADATA_VALUE) + ], None) + + +class BetaFeaturesTest(unittest.TestCase): + + def setUp(self): + self._servicer = _Servicer() + method_implementations = { + (_GROUP, _UNARY_UNARY): + utilities.unary_unary_inline(self._servicer.unary_unary), + (_GROUP, _UNARY_STREAM): + utilities.unary_stream_inline(self._servicer.unary_stream), + (_GROUP, _STREAM_UNARY): + utilities.stream_unary_inline(self._servicer.stream_unary), + (_GROUP, _STREAM_STREAM): + utilities.stream_stream_inline(self._servicer.stream_stream), + } + + cardinalities = { + _UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY, + _UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM, + _STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY, + _STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM, + } + + server_options = implementations.server_options( + thread_pool_size=test_constants.POOL_SIZE) + self._server = implementations.server(method_implementations, + options=server_options) + server_credentials = implementations.ssl_server_credentials([ + ( + resources.private_key(), + resources.certificate_chain(), + ), + ]) + port = self._server.add_secure_port('[::]:0', server_credentials) + self._server.start() + self._channel_credentials = implementations.ssl_channel_credentials( + resources.test_root_certificates()) + self._call_credentials = implementations.metadata_call_credentials( + _metadata_plugin) + channel = test_utilities.not_really_secure_channel( + 'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE) + stub_options = implementations.stub_options( + thread_pool_size=test_constants.POOL_SIZE) + self._dynamic_stub = implementations.dynamic_stub(channel, + _GROUP, + cardinalities, + options=stub_options) + + def tearDown(self): + self._dynamic_stub = None + self._server.stop(test_constants.SHORT_TIMEOUT).wait() + + def test_unary_unary(self): + call_options = interfaces.grpc_call_options( + disable_compression=True, credentials=self._call_credentials) + response = getattr(self._dynamic_stub, + _UNARY_UNARY)(_REQUEST, + test_constants.LONG_TIMEOUT, + protocol_options=call_options) + self.assertEqual(_RESPONSE, response) + self.assertIsNotNone(self._servicer.peer()) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + def test_unary_stream(self): + call_options = interfaces.grpc_call_options( + disable_compression=True, credentials=self._call_credentials) + response_iterator = getattr(self._dynamic_stub, _UNARY_STREAM)( + _REQUEST, + test_constants.LONG_TIMEOUT, + protocol_options=call_options) + self._servicer.block_until_serviced() + self.assertIsNotNone(self._servicer.peer()) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + def test_stream_unary(self): + call_options = interfaces.grpc_call_options( + credentials=self._call_credentials) + request_iterator = _BlockingIterator(iter((_REQUEST,))) + response_future = getattr(self._dynamic_stub, _STREAM_UNARY).future( + request_iterator, + test_constants.LONG_TIMEOUT, + protocol_options=call_options) + response_future.protocol_context().disable_next_request_compression() + request_iterator.allow() + response_future.protocol_context().disable_next_request_compression() + request_iterator.allow() + self._servicer.block_until_serviced() + self.assertIsNotNone(self._servicer.peer()) + self.assertEqual(_RESPONSE, response_future.result()) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + def test_stream_stream(self): + call_options = interfaces.grpc_call_options( + credentials=self._call_credentials) + request_iterator = _BlockingIterator(iter((_REQUEST,))) + response_iterator = getattr(self._dynamic_stub, _STREAM_STREAM)( + request_iterator, + test_constants.SHORT_TIMEOUT, + protocol_options=call_options) + response_iterator.protocol_context().disable_next_request_compression() + request_iterator.allow() + response = next(response_iterator) + response_iterator.protocol_context().disable_next_request_compression() + request_iterator.allow() + self._servicer.block_until_serviced() + self.assertIsNotNone(self._servicer.peer()) + self.assertEqual(_RESPONSE, response) + invocation_metadata = [ + (metadatum.key, metadatum.value) + for metadatum in self._servicer._invocation_metadata + ] + self.assertIn((_PER_RPC_CREDENTIALS_METADATA_KEY, + _PER_RPC_CREDENTIALS_METADATA_VALUE), + invocation_metadata) + + +class ContextManagementAndLifecycleTest(unittest.TestCase): + + def setUp(self): + self._servicer = _Servicer() + self._method_implementations = { + (_GROUP, _UNARY_UNARY): + utilities.unary_unary_inline(self._servicer.unary_unary), + (_GROUP, _UNARY_STREAM): + utilities.unary_stream_inline(self._servicer.unary_stream), + (_GROUP, _STREAM_UNARY): + utilities.stream_unary_inline(self._servicer.stream_unary), + (_GROUP, _STREAM_STREAM): + utilities.stream_stream_inline(self._servicer.stream_stream), + } + + self._cardinalities = { + _UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY, + _UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM, + _STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY, + _STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM, + } + + self._server_options = implementations.server_options( + thread_pool_size=test_constants.POOL_SIZE) + self._server_credentials = implementations.ssl_server_credentials([ + ( + resources.private_key(), + resources.certificate_chain(), + ), + ]) + self._channel_credentials = implementations.ssl_channel_credentials( + resources.test_root_certificates()) + self._stub_options = implementations.stub_options( + thread_pool_size=test_constants.POOL_SIZE) + + def test_stub_context(self): + server = implementations.server(self._method_implementations, + options=self._server_options) + port = server.add_secure_port('[::]:0', self._server_credentials) + server.start() + + channel = test_utilities.not_really_secure_channel( + 'localhost', port, self._channel_credentials, _SERVER_HOST_OVERRIDE) + dynamic_stub = implementations.dynamic_stub(channel, + _GROUP, + self._cardinalities, + options=self._stub_options) + for _ in range(100): + with dynamic_stub: + pass + for _ in range(10): + with dynamic_stub: + call_options = interfaces.grpc_call_options( + disable_compression=True) + response = getattr(dynamic_stub, + _UNARY_UNARY)(_REQUEST, + test_constants.LONG_TIMEOUT, + protocol_options=call_options) + self.assertEqual(_RESPONSE, response) + self.assertIsNotNone(self._servicer.peer()) + + server.stop(test_constants.SHORT_TIMEOUT).wait() + + def test_server_lifecycle(self): + for _ in range(100): + server = implementations.server(self._method_implementations, + options=self._server_options) + port = server.add_secure_port('[::]:0', self._server_credentials) + server.start() + server.stop(test_constants.SHORT_TIMEOUT).wait() + for _ in range(100): + server = implementations.server(self._method_implementations, + options=self._server_options) + server.add_secure_port('[::]:0', self._server_credentials) + server.add_insecure_port('[::]:0') + with server: + server.stop(test_constants.SHORT_TIMEOUT) + server.stop(test_constants.SHORT_TIMEOUT) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py new file mode 100644 index 00000000000..1416902eab8 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_connectivity_channel_test.py @@ -0,0 +1,32 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc.beta._connectivity_channel.""" + +import unittest + +from grpc.beta import interfaces + + +class ConnectivityStatesTest(unittest.TestCase): + + def testBetaConnectivityStates(self): + self.assertIsNotNone(interfaces.ChannelConnectivity.IDLE) + self.assertIsNotNone(interfaces.ChannelConnectivity.CONNECTING) + self.assertIsNotNone(interfaces.ChannelConnectivity.READY) + self.assertIsNotNone(interfaces.ChannelConnectivity.TRANSIENT_FAILURE) + self.assertIsNotNone(interfaces.ChannelConnectivity.FATAL_FAILURE) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py new file mode 100644 index 00000000000..75a615eeffb --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_implementations_test.py @@ -0,0 +1,55 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests the implementations module of the gRPC Python Beta API.""" + +import datetime +import unittest + +from oauth2client import client as oauth2client_client + +from grpc.beta import implementations +from tests.unit import resources + + +class ChannelCredentialsTest(unittest.TestCase): + + def test_runtime_provided_root_certificates(self): + channel_credentials = implementations.ssl_channel_credentials() + self.assertIsInstance(channel_credentials, + implementations.ChannelCredentials) + + def test_application_provided_root_certificates(self): + channel_credentials = implementations.ssl_channel_credentials( + resources.test_root_certificates()) + self.assertIsInstance(channel_credentials, + implementations.ChannelCredentials) + + +class CallCredentialsTest(unittest.TestCase): + + def test_google_call_credentials(self): + creds = oauth2client_client.GoogleCredentials( + 'token', 'client_id', 'secret', 'refresh_token', + datetime.datetime(2008, 6, 24), 'https://refresh.uri.com/', + 'user_agent') + call_creds = implementations.google_call_credentials(creds) + self.assertIsInstance(call_creds, implementations.CallCredentials) + + def test_access_token_call_credentials(self): + call_creds = implementations.access_token_call_credentials('token') + self.assertIsInstance(call_creds, implementations.CallCredentials) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py new file mode 100644 index 00000000000..837d2bbebf2 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_not_found_test.py @@ -0,0 +1,59 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of RPC-method-not-found behavior.""" + +import unittest + +from grpc.beta import implementations +from grpc.beta import interfaces +from grpc.framework.interfaces.face import face +from tests.unit.framework.common import test_constants + + +class NotFoundTest(unittest.TestCase): + + def setUp(self): + self._server = implementations.server({}) + port = self._server.add_insecure_port('[::]:0') + channel = implementations.insecure_channel('localhost', port) + self._generic_stub = implementations.generic_stub(channel) + self._server.start() + + def tearDown(self): + self._server.stop(0).wait() + self._generic_stub = None + + def test_blocking_unary_unary_not_found(self): + with self.assertRaises(face.LocalError) as exception_assertion_context: + self._generic_stub.blocking_unary_unary('groop', + 'meffod', + b'abc', + test_constants.LONG_TIMEOUT, + with_call=True) + self.assertIs(exception_assertion_context.exception.code, + interfaces.StatusCode.UNIMPLEMENTED) + + def test_future_stream_unary_not_found(self): + rpc_future = self._generic_stub.future_stream_unary( + 'grupe', 'mevvod', iter([b'def']), test_constants.LONG_TIMEOUT) + with self.assertRaises(face.LocalError) as exception_assertion_context: + rpc_future.result() + self.assertIs(exception_assertion_context.exception.code, + interfaces.StatusCode.UNIMPLEMENTED) + self.assertIs(rpc_future.exception().code, + interfaces.StatusCode.UNIMPLEMENTED) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py new file mode 100644 index 00000000000..e0422627962 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/_utilities_test.py @@ -0,0 +1,93 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests of grpc.beta.utilities.""" + +import threading +import time +import unittest + +from grpc.beta import implementations +from grpc.beta import utilities +from grpc.framework.foundation import future +from tests.unit.framework.common import test_constants + + +class _Callback(object): + + def __init__(self): + self._condition = threading.Condition() + self._value = None + + def accept_value(self, value): + with self._condition: + self._value = value + self._condition.notify_all() + + def block_until_called(self): + with self._condition: + while self._value is None: + self._condition.wait() + return self._value + + [email protected]('https://github.com/grpc/grpc/issues/16134') +class ChannelConnectivityTest(unittest.TestCase): + + def test_lonely_channel_connectivity(self): + channel = implementations.insecure_channel('localhost', 12345) + callback = _Callback() + + ready_future = utilities.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + with self.assertRaises(future.TimeoutError): + ready_future.result(timeout=test_constants.SHORT_TIMEOUT) + self.assertFalse(ready_future.cancelled()) + self.assertFalse(ready_future.done()) + self.assertTrue(ready_future.running()) + ready_future.cancel() + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertTrue(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + + def test_immediately_connectable_channel_connectivity(self): + server = implementations.server({}) + port = server.add_insecure_port('[::]:0') + server.start() + channel = implementations.insecure_channel('localhost', port) + callback = _Callback() + + try: + ready_future = utilities.channel_ready_future(channel) + ready_future.add_done_callback(callback.accept_value) + self.assertIsNone( + ready_future.result(timeout=test_constants.LONG_TIMEOUT)) + value_passed_to_callback = callback.block_until_called() + self.assertIs(ready_future, value_passed_to_callback) + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + # Cancellation after maturity has no effect. + ready_future.cancel() + self.assertFalse(ready_future.cancelled()) + self.assertTrue(ready_future.done()) + self.assertFalse(ready_future.running()) + finally: + ready_future.cancel() + server.stop(0) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/test_utilities.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/test_utilities.py new file mode 100644 index 00000000000..c8d920d35e9 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/beta/test_utilities.py @@ -0,0 +1,40 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test-appropriate entry points into the gRPC Python Beta API.""" + +import grpc +from grpc.beta import implementations + + +def not_really_secure_channel(host, port, channel_credentials, + server_host_override): + """Creates an insecure Channel to a remote host. + + Args: + host: The name of the remote host to which to connect. + port: The port of the remote host to which to connect. + channel_credentials: The implementations.ChannelCredentials with which to + connect. + server_host_override: The target name used for SSL host name checking. + + Returns: + An implementations.Channel to the remote host through which RPCs may be + conducted. + """ + target = '%s:%d' % (host, port) + channel = grpc.secure_channel(target, channel_credentials, (( + 'grpc.ssl_target_name_override', + server_host_override, + ),)) + return implementations.Channel(channel) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/README.md b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/README.md new file mode 100644 index 00000000000..100b43c1aaf --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/README.md @@ -0,0 +1,15 @@ +These are test keys *NOT* to be used in production. + +The `certificate_hierarchy_1` and `certificate_hierarchy_2` contain +two disjoint but similarly organized certificate hierarchies. Each +contains: + +* The respective root CA cert in `certs/ca.cert.pem` + +* The intermediate CA cert in + `intermediate/certs/intermediate.cert.pem`, signed by the root CA + +* A client cert and a server cert--both signed by the intermediate + CA--in `intermediate/certs/client.cert.pem` and + `intermediate/certs/localhost-1.cert.pem`; the corresponding keys + are in `intermediate/private` diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem new file mode 100755 index 00000000000..49d39cd8ed5 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/ca.pem @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDWjCCAkKgAwIBAgIUWrP0VvHcy+LP6UuYNtiL9gBhD5owDQYJKoZIhvcNAQEL +BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw +MDMxNzE4NTk1MVoXDTMwMDMxNTE4NTk1MVowVjELMAkGA1UEBhMCQVUxEzARBgNV +BAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0 +ZDEPMA0GA1UEAwwGdGVzdGNhMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAsGL0oXflF0LzoM+Bh+qUU9yhqzw2w8OOX5mu/iNCyUOBrqaHi7mGHx73GD01 +diNzCzvlcQqdNIH6NQSL7DTpBjca66jYT9u73vZe2MDrr1nVbuLvfu9850cdxiUO +Inv5xf8+sTHG0C+a+VAvMhsLiRjsq+lXKRJyk5zkbbsETybqpxoJ+K7CoSy3yc/k +QIY3TipwEtwkKP4hzyo6KiGd/DPexie4nBUInN3bS1BUeNZ5zeaIC2eg3bkeeW7c +qT55b+Yen6CxY0TEkzBK6AKt/WUialKMgT0wbTxRZO7kUCH3Sq6e/wXeFdJ+HvdV +LPlAg5TnMaNpRdQih/8nRFpsdwIDAQABoyAwHjAMBgNVHRMEBTADAQH/MA4GA1Ud +DwEB/wQEAwICBDANBgkqhkiG9w0BAQsFAAOCAQEAkTrKZjBrJXHps/HrjNCFPb5a +THuGPCSsepe1wkKdSp1h4HGRpLoCgcLysCJ5hZhRpHkRihhef+rFHEe60UePQO3S +CVTtdJB4CYWpcNyXOdqefrbJW5QNljxgi6Fhvs7JJkBqdXIkWXtFk2eRgOIP2Eo9 +/OHQHlYnwZFrk6sp4wPyR+A95S0toZBcyDVz7u+hOW0pGK3wviOe9lvRgj/H3Pwt +bewb0l+MhRig0/DVHamyVxrDRbqInU1/GTNCwcZkXKYFWSf92U+kIcTth24Q1gcw +eZiLl5FfrWokUNytFElXob0V0a5/kbhiLc3yWmvWqHTpqCALbVyF+rKJo2f5Kw== +-----END CERTIFICATE----- diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key new file mode 100755 index 00000000000..086462992cf --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDnE443EknxvxBq +6+hvn/t09hl8hx366EBYvZmVM/NC+7igXRAjiJiA/mIaCvL3MS0Iz5hBLxSGICU+ +WproA3GCIFITIwcf/ETyWj/5xpgZ4AKrLrjQmmX8mhwUajfF3UvwMJrCOVqPp67t +PtP+2kBXaqrXdvnvXR41FsIB8V7zIAuIZB6bHQhiGVlc1sgZYsE2EGG9WMmHtS86 +qkAOTjG2XyjmPTGAwhGDpYkYrpzp99IiDh4/Veai81hn0ssQkbry0XRD/Ig3jcHh +23WiriPNJ0JsbgXUSLKRPZObA9VgOLy2aXoN84IMaeK3yy+cwSYG/99w93fUZJte +MXwz4oYZAgMBAAECggEBAIVn2Ncai+4xbH0OLWckabwgyJ4IM9rDc0LIU368O1kU +koais8qP9dujAWgfoh3sGh/YGgKn96VnsZjKHlyMgF+r4TaDJn3k2rlAOWcurGlj +1qaVlsV4HiEzp7pxiDmHhWvp4672Bb6iBG+bsjCUOEk/n9o9KhZzIBluRhtxCmw5 +nw4Do7z00PTvN81260uPWSc04IrytvZUiAIx/5qxD72bij2xJ8t/I9GI8g4FtoVB +8pB6S/hJX1PZhh9VlU6Yk+TOfOVnbebG4W5138LkB835eqk3Zz0qsbc2euoi8Hxi +y1VGwQEmMQ63jXz4c6g+X55ifvUK9Jpn5E8pq+pMd7ECgYEA93lYq+Cr54K4ey5t +sWMa+ye5RqxjzgXj2Kqr55jb54VWG7wp2iGbg8FMlkQwzTJwebzDyCSatguEZLuB +gRGroRnsUOy9vBvhKPOch9bfKIl6qOgzMJB267fBVWx5ybnRbWN/I7RvMQf3k+9y +biCIVnxDLEEYyx7z85/5qxsXg/MCgYEA7wmWKtCTn032Hy9P8OL49T0X6Z8FlkDC +Rk42ygrc/MUbugq9RGUxcCxoImOG9JXUpEtUe31YDm2j+/nbvrjl6/bP2qWs0V7l +dTJl6dABP51pCw8+l4cWgBBX08Lkeen812AAFNrjmDCjX6rHjWHLJcpS18fnRRkP +V1d/AHWX7MMCgYEA6Gsw2guhp0Zf2GCcaNK5DlQab8OL4Hwrpttzo4kuTlwtqNKp +Q9H4al9qfF4Cr1TFya98+EVYf8yFRM3NLNjZpe3gwYf2EerlJj7VLcahw0KKzoN1 +QBENfwgPLRk5sDkx9VhSmcfl/diLroZdpAwtv3vo4nEoxeuGFbKTGx3Qkf0CgYEA +xyR+dcb05Ygm3w4klHQTowQ10s1H80iaUcZBgQuR1ghEtDbUPZHsoR5t1xCB02ys +DgAwLv1bChIvxvH/L6KM8ovZ2LekBX4AviWxoBxJnfz/EVau98B0b1auRN6eSC83 +FRuGldlSOW1z/nSh8ViizSYE5H5HX1qkXEippvFRE88CgYB3Bfu3YQY60ITWIShv +nNkdcbTT9eoP9suaRJjw92Ln+7ZpALYlQMKUZmJ/5uBmLs4RFwUTQruLOPL4yLTH +awADWUzs3IRr1fwn9E+zM8JVyKCnUEM3w4N5UZskGO2klashAd30hWO+knRv/y0r +uGIYs9Ek7YXlXIRVrzMwcsrt1w== +-----END PRIVATE KEY----- diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem new file mode 100755 index 00000000000..88244f856c6 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/credentials/server1.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDtDCCApygAwIBAgIUbJfTREJ6k6/+oInWhV1O1j3ZT0IwDQYJKoZIhvcNAQEL +BQAwVjELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEPMA0GA1UEAwwGdGVzdGNhMB4XDTIw +MDMxODAzMTA0MloXDTMwMDMxNjAzMTA0MlowZTELMAkGA1UEBhMCVVMxETAPBgNV +BAgMCElsbGlub2lzMRAwDgYDVQQHDAdDaGljYWdvMRUwEwYDVQQKDAxFeGFtcGxl +LCBDby4xGjAYBgNVBAMMESoudGVzdC5nb29nbGUuY29tMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA5xOONxJJ8b8Qauvob5/7dPYZfIcd+uhAWL2ZlTPz +Qvu4oF0QI4iYgP5iGgry9zEtCM+YQS8UhiAlPlqa6ANxgiBSEyMHH/xE8lo/+caY +GeACqy640Jpl/JocFGo3xd1L8DCawjlaj6eu7T7T/tpAV2qq13b5710eNRbCAfFe +8yALiGQemx0IYhlZXNbIGWLBNhBhvVjJh7UvOqpADk4xtl8o5j0xgMIRg6WJGK6c +6ffSIg4eP1XmovNYZ9LLEJG68tF0Q/yIN43B4dt1oq4jzSdCbG4F1EiykT2TmwPV +YDi8tml6DfOCDGnit8svnMEmBv/fcPd31GSbXjF8M+KGGQIDAQABo2swaTAJBgNV +HRMEAjAAMAsGA1UdDwQEAwIF4DBPBgNVHREESDBGghAqLnRlc3QuZ29vZ2xlLmZy +ghh3YXRlcnpvb2kudGVzdC5nb29nbGUuYmWCEioudGVzdC55b3V0dWJlLmNvbYcE +wKgBAzANBgkqhkiG9w0BAQsFAAOCAQEAS8hDQA8PSgipgAml7Q3/djwQ644ghWQv +C2Kb+r30RCY1EyKNhnQnIIh/OUbBZvh0M0iYsy6xqXgfDhCB93AA6j0i5cS8fkhH +Jl4RK0tSkGQ3YNY4NzXwQP/vmUgfkw8VBAZ4Y4GKxppdATjffIW+srbAmdDruIRM +wPeikgOoRrXf0LA1fi4TqxARzeRwenQpayNfGHTvVF9aJkl8HoaMunTAdG5pIVcr +9GKi/gEMpXUJbbVv3U5frX1Wo4CFo+rZWJ/LyCMeb0jciNLxSdMwj/E/ZuExlyeZ +gc9ctPjSMvgSyXEKv6Vwobleeg88V2ZgzenziORoWj4KszG/lbQZvg== +-----END CERTIFICATE----- diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/__init__.py new file mode 100644 index 00000000000..5fb4f3c3cfd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py new file mode 100644 index 00000000000..8b58a0c46af --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/__init__.py @@ -0,0 +1,102 @@ +# Copyright 2019 The gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import contextlib +import os +import socket +import errno + +_DEFAULT_SOCK_OPTIONS = (socket.SO_REUSEADDR, + socket.SO_REUSEPORT) if os.name != 'nt' else ( + socket.SO_REUSEADDR,) +_UNRECOVERABLE_ERRNOS = (errno.EADDRINUSE, errno.ENOSR) + + +def get_socket(bind_address='localhost', + port=0, + listen=True, + sock_options=_DEFAULT_SOCK_OPTIONS): + """Opens a socket. + + Useful for reserving a port for a system-under-test. + + Args: + bind_address: The host to which to bind. + port: The port to which to bind. + listen: A boolean value indicating whether or not to listen on the socket. + sock_options: A sequence of socket options to apply to the socket. + + Returns: + A tuple containing: + - the address to which the socket is bound + - the port to which the socket is bound + - the socket object itself + """ + _sock_options = sock_options if sock_options else [] + if socket.has_ipv6: + address_families = (socket.AF_INET6, socket.AF_INET) + else: + address_families = (socket.AF_INET) + for address_family in address_families: + try: + sock = socket.socket(address_family, socket.SOCK_STREAM) + for sock_option in _sock_options: + sock.setsockopt(socket.SOL_SOCKET, sock_option, 1) + sock.bind((bind_address, port)) + if listen: + sock.listen(1) + return bind_address, sock.getsockname()[1], sock + except OSError as os_error: + sock.close() + if os_error.errno in _UNRECOVERABLE_ERRNOS: + raise + else: + continue + # For PY2, socket.error is a child class of IOError; for PY3, it is + # pointing to OSError. We need this catch to make it 2/3 agnostic. + except socket.error: # pylint: disable=duplicate-except + sock.close() + continue + raise RuntimeError("Failed to bind to {} with sock_options {}".format( + bind_address, sock_options)) + + +def bound_socket(bind_address='localhost', + port=0, + listen=True, + sock_options=_DEFAULT_SOCK_OPTIONS): + """Opens a socket bound to an arbitrary port. + + Useful for reserving a port for a system-under-test. + + Args: + bind_address: The host to which to bind. + port: The port to which to bind. + listen: A boolean value indicating whether or not to listen on the socket. + sock_options: A sequence of socket options to apply to the socket. + + Yields: + A tuple containing: + - the address to which the socket is bound + - the port to which the socket is bound + """ + host, port, sock = get_socket(bind_address=bind_address, + port=port, + listen=listen, + sock_options=sock_options) + try: + yield host, port + finally: + sock.close() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py new file mode 100644 index 00000000000..2b9eb2e35bd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_constants.py @@ -0,0 +1,45 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants shared among tests throughout RPC Framework.""" + +# Value for maximum duration in seconds that a test is allowed for its actual +# behavioral logic, excluding all time spent deliberately waiting in the test. +TIME_ALLOWANCE = 10 +# Value for maximum duration in seconds of RPCs that may time out as part of a +# test. +SHORT_TIMEOUT = 4 +# Absurdly large value for maximum duration in seconds for should-not-time-out +# RPCs made during tests. +LONG_TIMEOUT = 3000 +# Values to supply on construction of an object that will service RPCs; these +# should not be used as the actual timeout values of any RPCs made during tests. +DEFAULT_TIMEOUT = 300 +MAXIMUM_TIMEOUT = 3600 + +# The number of payloads to transmit in streaming tests. +STREAM_LENGTH = 200 + +# The size of payloads to transmit in tests. +PAYLOAD_SIZE = 256 * 1024 + 17 + +# The concurrency to use in tests of concurrent RPCs that will not create as +# many threads as RPCs. +RPC_CONCURRENCY = 200 + +# The concurrency to use in tests of concurrent RPCs that will create as many +# threads as RPCs. +THREAD_CONCURRENCY = 25 + +# The size of thread pools to use in tests. +POOL_SIZE = 10 diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_control.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_control.py new file mode 100644 index 00000000000..6a422825cc7 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_control.py @@ -0,0 +1,97 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Code for instructing systems under test to block or fail.""" + +import abc +import contextlib +import threading + +import six + + +class Defect(Exception): + """Simulates a programming defect raised into in a system under test. + + Use of a standard exception type is too easily misconstrued as an actual + defect in either the test infrastructure or the system under test. + """ + + +class Control(six.with_metaclass(abc.ABCMeta)): + """An object that accepts program control from a system under test. + + Systems under test passed a Control should call its control() method + frequently during execution. The control() method may block, raise an + exception, or do nothing, all according to the enclosing test's desire for + the system under test to simulate hanging, failing, or functioning. + """ + + @abc.abstractmethod + def control(self): + """Potentially does anything.""" + raise NotImplementedError() + + +class PauseFailControl(Control): + """A Control that can be used to pause or fail code under control. + + This object is only safe for use from two threads: one of the system under + test calling control and the other from the test system calling pause, + block_until_paused, and fail. + """ + + def __init__(self): + self._condition = threading.Condition() + self._pause = False + self._paused = False + self._fail = False + + def control(self): + with self._condition: + if self._fail: + raise Defect() + + while self._pause: + self._paused = True + self._condition.notify_all() + self._condition.wait() + self._paused = False + + @contextlib.contextmanager + def pause(self): + """Pauses code under control while controlling code is in context.""" + with self._condition: + self._pause = True + yield + with self._condition: + self._pause = False + self._condition.notify_all() + + def block_until_paused(self): + """Blocks controlling code until code under control is paused. + + May only be called within the context of a pause call. + """ + with self._condition: + while not self._paused: + self._condition.wait() + + @contextlib.contextmanager + def fail(self): + """Fails code under control while controlling code is in context.""" + with self._condition: + self._fail = True + yield + with self._condition: + self._fail = False diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py new file mode 100644 index 00000000000..f90a11963fb --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/common/test_coverage.py @@ -0,0 +1,101 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Governs coverage for tests of RPCs throughout RPC Framework.""" + +import abc + +import six + +# This code is designed for use with the unittest module. +# pylint: disable=invalid-name + + +class Coverage(six.with_metaclass(abc.ABCMeta)): + """Specification of test coverage.""" + + @abc.abstractmethod + def testSuccessfulUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSuccessfulUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSuccessfulStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSuccessfulStreamRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testSequentialInvocations(self): + raise NotImplementedError() + + @abc.abstractmethod + def testParallelInvocations(self): + raise NotImplementedError() + + @abc.abstractmethod + def testWaitingForSomeButNotAllParallelInvocations(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testCancelledStreamRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testExpiredStreamRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedUnaryRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedUnaryRequestStreamResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedStreamRequestUnaryResponse(self): + raise NotImplementedError() + + @abc.abstractmethod + def testFailedStreamRequestStreamResponse(self): + raise NotImplementedError() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py new file mode 100644 index 00000000000..5fb4f3c3cfd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py new file mode 100644 index 00000000000..c4ea03177cc --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/_logging_pool_test.py @@ -0,0 +1,73 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for grpc.framework.foundation.logging_pool.""" + +import threading +import unittest + +from grpc.framework.foundation import logging_pool + +_POOL_SIZE = 16 + + +class _CallableObject(object): + + def __init__(self): + self._lock = threading.Lock() + self._passed_values = [] + + def __call__(self, value): + with self._lock: + self._passed_values.append(value) + + def passed_values(self): + with self._lock: + return tuple(self._passed_values) + + +class LoggingPoolTest(unittest.TestCase): + + def testUpAndDown(self): + pool = logging_pool.pool(_POOL_SIZE) + pool.shutdown(wait=True) + + with logging_pool.pool(_POOL_SIZE) as pool: + self.assertIsNotNone(pool) + + def testTaskExecuted(self): + test_list = [] + + with logging_pool.pool(_POOL_SIZE) as pool: + pool.submit(lambda: test_list.append(object())).result() + + self.assertTrue(test_list) + + def testException(self): + with logging_pool.pool(_POOL_SIZE) as pool: + raised_exception = pool.submit(lambda: 1 / 0).exception() + + self.assertIsNotNone(raised_exception) + + def testCallableObjectExecuted(self): + callable_object = _CallableObject() + passed_object = object() + with logging_pool.pool(_POOL_SIZE) as pool: + future = pool.submit(callable_object, passed_object) + self.assertIsNone(future.result()) + self.assertSequenceEqual((passed_object,), + callable_object.passed_values()) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py new file mode 100644 index 00000000000..dd5c5b3b031 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/framework/foundation/stream_testing.py @@ -0,0 +1,57 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Utilities for testing stream-related code.""" + +from grpc.framework.foundation import stream + + +class TestConsumer(stream.Consumer): + """A stream.Consumer instrumented for testing. + + Attributes: + calls: A sequence of value-termination pairs describing the history of calls + made on this object. + """ + + def __init__(self): + self.calls = [] + + def consume(self, value): + """See stream.Consumer.consume for specification.""" + self.calls.append((value, False)) + + def terminate(self): + """See stream.Consumer.terminate for specification.""" + self.calls.append((None, True)) + + def consume_and_terminate(self, value): + """See stream.Consumer.consume_and_terminate for specification.""" + self.calls.append((value, True)) + + def is_legal(self): + """Reports whether or not a legal sequence of calls has been made.""" + terminated = False + for value, terminal in self.calls: + if terminated: + return False + elif terminal: + terminated = True + elif value is None: + return False + else: # pylint: disable=useless-else-on-loop + return True + + def values(self): + """Returns the sequence of values that have been passed to this Consumer.""" + return [value for value, _ in self.calls if value] diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/resources.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/resources.py new file mode 100644 index 00000000000..6efd870fc86 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/resources.py @@ -0,0 +1,113 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants and functions for data used in testing.""" + +import os +import pkgutil + +_ROOT_CERTIFICATES_RESOURCE_PATH = 'credentials/ca.pem' +_PRIVATE_KEY_RESOURCE_PATH = 'credentials/server1.key' +_CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem' + + +def test_root_certificates(): + return pkgutil.get_data(__name__, _ROOT_CERTIFICATES_RESOURCE_PATH) + + +def private_key(): + return pkgutil.get_data(__name__, _PRIVATE_KEY_RESOURCE_PATH) + + +def certificate_chain(): + return pkgutil.get_data(__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH) + + +def cert_hier_1_root_ca_cert(): + return pkgutil.get_data( + __name__, 'credentials/certificate_hierarchy_1/certs/ca.cert.pem') + + +def cert_hier_1_intermediate_ca_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/certs/intermediate.cert.pem' + ) + + +def cert_hier_1_client_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/private/client.key.pem' + ) + + +def cert_hier_1_client_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/certs/client.cert.pem' + ) + + +def cert_hier_1_server_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/private/localhost-1.key.pem' + ) + + +def cert_hier_1_server_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_1/intermediate/certs/localhost-1.cert.pem' + ) + + +def cert_hier_2_root_ca_cert(): + return pkgutil.get_data( + __name__, 'credentials/certificate_hierarchy_2/certs/ca.cert.pem') + + +def cert_hier_2_intermediate_ca_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/certs/intermediate.cert.pem' + ) + + +def cert_hier_2_client_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/private/client.key.pem' + ) + + +def cert_hier_2_client_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/certs/client.cert.pem' + ) + + +def cert_hier_2_server_1_key(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/private/localhost-1.key.pem' + ) + + +def cert_hier_2_server_1_cert(): + return pkgutil.get_data( + __name__, + 'credentials/certificate_hierarchy_2/intermediate/certs/localhost-1.cert.pem' + ) diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py new file mode 100644 index 00000000000..59ded0752fd --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/test_common.py @@ -0,0 +1,134 @@ +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Common code used throughout tests of gRPC.""" + +import collections +import threading + +from concurrent import futures +import grpc +import six + +INVOCATION_INITIAL_METADATA = ( + ('0', 'abc'), + ('1', 'def'), + ('2', 'ghi'), +) +SERVICE_INITIAL_METADATA = ( + ('3', 'jkl'), + ('4', 'mno'), + ('5', 'pqr'), +) +SERVICE_TERMINAL_METADATA = ( + ('6', 'stu'), + ('7', 'vwx'), + ('8', 'yza'), +) +DETAILS = 'test details' + + +def metadata_transmitted(original_metadata, transmitted_metadata): + """Judges whether or not metadata was acceptably transmitted. + + gRPC is allowed to insert key-value pairs into the metadata values given by + applications and to reorder key-value pairs with different keys but it is not + allowed to alter existing key-value pairs or to reorder key-value pairs with + the same key. + + Args: + original_metadata: A metadata value used in a test of gRPC. An iterable over + iterables of length 2. + transmitted_metadata: A metadata value corresponding to original_metadata + after having been transmitted via gRPC. An iterable over iterables of + length 2. + + Returns: + A boolean indicating whether transmitted_metadata accurately reflects + original_metadata after having been transmitted via gRPC. + """ + original = collections.defaultdict(list) + for key, value in original_metadata: + original[key].append(value) + transmitted = collections.defaultdict(list) + for key, value in transmitted_metadata: + transmitted[key].append(value) + + for key, values in six.iteritems(original): + transmitted_values = transmitted[key] + transmitted_iterator = iter(transmitted_values) + try: + for value in values: + while True: + transmitted_value = next(transmitted_iterator) + if value == transmitted_value: + break + except StopIteration: + return False + else: + return True + + +def test_secure_channel(target, channel_credentials, server_host_override): + """Creates an insecure Channel to a remote host. + + Args: + host: The name of the remote host to which to connect. + port: The port of the remote host to which to connect. + channel_credentials: The implementations.ChannelCredentials with which to + connect. + server_host_override: The target name used for SSL host name checking. + + Returns: + An implementations.Channel to the remote host through which RPCs may be + conducted. + """ + channel = grpc.secure_channel(target, channel_credentials, (( + 'grpc.ssl_target_name_override', + server_host_override, + ),)) + return channel + + +def test_server(max_workers=10, reuse_port=False): + """Creates an insecure grpc server. + + These servers have SO_REUSEPORT disabled to prevent cross-talk. + """ + return grpc.server(futures.ThreadPoolExecutor(max_workers=max_workers), + options=(('grpc.so_reuseport', int(reuse_port)),)) + + +class WaitGroup(object): + + def __init__(self, n=0): + self.count = n + self.cv = threading.Condition() + + def add(self, n): + self.cv.acquire() + self.count += n + self.cv.release() + + def done(self): + self.cv.acquire() + self.count -= 1 + if self.count == 0: + self.cv.notify_all() + self.cv.release() + + def wait(self): + self.cv.acquire() + while self.count > 0: + self.cv.wait() + self.cv.release() diff --git a/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/thread_pool.py b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/thread_pool.py new file mode 100644 index 00000000000..094e203cd95 --- /dev/null +++ b/contrib/libs/grpc/src/python/grpcio_tests/tests/unit/thread_pool.py @@ -0,0 +1,34 @@ +# Copyright 2016 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +from concurrent import futures + + +class RecordingThreadPool(futures.ThreadPoolExecutor): + """A thread pool that records if used.""" + + def __init__(self, max_workers): + self._tp_executor = futures.ThreadPoolExecutor(max_workers=max_workers) + self._lock = threading.Lock() + self._was_used = False + + def submit(self, fn, *args, **kwargs): # pylint: disable=arguments-differ + with self._lock: + self._was_used = True + self._tp_executor.submit(fn, *args, **kwargs) + + def was_used(self): + with self._lock: + return self._was_used |